file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
next_ssg.rs | use easy_error::{bail, Error};
use fxhash::FxHashSet;
use std::mem::take;
use swc_common::pass::{Repeat, Repeated};
use swc_common::DUMMY_SP;
use swc_ecmascript::ast::*;
use swc_ecmascript::utils::ident::IdentLike;
use swc_ecmascript::visit::FoldWith;
use swc_ecmascript::{
utils::{Id, HANDLER},
visit::{noop_fold_type, Fold},
};
/// Note: This paths requires running `resolver` **before** running this.
pub fn next_ssg() -> impl Fold {
Repeat::new(NextSsg {
state: Default::default(),
in_lhs_of_var: false,
})
}
/// State of the transforms. Shared by the analyzer and the transform.
#[derive(Debug, Default)]
struct State {
/// Identifiers referenced by non-data function codes.
///
/// Cleared before running each pass, because we drop ast nodes between the
/// passes.
refs_from_other: FxHashSet<Id>,
/// Identifiers referenced by data functions or derivatives.
///
/// Preserved between runs, because we should remember derivatives of data
/// functions as the data function itself is already removed.
refs_from_data_fn: FxHashSet<Id>,
cur_declaring: FxHashSet<Id>,
is_prerenderer: bool,
is_server_props: bool,
done: bool,
should_run_again: bool,
}
impl State {
fn is_data_identifier(&mut self, i: &Ident) -> Result<bool, Error> {
let ssg_exports = &["getStaticProps", "getStaticPaths", "getServerSideProps"];
if ssg_exports.contains(&&*i.sym) {
if &*i.sym == "getServerSideProps" {
if self.is_prerenderer {
HANDLER.with(|handler| {
handler
.struct_span_err(
i.span,
"You can not use getStaticProps or getStaticPaths with \
getServerSideProps. To use SSG, please remove getServerSideProps",
)
.emit()
});
bail!("both ssg and ssr functions present");
}
self.is_server_props = true;
} else {
if self.is_server_props {
HANDLER.with(|handler| {
handler
.struct_span_err(
i.span,
"You can not use getStaticProps or getStaticPaths with \
getServerSideProps. To use SSG, please remove getServerSideProps",
)
.emit()
});
bail!("both ssg and ssr functions present");
}
self.is_prerenderer = true;
}
Ok(true)
} else {
Ok(false)
}
}
}
struct Analyzer<'a> {
state: &'a mut State,
in_lhs_of_var: bool,
in_data_fn: bool,
}
impl Analyzer<'_> {
fn add_ref(&mut self, id: Id) {
tracing::trace!("add_ref({}{:?}, data = {})", id.0, id.1, self.in_data_fn);
if self.in_data_fn {
self.state.refs_from_data_fn.insert(id);
} else {
if self.state.cur_declaring.contains(&id) {
return;
}
self.state.refs_from_other.insert(id);
}
}
}
impl Fold for Analyzer<'_> {
// This is important for reducing binary sizes.
noop_fold_type!();
fn fold_binding_ident(&mut self, i: BindingIdent) -> BindingIdent {
if !self.in_lhs_of_var || self.in_data_fn { |
i
}
fn fold_export_named_specifier(&mut self, s: ExportNamedSpecifier) -> ExportNamedSpecifier {
if let ModuleExportName::Ident(id) = &s.orig {
self.add_ref(id.to_id());
}
s
}
fn fold_expr(&mut self, e: Expr) -> Expr {
let e = e.fold_children_with(self);
match &e {
Expr::Ident(i) => {
self.add_ref(i.to_id());
}
_ => {}
}
e
}
fn fold_jsx_element(&mut self, jsx: JSXElement) -> JSXElement {
fn get_leftmost_id_member_expr(e: &JSXMemberExpr) -> Id {
match &e.obj {
JSXObject::Ident(i) => i.to_id(),
JSXObject::JSXMemberExpr(e) => get_leftmost_id_member_expr(e),
}
}
match &jsx.opening.name {
JSXElementName::Ident(i) => {
self.add_ref(i.to_id());
}
JSXElementName::JSXMemberExpr(e) => {
self.add_ref(get_leftmost_id_member_expr(e));
}
_ => {}
}
jsx.fold_children_with(self)
}
fn fold_fn_decl(&mut self, f: FnDecl) -> FnDecl {
let old_in_data = self.in_data_fn;
self.state.cur_declaring.insert(f.ident.to_id());
if let Ok(is_data_identifier) = self.state.is_data_identifier(&f.ident) {
self.in_data_fn |= is_data_identifier;
} else {
return f;
}
tracing::trace!(
"ssg: Handling `{}{:?}`; in_data_fn = {:?}",
f.ident.sym,
f.ident.span.ctxt,
self.in_data_fn
);
let f = f.fold_children_with(self);
self.state.cur_declaring.remove(&f.ident.to_id());
self.in_data_fn = old_in_data;
f
}
fn fold_fn_expr(&mut self, f: FnExpr) -> FnExpr {
let f = f.fold_children_with(self);
if let Some(id) = &f.ident {
self.add_ref(id.to_id());
}
f
}
/// Drops [ExportDecl] if all specifiers are removed.
fn fold_module_item(&mut self, s: ModuleItem) -> ModuleItem {
match s {
ModuleItem::ModuleDecl(ModuleDecl::ExportNamed(e)) if !e.specifiers.is_empty() => {
let e = e.fold_with(self);
if e.specifiers.is_empty() {
return ModuleItem::Stmt(Stmt::Empty(EmptyStmt { span: DUMMY_SP }));
}
return ModuleItem::ModuleDecl(ModuleDecl::ExportNamed(e));
}
_ => {}
};
// Visit children to ensure that all references is added to the scope.
let s = s.fold_children_with(self);
match &s {
ModuleItem::ModuleDecl(ModuleDecl::ExportDecl(e)) => match &e.decl {
Decl::Fn(f) => {
// Drop getStaticProps.
if let Ok(is_data_identifier) = self.state.is_data_identifier(&f.ident) {
if is_data_identifier {
return ModuleItem::Stmt(Stmt::Empty(EmptyStmt { span: DUMMY_SP }));
}
} else {
return s;
}
}
Decl::Var(d) => {
if d.decls.is_empty() {
return ModuleItem::Stmt(Stmt::Empty(EmptyStmt { span: DUMMY_SP }));
}
}
_ => {}
},
_ => {}
}
s
}
fn fold_named_export(&mut self, mut n: NamedExport) -> NamedExport {
if n.src.is_some() {
n.specifiers = n.specifiers.fold_with(self);
}
n
}
fn fold_prop(&mut self, p: Prop) -> Prop {
let p = p.fold_children_with(self);
match &p {
Prop::Shorthand(i) => {
self.add_ref(i.to_id());
}
_ => {}
}
p
}
fn fold_var_declarator(&mut self, mut v: VarDeclarator) -> VarDeclarator {
let old_in_data = self.in_data_fn;
match &v.name {
Pat::Ident(name) => {
if let Ok(is_data_identifier) = self.state.is_data_identifier(&name.id) {
if is_data_identifier {
self.in_data_fn = true;
}
} else {
return v;
}
}
_ => {}
}
let old_in_lhs_of_var = self.in_lhs_of_var;
self.in_lhs_of_var = true;
v.name = v.name.fold_with(self);
self.in_lhs_of_var = false;
v.init = v.init.fold_with(self);
self.in_lhs_of_var = old_in_lhs_of_var;
self.in_data_fn = old_in_data;
v
}
}
/// Actual implementation of the transform.
struct NextSsg {
state: State,
in_lhs_of_var: bool,
}
impl NextSsg {
fn should_remove(&self, id: Id) -> bool {
self.state.refs_from_data_fn.contains(&id) && !self.state.refs_from_other.contains(&id)
}
/// Mark identifiers in `n` as a candidate for removal.
fn mark_as_candidate<N>(&mut self, n: N) -> N
where
N: for<'aa> FoldWith<Analyzer<'aa>>,
{
tracing::debug!("mark_as_candidate");
// Analyzer never change `in_data_fn` to false, so all identifiers in `n` will
// be marked as referenced from a data function.
let mut v = Analyzer {
state: &mut self.state,
in_lhs_of_var: false,
in_data_fn: true,
};
let n = n.fold_with(&mut v);
self.state.should_run_again = true;
n
}
}
impl Repeated for NextSsg {
fn changed(&self) -> bool {
self.state.should_run_again
}
fn reset(&mut self) {
self.state.refs_from_other.clear();
self.state.cur_declaring.clear();
self.state.should_run_again = false;
}
}
/// `VisitMut` is faster than [Fold], but we use [Fold] because it's much easier
/// to read.
///
/// Note: We don't implement `fold_script` because next.js doesn't use it.
impl Fold for NextSsg {
// This is important for reducing binary sizes.
noop_fold_type!();
fn fold_import_decl(&mut self, mut i: ImportDecl) -> ImportDecl {
// Imports for side effects.
if i.specifiers.is_empty() {
return i;
}
i.specifiers.retain(|s| match s {
ImportSpecifier::Named(ImportNamedSpecifier { local, .. })
| ImportSpecifier::Default(ImportDefaultSpecifier { local, .. })
| ImportSpecifier::Namespace(ImportStarAsSpecifier { local, .. }) => {
if self.should_remove(local.to_id()) {
tracing::trace!(
"Dropping import `{}{:?}` because it should be removed",
local.sym,
local.span.ctxt
);
self.state.should_run_again = true;
false
} else {
true
}
}
});
i
}
fn fold_module(&mut self, mut m: Module) -> Module {
tracing::info!("ssg: Start");
{
// Fill the state.
let mut v = Analyzer {
state: &mut self.state,
in_lhs_of_var: false,
in_data_fn: false,
};
m = m.fold_with(&mut v);
}
// TODO: Use better detection logic
// if !self.state.is_prerenderer && !self.state.is_server_props {
// return m;
// }
m.fold_children_with(self)
}
fn fold_module_item(&mut self, i: ModuleItem) -> ModuleItem {
match i {
ModuleItem::ModuleDecl(ModuleDecl::Import(i)) => {
let is_for_side_effect = i.specifiers.is_empty();
let i = i.fold_with(self);
if !is_for_side_effect && i.specifiers.is_empty() {
return ModuleItem::Stmt(Stmt::Empty(EmptyStmt { span: DUMMY_SP }));
}
return ModuleItem::ModuleDecl(ModuleDecl::Import(i));
}
_ => {}
}
let i = i.fold_children_with(self);
match &i {
ModuleItem::ModuleDecl(ModuleDecl::ExportNamed(e)) if e.specifiers.is_empty() => {
return ModuleItem::Stmt(Stmt::Empty(EmptyStmt { span: DUMMY_SP }))
}
_ => {}
}
i
}
fn fold_module_items(&mut self, mut items: Vec<ModuleItem>) -> Vec<ModuleItem> {
items = items.fold_children_with(self);
// Drop nodes.
items.retain(|s| match s {
ModuleItem::Stmt(Stmt::Empty(..)) => false,
_ => true,
});
if !self.state.done
&& !self.state.should_run_again
&& (self.state.is_prerenderer || self.state.is_server_props)
{
self.state.done = true;
if items.iter().any(|s| s.is_module_decl()) {
let mut var = Some(VarDeclarator {
span: DUMMY_SP,
name: Pat::Ident(
Ident::new(
if self.state.is_prerenderer {
"__N_SSG".into()
} else {
"__N_SSP".into()
},
DUMMY_SP,
)
.into(),
),
init: Some(Box::new(Expr::Lit(Lit::Bool(Bool {
span: DUMMY_SP,
value: true,
})))),
definite: Default::default(),
});
let mut new = Vec::with_capacity(items.len() + 1);
for item in take(&mut items) {
match &item {
ModuleItem::ModuleDecl(
ModuleDecl::ExportNamed(..)
| ModuleDecl::ExportDecl(..)
| ModuleDecl::ExportDefaultDecl(..)
| ModuleDecl::ExportDefaultExpr(..),
) => {
if let Some(var) = var.take() {
new.push(ModuleItem::ModuleDecl(ModuleDecl::ExportDecl(
ExportDecl {
span: DUMMY_SP,
decl: Decl::Var(VarDecl {
span: DUMMY_SP,
kind: VarDeclKind::Var,
declare: Default::default(),
decls: vec![var],
}),
},
)))
}
}
_ => {}
}
new.push(item);
}
return new;
}
}
items
}
fn fold_named_export(&mut self, mut n: NamedExport) -> NamedExport {
n.specifiers = n.specifiers.fold_with(self);
n.specifiers.retain(|s| {
let preserve = match s {
ExportSpecifier::Namespace(ExportNamespaceSpecifier {
name: ModuleExportName::Ident(exported),
..
})
| ExportSpecifier::Default(ExportDefaultSpecifier { exported, .. })
| ExportSpecifier::Named(ExportNamedSpecifier {
exported: Some(ModuleExportName::Ident(exported)),
..
}) => self
.state
.is_data_identifier(&exported)
.map(|is_data_identifier| !is_data_identifier),
ExportSpecifier::Named(ExportNamedSpecifier {
orig: ModuleExportName::Ident(orig),
..
}) => self
.state
.is_data_identifier(&orig)
.map(|is_data_identifier| !is_data_identifier),
_ => Ok(true),
};
match preserve {
Ok(false) => {
tracing::trace!("Dropping a export specifier because it's a data identifier");
match s {
ExportSpecifier::Named(ExportNamedSpecifier {
orig: ModuleExportName::Ident(orig),
..
}) => {
self.state.should_run_again = true;
self.state.refs_from_data_fn.insert(orig.to_id());
}
_ => {}
}
false
}
Ok(true) => true,
Err(_) => false,
}
});
n
}
/// This methods returns [Pat::Invalid] if the pattern should be removed.
fn fold_pat(&mut self, mut p: Pat) -> Pat {
p = p.fold_children_with(self);
if self.in_lhs_of_var {
match &mut p {
Pat::Ident(name) => {
if self.should_remove(name.id.to_id()) {
self.state.should_run_again = true;
tracing::trace!(
"Dropping var `{}{:?}` because it should be removed",
name.id.sym,
name.id.span.ctxt
);
return Pat::Invalid(Invalid { span: DUMMY_SP });
}
}
Pat::Array(arr) => {
if !arr.elems.is_empty() {
arr.elems.retain(|e| match e {
Some(Pat::Invalid(..)) => return false,
_ => true,
});
if arr.elems.is_empty() {
return Pat::Invalid(Invalid { span: DUMMY_SP });
}
}
}
Pat::Object(obj) => {
if !obj.props.is_empty() {
obj.props = take(&mut obj.props)
.into_iter()
.filter_map(|prop| match prop {
ObjectPatProp::KeyValue(prop) => {
if prop.value.is_invalid() {
None
} else {
Some(ObjectPatProp::KeyValue(prop))
}
}
ObjectPatProp::Assign(prop) => {
if self.should_remove(prop.key.to_id()) {
self.mark_as_candidate(prop.value);
None
} else {
Some(ObjectPatProp::Assign(prop))
}
}
ObjectPatProp::Rest(prop) => {
if prop.arg.is_invalid() {
None
} else {
Some(ObjectPatProp::Rest(prop))
}
}
})
.collect();
if obj.props.is_empty() {
return Pat::Invalid(Invalid { span: DUMMY_SP });
}
}
}
Pat::Rest(rest) => {
if rest.arg.is_invalid() {
return Pat::Invalid(Invalid { span: DUMMY_SP });
}
}
_ => {}
}
}
p
}
fn fold_stmt(&mut self, mut s: Stmt) -> Stmt {
match s {
Stmt::Decl(Decl::Fn(f)) => {
if self.should_remove(f.ident.to_id()) {
self.mark_as_candidate(f.function);
return Stmt::Empty(EmptyStmt { span: DUMMY_SP });
}
s = Stmt::Decl(Decl::Fn(f));
}
_ => {}
}
let s = s.fold_children_with(self);
match s {
Stmt::Decl(Decl::Var(v)) if v.decls.is_empty() => {
return Stmt::Empty(EmptyStmt { span: DUMMY_SP });
}
_ => {}
}
s
}
/// This method make `name` of [VarDeclarator] to [Pat::Invalid] if it
/// should be removed.
fn fold_var_declarator(&mut self, mut d: VarDeclarator) -> VarDeclarator {
let old = self.in_lhs_of_var;
self.in_lhs_of_var = true;
let name = d.name.fold_with(self);
self.in_lhs_of_var = false;
if name.is_invalid() {
d.init = self.mark_as_candidate(d.init);
}
let init = d.init.fold_with(self);
self.in_lhs_of_var = old;
VarDeclarator { name, init, ..d }
}
fn fold_var_declarators(&mut self, mut decls: Vec<VarDeclarator>) -> Vec<VarDeclarator> {
decls = decls.fold_children_with(self);
decls.retain(|d| !d.name.is_invalid());
decls
}
} | self.add_ref(i.id.to_id());
} |
ca.py | import ssl
import socket
import OpenSSL
import sqlite3
import signal
from functools import wraps
from numpy.core.numeric import count_nonzero
import requests
from multiprocessing import Process, Value
TIMEOUT = Value('i', 5)
cMax = Value('i', 2)
ca_num = Value('i', 0) | pass
def deadline(timeout, *args):
def decorate(f):
def handler(signum, frame):
raise TimeoutException() #when the signal have been handle raise the exception
@wraps(timeout, *args)
def new_f(*args):
signal.signal(signal.SIGALRM, handler) #link the SIGALARM signal to the handler
signal.alarm(timeout) #create an alarm of timeout second
res = f(*args)
signal.alarm(0) #reinitiate the alarm
return res
return new_f
return decorate
@deadline(TIMEOUT.value)
def get_certificate(host, port=443, timeout=10):
context = ssl.create_default_context()
context.set_ciphers('DEFAULT:@SECLEVEL=1')
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
conn = socket.create_connection((host, port))
sock = context.wrap_socket(conn, server_hostname=host)
sock.settimeout(timeout)
try:
der_cert = sock.getpeercert(True)
finally:
sock.close()
return ssl.DER_cert_to_PEM_cert(der_cert)
@deadline(60)
def url_direct(user):
user = 'http://' + user
user = requests.get(user).url.split('/')[2]
return user
@deadline(60)
def url_with_header(user):
user = 'http://' + user
user = requests.head(user).headers['location'].split('/')[2]
return user
def get_url(user, counter, error):
try:
user = url_direct(user)
except TimeoutException:
print(" Impossible to get url (TimeoutException) from ", user)
cur.execute("INSERT INTO errors VALUES (?, ?, ?)", (user, user.split('.')[len(user.split('.'))-1], error))
counter = cMax.value-1
except:
try:
user = url_with_header(user)
except TimeoutException:
print(" Impossible to get url (TimeoutException) from ", user)
cur.execute("INSERT INTO errors VALUES (?, ?, ?)", (user, user.split('.')[len(user.split('.'))-1], error))
counter = cMax.value-1
except:
print(" Impossible to get url from ", user)
cur.execute("INSERT INTO errors VALUES (?, ?, ?)", (user, user.split('.')[len(user.split('.'))-1], error))
counter = cMax.value-1
return user, counter
def processus(user):
counter = 0
ok = False
while ok == False:
try:
certificate = get_certificate(user)
x509 = OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, certificate)
provider = x509.get_issuer().organizationName
cur.execute("INSERT INTO ca VALUES (?, ?, ?)", (user, provider, ca_num.value))
print(user, ": ", provider)
ok = True
except TimeoutException as e:
if (counter == cMax.value-1):
if (TIMEOUT.value != 60):
TIMEOUT.value = 60
counter -= counter
else:
cur.execute("INSERT INTO errors VALUES (?, ?, ?)", (user, user.split('.')[len(user.split('.'))-1], repr(e)))
else:
user, counter = get_url(user, counter, repr(e))
print(" ", repr(e), user)
ok = False
counter += 1
except Exception as e:
if (counter == cMax.value-1):
cur.execute("INSERT INTO errors VALUES (?, ?, ?)", (user, user.split('.')[len(user.split('.'))-1], repr(e)))
else:
user, counter = get_url(user, counter, repr(e))
print(" ", repr(e), user)
ok = False
counter += 1
finally:
con.commit()
ca_num.value += 1
if counter == cMax.value:
ok = True
con = sqlite3.connect('ca-providers.db')
cur = con.cursor()
try:
cur.execute("CREATE TABLE ca (ca_user, ca_provider, ca_num)")
except sqlite3.OperationalError:
cur.execute("DELETE FROM ca")
try:
cur.execute("CREATE TABLE errors (user, extension, error)")
except sqlite3.OperationalError:
cur.execute("DELETE FROM errors")
con.commit()
debut = 0
with open("list1m2020.csv", "r") as f:
for line in f:
user = line.split()[0]
p = Process(target=processus, args=(user,))
p.start()
p.join()
if (TIMEOUT.value != 5):
TIMEOUT.value = 5
con.close() |
class TimeoutException(Exception): |
tae.rs | // Copyright (C) 2017-2019 Baidu, Inc. All Rights Reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in
// the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Baidu, Inc., nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//! Trust Platform Service Functions
//!
//! The sgx_tservice library provides the following functions that allow an ISV
//! to use platform services and get platform services security property.
//!
use sgx_types::*;
///
/// rsgx_create_pse_session creates a session with the PSE.
///
/// # Description
///
/// An Intel(R) SGX enclave first calls rsgx_create_pse_session() in the process to request platform service.
///
/// It's suggested that the caller should wait (typically several seconds to tens of seconds) and retry
/// this API if SGX_ERROR_BUSY is returned.
///
/// # Requirements
///
/// Header: sgx_tae_service.edl
///
/// Library: libsgx_tservice.a
///
/// # Errors
///
/// **SGX_ERROR_SERVICE_UNAVAILABLE**
///
/// The AE service did not respond or the requested service is not supported.
///
/// **SGX_ERROR_SERVICE_TIMEOUT**
///
/// A request to the AE service timed out.
///
/// **SGX_ERROR_BUSY**
///
/// The requested service is temporarily not available.
///
/// **SGX_ERROR_OUT_OF_MEMORY**
///
/// Not enough memory is available to complete this operation.
///
/// **SGX_ERROR_NETWORK_FAILURE**
///
/// Network connecting or proxy setting issue was encountered.
///
/// **SGX_ERROR_OUT_OF_EPC**
///
/// There is not enough EPC memory to load one of the Architecture Enclaves needed to complete this operation.
///
/// **SGX_ERROR_UPDATE_NEEDED**
///
/// Intel(R) SGX needs to be updated.
///
/// **SGX_ERROR_UNEXPECTED**
///
/// Indicates an unexpected error occurred.
///
pub fn rsgx_create_pse_session() -> SgxError {
let ret = unsafe { sgx_create_pse_session() };
match ret {
sgx_status_t::SGX_SUCCESS => Ok(()),
_ => Err(ret),
}
}
///
/// rsgx_close_pse_session closes a session created by rsgx_create_pse_ session.
///
/// # Description
///
/// An Intel(R) SGX enclave calls rsgx_close_pse_session() when there is no need to request platform service.
///
/// # Requirements
///
/// Header: sgx_tae_service.edl
///
/// Library: libsgx_tservice.a
///
/// # Errors
///
/// **SGX_ERROR_SERVICE_UNAVAILABLE**
///
/// The AE service did not respond or the requested service is not supported.
///
/// **SGX_ERROR_SERVICE_TIMEOUT**
///
/// A request to the AE service timed out.
///
/// **SGX_ERROR_UNEXPECTED**
///
/// Indicates an unexpected error occurs.
///
pub fn rsgx_close_pse_session() -> SgxError {
let ret = unsafe { sgx_close_pse_session() };
match ret {
sgx_status_t::SGX_SUCCESS => Ok(()),
_ => Err(ret),
}
}
///
/// rsgx_get_ps_sec_prop gets a data structure describing the security property of the platform service.
///
/// # Description
///
/// Gets a data structure that describes the security property of the platform service.
///
/// The caller should call rsgx_create_pse_session to establish a session with the platform service enclave
/// before calling this API.
///
/// # Requirements
///
/// Header: sgx_tae_service.edl
///
/// Library: libsgx_tservice.a
///
/// # Return value
///
/// The security property descriptor of the platform service
///
/// # Errors
///
/// **SGX_ERROR_INVALID_PARAMETER**
///
/// Any of the pointers is invalid.
///
/// **SGX_ERROR_AE_SESSION_INVALID**
///
/// Session is not created or has been closed by architectural enclave service.
///
pub fn rsgx_get_ps_sec_prop() -> SgxResult<sgx_ps_sec_prop_desc_t> {
let mut security_property: sgx_ps_sec_prop_desc_t = Default::default();
let ret = unsafe { sgx_get_ps_sec_prop(&mut security_property as * mut sgx_ps_sec_prop_desc_t) };
match ret {
sgx_status_t::SGX_SUCCESS => Ok(security_property),
_ => Err(ret),
}
}
///
/// rsgx_get_ps_sec_prop_ex gets a data structure describing the security property of the platform service.
///
/// # Description
///
/// Gets a data structure that describes the security property of the platform service.
///
/// The caller should call rsgx_create_pse_session to establish a session with the platform service enclave
/// before calling this API.
///
/// # Requirements
///
/// Header: sgx_tae_service.edl
///
/// Library: libsgx_tservice.a
///
/// # Return value
///
/// The security property descriptor of the platform service
///
/// # Errors | /// **SGX_ERROR_INVALID_PARAMETER**
///
/// Any of the pointers is invalid.
///
/// **SGX_ERROR_AE_SESSION_INVALID**
///
/// Session is not created or has been closed by architectural enclave service.
///
pub fn rsgx_get_ps_sec_prop_ex() -> SgxResult<sgx_ps_sec_prop_desc_ex_t> {
let mut security_property: sgx_ps_sec_prop_desc_ex_t = Default::default();
let ret = unsafe { sgx_get_ps_sec_prop_ex(&mut security_property as * mut sgx_ps_sec_prop_desc_ex_t) };
match ret {
sgx_status_t::SGX_SUCCESS => Ok(security_property),
_ => Err(ret),
}
} | /// |
background.js | /* Copyright (c) 2019 Parallax Inc., All Rights Reserved. */
// Register listeners to create app window upon application launch and
// to close active serial ports upon application termination
chrome.app.runtime.onLaunched.addListener(function() {
chrome.app.window.create('index.html', {
id: "BlocklyProp-Launcher",
innerBounds: {
width: 500,
height: 414
}, state: "normal",
resizable: false
}, function(win) {
win.onClosed.addListener(closeSerialPorts);
win.onClosed.addListener(closeServer);
});
});
function closeSerialPorts() {
// Close this app's active serial ports
chrome.serial.getConnections(function(activeConnections) {
activeConnections.forEach(function(port) {
chrome.serial.disconnect(port.connectionId, function() {});
});
});
}
function | () {
// Close this app's active server(s)
chrome.sockets.tcpServer.getSockets(function (socketInfos) {
socketInfos.forEach(function(v) {chrome.sockets.tcpServer.close(v.socketId)});
});
}
| closeServer |
generator.py | import torch
import torch.nn as nn
from torchvision.transforms import ToTensor, ToPILImage
class Generator(nn.Module):
def | (self):
super().__init__()
self.conv_block = nn.Sequential(
nn.ConvTranspose2d(100, 512, 4, 1, 0),
nn.BatchNorm2d(512),
nn.ReLU(True),
nn.ConvTranspose2d(512, 256, 4, 2, 1),
nn.BatchNorm2d(256),
nn.ReLU(True),
nn.ConvTranspose2d(256, 128, 4, 2, 1),
nn.BatchNorm2d(128),
nn.ReLU(True),
nn.ConvTranspose2d(128, 64, 4, 2, 1),
nn.BatchNorm2d(64),
nn.ReLU(True),
nn.ConvTranspose2d(64, 3, 4, 2, 1),
nn.BatchNorm2d(3),
nn.ReLU(True),
nn.ConvTranspose2d(3, 3, 4, 2, 1),
nn.Tanh(),
)
def forward(self, x):
x = self.conv_block(x)
return x
if __name__ == '__main__':
img = torch.randn(1, 100, 1, 1)
gen = Generator()
print(gen(img).shape)
| __init__ |
migrate-users.py | #!/usr/bin/env python
from __future__ import print_function
import argparse
from datetime import datetime
import sys
import json
import yaml
import urllib
import requests
import time
import re | # logging.basicConfig()
# logging.getLogger().setLevel(logging.DEBUG)
# requests_log = logging.getLogger("requests.packages.urllib3")
# requests_log.setLevel(logging.DEBUG)
# requests_log.propagate = True
# If you're running into M_NOT_JSON issues, python-requests strips the body of request on 301 redirects. Make sure you're using the direct url of your homeserver.
# See https://github.com/requests/requests/issues/2590
def get_appservice_token(reg):
with open(reg, "r") as f:
reg_yaml = yaml.load(f)
return reg_yaml["as_token"]
def get_users(homeserver, room_id, token, user_prefix, name_suffix):
res = requests.get(homeserver + "/_matrix/client/r0/rooms/" + urllib.quote(room_id) + "/joined_members?access_token=" + token)
joined = res.json().get("joined", None)
user_ids = [user_id for user_id in joined if user_id.startswith(user_prefix) and (joined.get(user_id).get("display_name") or "").endswith(name_suffix) ]
return { uid: joined.get(uid).get("display_name") for uid in user_ids }
def get_rooms(homeserver, token):
res = requests.get(homeserver + "/_matrix/client/r0/joined_rooms?access_token=" + token).json()
room_ids = []
for room_id in res["joined_rooms"]:
room_ids.append(room_id)
return room_ids
def migrate_displayname(uid, oldname, suffix, homeserver, token):
newname = re.sub(re.escape(suffix)+'$', "", oldname).rstrip()
print("Migrating %s from %s to %s" % (uid, oldname, newname))
headers = { 'Content-Type': 'application/json' }
res = requests.put(homeserver + "/_matrix/client/r0/profile/" + urllib.quote(uid) + "/displayname?access_token=" + token + "&user_id=" + urllib.quote(uid),
data = json.dumps({ 'displayname': newname }), headers=headers)
if res.json():
print(res.json())
if 'M_NOT_JSON' in str(res.json()):
print("python-requests strips the body of the request on 301 redirects (https://github.com/requests/requests/issues/2590). Make sure you're using the direct url of your homeserver.")
def main(registration, homeserver, prefix, suffix):
token = get_appservice_token(registration)
if not token:
raise Exception("Cannot read as_token from registration file")
rooms = get_rooms(homeserver, token)
per_room_users = [get_users(homeserver, room, token, prefix, suffix) for room in rooms]
merged_users = { k: v for d in per_room_users for k,v in d.items() }
for uid, display in merged_users.iteritems():
migrate_displayname(uid, display, suffix, homeserver, token)
time.sleep(0.1)
if __name__ == "__main__":
parser = argparse.ArgumentParser("Remove (ircserver) suffix from users")
parser.add_argument("-r", "--registration", help="The path to the AS registration file", required=True)
parser.add_argument("-u", "--url", help="Base homeserver URL eg 'https://matrix.org'", required=True)
parser.add_argument("-p", "--prefix", help="User prefix to determine which users to check. E.g. @freenode_", required=True)
parser.add_argument("-s", "--suffix", help="Suffix to remove. E.g. (irc.freenode.net)", required=True)
args = parser.parse_args()
if not args.registration or not args.url or not args.prefix or not args.suffix:
parser.print_help()
sys.exit(1)
if args.prefix[0] != "@":
parser.print_help()
print("--prefix must start with '@'")
sys.exit(1)
main(registration=args.registration, homeserver=args.url, prefix=args.prefix, suffix=args.suffix) |
# import logging
# import httplib as http_client
# http_client.HTTPConnection.debuglevel = 1 |
main.ts | import * as core from '@actions/core'
import {checkReference, checkReferenceAndSetForSHA} from "./guardian";
import {getRequiredEnvironmentVariable} from "./utils";
import {context} from "@actions/github";
async function run(): Promise<void> {
try {
await _runImpl()
} catch (error) {
core.setFailed(error.message) | let GITHUB_REPOSITORY = getRequiredEnvironmentVariable('GITHUB_REPOSITORY');
let [owner, name] = GITHUB_REPOSITORY.split('/');
core.info(`Handling '${context.payload.action}' action for ${context.eventName} event for ${context.ref}@${context.sha}.`);
// can't rely on GITHUB_REF because of https://github.community/t5/GitHub-Actions/check-run-check-suite-events-always-run-workflow-against-latest/m-p/41537/highlight/true#M4656
if (context.eventName == "pull_request") {
let pr = context.payload.pull_request?.number;
let sha = context.payload.pull_request?.head?.sha;
let ref = context.payload.pull_request?.base?.ref;
core.info(`Checking reference ${ref} for PR #${pr} and SHA ${sha}.`);
await checkReferenceAndSetForSHA(owner, name, ref, sha);
} else if (context.eventName == "check_suite") {
await checkReference(owner, name, `refs/heads/${context.payload.check_suite?.head_branch}`);
} else if (context.eventName == "repository_dispatch") {
let GITHUB_SHA = getRequiredEnvironmentVariable('GITHUB_SHA');
core.info(`Checking branch ${context.payload.repository_dispatch?.branch}`);
core.info(`Checking GITHUB_SHA : ${GITHUB_SHA}`);
await checkReferenceAndSetForSHA(owner, name, `refs/heads/${context.payload.repository_dispatch?.branch}`, GITHUB_SHA);
} else {
core.warning(`Don't know how to process '${context.eventName}' event!`);
}
}
run(); | }
}
async function _runImpl(): Promise<void> { |
main.rs | fn main() |
pub struct Solution{}
use std::collections::HashSet;
impl Solution {
pub fn powerful_integers(x: i32, y: i32, bound: i32) -> Vec<i32> {
if bound < 2 {
return vec![];
}
let mut set = HashSet::new();
let mut temp = 2;
let mut i = 0;
let mut j = 0;
while temp <= bound {
while temp <= bound {
set.insert(temp);
temp = x.pow(i) + y.pow(j);
j += 1;
if y == 1 && j > 1 {
break;
}
}
i += 1;
j = 0;
temp = x.pow(i) + y.pow(j);
if x == 1 && i > 1 {
break;
}
}
let res = set.iter().map(|x| *x).collect();
res
}
}
/*
执行结果:
通过
显示详情
执行用时 :
0 ms
, 在所有 rust 提交中击败了
100.00%
的用户
内存消耗 :
2 MB
, 在所有 rust 提交中击败了
100.00%
的用户
*/
| {
println!("Hello, world!");
println!("{:?}", Solution::powerful_integers(2,1,10));
} |
url.go | // Licensed to Elasticsearch B.V. under one or more contributor
// license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright
// ownership. Elasticsearch B.V. licenses this file to you under
// the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package apmhttputil
import (
"net"
"net/http"
"strings"
"github.com/Beeketing/apm-agent-go/internal/apmstrings"
"github.com/Beeketing/apm-agent-go/model"
)
// RequestURL returns a model.URL for req.
//
// If req contains an absolute URI, the values will be split and
// sanitized, but no further processing performed. For all other
// requests (i.e. most server-side requests), we reconstruct the
// URL based on various proxy forwarding headers and other request
// attributes.
func RequestURL(req *http.Request, forwarded *ForwardedHeader) model.URL {
out := model.URL{
Path: truncateString(req.URL.Path),
Search: truncateString(req.URL.RawQuery),
Hash: truncateString(req.URL.Fragment),
}
if req.URL.Host != "" {
// Absolute URI: client-side or proxy request, so ignore the
// headers.
hostname, port := splitHost(req.URL.Host)
out.Hostname = truncateString(hostname)
out.Port = truncateString(port)
out.Protocol = truncateString(req.URL.Scheme)
return out
}
// This is a server-side request URI, which contains only the path.
// We synthesize the full URL by extracting the host and protocol
// from headers, or inferring from other properties.
var fullHost string
if forwarded != nil && forwarded.Host != "" {
fullHost = forwarded.Host
out.Protocol = truncateString(forwarded.Proto)
} else if xfh := req.Header.Get("X-Forwarded-Host"); xfh != "" {
fullHost = xfh
} else {
fullHost = req.Host
}
hostname, port := splitHost(fullHost)
out.Hostname = truncateString(hostname)
out.Port = truncateString(port)
// Protocol might be extracted from the Forwarded header. If it's not,
// look for various other headers.
if out.Protocol == "" {
if proto := req.Header.Get("X-Forwarded-Proto"); proto != "" {
out.Protocol = truncateString(proto)
} else if proto := req.Header.Get("X-Forwarded-Protocol"); proto != "" {
out.Protocol = truncateString(proto)
} else if proto := req.Header.Get("X-Url-Scheme"); proto != "" {
out.Protocol = truncateString(proto)
} else if req.Header.Get("Front-End-Https") == "on" {
out.Protocol = "https"
} else if req.Header.Get("X-Forwarded-Ssl") == "on" {
out.Protocol = "https"
} else if req.TLS != nil {
out.Protocol = "https"
} else {
// Assume http otherwise.
out.Protocol = "http"
}
}
return out
}
func splitHost(in string) (host, port string) {
if strings.LastIndexByte(in, ':') == -1 {
// In the common (relative to other "errors") case that
// there is no colon, we can avoid allocations by not
// calling SplitHostPort.
return in, ""
}
host, port, err := net.SplitHostPort(in)
if err != nil {
return in, ""
}
return host, port
}
func truncateString(s string) string | {
// At the time of writing, all length limits are 1024.
s, _ = apmstrings.Truncate(s, 1024)
return s
} |
|
popup.rs | use super::{construct_list_widget, help, Frame};
use crate::state::*;
use tui::{layout::*, widgets::*};
/// renders a popup (if any) to handle a command or show additional information
/// depending on the current popup state.
///
/// The function returns a rectangle area to render the main layout
/// and a boolean `is_active` determining whether the focus is **not** placed on the popup.
pub fn render_popup(frame: &mut Frame, state: &SharedState, rect: Rect) -> (Rect, bool) {
let ui = state.ui.lock();
match ui.popup {
None => (rect, true),
Some(ref popup) => match popup {
PopupState::Search { query } => {
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Min(0), Constraint::Length(3)].as_ref())
.split(rect);
let widget = Paragraph::new(format!("/{}", query)).block(
Block::default()
.borders(Borders::ALL)
.title(ui.theme.block_title_with_style("Search")),
);
frame.render_widget(widget, chunks[1]);
(chunks[0], true)
}
PopupState::CommandHelp { .. } => {
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Length(7), Constraint::Min(0)].as_ref())
.split(rect);
drop(ui);
help::render_commands_help_popup(frame, state, chunks[1]);
(chunks[0], false)
}
PopupState::ActionList(item, _) => {
let items = item
.actions()
.iter()
.map(|a| (format!("{:?}", a), false))
.collect();
drop(ui);
let rect = render_list_popup(frame, state, rect, "Actions", items, 7);
(rect, false)
}
PopupState::DeviceList { .. } => {
let player = state.player.read();
let current_device_id = match player.playback {
Some(ref playback) => playback.device.id.as_deref().unwrap_or_default(),
None => "",
};
let items = player
.devices
.iter()
.map(|d| (format!("{} | {}", d.name, d.id), current_device_id == d.id))
.collect();
drop(ui);
let rect = render_list_popup(frame, state, rect, "Devices", items, 5);
(rect, false)
}
PopupState::ThemeList(themes, ..) => {
let items = themes.iter().map(|t| (t.name.clone(), false)).collect();
drop(ui);
let rect = render_list_popup(frame, state, rect, "Themes", items, 7);
(rect, false)
}
PopupState::UserPlaylistList(action, _) => {
let data = state.data.read();
let playlists = match action {
PlaylistPopupAction::Browse => data.user_data.playlists.iter().collect(),
PlaylistPopupAction::AddTrack(_) => data.user_data.playlists_created_by_user(),
};
let items = playlists
.into_iter()
.map(|p| (p.name.clone(), false))
.collect();
drop(ui);
let rect = render_list_popup(frame, state, rect, "User Playlists", items, 10);
(rect, false)
}
PopupState::UserFollowedArtistList { .. } => |
PopupState::UserSavedAlbumList { .. } => {
let items = state
.data
.read()
.user_data
.saved_albums
.iter()
.map(|a| (a.name.clone(), false))
.collect();
drop(ui);
let rect = render_list_popup(frame, state, rect, "User Saved Albums", items, 7);
(rect, false)
}
PopupState::ArtistList(artists, ..) => {
let items = artists.iter().map(|a| (a.name.clone(), false)).collect();
drop(ui);
let rect = render_list_popup(frame, state, rect, "Artists", items, 5);
(rect, false)
}
},
}
}
/// a helper function to render a list popup
fn render_list_popup(
frame: &mut Frame,
state: &SharedState,
rect: Rect,
title: &'static str,
items: Vec<(String, bool)>,
length: u16,
) -> Rect {
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Min(0), Constraint::Length(length)].as_ref())
.split(rect);
let widget = construct_list_widget(state, items, title, true, None);
frame.render_stateful_widget(
widget,
chunks[1],
state
.ui
.lock()
.popup
.as_mut()
.unwrap()
.list_state_mut()
.unwrap(),
);
chunks[0]
}
| {
let items = state
.data
.read()
.user_data
.followed_artists
.iter()
.map(|a| (a.name.clone(), false))
.collect();
drop(ui);
let rect = render_list_popup(frame, state, rect, "User Followed Artists", items, 7);
(rect, false)
} |
ext-options.js | ace.define("ace/ext/menu_tools/overlay_page",["require","exports","module","ace/lib/dom"], function(require, exports, module) {
'use strict';
var dom = require("../../lib/dom");
var cssText = "#ace_settingsmenu, #kbshortcutmenu {\
background-color: #F7F7F7;\
color: black;\
box-shadow: -5px 4px 5px rgba(126, 126, 126, 0.55);\
padding: 1em 0.5em 2em 1em;\
overflow: auto;\
position: absolute;\
margin: 0;\
bottom: 0;\
right: 0;\
top: 0;\
z-index: 9991;\
cursor: default;\
}\
.ace_dark #ace_settingsmenu, .ace_dark #kbshortcutmenu {\
box-shadow: -20px 10px 25px rgba(126, 126, 126, 0.25);\
background-color: rgba(255, 255, 255, 0.6);\
color: black;\
}\
.ace_optionsMenuEntry:hover {\
background-color: rgba(100, 100, 100, 0.1);\
transition: all 0.3s\
}\
.ace_closeButton {\
background: rgba(245, 146, 146, 0.5);\
border: 1px solid #F48A8A;\
border-radius: 50%;\
padding: 7px;\
position: absolute;\
right: -8px;\
top: -8px;\
z-index: 100000;\
}\
.ace_closeButton{\
background: rgba(245, 146, 146, 0.9);\
}\
.ace_optionsMenuKey {\
color: darkslateblue;\
font-weight: bold;\
}\
.ace_optionsMenuCommand {\
color: darkcyan;\
font-weight: normal;\
}\
.ace_optionsMenuEntry input, .ace_optionsMenuEntry button {\
vertical-align: middle;\
}\
.ace_optionsMenuEntry button[ace_selected_button=true] {\
background: #e7e7e7;\
box-shadow: 1px 0px 2px 0px #adadad inset;\
border-color: #adadad;\
}\
.ace_optionsMenuEntry button {\
background: white;\
border: 1px solid lightgray;\
margin: 0px;\
}\
.ace_optionsMenuEntry button:hover{\
background: #f0f0f0;\
}";
dom.importCssString(cssText);
module.exports.overlayPage = function overlayPage(editor, contentElement, top, right, bottom, left) {
top = top ? 'top: ' + top + ';' : '';
bottom = bottom ? 'bottom: ' + bottom + ';' : '';
right = right ? 'right: ' + right + ';' : '';
left = left ? 'left: ' + left + ';' : '';
var closer = document.createElement('div');
var contentContainer = document.createElement('div');
function documentEscListener(e) {
if (e.keyCode === 27) {
closer.click();
}
}
closer.style.cssText = 'margin: 0; padding: 0; ' +
'position: fixed; top:0; bottom:0; left:0; right:0;' +
'z-index: 9990; ' +
'background-color: rgba(0, 0, 0, 0.3);';
closer.addEventListener('click', function() {
document.removeEventListener('keydown', documentEscListener);
closer.parentNode.removeChild(closer);
editor.focus();
closer = null;
});
document.addEventListener('keydown', documentEscListener);
contentContainer.style.cssText = top + right + bottom + left;
contentContainer.addEventListener('click', function(e) {
e.stopPropagation();
});
var wrapper = dom.createElement("div");
wrapper.style.position = "relative";
var closeButton = dom.createElement("div");
closeButton.className = "ace_closeButton";
closeButton.addEventListener('click', function() {
closer.click();
});
wrapper.appendChild(closeButton);
contentContainer.appendChild(wrapper);
contentContainer.appendChild(contentElement);
closer.appendChild(contentContainer);
document.body.appendChild(closer);
editor.blur();
};
});
ace.define("ace/ext/modelist",["require","exports","module"], function(require, exports, module) {
"use strict";
var modes = [];
function getModeForPath(path) {
var mode = modesByName.text;
var fileName = path.split(/[\/\\]/).pop();
for (var i = 0; i < modes.length; i++) {
if (modes[i].supportsFile(fileName)) {
mode = modes[i];
break;
}
}
return mode;
}
var Mode = function(name, caption, extensions) {
this.name = name;
this.caption = caption;
this.mode = "ace/mode/" + name;
this.extensions = extensions;
var re;
if (/\^/.test(extensions)) {
re = extensions.replace(/\|(\^)?/g, function(a, b){
return "$|" + (b ? "^" : "^.*\\.");
}) + "$";
} else {
re = "^.*\\.(" + extensions + ")$";
}
this.extRe = new RegExp(re, "gi");
};
Mode.prototype.supportsFile = function(filename) {
return filename.match(this.extRe);
};
var supportedModes = {
ABAP: ["abap"],
ABC: ["abc"],
ActionScript:["as"],
ADA: ["ada|adb"],
Apache_Conf: ["^htaccess|^htgroups|^htpasswd|^conf|htaccess|htgroups|htpasswd"],
AsciiDoc: ["asciidoc|adoc"],
ASL: ["dsl|asl"],
Assembly_x86:["asm|a"],
AutoHotKey: ["ahk"],
Apex: ["apex|cls|trigger|tgr"],
BatchFile: ["bat|cmd"],
Bro: ["bro"],
C_Cpp: ["cpp|c|cc|cxx|h|hh|hpp|ino"],
C9Search: ["c9search_results"],
Cirru: ["cirru|cr"],
Clojure: ["clj|cljs"],
Cobol: ["CBL|COB"],
coffee: ["coffee|cf|cson|^Cakefile"],
ColdFusion: ["cfm"],
CSharp: ["cs"],
Csound_Document: ["csd"],
Csound_Orchestra: ["orc"],
Csound_Score: ["sco"],
CSS: ["css"],
Curly: ["curly"],
D: ["d|di"],
Dart: ["dart"],
Diff: ["diff|patch"],
Dockerfile: ["^Dockerfile"],
Dot: ["dot"],
Drools: ["drl"],
Edifact: ["edi"],
Eiffel: ["e|ge"],
EJS: ["ejs"],
Elixir: ["ex|exs"],
Elm: ["elm"],
Erlang: ["erl|hrl"],
Forth: ["frt|fs|ldr|fth|4th"],
Fortran: ["f|f90"],
FSharp: ["fsi|fs|ml|mli|fsx|fsscript"],
FSL: ["fsl"],
FTL: ["ftl"],
Gcode: ["gcode"],
Gherkin: ["feature"],
Gitignore: ["^.gitignore"],
Glsl: ["glsl|frag|vert"],
Gobstones: ["gbs"],
golang: ["go"],
GraphQLSchema: ["gql"],
Groovy: ["groovy"],
HAML: ["haml"],
Handlebars: ["hbs|handlebars|tpl|mustache"],
Haskell: ["hs"],
Haskell_Cabal: ["cabal"],
haXe: ["hx"],
Hjson: ["hjson"],
HTML: ["html|htm|xhtml|vue|we|wpy"],
HTML_Elixir: ["eex|html.eex"],
HTML_Ruby: ["erb|rhtml|html.erb"],
INI: ["ini|conf|cfg|prefs"],
Io: ["io"],
Jack: ["jack"],
Jade: ["jade|pug"],
Java: ["java"],
JavaScript: ["js|jsm|jsx"],
JSON: ["json"],
JSONiq: ["jq"],
JSP: ["jsp"],
JSSM: ["jssm|jssm_state"],
JSX: ["jsx"],
Julia: ["jl"],
Kotlin: ["kt|kts"],
LaTeX: ["tex|latex|ltx|bib"],
LESS: ["less"],
Liquid: ["liquid"],
Lisp: ["lisp"],
LiveScript: ["ls"],
LogiQL: ["logic|lql"],
LSL: ["lsl"],
Lua: ["lua"],
LuaPage: ["lp"],
Lucene: ["lucene"],
Makefile: ["^Makefile|^GNUmakefile|^makefile|^OCamlMakefile|make"],
Markdown: ["md|markdown"],
Mask: ["mask"],
MATLAB: ["matlab"],
Maze: ["mz"],
MEL: ["mel"],
MIXAL: ["mixal"],
MUSHCode: ["mc|mush"],
MySQL: ["mysql"],
Nix: ["nix"],
NSIS: ["nsi|nsh"],
ObjectiveC: ["m|mm"],
OCaml: ["ml|mli"],
Pascal: ["pas|p"],
Perl: ["pl|pm"],
Perl6: ["p6|pl6|pm6"],
pgSQL: ["pgsql"],
PHP_Laravel_blade: ["blade.php"],
PHP: ["php|inc|phtml|shtml|php3|php4|php5|phps|phpt|aw|ctp|module"],
Puppet: ["epp|pp"],
Pig: ["pig"],
Powershell: ["ps1"],
Praat: ["praat|praatscript|psc|proc"],
Prolog: ["plg|prolog"],
Properties: ["properties"],
Protobuf: ["proto"],
Python: ["py"],
R: ["r"],
Razor: ["cshtml|asp"],
RDoc: ["Rd"],
Red: ["red|reds"],
RHTML: ["Rhtml"],
RST: ["rst"],
Ruby: ["rb|ru|gemspec|rake|^Guardfile|^Rakefile|^Gemfile"],
Rust: ["rs"],
SASS: ["sass"],
SCAD: ["scad"],
Scala: ["scala"],
Scheme: ["scm|sm|rkt|oak|scheme"],
SCSS: ["scss"],
SH: ["sh|bash|^.bashrc"],
SJS: ["sjs"],
Slim: ["slim|skim"],
Smarty: ["smarty|tpl"],
snippets: ["snippets"],
Soy_Template:["soy"],
Space: ["space"],
SQL: ["sql"],
SQLServer: ["sqlserver"],
Stylus: ["styl|stylus"],
SVG: ["svg"],
Swift: ["swift"],
Tcl: ["tcl"],
Terraform: ["tf", "tfvars", "terragrunt"],
Tex: ["tex"],
Text: ["txt"],
Textile: ["textile"],
Toml: ["toml"],
TSX: ["tsx"],
Twig: ["latte|twig|swig"],
Typescript: ["ts|typescript|str"],
Vala: ["vala"],
VBScript: ["vbs|vb"],
Velocity: ["vm"],
Verilog: ["v|vh|sv|svh"],
VHDL: ["vhd|vhdl"],
Visualforce: ["vfp|component|page"],
Wollok: ["wlk|wpgm|wtest"],
XML: ["xml|rdf|rss|wsdl|xslt|atom|mathml|mml|xul|xbl|xaml"],
XQuery: ["xq"],
YAML: ["yaml|yml"],
Django: ["html"]
};
var nameOverrides = {
ObjectiveC: "Objective-C",
CSharp: "C#",
golang: "Go",
C_Cpp: "C and C++",
Csound_Document: "Csound Document",
Csound_Orchestra: "Csound",
Csound_Score: "Csound Score",
coffee: "CoffeeScript",
HTML_Ruby: "HTML (Ruby)",
HTML_Elixir: "HTML (Elixir)",
FTL: "FreeMarker",
PHP_Laravel_blade: "PHP (Blade Template)",
Perl6: "Perl 6",
AutoHotKey: "AutoHotkey / AutoIt"
};
var modesByName = {};
for (var name in supportedModes) {
var data = supportedModes[name];
var displayName = (nameOverrides[name] || name).replace(/_/g, " ");
var filename = name.toLowerCase();
var mode = new Mode(filename, displayName, data[0]);
modesByName[filename] = mode;
modes.push(mode);
}
module.exports = {
getModeForPath: getModeForPath,
modes: modes,
modesByName: modesByName
};
});
ace.define("ace/ext/themelist",["require","exports","module"], function(require, exports, module) {
"use strict";
var themeData = [
["Chrome" ],
["Clouds" ],
["Crimson Editor" ],
["Dawn" ],
["Dreamweaver" ],
["Eclipse" ],
["GitHub" ],
["IPlastic" ],
["Solarized Light"],
["TextMate" ],
["Tomorrow" ],
["XCode" ],
["Kuroir"],
["KatzenMilch"],
["SQL Server" ,"sqlserver" , "light"],
["Ambiance" ,"ambiance" , "dark"],
["Chaos" ,"chaos" , "dark"],
["Clouds Midnight" ,"clouds_midnight" , "dark"],
["Dracula" ,"" , "dark"],
["Cobalt" ,"cobalt" , "dark"],
["Gruvbox" ,"gruvbox" , "dark"],
["Green on Black" ,"gob" , "dark"],
["idle Fingers" ,"idle_fingers" , "dark"],
["krTheme" ,"kr_theme" , "dark"],
["Merbivore" ,"merbivore" , "dark"],
["Merbivore Soft" ,"merbivore_soft" , "dark"], | ["Mono Industrial" ,"mono_industrial" , "dark"],
["Monokai" ,"monokai" , "dark"],
["Pastel on dark" ,"pastel_on_dark" , "dark"],
["Solarized Dark" ,"solarized_dark" , "dark"],
["Terminal" ,"terminal" , "dark"],
["Tomorrow Night" ,"tomorrow_night" , "dark"],
["Tomorrow Night Blue" ,"tomorrow_night_blue" , "dark"],
["Tomorrow Night Bright","tomorrow_night_bright" , "dark"],
["Tomorrow Night 80s" ,"tomorrow_night_eighties" , "dark"],
["Twilight" ,"twilight" , "dark"],
["Vibrant Ink" ,"vibrant_ink" , "dark"]
];
exports.themesByName = {};
exports.themes = themeData.map(function(data) {
var name = data[1] || data[0].replace(/ /g, "_").toLowerCase();
var theme = {
caption: data[0],
theme: "ace/theme/" + name,
isDark: data[2] == "dark",
name: name
};
exports.themesByName[name] = theme;
return theme;
});
});
ace.define("ace/ext/options",["require","exports","module","ace/ext/menu_tools/overlay_page","ace/lib/dom","ace/lib/oop","ace/lib/event_emitter","ace/ext/modelist","ace/ext/themelist"], function(require, exports, module) {
"use strict";
var overlayPage = require('./menu_tools/overlay_page').overlayPage;
var dom = require("../lib/dom");
var oop = require("../lib/oop");
var EventEmitter = require("../lib/event_emitter").EventEmitter;
var buildDom = dom.buildDom;
var modelist = require("./modelist");
var themelist = require("./themelist");
var themes = { Bright: [], Dark: [] };
themelist.themes.forEach(function(x) {
themes[x.isDark ? "Dark" : "Bright"].push({ caption: x.caption, value: x.theme });
});
var modes = modelist.modes.map(function(x){
return { caption: x.caption, value: x.mode };
});
var optionGroups = {
Main: {
Mode: {
path: "mode",
type: "select",
items: modes
},
Theme: {
path: "theme",
type: "select",
items: themes
},
"Keybinding": {
type: "buttonBar",
path: "keyboardHandler",
items: [
{ caption : "Ace", value : null },
{ caption : "Vim", value : "ace/keyboard/vim" },
{ caption : "Emacs", value : "ace/keyboard/emacs" },
{ caption : "Sublime", value : "ace/keyboard/sublime" }
]
},
"Font Size": {
path: "fontSize",
type: "number",
defaultValue: 12,
defaults: [
{caption: "12px", value: 12},
{caption: "24px", value: 24}
]
},
"Soft Wrap": {
type: "buttonBar",
path: "wrap",
items: [
{ caption : "Off", value : "off" },
{ caption : "View", value : "free" },
{ caption : "margin", value : "printMargin" },
{ caption : "40", value : "40" }
]
},
"Cursor Style": {
path: "cursorStyle",
items: [
{ caption : "Ace", value : "ace" },
{ caption : "Slim", value : "slim" },
{ caption : "Smooth", value : "smooth" },
{ caption : "Smooth And Slim", value : "smooth slim" },
{ caption : "Wide", value : "wide" }
]
},
"Folding": {
path: "foldStyle",
items: [
{ caption : "Manual", value : "manual" },
{ caption : "Mark begin", value : "markbegin" },
{ caption : "Mark begin and end", value : "markbeginend" }
]
},
"Soft Tabs": [{
path: "useSoftTabs"
}, {
path: "tabSize",
type: "number",
values: [2, 3, 4, 8, 16]
}],
"Overscroll": {
type: "buttonBar",
path: "scrollPastEnd",
items: [
{ caption : "None", value : 0 },
{ caption : "Half", value : 0.5 },
{ caption : "Full", value : 1 }
]
}
},
More: {
"Atomic soft tabs": {
path: "navigateWithinSoftTabs"
},
"Enable Behaviours": {
path: "behavioursEnabled"
},
"Full Line Selection": {
type: "checkbox",
values: "text|line",
path: "selectionStyle"
},
"Highlight Active Line": {
path: "highlightActiveLine"
},
"Show Invisibles": {
path: "showInvisibles"
},
"Show Indent Guides": {
path: "displayIndentGuides"
},
"Persistent Scrollbar": [{
path: "hScrollBarAlwaysVisible"
}, {
path: "vScrollBarAlwaysVisible"
}],
"Animate scrolling": {
path: "animatedScroll"
},
"Show Gutter": {
path: "showGutter"
},
"Show Line Numbers": {
path: "showLineNumbers"
},
"Relative Line Numbers": {
path: "relativeLineNumbers"
},
"Fixed Gutter Width": {
path: "fixedWidthGutter"
},
"Show Print Margin": [{
path: "showPrintMargin"
}, {
type: "number",
path: "printMarginColumn"
}],
"Indented Soft Wrap": {
path: "indentedSoftWrap"
},
"Highlight selected word": {
path: "highlightSelectedWord"
},
"Fade Fold Widgets": {
path: "fadeFoldWidgets"
},
"Use textarea for IME": {
path: "useTextareaForIME"
},
"Merge Undo Deltas": {
path: "mergeUndoDeltas",
items: [
{ caption : "Always", value : "always" },
{ caption : "Never", value : "false" },
{ caption : "Timed", value : "true" }
]
},
"Elastic Tabstops": {
path: "useElasticTabstops"
},
"Incremental Search": {
path: "useIncrementalSearch"
},
"Read-only": {
path: "readOnly"
},
"Copy without selection": {
path: "copyWithEmptySelection"
},
"Live Autocompletion": {
path: "enableLiveAutocompletion"
}
}
};
var OptionPanel = function(editor, element) {
this.editor = editor;
this.container = element || document.createElement("div");
this.groups = [];
this.options = {};
};
(function() {
oop.implement(this, EventEmitter);
this.add = function(config) {
if (config.Main)
oop.mixin(optionGroups.Main, config.Main);
if (config.More)
oop.mixin(optionGroups.More, config.More);
};
this.render = function() {
this.container.innerHTML = "";
buildDom(["table", {id: "controls"},
this.renderOptionGroup(optionGroups.Main),
["tr", null, ["td", {colspan: 2},
["table", {id: "more-controls"},
this.renderOptionGroup(optionGroups.More)
]
]]
], this.container);
};
this.renderOptionGroup = function(group) {
return Object.keys(group).map(function(key, i) {
var item = group[key];
if (!item.position)
item.position = i / 10000;
if (!item.label)
item.label = key;
return item;
}).sort(function(a, b) {
return a.position - b.position;
}).map(function(item) {
return this.renderOption(item.label, item);
}, this);
};
this.renderOptionControl = function(key, option) {
var self = this;
if (Array.isArray(option)) {
return option.map(function(x) {
return self.renderOptionControl(key, x);
});
}
var control;
var value = self.getOption(option);
if (option.values && option.type != "checkbox") {
if (typeof option.values == "string")
option.values = option.values.split("|");
option.items = option.values.map(function(v) {
return { value: v, name: v };
});
}
if (option.type == "buttonBar") {
control = ["div", option.items.map(function(item) {
return ["button", {
value: item.value,
ace_selected_button: value == item.value,
onclick: function() {
self.setOption(option, item.value);
var nodes = this.parentNode.querySelectorAll("[ace_selected_button]");
for (var i = 0; i < nodes.length; i++) {
nodes[i].removeAttribute("ace_selected_button");
}
this.setAttribute("ace_selected_button", true);
}
}, item.desc || item.caption || item.name];
})];
} else if (option.type == "number") {
control = ["input", {type: "number", value: value || option.defaultValue, style:"width:3em", oninput: function() {
self.setOption(option, parseInt(this.value));
}}];
if (option.defaults) {
control = [control, option.defaults.map(function(item) {
return ["button", {onclick: function() {
var input = this.parentNode.firstChild;
input.value = item.value;
input.oninput();
}}, item.caption];
})];
}
} else if (option.items) {
var buildItems = function(items) {
return items.map(function(item) {
return ["option", { value: item.value || item.name }, item.desc || item.caption || item.name];
});
};
var items = Array.isArray(option.items)
? buildItems(option.items)
: Object.keys(option.items).map(function(key) {
return ["optgroup", {"label": key}, buildItems(option.items[key])];
});
control = ["select", { id: key, value: value, onchange: function() {
self.setOption(option, this.value);
} }, items];
} else {
if (typeof option.values == "string")
option.values = option.values.split("|");
if (option.values) value = value == option.values[1];
control = ["input", { type: "checkbox", id: key, checked: value || null, onchange: function() {
var value = this.checked;
if (option.values) value = option.values[value ? 1 : 0];
self.setOption(option, value);
}}];
if (option.type == "checkedNumber") {
control = [control, []];
}
}
return control;
};
this.renderOption = function(key, option) {
if (option.path && !option.onchange && !this.editor.$options[option.path])
return;
this.options[option.path] = option;
var safeKey = "-" + option.path;
var control = this.renderOptionControl(safeKey, option);
return ["tr", {class: "ace_optionsMenuEntry"}, ["td",
["label", {for: safeKey}, key]
], ["td", control]];
};
this.setOption = function(option, value) {
if (typeof option == "string")
option = this.options[option];
if (value == "false") value = false;
if (value == "true") value = true;
if (value == "null") value = null;
if (value == "undefined") value = undefined;
if (typeof value == "string" && parseFloat(value).toString() == value)
value = parseFloat(value);
if (option.onchange)
option.onchange(value);
else if (option.path)
this.editor.setOption(option.path, value);
this._signal("setOption", {name: option.path, value: value});
};
this.getOption = function(option) {
if (option.getValue)
return option.getValue();
return this.editor.getOption(option.path);
};
}).call(OptionPanel.prototype);
exports.OptionPanel = OptionPanel;
}); (function() {
ace.require(["ace/ext/options"], function(m) {
if (typeof module == "object" && typeof exports == "object" && module) {
module.exports = m;
}
});
})(); | |
telemetry_vppcalls.go | // Copyright (c) 2019 Cisco and/or its affiliates.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package vpp2001
import (
"context"
"fmt"
"regexp"
"strconv"
"strings"
govppapi "git.fd.io/govpp.git/api"
vpevppcalls "github.com/ligato/vpp-agent/plugins/govppmux/vppcalls"
"github.com/ligato/vpp-agent/plugins/govppmux/vppcalls/vpp2001"
"github.com/ligato/vpp-agent/plugins/telemetry/vppcalls"
"github.com/ligato/vpp-agent/plugins/vpp/binapi/vpp2001/memclnt"
"github.com/ligato/vpp-agent/plugins/vpp/binapi/vpp2001/vpe"
)
func init() |
type TelemetryHandler struct {
ch govppapi.Channel
stats govppapi.StatsProvider
vpe vpevppcalls.VpeVppAPI
}
func NewTelemetryVppHandler(ch govppapi.Channel, stats govppapi.StatsProvider) *TelemetryHandler {
vpeHandler := vpp2001.NewVpeHandler(ch)
return &TelemetryHandler{ch, stats, vpeHandler}
}
var (
// Regular expression to parse output from `show memory`
memoryRe = regexp.MustCompile(
`Thread\s+(\d+)\s+(\w+).?\s+` +
`virtual memory start 0x[0-9a-f]+, size ([\dkmg\.]+), ([\dkmg\.]+) pages, page size ([\dkmg\.]+)\s+` +
`(?:page information not available.*\s+)*` +
`(?:(?:\s+(?:numa [\d]+|not mapped|unknown): [\dkmg\.]+ pages, [\dkmg\.]+\s+)*\s+)*` +
`\s+total: ([\dkmgKMG\.]+), used: ([\dkmgKMG\.]+), free: ([\dkmgKMG\.]+), trimmable: ([\dkmgKMG\.]+)`,
)
)
// GetMemory retrieves `show memory` info.
func (h *TelemetryHandler) GetMemory(ctx context.Context) (*vppcalls.MemoryInfo, error) {
return h.getMemoryCLI(ctx)
}
func (h *TelemetryHandler) getMemoryCLI(ctx context.Context) (*vppcalls.MemoryInfo, error) {
data, err := h.vpe.RunCli("show memory main-heap")
if err != nil {
return nil, err
}
input := string(data)
threadMatches := memoryRe.FindAllStringSubmatch(input, -1)
if len(threadMatches) == 0 && input != "" {
return nil, fmt.Errorf("invalid memory input: %q", input)
}
var threads []vppcalls.MemoryThread
for _, matches := range threadMatches {
fields := matches[1:]
if len(fields) != 9 {
return nil, fmt.Errorf("invalid memory data %v for thread: %q", fields, matches[0])
}
id, err := strconv.ParseUint(fields[0], 10, 64)
if err != nil {
return nil, err
}
thread := &vppcalls.MemoryThread{
ID: uint(id),
Name: fields[1],
Size: strToUint64(fields[2]),
Pages: strToUint64(fields[3]),
PageSize: strToUint64(fields[4]),
Total: strToUint64(fields[5]),
Used: strToUint64(fields[6]),
Free: strToUint64(fields[7]),
Reclaimed: strToUint64(fields[8]),
}
threads = append(threads, *thread)
}
info := &vppcalls.MemoryInfo{
Threads: threads,
}
return info, nil
}
func (h *TelemetryHandler) GetInterfaceStats(context.Context) (*govppapi.InterfaceStats, error) {
stats, err := h.stats.GetInterfaceStats()
if err != nil {
return nil, err
}
return stats, nil
}
var (
// Regular expression to parse output from `show node counters`
nodeCountersRe = regexp.MustCompile(`^\s+(\d+)\s+([\w-\/]+)\s+(.+)$`)
)
// GetNodeCounters retrieves node counters info.
func (h *TelemetryHandler) GetNodeCounters(ctx context.Context) (*vppcalls.NodeCounterInfo, error) {
if h.stats == nil {
return h.getNodeCountersCLI()
}
return h.getNodeCountersStats()
}
// GetNodeCounters retrieves node counters info.
func (h *TelemetryHandler) getNodeCountersStats() (*vppcalls.NodeCounterInfo, error) {
errStats, err := h.stats.GetErrorStats()
if err != nil {
return nil, err
} else if errStats == nil {
return nil, nil
}
var counters []vppcalls.NodeCounter
for _, c := range errStats.Errors {
node, reason := SplitErrorName(c.CounterName)
counters = append(counters, vppcalls.NodeCounter{
Value: c.Value,
Node: node,
Name: reason,
})
}
info := &vppcalls.NodeCounterInfo{
Counters: counters,
}
return info, nil
}
// GetNodeCounters retrieves node counters info.
func (h *TelemetryHandler) getNodeCountersCLI() (*vppcalls.NodeCounterInfo, error) {
data, err := h.vpe.RunCli("show node counters")
if err != nil {
return nil, err
}
var counters []vppcalls.NodeCounter
for i, line := range strings.Split(string(data), "\n") {
// Skip empty lines
if strings.TrimSpace(line) == "" {
continue
}
// Check first line
if i == 0 {
fields := strings.Fields(line)
// Verify header
if len(fields) != 3 || fields[0] != "Count" {
return nil, fmt.Errorf("invalid header for `show node counters` received: %q", line)
}
continue
}
// Parse lines using regexp
matches := nodeCountersRe.FindStringSubmatch(line)
if len(matches)-1 != 3 {
return nil, fmt.Errorf("parsing failed for `show node counters` line: %q", line)
}
fields := matches[1:]
counters = append(counters, vppcalls.NodeCounter{
Value: strToUint64(fields[0]),
Node: fields[1],
Name: fields[2],
})
}
info := &vppcalls.NodeCounterInfo{
Counters: counters,
}
return info, nil
}
var (
// Regular expression to parse output from `show runtime`
runtimeRe = regexp.MustCompile(`(?:-+\n)?(?:Thread (\d+) (\w+)(?: \(lcore \d+\))?\n)?` +
`Time ([0-9\.e-]+), average vectors/node ([0-9\.e-]+), last (\d+) main loops ([0-9\.e-]+) per node ([0-9\.e-]+)\s+` +
`vector rates in ([0-9\.e-]+), out ([0-9\.e-]+), drop ([0-9\.e-]+), punt ([0-9\.e-]+)\n` +
`\s+Name\s+State\s+Calls\s+Vectors\s+Suspends\s+Clocks\s+Vectors/Call\s+(?:Perf Ticks\s+)?` +
`((?:[\w-:\.]+\s+\w+(?:[ -]\w+)*\s+\d+\s+\d+\s+\d+\s+[0-9\.e-]+\s+[0-9\.e-]+\s+)+)`)
runtimeItemsRe = regexp.MustCompile(`([\w-:\.]+)\s+(\w+(?:[ -]\w+)*)\s+(\d+)\s+(\d+)\s+(\d+)\s+([0-9\.e-]+)\s+([0-9\.e-]+)\s+`)
)
// GetRuntimeInfo retrieves how runtime info.
func (h *TelemetryHandler) GetRuntimeInfo(ctx context.Context) (*vppcalls.RuntimeInfo, error) {
if h.stats == nil {
return h.getRuntimeInfoCLI()
}
return h.getRuntimeInfoStats()
}
// GetRuntimeInfo retrieves how runtime info.
func (h *TelemetryHandler) getRuntimeInfoStats() (*vppcalls.RuntimeInfo, error) {
nodeStats, err := h.stats.GetNodeStats()
if err != nil {
return nil, err
} else if nodeStats == nil {
return nil, nil
}
var threads []vppcalls.RuntimeThread
thread := vppcalls.RuntimeThread{
Name: "ALL",
}
for _, node := range nodeStats.Nodes {
vpc := 0.0
if node.Vectors != 0 && node.Calls != 0 {
vpc = float64(node.Vectors) / float64(node.Calls)
}
thread.Items = append(thread.Items, vppcalls.RuntimeItem{
Index: uint(node.NodeIndex),
Name: node.NodeName,
Calls: node.Calls,
Vectors: node.Vectors,
Suspends: node.Suspends,
Clocks: float64(node.Clocks),
VectorsPerCall: vpc,
})
}
threads = append(threads, thread)
info := &vppcalls.RuntimeInfo{
Threads: threads,
}
return info, nil
}
// GetRuntimeInfo retrieves how runtime info.
func (h *TelemetryHandler) getRuntimeInfoCLI() (*vppcalls.RuntimeInfo, error) {
data, err := h.vpe.RunCli("show runtime")
if err != nil {
return nil, err
}
input := string(data)
threadMatches := runtimeRe.FindAllStringSubmatch(input, -1)
if len(threadMatches) == 0 && input != "" {
return nil, fmt.Errorf("invalid runtime input: %q", input)
}
var threads []vppcalls.RuntimeThread
for _, matches := range threadMatches {
fields := matches[1:]
if len(fields) != 12 {
return nil, fmt.Errorf("invalid runtime data for thread (len=%v): %q", len(fields), matches[0])
}
thread := vppcalls.RuntimeThread{
ID: uint(strToUint64(fields[0])),
Name: fields[1],
Time: strToFloat64(fields[2]),
AvgVectorsPerNode: strToFloat64(fields[3]),
LastMainLoops: strToUint64(fields[4]),
VectorsPerMainLoop: strToFloat64(fields[5]),
VectorLengthPerNode: strToFloat64(fields[6]),
VectorRatesIn: strToFloat64(fields[7]),
VectorRatesOut: strToFloat64(fields[8]),
VectorRatesDrop: strToFloat64(fields[9]),
VectorRatesPunt: strToFloat64(fields[10]),
}
itemMatches := runtimeItemsRe.FindAllStringSubmatch(fields[11], -1)
for _, matches := range itemMatches {
fields := matches[1:]
if len(fields) != 7 {
return nil, fmt.Errorf("invalid runtime data for thread item: %q", matches[0])
}
thread.Items = append(thread.Items, vppcalls.RuntimeItem{
Name: fields[0],
State: fields[1],
Calls: strToUint64(fields[2]),
Vectors: strToUint64(fields[3]),
Suspends: strToUint64(fields[4]),
Clocks: strToFloat64(fields[5]),
VectorsPerCall: strToFloat64(fields[6]),
})
}
threads = append(threads, thread)
}
info := &vppcalls.RuntimeInfo{
Threads: threads,
}
return info, nil
}
var (
// Regular expression to parse output from `show buffers`
buffersRe = regexp.MustCompile(
`^(\w+(?:[ \-]\w+)*)\s+(\d+)\s+(\d+)\s+(\d+)\s+(\d+)\s+([\dkmg\.]+)\s+([\dkmg\.]+)\s+([\dkmg\.]+)\s+([\dkmg\.]+)(?:\s+)?$`,
)
)
// GetBuffersInfo retrieves buffers info from VPP.
func (h *TelemetryHandler) GetBuffersInfo(ctx context.Context) (*vppcalls.BuffersInfo, error) {
if h.stats == nil {
return h.getBuffersInfoCLI()
}
return h.getBuffersInfoStats()
}
func (h *TelemetryHandler) getBuffersInfoStats() (*vppcalls.BuffersInfo, error) {
bufStats, err := h.stats.GetBufferStats()
if err != nil {
return nil, err
} else if bufStats == nil {
return nil, nil
}
var items []vppcalls.BuffersItem
for _, c := range bufStats.Buffer {
items = append(items, vppcalls.BuffersItem{
Name: c.PoolName,
Alloc: uint64(c.Used),
Free: uint64(c.Available),
//Cached: c.Cached,
})
}
info := &vppcalls.BuffersInfo{
Items: items,
}
return info, nil
}
func (h *TelemetryHandler) getBuffersInfoCLI() (*vppcalls.BuffersInfo, error) {
data, err := h.vpe.RunCli("show buffers")
if err != nil {
return nil, err
}
var items []vppcalls.BuffersItem
for i, line := range strings.Split(string(data), "\n") {
// Skip empty lines
if strings.TrimSpace(line) == "" {
continue
}
// Check first line
if i == 0 {
fields := strings.Fields(line)
// Verify header
if len(fields) != 11 || fields[0] != "Pool" {
return nil, fmt.Errorf("invalid header for `show buffers` received: %q", line)
}
continue
}
// Parse lines using regexp
matches := buffersRe.FindStringSubmatch(line)
if len(matches)-1 != 9 {
return nil, fmt.Errorf("parsing failed (%d matches) for `show buffers` line: %q", len(matches), line)
}
fields := matches[1:]
items = append(items, vppcalls.BuffersItem{
//ThreadID: uint(strToUint64(fields[0])),
Name: fields[0],
Index: uint(strToUint64(fields[1])),
Size: strToUint64(fields[3]),
Alloc: strToUint64(fields[7]),
Free: strToUint64(fields[5]),
//NumAlloc: strToUint64(fields[6]),
//NumFree: strToUint64(fields[7]),
})
}
info := &vppcalls.BuffersInfo{
Items: items,
}
return info, nil
}
func strToFloat64(s string) float64 {
// Replace 'k' (thousands) with 'e3' to make it parsable with strconv
s = strings.Replace(s, "k", "e3", 1)
s = strings.Replace(s, "K", "e3", 1)
s = strings.Replace(s, "m", "e6", 1)
s = strings.Replace(s, "M", "e6", 1)
s = strings.Replace(s, "g", "e9", 1)
s = strings.Replace(s, "G", "e9", 1)
num, err := strconv.ParseFloat(s, 10)
if err != nil {
return 0
}
return num
}
func strToUint64(s string) uint64 {
return uint64(strToFloat64(s))
}
var (
errorNameLikeMemifRe = regexp.MustCompile(`^[A-Za-z0-9-]+([0-9]+\/[0-9]+|pg\/stream)`)
errorNameLikeGigabitRe = regexp.MustCompile(`^[A-Za-z0-9]+[0-9a-f]+(\/[0-9a-f]+){2}`)
)
func SplitErrorName(str string) (node, reason string) {
parts := strings.Split(str, "/")
switch len(parts) {
case 1:
return parts[0], ""
case 2:
return parts[0], parts[1]
case 3:
if strings.Contains(parts[1], " ") {
return parts[0], strings.Join(parts[1:], "/")
}
if errorNameLikeMemifRe.MatchString(str) {
return strings.Join(parts[:2], "/"), parts[2]
}
default:
if strings.Contains(parts[2], " ") {
return strings.Join(parts[:2], "/"), strings.Join(parts[2:], "/")
}
if errorNameLikeGigabitRe.MatchString(str) {
return strings.Join(parts[:3], "/"), strings.Join(parts[3:], "/")
}
}
return strings.Join(parts[:len(parts)-1], "/"), parts[len(parts)-1]
}
| {
var msgs []govppapi.Message
msgs = append(msgs, memclnt.AllMessages()...)
msgs = append(msgs, vpe.AllMessages()...)
vppcalls.Versions["20.01"] = vppcalls.HandlerVersion{
Msgs: msgs,
New: func(ch govppapi.Channel, stats govppapi.StatsProvider) vppcalls.TelemetryVppAPI {
return NewTelemetryVppHandler(ch, stats)
},
}
} |
reduce.py | # !/usr/bin/python3
from sudoku.cells import *
from sudoku.regions import *
class ReduceError(BaseException):
|
# Remove all used values from a cell
def reduceCellByValues(cell, used):
reduced = []
for value in cell:
if not used[value]: reduced.append(value)
if not len(reduced): raise ReduceError
return reduced
# Remove all illegal values from a cell by region
def reduceCellByRegion(cells, c, region):
cell = cells[c]
if len(cell) == 1: return cell
return reduceCellByValues(cell, findRegionValues(cells, region))
# Apply the row reduction rule
def reduceRow(cells, c):
cells[c] = reduceCellByRegion(cells, c, iRows[row(c)])
return cells
# Apply the column reduction rule
def reduceCol(cells, c):
cells[c] = reduceCellByRegion(cells, c, iCols[col(c)])
return cells
# Apply the box reduction rule
def reduceBox(cells, c):
cells[c] = reduceCellByRegion(cells, c, iBoxes[box(c)])
return cells
# Apply the king's move reduction rule
# Any two cells separated by a king's move cannot contain the same digit
def reduceKing(cells, c):
cells[c] = reduceCellByRegion(cells, c, iKings[c])
return cells
# Apply the knight's move reduction rule
# Any two cells separated by a knight's move cannot contain the same digit
def reduceKnight(cells, c):
cells[c] = reduceCellByRegion(cells, c, iKnights[c])
return cells
# Apply the adjacency move reduction rule:
# Any two orthogonally adjacent cells cannot contain consecutive digits
def reduceAdjacent(cells, c):
cell = cells[c]
if len(cell) == 1:
value = cell[0]
used = [0 for v in range(9)]
if value > 0: used[value-1] = 1
if value < 8: used[value+1] = 1
if hasLeft(c):
l = leftOf(c)
cells[l] = reduceCellByValues(cells[l], used)
if hasRight(c):
r = rightOf(c)
cells[r] = reduceCellByValues(cells[r], used)
if hasAbove(c):
a = aboveOf(c)
cells[a] = reduceCellByValues(cells[a], used)
if hasBelow(c):
b = belowOf(c)
cells[b] = reduceCellByValues(cells[b], used)
return cells
| pass |
typescript.ts | import { SyntaxNode } from "web-tree-sitter";
import { TextEditor } from "vscode";
import { getPojoMatchers } from "./getPojoMatchers";
import {
cascadingMatcher,
delimitedMatcher,
hasType,
possiblyWrappedNode,
simpleSelectionExtractor,
getNodeWithLeadingDelimiter,
childNodeMatcher,
} from "../nodeMatchers";
import { NodeMatcher, ScopeType } from "../Types";
import { getDeclarationNode, getValueNode } from "../treeSitterUtils";
// TODO figure out how to properly use super types
// Generated by the following command:
// > curl https://raw.githubusercontent.com/tree-sitter/tree-sitter-typescript/4c20b54771e4b390ee058af2930feb2cd55f2bf8/typescript/src/node-types.json \
// | jq '.[] | select(.type == "primary_expression" or .type == "expression") | [.subtypes[].type]'
const EXPRESSION_TYPES = [
"array",
"arrow_function",
"as_expression",
"assignment_expression",
"augmented_assignment_expression",
"await_expression",
"binary_expression",
"call_expression",
"class",
"false",
"function",
"generator_function",
"identifier",
"import",
"internal_module",
"member_expression",
"meta_property",
"new_expression",
"non_null_expression",
"null",
"number",
"object",
"parenthesized_expression",
"primary_expression",
"regex", | "string",
"subscript_expression",
"super",
"template_string",
"ternary_expression",
"this",
"true",
"type_assertion",
"unary_expression",
"undefined",
"update_expression",
"yield_expression",
];
function isExpression(node: SyntaxNode) {
return EXPRESSION_TYPES.includes(node.type);
}
// Generated by the following command:
// > curl https://raw.githubusercontent.com/tree-sitter/tree-sitter-typescript/4c20b54771e4b390ee058af2930feb2cd55f2bf8/typescript/src/node-types.json \
// | jq '[.[] | select(.type == "statement" or .type == "declaration") | .subtypes[].type]'
const STATEMENT_TYPES = [
"abstract_class_declaration",
"ambient_declaration",
"break_statement",
"class_declaration",
"continue_statement",
"debugger_statement",
"declaration",
"do_statement",
"empty_statement",
"enum_declaration",
"export_statement",
"expression_statement",
"for_in_statement",
"for_statement",
"function_declaration",
"function_signature",
"generator_function_declaration",
"if_statement",
"import_alias",
"import_statement",
"interface_declaration",
"internal_module",
"labeled_statement",
"lexical_declaration",
"module",
"return_statement",
"statement_block",
"switch_statement",
"throw_statement",
"try_statement",
"type_alias_declaration",
"variable_declaration",
"while_statement",
"with_statement",
];
function possiblyExportedDeclaration(...typeNames: string[]): NodeMatcher {
return possiblyWrappedNode(
(node) => node.type === "export_statement",
(node) => typeNames.includes(node.type),
(node) => [getDeclarationNode(node), getValueNode(node)]
);
}
const isNamedArrowFunction = (node: SyntaxNode) => {
if (node.type !== "lexical_declaration" || node.namedChildCount !== 1) {
return false;
}
const child = node.firstNamedChild!;
return (
child.type === "variable_declarator" &&
getValueNode(child)!.type === "arrow_function"
);
};
export const getTypeNode = (node: SyntaxNode) => {
const typeAnnotationNode = node.children.find((child) =>
["type_annotation", "opting_type_annotation"].includes(child.type)
);
return typeAnnotationNode?.lastChild ?? null;
};
const nodeMatchers: Record<ScopeType, NodeMatcher> = {
...getPojoMatchers(
["object"],
["array"],
(node) => isExpression(node) || node.type === "spread_element"
),
ifStatement: hasType("if_statement"),
class: possiblyExportedDeclaration("class_declaration", "class"),
statement: possiblyExportedDeclaration(...STATEMENT_TYPES),
arrowFunction: hasType("arrow_function"),
functionCall: hasType("call_expression", "new_expression"),
type: cascadingMatcher(
// Typed parameters, properties, and functions
childNodeMatcher(getTypeNode, getNodeWithLeadingDelimiter),
// Type alias/interface declarations
possiblyExportedDeclaration(
"type_alias_declaration",
"interface_declaration"
)
),
argumentOrParameter: delimitedMatcher(
(node) =>
(node.parent?.type === "arguments" &&
(isExpression(node) || node.type === "spread_element")) ||
node.type === "optional_parameter" ||
node.type === "required_parameter",
(node) => node.type === "," || node.type === "(" || node.type === ")",
", "
),
namedFunction: cascadingMatcher(
// Simple case, eg
// function foo() {}
possiblyExportedDeclaration("function_declaration", "method_definition"),
// Class property defined as field definition with arrow
// eg:
// class Foo {
// bar = () => "hello";
// }
(editor: TextEditor, node: SyntaxNode) =>
node.type === "public_field_definition" &&
getValueNode(node)!.type === "arrow_function"
? simpleSelectionExtractor(node)
: null,
// eg:
// const foo = () => "hello"
possiblyWrappedNode(
(node) => node.type === "export_statement",
isNamedArrowFunction,
(node) => [getDeclarationNode(node)]
)
),
comment: hasType("comment"),
};
export default nodeMatchers; | |
model.py | from __future__ import division
import os
import time
from glob import glob
import tensorflow as tf
import numpy as np
from six.moves import xrange
from ops import *
from utils import *
class pix2pix(object):
def __init__(self, sess, image_size=256,
batch_size=1, sample_size=1, output_size=256,
gf_dim=64, df_dim=64, L1_lambda=100,
input_c_dim=3, output_c_dim=3, dataset_name='facades',
checkpoint_dir=None, sample_dir=None):
"""
Args:
sess: TensorFlow session
batch_size: The size of batch. Should be specified before training.
output_size: (optional) The resolution in pixels of the images. [256]
gf_dim: (optional) Dimension of gen filters in first conv layer. [64]
df_dim: (optional) Dimension of discrim filters in first conv layer. [64]
input_c_dim: (optional) Dimension of input image color. For grayscale input, set to 1. [3]
output_c_dim: (optional) Dimension of output image color. For grayscale input, set to 1. [3]
"""
self.sess = sess
self.is_grayscale = (input_c_dim == 1)
self.batch_size = batch_size
self.image_size = image_size
self.sample_size = sample_size
self.output_size = output_size
self.gf_dim = gf_dim
self.df_dim = df_dim
self.input_c_dim = input_c_dim
self.output_c_dim = output_c_dim
self.L1_lambda = L1_lambda
# batch normalization : deals with poor initialization helps gradient flow
self.d_bn1 = batch_norm(name='d_bn1')
self.d_bn2 = batch_norm(name='d_bn2')
self.d_bn3 = batch_norm(name='d_bn3')
self.g_bn_e2 = batch_norm(name='g_bn_e2')
self.g_bn_e3 = batch_norm(name='g_bn_e3')
self.g_bn_e4 = batch_norm(name='g_bn_e4')
self.g_bn_e5 = batch_norm(name='g_bn_e5')
self.g_bn_e6 = batch_norm(name='g_bn_e6')
self.g_bn_e7 = batch_norm(name='g_bn_e7')
self.g_bn_e8 = batch_norm(name='g_bn_e8')
self.g_bn_d1 = batch_norm(name='g_bn_d1')
self.g_bn_d2 = batch_norm(name='g_bn_d2')
self.g_bn_d3 = batch_norm(name='g_bn_d3')
self.g_bn_d4 = batch_norm(name='g_bn_d4')
self.g_bn_d5 = batch_norm(name='g_bn_d5')
self.g_bn_d6 = batch_norm(name='g_bn_d6')
self.g_bn_d7 = batch_norm(name='g_bn_d7')
self.dataset_name = dataset_name
self.checkpoint_dir = checkpoint_dir
self.build_model()
def build_model(self):
self.real_data = tf.placeholder(tf.float32,
[self.batch_size, self.image_size, self.image_size,
self.input_c_dim + self.output_c_dim],
name='real_A_and_B_images')
self.real_B = self.real_data[:, :, :, :self.input_c_dim]
self.real_A = self.real_data[:, :, :, self.input_c_dim:self.input_c_dim + self.output_c_dim]
self.fake_B = self.generator(self.real_A)
self.real_AB = tf.concat([self.real_A, self.real_B], 3)
self.fake_AB = tf.concat([self.real_A, self.fake_B], 3)
self.D, self.D_logits = self.discriminator(self.real_AB, reuse=False)
self.D_, self.D_logits_ = self.discriminator(self.fake_AB, reuse=True)
self.fake_B_sample = self.sampler(self.real_A)
self.d_sum = tf.summary.histogram("d", self.D)
self.d__sum = tf.summary.histogram("d_", self.D_)
self.fake_B_sum = tf.summary.image("fake_B", self.fake_B)
self.d_loss_real = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=self.D_logits, labels=tf.ones_like(self.D)))
self.d_loss_fake = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=self.D_logits_, labels=tf.zeros_like(self.D_)))
self.g_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=self.D_logits_, labels=tf.ones_like(self.D_))) \
+ self.L1_lambda * tf.reduce_mean(tf.abs(self.real_B - self.fake_B))
self.d_loss_real_sum = tf.summary.scalar("d_loss_real", self.d_loss_real)
self.d_loss_fake_sum = tf.summary.scalar("d_loss_fake", self.d_loss_fake)
self.d_loss = self.d_loss_real + self.d_loss_fake
self.g_loss_sum = tf.summary.scalar("g_loss", self.g_loss)
self.d_loss_sum = tf.summary.scalar("d_loss", self.d_loss)
t_vars = tf.trainable_variables()
self.d_vars = [var for var in t_vars if 'd_' in var.name]
self.g_vars = [var for var in t_vars if 'g_' in var.name]
self.saver = tf.train.Saver()
def load_random_samples(self):
|
def sample_model(self, sample_dir, epoch, idx):
sample_images = self.load_random_samples()
samples, d_loss, g_loss = self.sess.run(
[self.fake_B_sample, self.d_loss, self.g_loss],
feed_dict={self.real_data: sample_images}
)
save_images(samples, [self.batch_size, 1],
'./{}/train_{:02d}_{:04d}.png'.format(sample_dir, epoch, idx))
print("[Sample] d_loss: {:.8f}, g_loss: {:.8f}".format(d_loss, g_loss))
def train(self, args):
"""Train pix2pix"""
d_optim = tf.train.AdamOptimizer(args.lr, beta1=args.beta1) \
.minimize(self.d_loss, var_list=self.d_vars)
g_optim = tf.train.AdamOptimizer(args.lr, beta1=args.beta1) \
.minimize(self.g_loss, var_list=self.g_vars)
init_op = tf.global_variables_initializer()
self.sess.run(init_op)
self.g_sum = tf.summary.merge([self.d__sum,
self.fake_B_sum, self.d_loss_fake_sum, self.g_loss_sum])
self.d_sum = tf.summary.merge([self.d_sum, self.d_loss_real_sum, self.d_loss_sum])
self.writer = tf.summary.FileWriter("./logs", self.sess.graph)
counter = 1
start_time = time.time()
if self.load(self.checkpoint_dir):
print(" [*] Load SUCCESS")
else:
print(" [!] Load failed...")
for epoch in xrange(args.epoch):
data = glob('./datasets/{}/train/*.jpg'.format(self.dataset_name))
#np.random.shuffle(data)
batch_idxs = min(len(data), args.train_size) // self.batch_size
for idx in xrange(0, batch_idxs):
batch_files = data[idx*self.batch_size:(idx+1)*self.batch_size]
batch = [load_data(batch_file) for batch_file in batch_files]
if (self.is_grayscale):
batch_images = np.array(batch).astype(np.float32)[:, :, :, None]
else:
batch_images = np.array(batch).astype(np.float32)
# Update D network
_, summary_str = self.sess.run([d_optim, self.d_sum],
feed_dict={ self.real_data: batch_images })
self.writer.add_summary(summary_str, counter)
# Update G network
_, summary_str = self.sess.run([g_optim, self.g_sum],
feed_dict={ self.real_data: batch_images })
self.writer.add_summary(summary_str, counter)
# Run g_optim twice to make sure that d_loss does not go to zero (different from paper)
_, summary_str = self.sess.run([g_optim, self.g_sum],
feed_dict={ self.real_data: batch_images })
self.writer.add_summary(summary_str, counter)
errD_fake = self.d_loss_fake.eval({self.real_data: batch_images})
errD_real = self.d_loss_real.eval({self.real_data: batch_images})
errG = self.g_loss.eval({self.real_data: batch_images})
counter += 1
print("Epoch: [%2d] [%4d/%4d] time: %4.4f, d_loss: %.8f, g_loss: %.8f" \
% (epoch, idx, batch_idxs,
time.time() - start_time, errD_fake+errD_real, errG))
if np.mod(counter, 100) == 1:
self.sample_model(args.sample_dir, epoch, idx)
if np.mod(counter, 500) == 2:
self.save(args.checkpoint_dir, counter)
def discriminator(self, image, y=None, reuse=False):
with tf.variable_scope("discriminator") as scope:
# image is 256 x 256 x (input_c_dim + output_c_dim)
if reuse:
tf.get_variable_scope().reuse_variables()
else:
assert tf.get_variable_scope().reuse == False
h0 = lrelu(conv2d(image, self.df_dim, name='d_h0_conv'))
# h0 is (128 x 128 x self.df_dim)
h1 = lrelu(self.d_bn1(conv2d(h0, self.df_dim*2, name='d_h1_conv')))
# h1 is (64 x 64 x self.df_dim*2)
h2 = lrelu(self.d_bn2(conv2d(h1, self.df_dim*4, name='d_h2_conv')))
# h2 is (32x 32 x self.df_dim*4)
h3 = lrelu(self.d_bn3(conv2d(h2, self.df_dim*8, d_h=1, d_w=1, name='d_h3_conv')))
# h3 is (16 x 16 x self.df_dim*8)
h4 = linear(tf.reshape(h3, [self.batch_size, -1]), 1, 'd_h3_lin')
return tf.nn.sigmoid(h4), h4
def generator(self, image, y=None):
with tf.variable_scope("generator") as scope:
s = self.output_size
s2, s4, s8, s16, s32, s64, s128 = int(s/2), int(s/4), int(s/8), int(s/16), int(s/32), int(s/64), int(s/128)
# image is (256 x 256 x input_c_dim)
e1 = conv2d(image, self.gf_dim, name='g_e1_conv')
# e1 is (128 x 128 x self.gf_dim)
e2 = self.g_bn_e2(conv2d(lrelu(e1), self.gf_dim*2, name='g_e2_conv'))
# e2 is (64 x 64 x self.gf_dim*2)
e3 = self.g_bn_e3(conv2d(lrelu(e2), self.gf_dim*4, name='g_e3_conv'))
# e3 is (32 x 32 x self.gf_dim*4)
e4 = self.g_bn_e4(conv2d(lrelu(e3), self.gf_dim*8, name='g_e4_conv'))
# e4 is (16 x 16 x self.gf_dim*8)
e5 = self.g_bn_e5(conv2d(lrelu(e4), self.gf_dim*8, name='g_e5_conv'))
# e5 is (8 x 8 x self.gf_dim*8)
e6 = self.g_bn_e6(conv2d(lrelu(e5), self.gf_dim*8, name='g_e6_conv'))
# e6 is (4 x 4 x self.gf_dim*8)
e7 = self.g_bn_e7(conv2d(lrelu(e6), self.gf_dim*8, name='g_e7_conv'))
# e7 is (2 x 2 x self.gf_dim*8)
e8 = self.g_bn_e8(conv2d(lrelu(e7), self.gf_dim*8, name='g_e8_conv'))
# e8 is (1 x 1 x self.gf_dim*8)
self.d1, self.d1_w, self.d1_b = deconv2d(tf.nn.relu(e8),
[self.batch_size, s128, s128, self.gf_dim*8], name='g_d1', with_w=True)
d1 = tf.nn.dropout(self.g_bn_d1(self.d1), 0.5)
d1 = tf.concat([d1, e7], 3)
# d1 is (2 x 2 x self.gf_dim*8*2)
self.d2, self.d2_w, self.d2_b = deconv2d(tf.nn.relu(d1),
[self.batch_size, s64, s64, self.gf_dim*8], name='g_d2', with_w=True)
d2 = tf.nn.dropout(self.g_bn_d2(self.d2), 0.5)
d2 = tf.concat([d2, e6], 3)
# d2 is (4 x 4 x self.gf_dim*8*2)
self.d3, self.d3_w, self.d3_b = deconv2d(tf.nn.relu(d2),
[self.batch_size, s32, s32, self.gf_dim*8], name='g_d3', with_w=True)
d3 = tf.nn.dropout(self.g_bn_d3(self.d3), 0.5)
d3 = tf.concat([d3, e5], 3)
# d3 is (8 x 8 x self.gf_dim*8*2)
self.d4, self.d4_w, self.d4_b = deconv2d(tf.nn.relu(d3),
[self.batch_size, s16, s16, self.gf_dim*8], name='g_d4', with_w=True)
d4 = self.g_bn_d4(self.d4)
d4 = tf.concat([d4, e4], 3)
# d4 is (16 x 16 x self.gf_dim*8*2)
self.d5, self.d5_w, self.d5_b = deconv2d(tf.nn.relu(d4),
[self.batch_size, s8, s8, self.gf_dim*4], name='g_d5', with_w=True)
d5 = self.g_bn_d5(self.d5)
d5 = tf.concat([d5, e3], 3)
# d5 is (32 x 32 x self.gf_dim*4*2)
self.d6, self.d6_w, self.d6_b = deconv2d(tf.nn.relu(d5),
[self.batch_size, s4, s4, self.gf_dim*2], name='g_d6', with_w=True)
d6 = self.g_bn_d6(self.d6)
d6 = tf.concat([d6, e2], 3)
# d6 is (64 x 64 x self.gf_dim*2*2)
self.d7, self.d7_w, self.d7_b = deconv2d(tf.nn.relu(d6),
[self.batch_size, s2, s2, self.gf_dim], name='g_d7', with_w=True)
d7 = self.g_bn_d7(self.d7)
d7 = tf.concat([d7, e1], 3)
# d7 is (128 x 128 x self.gf_dim*1*2)
self.d8, self.d8_w, self.d8_b = deconv2d(tf.nn.relu(d7),
[self.batch_size, s, s, self.output_c_dim], name='g_d8', with_w=True)
# d8 is (256 x 256 x output_c_dim)
return tf.nn.tanh(self.d8)
def sampler(self, image, y=None):
with tf.variable_scope("generator") as scope:
scope.reuse_variables()
s = self.output_size
s2, s4, s8, s16, s32, s64, s128 = int(s/2), int(s/4), int(s/8), int(s/16), int(s/32), int(s/64), int(s/128)
# image is (256 x 256 x input_c_dim)
e1 = conv2d(image, self.gf_dim, name='g_e1_conv')
# e1 is (128 x 128 x self.gf_dim)
e2 = self.g_bn_e2(conv2d(lrelu(e1), self.gf_dim*2, name='g_e2_conv'))
# e2 is (64 x 64 x self.gf_dim*2)
e3 = self.g_bn_e3(conv2d(lrelu(e2), self.gf_dim*4, name='g_e3_conv'))
# e3 is (32 x 32 x self.gf_dim*4)
e4 = self.g_bn_e4(conv2d(lrelu(e3), self.gf_dim*8, name='g_e4_conv'))
# e4 is (16 x 16 x self.gf_dim*8)
e5 = self.g_bn_e5(conv2d(lrelu(e4), self.gf_dim*8, name='g_e5_conv'))
# e5 is (8 x 8 x self.gf_dim*8)
e6 = self.g_bn_e6(conv2d(lrelu(e5), self.gf_dim*8, name='g_e6_conv'))
# e6 is (4 x 4 x self.gf_dim*8)
e7 = self.g_bn_e7(conv2d(lrelu(e6), self.gf_dim*8, name='g_e7_conv'))
# e7 is (2 x 2 x self.gf_dim*8)
e8 = self.g_bn_e8(conv2d(lrelu(e7), self.gf_dim*8, name='g_e8_conv'))
# e8 is (1 x 1 x self.gf_dim*8)
self.d1, self.d1_w, self.d1_b = deconv2d(tf.nn.relu(e8),
[self.batch_size, s128, s128, self.gf_dim*8], name='g_d1', with_w=True)
d1 = tf.nn.dropout(self.g_bn_d1(self.d1), 0.5)
d1 = tf.concat([d1, e7], 3)
# d1 is (2 x 2 x self.gf_dim*8*2)
self.d2, self.d2_w, self.d2_b = deconv2d(tf.nn.relu(d1),
[self.batch_size, s64, s64, self.gf_dim*8], name='g_d2', with_w=True)
d2 = tf.nn.dropout(self.g_bn_d2(self.d2), 0.5)
d2 = tf.concat([d2, e6], 3)
# d2 is (4 x 4 x self.gf_dim*8*2)
self.d3, self.d3_w, self.d3_b = deconv2d(tf.nn.relu(d2),
[self.batch_size, s32, s32, self.gf_dim*8], name='g_d3', with_w=True)
d3 = tf.nn.dropout(self.g_bn_d3(self.d3), 0.5)
d3 = tf.concat([d3, e5], 3)
# d3 is (8 x 8 x self.gf_dim*8*2)
self.d4, self.d4_w, self.d4_b = deconv2d(tf.nn.relu(d3),
[self.batch_size, s16, s16, self.gf_dim*8], name='g_d4', with_w=True)
d4 = self.g_bn_d4(self.d4)
d4 = tf.concat([d4, e4], 3)
# d4 is (16 x 16 x self.gf_dim*8*2)
self.d5, self.d5_w, self.d5_b = deconv2d(tf.nn.relu(d4),
[self.batch_size, s8, s8, self.gf_dim*4], name='g_d5', with_w=True)
d5 = self.g_bn_d5(self.d5)
d5 = tf.concat([d5, e3], 3)
# d5 is (32 x 32 x self.gf_dim*4*2)
self.d6, self.d6_w, self.d6_b = deconv2d(tf.nn.relu(d5),
[self.batch_size, s4, s4, self.gf_dim*2], name='g_d6', with_w=True)
d6 = self.g_bn_d6(self.d6)
d6 = tf.concat([d6, e2], 3)
# d6 is (64 x 64 x self.gf_dim*2*2)
self.d7, self.d7_w, self.d7_b = deconv2d(tf.nn.relu(d6),
[self.batch_size, s2, s2, self.gf_dim], name='g_d7', with_w=True)
d7 = self.g_bn_d7(self.d7)
d7 = tf.concat([d7, e1], 3)
# d7 is (128 x 128 x self.gf_dim*1*2)
self.d8, self.d8_w, self.d8_b = deconv2d(tf.nn.relu(d7),
[self.batch_size, s, s, self.output_c_dim], name='g_d8', with_w=True)
# d8 is (256 x 256 x output_c_dim)
return tf.nn.tanh(self.d8)
def save(self, checkpoint_dir, step):
model_name = "pix2pix.model"
model_dir = "%s_%s_%s" % (self.dataset_name, self.batch_size, self.output_size)
checkpoint_dir = os.path.join(checkpoint_dir, model_dir)
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
self.saver.save(self.sess,
os.path.join(checkpoint_dir, model_name),
global_step=step)
def load(self, checkpoint_dir):
print(" [*] Reading checkpoint...")
model_dir = "%s_%s_%s" % (self.dataset_name, self.batch_size, self.output_size)
checkpoint_dir = os.path.join(checkpoint_dir, model_dir)
ckpt = tf.train.get_checkpoint_state(checkpoint_dir)
if ckpt and ckpt.model_checkpoint_path:
ckpt_name = os.path.basename(ckpt.model_checkpoint_path)
self.saver.restore(self.sess, os.path.join(checkpoint_dir, ckpt_name))
return True
else:
return False
def test(self, args):
"""Test pix2pix"""
init_op = tf.global_variables_initializer()
self.sess.run(init_op)
sample_files = glob('./datasets/{}/val/*.jpg'.format(self.dataset_name))
# sort testing input
n = [int(i) for i in map(lambda x: x.split('/')[-1].split('.jpg')[0], sample_files)]
sample_files = [x for (y, x) in sorted(zip(n, sample_files))]
# load testing input
print("Loading testing images ...")
sample = [load_data(sample_file, is_test=True) for sample_file in sample_files]
if (self.is_grayscale):
sample_images = np.array(sample).astype(np.float32)[:, :, :, None]
else:
sample_images = np.array(sample).astype(np.float32)
sample_images = [sample_images[i:i+self.batch_size]
for i in xrange(0, len(sample_images), self.batch_size)]
sample_images = np.array(sample_images)
print(sample_images.shape)
start_time = time.time()
if self.load(self.checkpoint_dir):
print(" [*] Load SUCCESS")
else:
print(" [!] Load failed...")
for i, sample_image in enumerate(sample_images):
idx = i+1
print("sampling image ", idx)
samples = self.sess.run(
self.fake_B_sample,
feed_dict={self.real_data: sample_image}
)
save_images(samples, [self.batch_size, 1],
'./{}/test_{:04d}.png'.format(args.test_dir, idx))
| data = np.random.choice(glob('./datasets/{}/val/*.jpg'.format(self.dataset_name)), self.batch_size)
sample = [load_data(sample_file) for sample_file in data]
if (self.is_grayscale):
sample_images = np.array(sample).astype(np.float32)[:, :, :, None]
else:
sample_images = np.array(sample).astype(np.float32)
return sample_images |
moleprop.py | import os
import time
import math
import numpy as np
import torch
# torch.multiprocessing.set_start_method('spawn')
torch.multiprocessing.set_start_method('forkserver', force=True)
import torch.nn as nn
from torch.utils.data import DataLoader
from tqdm import tqdm
from argparse import Namespace
from typing import List
from dglt.data.dataset.molecular import MoleculeDataset
from dglt.data.transformer.scaler import StandardScaler
from dglt.data.transformer.collator import MolCollator
from dglt.data.dataset.utils import get_data, get_data_from_smiles
from dglt.utils import load_args, load_checkpoint, load_scalers
from deploy import get_newest_train_args
from third_party.dimorphite_dl.acid_base import mol_cls
class MoleProp(object):
"""Molecular Properties Prediction Service"""
def __init__(self, checkpoint_dir, debug=print):
self.debug_ = debug
self.checkpoint_paths_ = []
for root, _, files in os.walk(checkpoint_dir):
for fname in files:
if fname.endswith('.pt'):
self.checkpoint_paths_.append(os.path.join(root, fname))
def load_model(self, args: Namespace):
"""
Load checkpoints
:param args: Arguments.
:return:
"""
self.scaler_, self.features_scaler_ = load_scalers(self.checkpoint_paths_[0])
self.train_args = load_args(self.checkpoint_paths_[0])
self.args_ = args
for key, value in vars(self.train_args).items():
if not hasattr(self.args_, key):
setattr(self.args_, key, value)
# update args with newest training args
newest_train_args = get_newest_train_args()
for key, value in vars(newest_train_args).items():
if not hasattr(args, key):
setattr(args, key, value)
if args.features_path:
args.features_path = None
args.features_generator = ['rdkit_2d_normalized']
self.models_ = []
for checkpoint_path in tqdm(self.checkpoint_paths_, total=len(self.checkpoint_paths_)):
self.models_.append(load_checkpoint(checkpoint_path, cuda=self.args_.cuda, current_args=self.args_))
def inference(self,
model: nn.Module,
data: MoleculeDataset,
args,
batch_size: int,
shared_dict,
scaler: StandardScaler = None
) -> List[List[float]]:
"""
Do inference
:param model: model.
:param data: input data.
:param args: Arguments.
:param batch_size: batch size.
:param shared_dict: shared_dict of model.
:param scaler: scaler of input data.
:return: prediction of molecular properties.
"""
# model.share_memory()
model.eval()
args.bond_drop_rate = 0
preds = []
iter_count = 0
mol_collator = MolCollator(args=args, shared_dict=shared_dict)
mol_loader = DataLoader(data, batch_size=batch_size, shuffle=False, num_workers=0, collate_fn=mol_collator)
for i, item in enumerate(mol_loader):
smiles_batch, batch, features_batch, mask, _ = item
with torch.no_grad():
batch_preds = model(batch, features_batch)
iter_count += args.batch_size
batch_preds = batch_preds.data.cpu().numpy()
if scaler is not None:
batch_preds = scaler.inverse_transform(batch_preds)
batch_preds = batch_preds.tolist()
preds.extend(batch_preds)
return preds
def postprocessing(self, task: str = None, smiles: List[str] = None, preds: np.ndarray = None):
if task == 'caco2':
for i in range(preds.shape[0]):
if preds[i] is not None:
for j in range(len(preds[i])):
preds[i][j] = (math.pow(10, preds[i][j]) - 1) / 10
elif task == 'pka':
acid_base = mol_cls(smiles)
preds[acid_base == None] = np.nan
preds = np.column_stack((preds, np.array(acid_base, dtype=np.float)))
elif task == 'ppb':
preds[preds > 1] = 1
preds[preds < 0] = 0
return preds
def | (self, task: str = None, smiles: List[str] = None):
"""
Predict molecular properties.
:param smiles: input data.
:return: molecular properties.
"""
self.debug_('Loading data')
tic = time.time()
self.args_.max_workers = 30
if smiles is not None:
test_data = get_data_from_smiles(smiles=smiles, skip_invalid_smiles=True, args=self.args_)
else:
test_data = get_data(path=self.args_.input_file, args=self.args_,
use_compound_names=self.args_.use_compound_names,
skip_invalid_smiles=True)
toc = time.time()
self.debug_('loading data: {}s'.format(toc - tic))
self.debug_('Validating SMILES')
tic = time.time()
valid_indices = [i for i in range(len(test_data)) if test_data[i].mol is not None]
full_data = test_data
test_data = MoleculeDataset([test_data[i] for i in valid_indices])
# Edge case if empty list of smiles is provided
if len(test_data) == 0:
return [None] * len(full_data)
# Normalize features
if self.train_args.features_scaling:
test_data.normalize_features(self.features_scaler)
sum_preds = np.zeros((len(test_data), self.args_.num_tasks))
toc = time.time()
self.debug_('validating smiles: {}s'.format(toc - tic))
self.debug_(f'Predicting...')
tic = time.time()
shared_dict = {}
for model in self.models_:
model_preds = self.inference(
model=model,
data=test_data,
batch_size=self.args_.batch_size,
scaler=self.scaler_,
shared_dict=shared_dict,
args=self.args_
)
sum_preds += np.array(model_preds)
toc = time.time()
self.debug_('predicting: {}s'.format(toc - tic))
avg_preds = sum_preds / len(self.checkpoint_paths_)
avg_preds = self.postprocessing(task=task, smiles=smiles, preds=avg_preds)
avg_preds = avg_preds.tolist()
assert len(test_data) == len(avg_preds)
test_smiles = test_data.smiles()
res = {}
for i in range(len(avg_preds)):
res[test_smiles[i]] = avg_preds[i]
return {'task': task, 'task_score': res}
| predict |
duplicate.rs | // Copyright 2014 Pierre Talbot (IRCAM)
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
pub use std::collections::HashMap;
use middle::analysis::ast::*;
use partial::Partial::*;
use rust;
pub fn rule_duplicate<'a, 'b>(mut grammar: AGrammar<'a, 'b>, rules: Vec<Rule>) -> Partial<AGrammar<'a, 'b>>
{
DuplicateItem::analyse(&grammar, rules.into_iter(), String::from("rule"))
.map(move |rules| {
grammar.rules = rules.into_iter().map(|x| x.1).collect();
grammar
})
}
pub fn rust_functions_duplicate<'a, 'b>(mut grammar: AGrammar<'a, 'b>,
items: Vec<RItem>) -> Partial<AGrammar<'a, 'b>>
{
let mut functions = vec![];
let mut others = vec![];
for item in items {
if let &rust::ItemKind::Fn(..) = &item.node {
functions.push(item);
}
else {
others.push(item);
}
}
DuplicateItem::analyse(&grammar, functions.into_iter(), String::from("rust function"))
.map(move |functions| {
grammar.rust_functions = functions.into_iter().collect();
grammar.rust_items = others;
grammar
})
}
struct DuplicateItem<'a: 'c, 'b: 'a, 'c, Item>
{
grammar: &'c AGrammar<'a, 'b>,
items: Vec<(Ident, Item)>,
has_duplicate: bool,
what_is_duplicated: String
}
impl<'a, 'b, 'c, Item> DuplicateItem<'a, 'b, 'c, Item> where
Item: ItemIdent + ItemSpan
{
pub fn analyse<ItemIter>(grammar: &'c AGrammar<'a, 'b>, iter: ItemIter, item_kind: String)
-> Partial<Vec<(Ident, Item)>> where
ItemIter: Iterator<Item=Item>
{
DuplicateItem {
grammar: grammar,
items: vec![],
has_duplicate: false,
what_is_duplicated: item_kind
}.populate(iter)
.make()
}
fn populate<ItemIter: Iterator<Item=Item>>(mut self, iter: ItemIter)
-> DuplicateItem<'a, 'b, 'c, Item>
{
for item in iter {
let ident = item.ident();
if self.items.iter().any(|&(id,_)| id == ident) |
else {
self.items.push((ident, item));
}
}
self
}
fn duplicate_items(&self, pre: &Item, current: Item) {
self.grammar.multi_locations_err(vec![
(current.span(),
format!("duplicate definition of {} with name `{}`", self.what_is_duplicated, current.ident())),
(pre.span(),
format!("previous definition of `{}` here", pre.ident()))]
);
}
fn make(self) -> Partial<Vec<(Ident, Item)>> {
if self.has_duplicate {
Fake(self.items)
} else {
Value(self.items)
}
}
}
| {
let &(_, ref dup_item) = self.items.iter().find(|&&(id,_)| id == ident).unwrap();
self.duplicate_items(dup_item, item);
self.has_duplicate = true;
} |
contactSearchStatus.js | import Enum from '../../lib/Enum';
| export default new Enum(['searching', 'idle'], 'contactSearchStatus'); |
|
client.rs | //! The client module wraps the interactions between the client and the server
use crate::{responses, requests};
use crate::shared;
use serde::{Serialize, Deserialize};
use tokio::sync::Mutex;
use std::sync::Arc;
use reqwest::header::{HeaderName, HeaderValue};
use reqwest::{Method, Url, StatusCode};
use std::str::FromStr;
use tokio::time::Duration;
use std::fmt::{Debug, Formatter};
use std::collections::HashMap;
use crate::errors::SpaceTradersClientError;
/// HttpClient is a thread-safe rate-limited space traders client
pub type HttpClient = Arc<Mutex<SpaceTradersClient>>;
/// Allow the user to tie into the request lifecycle and do things with the request, responses, and/or error coming back
pub type PostRequestHook = fn(
method: &str,
url: &str,
request_body: Option<&str>,
response_status_code: Option<u16>,
response_headers: Option<&HashMap<String, String>>,
response_body: Option<&str>,
error: Option<&SpaceTradersClientError>,
);
/// SpaceTradersClient wraps the actual reqwest client and adds rate-limiting support
#[derive(Clone)]
pub struct SpaceTradersClient {
client: reqwest::Client,
post_request_hook: Option<PostRequestHook>,
}
impl Debug for SpaceTradersClient {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("SpaceTradersClient")
.field("client", &self.client)
.finish()
}
}
/// SpaceTradersClientRequest wraps all the parameters sent to the spacetraders client
#[derive(Serialize)]
pub struct SpaceTradersClientRequest {
method: String,
url: String,
request_headers: HashMap<String, String>,
request_text: String,
}
/// SpaceTradersClientRequest wraps all the parameters received from the spacetraders API
#[derive(Serialize)]
pub struct SpaceTradersClientResponse {
status_code: u16,
response_headers: HashMap<String, String>,
response_text: String,
}
impl SpaceTradersClient {
fn new(proxy: Option<String>) -> Self {
let mut client_builder = reqwest::ClientBuilder::new();
if let Some(proxy) = proxy {
client_builder = client_builder.proxy(reqwest::Proxy::all(proxy).unwrap());
}
Self {
client: client_builder.build().unwrap(),
post_request_hook: None,
}
}
fn set_post_request_hook(&mut self, hook: PostRequestHook) {
self.post_request_hook = Some(hook);
}
async fn execute_request(
&self, method: &str, url: &str, body: Option<&str>, token: Option<&str>,
) -> Result<SpaceTradersClientResponse, SpaceTradersClientError> {
let mut request_builder = self.client.request(
Method::from_str(&method).unwrap(),
Url::parse(url).unwrap(),
);
if let Some(token) = token {
request_builder = request_builder.header(
HeaderName::from_lowercase(b"authorization").unwrap(),
HeaderValue::from_str(&format!("Bearer {}", &token)).unwrap(),
);
}
if let Some(body) = body {
request_builder = request_builder.header(HeaderName::from_lowercase(b"content-type").unwrap(), HeaderValue::from_static("application/json"));
request_builder = request_builder.body(body.to_owned());
}
let mut attempts = 0;
let request = request_builder.build().unwrap();
loop {
attempts += 1;
if attempts > 3 {
return Err(SpaceTradersClientError::TooManyRetries);
}
match self.client.execute(request.try_clone().unwrap()).await {
Ok(response) => {
let response_headers = response.headers()
.iter().fold(HashMap::new(), |mut acc, (h, v)| {
acc.insert(h.to_string(), v.to_str().unwrap().to_string());
acc
});
let response_status = response.status();
let response_text = response.text().await?;
if let Some(post_request_hook) = self.post_request_hook {
post_request_hook(
method, url, body,
Some(response_status.as_u16()),
Some(&response_headers),
Some(&response_text),
None,
);
}
// Check if the response was a throttle exception (status 429 means we have been rate limited)
if response_status == 429 {
let retry_after: f64 = response_headers
.get("retry-after").unwrap_or(&"1.0".to_string())
.parse().unwrap_or(1.0);
// If it was a throttle then wait based on the retry-after response headers
log::warn!("Rate limited... waiting for {} seconds before trying again. Request: \"{} {}\"", retry_after, request.method(), request.url());
tokio::time::sleep(Duration::from_secs_f64(retry_after)).await;
continue;
} else if response_status == 401 {
return Err(SpaceTradersClientError::Unauthorized);
} else if response_status == 500 {
// If there was an internal server error then try the request again in 2 seconds
log::error!("Caught internal server error retrying in 2 seconds. {}", response_text);
tokio::time::sleep(Duration::from_secs(2)).await;
continue;
} else {
return Ok(
SpaceTradersClientResponse {
status_code: response_status.as_u16(),
response_headers,
response_text,
}
);
}
}
Err(e) => {
let space_traders_client_error = SpaceTradersClientError::Http(e);
if let Some(post_request_hook) = self.post_request_hook {
post_request_hook(
method, url, body,
None, None, None,
Some(&space_traders_client_error)
);
}
return Err(space_traders_client_error);
}
}
}
}
}
/// Get a rate-limited http client that is safe to use across threads and won't break rate-limiting
pub fn get_http_client(proxy: Option<String>) -> HttpClient {
Arc::new(Mutex::new(SpaceTradersClient::new(proxy)))
}
/// Get a rate-limited http client, with post receive hook, that is safe to use across threads and
/// won't break rate-limiting
pub fn get_http_client_with_hook(proxy: Option<String>, hook: PostRequestHook) -> HttpClient {
let mut client = SpaceTradersClient::new(proxy);
client.set_post_request_hook(hook);
Arc::new(Mutex::new(client))
}
/// Parse a response string into the type represented by T
/// If the `response_text` cannot be parsed into type T then it is assumed that an error
/// occurred and an shared::ErrorMessage will be returned
///
/// # Arguments
///
/// * `response_text` - A string containing the JSON response to be parsed
fn parse_response<'a, T: Deserialize<'a>>(response_text: &'a str) -> Result<T, SpaceTradersClientError> {
match serde_json::from_str::<T>(&response_text) {
Ok(o) => Ok(o),
Err(e) => {
log::error!("Error processing type {:?}: {}", std::any::type_name::<T>(), e);
log::error!("Error response: {}", &response_text);
match serde_json::from_str::<shared::ErrorMessage>(&response_text) {
Ok(error_message) => Err(SpaceTradersClientError::ApiError(error_message)),
Err(e) => Err(SpaceTradersClientError::JsonParse(e)),
}
}
}
}
/// Claim a username and get a token
///
/// # Arguments
///
/// * `username` - A string containing the username to get a token for
pub async fn claim_username(http_client: HttpClient, username: String) -> Result<responses::ClaimUsername, SpaceTradersClientError> {
let http_client = http_client.lock().await;
let response = http_client.execute_request(
"POST",
&format!("https://api.spacetraders.io/users/{}/token", username),
Some("{\"message\":\"this body doesn't actually matter\"}"),
None,
)
.await?;
parse_response::<responses::ClaimUsername>(&response.response_text)
}
/// Get the status of the game API.
pub async fn get_game_status(http_client: HttpClient) -> Result<responses::GameStatus, SpaceTradersClientError> {
let http_client = http_client.lock().await;
let response = http_client.execute_request(
"GET",
"https://api.spacetraders.io/game/status",
None,
None,
)
.await?;
if response.status_code == StatusCode::SERVICE_UNAVAILABLE.as_u16() {
return Err(SpaceTradersClientError::ServiceUnavailable);
}
parse_response::<responses::GameStatus>(&response.response_text)
}
/// Get the users current ip address
pub async fn get_my_ip_address(http_client: HttpClient) -> Result<responses::MyIpAddress, SpaceTradersClientError> {
let http_client = http_client.lock().await;
let response = http_client.execute_request(
"GET",
"https://api.ipify.org?format=json",
None,
None,
)
.await?;
parse_response::<responses::MyIpAddress>(&response.response_text)
}
/// A SpaceTraders client that is associated to a specific username
#[derive(Debug, Clone)]
pub struct Client {
http_client: HttpClient,
/// The users username
pub username: String,
/// The uses access token
pub token: String,
}
impl Client {
/// Create a new game with a reqwest client that has the Authorization header set
///
/// # Arguments
///
/// * `username` - A string containing the username of the current player
/// * `token` - A string containing the access token for the username provided
pub fn new(http_client: HttpClient, username: String, token: String) -> Client {
Client {
http_client,
username,
token,
}
}
//////////////////////////////////////////////
///// ACCOUNT
//////////////////////////////////////////////
/// Get all information about the current user
pub async fn get_my_info(&self) -> Result<responses::UserInfo, SpaceTradersClientError> {
let http_client = self.http_client.lock().await;
let response = http_client.execute_request(
"GET",
"https://api.spacetraders.io/my/account",
None,
Some(&self.token),
)
.await?;
parse_response::<responses::UserInfo>(&response.response_text)
}
//////////////////////////////////////////////
///// FLIGHT PLANS
//////////////////////////////////////////////
/// Get the current details of a flight plan
///
/// # Arguments | let http_client = self.http_client.lock().await;
let response = http_client.execute_request(
"GET",
&format!("https://api.spacetraders.io/my/flight-plans/{}", flight_plan_id),
None,
Some(&self.token),
)
.await?;
parse_response::<responses::FlightPlan>(&response.response_text)
}
/// Create a flight plan.
///
/// # Arguments
///
/// * `ship_id` - A string containing the ship_id to create the flight plan for
/// * `destination` - A string containing the location to send the ship to
pub async fn create_flight_plan(&self, ship_id: String, destination: String) -> Result<responses::FlightPlan, SpaceTradersClientError> {
let flight_plan_request = requests::FlightPlanRequest {
ship_id: ship_id.clone(),
destination: destination.clone(),
};
let http_client = self.http_client.lock().await;
let response = http_client.execute_request(
"POST",
"https://api.spacetraders.io/my/flight-plans",
Some(&serde_json::to_string(&flight_plan_request).unwrap()),
Some(&self.token),
)
.await?;
parse_response::<responses::FlightPlan>(&response.response_text)
}
//////////////////////////////////////////////
///// LEADERBOARD
//////////////////////////////////////////////
// TODO: leaderboard/networth
//////////////////////////////////////////////
///// LOANS
//////////////////////////////////////////////
/// Get any loans taken out by the current user
pub async fn get_my_loans(&self) -> Result<responses::LoanInfo, SpaceTradersClientError> {
let http_client = self.http_client.lock().await;
let response = http_client.execute_request(
"GET",
"https://api.spacetraders.io/my/loans",
None,
Some(&self.token),
)
.await?;
parse_response::<responses::LoanInfo>(&response.response_text)
}
/// Pay off a loan completely
///
/// # Arguments
///
/// * `loan_id` - A string containing the loan_id of the loan to pay off
pub async fn pay_off_loan(&self, loan_id: &str) -> Result<responses::PayLoanResponse, SpaceTradersClientError> {
let http_client = self.http_client.lock().await;
let response = http_client.execute_request(
"PUT",
&format!("https://api.spacetraders.io/my/loans/{}", loan_id),
Some("{\"message\":\"this body doesn't actually matter\"}"),
Some(&self.token),
)
.await?;
parse_response::<responses::PayLoanResponse>(&response.response_text)
}
/// Request a new loan
///
/// # Arguments
///
/// * `loan_type` - A LoanType with the type of loan being requested for the current user
pub async fn request_new_loan(&self, loan_type: shared::LoanType) -> Result<responses::RequestLoan, SpaceTradersClientError> {
let request_new_loan_request = requests::RequestNewLoanRequest {
loan_type
};
let http_client = self.http_client.lock().await;
let response = http_client.execute_request(
"POST",
"https://api.spacetraders.io/my/loans",
Some(&serde_json::to_string(&request_new_loan_request).unwrap()),
Some(&self.token),
)
.await?;
parse_response::<responses::RequestLoan>(&response.response_text)
}
//////////////////////////////////////////////
///// LOCATIONS
//////////////////////////////////////////////
/// Get location info about a specific location
///
/// # Arguments
///
/// * `location_symbol` - A string containing the location name to get info about
pub async fn get_location_info(&self, location_symbol: String) -> Result<responses::LocationInfo, SpaceTradersClientError> {
let http_client = self.http_client.lock().await;
let response = http_client.execute_request(
"GET",
&format!("https://api.spacetraders.io/locations/{}", location_symbol),
None,
Some(&self.token),
)
.await?;
parse_response::<responses::LocationInfo>(&response.response_text)
}
// TODO:
// /// Get all the locations in a particular system
// ///
// /// # Arguments
// ///
// /// * `system_symbol` - A string containing the system name to get the locations from
// /// * `location_type` - An optional LocationType if you want to filter the locations by type
// pub async fn get_locations_in_system(&self, system_symbol: String) -> Result<responses::AvailableLocations, SpaceTradersClientError> {
// let http_client = self.http_client.lock().await;
// let response = http_client.execute_request(
// "GET",
// &format!("https://api.spacetraders.io/game/systems/{}/locations", system_symbol),
// None,
// Some(&self.token),
// )
// .await?;
//
// parse_response::<responses::AvailableLocations>(&response.response_text)
// }
/// Get the marketplace data about a location.
///
/// # Note
///
/// You must have a ship docked at the location in order to get it's marketplace data
///
/// # Arguments
///
/// * `location_symbol` - A string containing the name of the location to get marketplace data for
pub async fn get_location_marketplace(&self, location_symbol: &str) -> Result<responses::LocationMarketplace, SpaceTradersClientError> {
let http_client = self.http_client.lock().await;
let response = http_client.execute_request(
"GET",
&format!("https://api.spacetraders.io/locations/{}/marketplace", location_symbol),
None,
Some(&self.token),
)
.await?;
parse_response::<responses::LocationMarketplace>(&response.response_text)
}
// TODO: Get Ships at a location
//////////////////////////////////////////////
///// PURCHASE ORDERS
//////////////////////////////////////////////
/// Create a purchase order to transfer goods from a location to your ship
///
/// # Arguments
///
/// * `ship` - A Ship struct that you'd like to transfer the goods into
/// * `good` - A Good enum containing the type of good you'd like to transfer
/// * `quantity` - An i32 containing the quantity of good you'd like transferred
pub async fn create_purchase_order(&self, ship_id: String, good: shared::Good, quantity: i32) -> Result<responses::PurchaseOrder, SpaceTradersClientError> {
let purchase_order_request = requests::PurchaseOrderRequest {
ship_id: ship_id.clone(),
good,
quantity,
};
let http_client = self.http_client.lock().await;
let response = http_client.execute_request(
"POST",
"https://api.spacetraders.io/my/purchase-orders",
Some(&serde_json::to_string(&purchase_order_request).unwrap()),
Some(&self.token),
)
.await?;
parse_response::<responses::PurchaseOrder>(&response.response_text)
}
//////////////////////////////////////////////
///// SELL ORDERS
//////////////////////////////////////////////
/// Create a sell order to transfer good from your ship to a location. Your ship will
/// automatically sell the good to whatever location it is docked at
///
/// # Arguments
///
/// * `ship` - A Ship struct that you'd like to transfer the goods from
/// * `good` - A Good enum containing the type of good you'd like to transfer
/// * `quantity` - An i32 containing the quantity of good you'd like transferred
pub async fn create_sell_order(&self, ship_id: String, good: shared::Good, quantity: i32) -> Result<responses::PurchaseOrder, SpaceTradersClientError> {
let sell_order_request = requests::SellOrderRequest {
ship_id: ship_id.clone(),
good,
quantity,
};
let http_client = self.http_client.lock().await;
let response = http_client.execute_request(
"POST",
"https://api.spacetraders.io/my/sell-orders",
Some(&serde_json::to_string(&sell_order_request).unwrap()),
Some(&self.token),
)
.await?;
parse_response::<responses::PurchaseOrder>(&response.response_text)
}
//////////////////////////////////////////////
///// SHIPS
//////////////////////////////////////////////
/// Add a ship to the users inventory by purchasing it
///
/// # Arguments
///
/// * `location_symbol` - A string containing the location you'd like to purchase the ship from
/// * `ship_type` - A string containing the type of ship you'd like to purchase
pub async fn purchase_ship(&self, location_symbol: String, ship_type: String) -> Result<responses::PurchaseShip, SpaceTradersClientError> {
let purchase_ship_request = requests::PurchaseShipRequest {
location: location_symbol.clone(),
ship_type: ship_type.clone(),
};
let http_client = self.http_client.lock().await;
let response = http_client.execute_request(
"POST",
"https://api.spacetraders.io/my/ships",
Some(&serde_json::to_string(&purchase_ship_request).unwrap()),
Some(&self.token),
)
.await?;
parse_response::<responses::PurchaseShip>(&response.response_text)
}
/// Get info about a specific ship for the current user
///
/// # Arguments
///
/// * `ship_id` - A string id of the ship you'd like info about
pub async fn get_my_ship(&self, ship_id: &str) -> Result<responses::MyShip, SpaceTradersClientError> {
let http_client = self.http_client.lock().await;
let response = http_client.execute_request(
"GET",
&format!("https://api.spacetraders.io/my/ships/{}", ship_id),
None,
Some(&self.token),
)
.await?;
parse_response::<responses::MyShip>(&response.response_text)
}
/// Get all your ships
pub async fn get_my_ships(&self) -> Result<responses::MyShips, SpaceTradersClientError> {
let http_client = self.http_client.lock().await;
let response = http_client.execute_request(
"GET",
"https://api.spacetraders.io/my/ships",
None,
Some(&self.token),
)
.await?;
parse_response::<responses::MyShips>(&response.response_text)
}
/// Jettison cargo from a ship
pub async fn jettison_cargo(&self, ship_id: &str, good: shared::Good, quantity: i32) -> Result<responses::JettisonCargo, SpaceTradersClientError> {
let jettison_cargo_request = requests::JettisonCargo {
good,
quantity,
};
let http_client = self.http_client.lock().await;
let response = http_client.execute_request(
"POST",
&format!("https://api.spacetraders.io/my/ships/{}/jettison", ship_id),
Some(&serde_json::to_string(&jettison_cargo_request).unwrap()),
Some(&self.token),
)
.await?;
parse_response::<responses::JettisonCargo>(&response.response_text)
}
// TODO: Jettison cargo
// TODO: Scrap your ship for credits
// TODO: Transfer cargo between ships
//////////////////////////////////////////////
///// STRUCTURES
//////////////////////////////////////////////
// TODO: Create a new structure
// TODO: Deposit goods to a structure you own
// TODO: Deposit goods to a structure
// TODO: See specific structure
// TODO: Transfer goods from your structure to a ship
// TODO: Use to see a specific structure
// TODO: Use to see all of your structures
//////////////////////////////////////////////
///// SYSTEMS
//////////////////////////////////////////////
// TODO: This endpoint should probably be "Get a list of all available ships in the system."
/// Get all ships that are available for sale
pub async fn get_ships_for_sale(&self) -> Result<responses::ShipsForSale, SpaceTradersClientError> {
let http_client = self.http_client.lock().await;
let response = http_client.execute_request(
"GET",
"https://api.spacetraders.io/game/ships",
None,
Some(&self.token),
)
.await?;
parse_response::<responses::ShipsForSale>(&response.response_text)
}
// TODO: Get all active flight plans in the system.
// TODO: Get info on a system's docked ships
// TODO: Get location info for a system
// TODO: Get systems info
// TODO: I'm not sure which endpoint this is supposed to be converted to
/// Get information about all systems
pub async fn get_systems_info(&self) -> Result<responses::SystemsInfo, SpaceTradersClientError> {
let http_client = self.http_client.lock().await;
let response = http_client.execute_request(
"GET",
"https://api.spacetraders.io/game/systems",
None,
Some(&self.token),
)
.await?;
parse_response::<responses::SystemsInfo>(&response.response_text)
}
//////////////////////////////////////////////
///// TYPES
//////////////////////////////////////////////
// TODO: Get available goods
/// Get all available loans
pub async fn get_available_loans(&self) -> Result<responses::AvailableLoans, SpaceTradersClientError> {
let http_client = self.http_client.lock().await;
let response = http_client.execute_request(
"GET",
"https://api.spacetraders.io/types/loans",
None,
Some(&self.token),
)
.await?;
parse_response::<responses::AvailableLoans>(&response.response_text)
}
// TODO: Get available structures
// TODO: Get info on available ships
//////////////////////////////////////////////
///// WARP JUMP
//////////////////////////////////////////////
/// Attempt a warp jump
pub async fn attempt_warp_jump(&self, ship_id: String) -> Result<responses::FlightPlan, SpaceTradersClientError> {
let warp_jump_request = requests::WarpJump {
ship_id
};
let http_client = self.http_client.lock().await;
let response = http_client.execute_request(
"POST",
"https://api.spacetraders.io/my/warp-jumps",
Some(&serde_json::to_string(&warp_jump_request).unwrap()),
Some(&self.token),
)
.await?;
parse_response::<responses::FlightPlan>(&response.response_text)
}
} | ///
/// * `flight_plan_id` - A string containing the flight plan id
pub async fn get_flight_plan(&self, flight_plan_id: String) -> Result<responses::FlightPlan, SpaceTradersClientError> { |
encrypt.go | package encrypt
import (
"bytes"
"crypto/cipher"
"crypto/des"
)
func DesEncryption(key, plainText []byte) ([]byte, error) |
func DesDecryption(key, cipherText []byte) ([]byte, error) {
if len(key) > 8 {
key = key[:8]
} else if len(key) < 8 {
key = PKCS5Padding(key, 8)
}
block, err := des.NewCipher(key)
if err != nil {
return nil, err
}
blockMode := cipher.NewCBCDecrypter(block, key)
origData := make([]byte, len(cipherText))
blockMode.CryptBlocks(origData, cipherText)
origData = PKCS5UnPadding(origData)
return origData, nil
}
// 用余数填充
func PKCS5Padding(src []byte, blockSize int) []byte {
padding := blockSize - len(src)%blockSize
padBytes := bytes.Repeat([]byte{byte(padding)}, padding)
return append(src, padBytes...)
}
func PKCS5UnPadding(src []byte) []byte {
length := len(src)
unPadding := int(src[length-1])
return src[:(length - unPadding)]
}
| {
if len(key) > 8 {
key = key[:8]
} else if len(key) < 8 {
key = PKCS5Padding(key, 8)
}
block, err := des.NewCipher(key)
if err != nil {
return nil, err
}
blockSize := block.BlockSize()
origData := PKCS5Padding(plainText, blockSize)
blockMode := cipher.NewCBCEncrypter(block, key)
encryptBytes := make([]byte, len(origData))
blockMode.CryptBlocks(encryptBytes, origData)
return encryptBytes, nil
} |
archive.py | import re
from anchore_engine.apis.authorization import (
ActionBoundPermission,
Permission,
RequestingAccountValue,
get_authorizer,
)
from anchore_engine.apis.context import ApiRequestContextProxy
from anchore_engine.apis.exceptions import BadRequest
from anchore_engine.clients.services import internal_client_for
from anchore_engine.clients.services.catalog import CatalogClient
from anchore_engine.common.helpers import make_response_error
from anchore_engine.configuration.localconfig import (
ADMIN_ACCOUNT_NAME,
GLOBAL_RESOURCE_DOMAIN,
)
authorizer = get_authorizer()
digest_regex = re.compile("^sha256:[abcdef0-9]+$")
def handle_proxy_response(resp):
if issubclass(Exception, resp.__class__):
if hasattr(resp, "httpcode"):
return make_response_error(resp, in_httpcode=resp.httpcode), resp.httpcode
else:
return make_response_error(resp, in_httpcode=500), 500
else:
return resp, 200
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def | ():
"""
GET /archives
:return: JSON object for archive summary
"""
client = internal_client_for(CatalogClient, ApiRequestContextProxy.namespace())
try:
return handle_proxy_response(client.list_archives())
except Exception as ex:
return handle_proxy_response(ex)
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def list_analysis_archive_rules(system_global=True):
"""
GET /archives/rules
:return:
"""
client = internal_client_for(CatalogClient, ApiRequestContextProxy.namespace())
try:
return handle_proxy_response(
client.list_analysis_archive_rules(system_global=system_global)
)
except Exception as ex:
return handle_proxy_response(ex)
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def create_analysis_archive_rule(rule):
"""
POST /archives/rules
:param rule: the rule's json object definition
:return:
"""
# Permission check on the system_global field, only admins
if rule.get("system_global"):
perm = Permission(GLOBAL_RESOURCE_DOMAIN, "createArchiveTransitionRule", "*")
# Will raise exception if unauthorized
authorizer.authorize(ApiRequestContextProxy.identity(), [perm])
# Validation for max_images_per_account
if (
not rule.get("system_global")
and rule.get("max_images_per_account", None) is not None
):
raise BadRequest(
"Cannot set max_images_per_account on a rule that isn't system_global", {}
)
client = internal_client_for(CatalogClient, ApiRequestContextProxy.namespace())
try:
return handle_proxy_response(client.add_analysis_archive_rule(rule))
except Exception as ex:
return handle_proxy_response(ex)
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def delete_analysis_archive_rule(ruleId):
"""
DELETE /archives/rules/{ruleId}
:param ruleId:
:return:
"""
client = internal_client_for(CatalogClient, ApiRequestContextProxy.namespace())
try:
resp1 = handle_proxy_response(client.delete_analysis_archive_rule(ruleId))
if resp1[1] == 404 and ApiRequestContextProxy.namespace() != ADMIN_ACCOUNT_NAME:
# Yes, this is a bit ugly
# Get the rule, check if a global rule and adjust error code appropriately
try:
c2 = internal_client_for(CatalogClient, ADMIN_ACCOUNT_NAME)
r2 = c2.get_analysis_archive_rule(ruleId)
if r2 and r2.get("system_global", False):
return (
make_response_error(
"Non-admins cannot modify/delete system global rules",
in_httpcode=403,
),
403,
)
except Exception as ex:
pass
return resp1
except Exception as ex:
return handle_proxy_response(ex)
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def get_analysis_archive_rule(ruleId):
"""
GET /archives/rules/{ruleId}
:param ruleId:
:return:
"""
client = internal_client_for(CatalogClient, ApiRequestContextProxy.namespace())
try:
resp1 = handle_proxy_response(client.get_analysis_archive_rule(ruleId))
if resp1[1] == 404 and ApiRequestContextProxy.namespace() != ADMIN_ACCOUNT_NAME:
# Yes, this is a bit ugly
# Get the rule, check if a global rule
try:
c2 = internal_client_for(CatalogClient, ADMIN_ACCOUNT_NAME)
r2 = handle_proxy_response(c2.get_analysis_archive_rule(ruleId))
if r2 and r2[1] == 200 and r2[0].get("system_global", False):
# Allow it
return handle_proxy_response(r2)
except Exception as ex:
pass
return resp1
except Exception as ex:
return handle_proxy_response(ex)
# @authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
# def get_analysis_archive_rule_history(ruleId):
# """
#
# GET /archives/rules/{ruleId}/history
#
# :param ruleId:
# :return: list of events for the rule
# """
# client = internal_client_for(CatalogClient, ApiRequestContextProxy.namespace())
# try:
# resp1 = handle_proxy_response(client.get_analysis_archive_rule_history(ruleId))
# if resp1[1] == 404 and ApiRequestContextProxy.namespace() != ADMIN_ACCOUNT_NAME:
# # Yes, this is a bit ugly
# # Get the rule, check if a global rule and adjust error code appropriately
# try:
# c2 = internal_client_for(CatalogClient, ADMIN_ACCOUNT_NAME)
# r2 = handle_proxy_response(c2.get_analysis_archive_rule(ruleId))
# if r2 and r2[1] == 200 and r2[0].get('system_global', False):
# return make_response_error('Non-admins cannot modify/delete system global rules', in_httpcode=403), 403
# except Exception as ex:
# pass
# return resp1
# except Exception as ex:
# return handle_proxy_response(ex)
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def list_analysis_archive():
"""
GET /archives/images
:return: array of archivedimage json objects
"""
client = internal_client_for(CatalogClient, ApiRequestContextProxy.namespace())
try:
return handle_proxy_response(client.list_archived_analyses())
except Exception as ex:
return handle_proxy_response(ex)
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def archive_image_analysis(imageReferences):
"""
POST /archives/images
:param imageReferences: list of json object that reference images to archive
:return:
"""
client = internal_client_for(CatalogClient, ApiRequestContextProxy.namespace())
try:
return handle_proxy_response(client.archive_analyses(imageReferences))
except Exception as ex:
return handle_proxy_response(ex)
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def get_archived_analysis(imageDigest):
"""
GET /archives/images/{imageDigest}
:param imageDigest:
:return:
"""
client = internal_client_for(CatalogClient, ApiRequestContextProxy.namespace())
try:
return handle_proxy_response(client.get_archived_analysis(imageDigest))
except Exception as ex:
return handle_proxy_response(ex)
@authorizer.requires([ActionBoundPermission(domain=RequestingAccountValue())])
def delete_archived_analysis(imageDigest):
"""
DELETE /archives/images/{imageDigest}
:param imageDigest:
:return:
"""
client = internal_client_for(CatalogClient, ApiRequestContextProxy.namespace())
try:
return handle_proxy_response(client.delete_archived_analysis(imageDigest))
except Exception as e:
return handle_proxy_response(e)
| list_archives |
camera.rs | pub use crate::ray::Ray;
use crate::vec3::random_in_unit_disk;
pub use crate::vec3::Point3;
pub use crate::vec3::Vec3;
pub fn degrees_to_radians(degrees: f64) -> f64 |
#[derive(Copy, Clone)]
pub struct Camera {
pub origin: Point3,
pub lower_left_corner: Point3,
pub horizontal: Vec3,
pub vertical: Vec3,
pub u: Vec3,
pub v: Vec3,
pub w: Vec3,
pub lens_radius: f64,
}
impl Camera {
pub fn new(
vfov: f64,
aspect_ratio: f64,
look_from: Point3,
look_at: Point3,
vup: Vec3,
aperture: f64,
focus_dist: f64,
) -> Self {
let theta = degrees_to_radians(vfov);
let h = (theta / 2.0).tan();
let viewport_height = 2.0 * h;
let viewport_width = aspect_ratio * viewport_height;
let _w = (look_from - look_at).unit();
let _u = Vec3::cross(vup, _w).unit();
let _v = Vec3::cross(_w, _u);
Self {
origin: look_from,
horizontal: _u * viewport_width * focus_dist,
vertical: _v * viewport_height * focus_dist,
lower_left_corner: look_from
- _u * viewport_width / 2.0 * focus_dist
- _v * viewport_height / 2.0 * focus_dist
- _w * focus_dist,
u: _u,
v: _v,
w: _w,
lens_radius: aperture / 2.0,
}
}
pub fn get_ray(&self, s: f64, t: f64) -> Ray {
let rd = crate::vec3::random_in_unit_disk() * self.lens_radius;
let offset = self.u * rd.x + self.v * rd.y;
Ray::new(
self.origin + offset,
self.lower_left_corner + self.horizontal * s + self.vertical * t - self.origin - offset,
)
}
}
| {
degrees * std::f64::consts::PI / 180.0
} |
player.ts | import {Chart, difficultyShorthandOrder} from "./meta"
export type LampType = "NO_PLAY" | "ASSIST_CLEAR" | "EASY_CLEAR" | "CLEAR" | "HARD_CLEAR" | "EX_HARD_CLEAR"
| "FULL_COMBO" | "FAILED"
export type ServiceType = "573" | "Arcana"
export function lampOrder(lamp: LampType): number {
const lamps: LampType[] = ["NO_PLAY", "FAILED", "ASSIST_CLEAR", "EASY_CLEAR", "CLEAR", "HARD_CLEAR", "EX_HARD_CLEAR", "FULL_COMBO"]
return lamps.indexOf(lamp)
}
export interface PlayerBest {
readonly chart: Chart
readonly server: ServiceType
readonly lamp: LampType
readonly ex_score: number
readonly miss_count?: number
readonly timestamp: Date
}
function cmp<T>(v1: T | undefined, v2: T | undefined): -1 | 0 | 1 {
if (!v1) {
return 1
} else if (!v2) {
return -1
} else {
return v1 === v2 ? 0 : (v1 < v2 ? -1 : 1)
}
}
type FieldMapper<T> = (t: T) => string | number | undefined;
class ChainedCmp<T> {
private readonly criterionMappers: FieldMapper<T>[]
private readonly i1: T;
private readonly i2: T;
constructor(i1: T, i2: T) {
this.i1 = i1
this.i2 = i2
this.criterionMappers = []
}
cmp(): -1 | 0 | 1 {
const mapper = this.criterionMappers.splice(0, 1)[0]
if (!mapper) {
return 0
} else {
const v1 = mapper(this.i1)
const v2 = mapper(this.i2)
return v1 === v2 ? this.cmp() : cmp(v1, v2)
}
}
compare(mapper: FieldMapper<T>): this {
this.criterionMappers.push(mapper)
return this
}
}
export function playerBestCmp(b1: PlayerBest, b2: PlayerBest): -1 | 0 | 1 {
return new ChainedCmp(b1, b2)
.compare(best => difficultyShorthandOrder(best))
.compare(best => best.chart.level)
.compare(best => best.chart.music.folder)
.compare(best => lampOrder(best.lamp))
.compare(best => best.ex_score)
.compare(best => best.miss_count)
.compare(best => best.chart.music.title.toLocaleLowerCase()) | } | .compare(best => best.chart.music.artist)
.compare(best => best.chart.music.genre)
.compare(best => best.server)
.cmp() |
metric_service.pb.go | //
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.27.1-devel
// protoc v3.15.6
// source: metric_service.proto
package ai_flow
import (
context "context"
_ "github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis/google/api"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
wrapperspb "google.golang.org/protobuf/types/known/wrapperspb"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
type MetricNameRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
MetricName string `protobuf:"bytes,1,opt,name=metric_name,json=metricName,proto3" json:"metric_name,omitempty"`
}
func (x *MetricNameRequest) Reset() {
*x = MetricNameRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_metric_service_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *MetricNameRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*MetricNameRequest) ProtoMessage() {}
func (x *MetricNameRequest) ProtoReflect() protoreflect.Message {
mi := &file_metric_service_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use MetricNameRequest.ProtoReflect.Descriptor instead.
func (*MetricNameRequest) Descriptor() ([]byte, []int) {
return file_metric_service_proto_rawDescGZIP(), []int{0}
}
func (x *MetricNameRequest) GetMetricName() string {
if x != nil {
return x.MetricName
}
return ""
}
type UuidRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Uuid int64 `protobuf:"varint,1,opt,name=uuid,proto3" json:"uuid,omitempty"`
}
func (x *UuidRequest) Reset() {
*x = UuidRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_metric_service_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *UuidRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*UuidRequest) ProtoMessage() {}
func (x *UuidRequest) ProtoReflect() protoreflect.Message {
mi := &file_metric_service_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use UuidRequest.ProtoReflect.Descriptor instead.
func (*UuidRequest) Descriptor() ([]byte, []int) {
return file_metric_service_proto_rawDescGZIP(), []int{1}
}
func (x *UuidRequest) GetUuid() int64 {
if x != nil {
return x.Uuid
}
return 0
}
type MetricMetaRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
MetricMeta *MetricMetaProto `protobuf:"bytes,1,opt,name=metric_meta,json=metricMeta,proto3" json:"metric_meta,omitempty"`
}
func (x *MetricMetaRequest) Reset() {
*x = MetricMetaRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_metric_service_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *MetricMetaRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*MetricMetaRequest) ProtoMessage() {}
func (x *MetricMetaRequest) ProtoReflect() protoreflect.Message {
mi := &file_metric_service_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use MetricMetaRequest.ProtoReflect.Descriptor instead.
func (*MetricMetaRequest) Descriptor() ([]byte, []int) {
return file_metric_service_proto_rawDescGZIP(), []int{2}
}
func (x *MetricMetaRequest) GetMetricMeta() *MetricMetaProto {
if x != nil {
return x.MetricMeta
}
return nil
}
type MetricMetaResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
ReturnCode int64 `protobuf:"varint,1,opt,name=return_code,json=returnCode,proto3" json:"return_code,omitempty"`
ReturnMsg string `protobuf:"bytes,2,opt,name=return_msg,json=returnMsg,proto3" json:"return_msg,omitempty"`
MetricMeta *MetricMetaProto `protobuf:"bytes,3,opt,name=metric_meta,json=metricMeta,proto3" json:"metric_meta,omitempty"`
}
func (x *MetricMetaResponse) Reset() {
*x = MetricMetaResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_metric_service_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *MetricMetaResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*MetricMetaResponse) ProtoMessage() {}
func (x *MetricMetaResponse) ProtoReflect() protoreflect.Message {
mi := &file_metric_service_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use MetricMetaResponse.ProtoReflect.Descriptor instead.
func (*MetricMetaResponse) Descriptor() ([]byte, []int) {
return file_metric_service_proto_rawDescGZIP(), []int{3}
}
func (x *MetricMetaResponse) GetReturnCode() int64 {
if x != nil {
return x.ReturnCode
}
return 0
}
func (x *MetricMetaResponse) GetReturnMsg() string {
if x != nil {
return x.ReturnMsg
}
return ""
}
func (x *MetricMetaResponse) GetMetricMeta() *MetricMetaProto {
if x != nil {
return x.MetricMeta
}
return nil
}
type MetricSummaryRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
MetricSummary *MetricSummaryProto `protobuf:"bytes,1,opt,name=metric_summary,json=metricSummary,proto3" json:"metric_summary,omitempty"`
}
func (x *MetricSummaryRequest) Reset() {
*x = MetricSummaryRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_metric_service_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *MetricSummaryRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*MetricSummaryRequest) ProtoMessage() {}
func (x *MetricSummaryRequest) ProtoReflect() protoreflect.Message {
mi := &file_metric_service_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use MetricSummaryRequest.ProtoReflect.Descriptor instead.
func (*MetricSummaryRequest) Descriptor() ([]byte, []int) {
return file_metric_service_proto_rawDescGZIP(), []int{4}
}
func (x *MetricSummaryRequest) GetMetricSummary() *MetricSummaryProto {
if x != nil {
return x.MetricSummary
}
return nil
}
type MetricSummaryResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
ReturnCode int64 `protobuf:"varint,1,opt,name=return_code,json=returnCode,proto3" json:"return_code,omitempty"`
ReturnMsg string `protobuf:"bytes,2,opt,name=return_msg,json=returnMsg,proto3" json:"return_msg,omitempty"`
MetricSummary *MetricSummaryProto `protobuf:"bytes,3,opt,name=metric_summary,json=metricSummary,proto3" json:"metric_summary,omitempty"`
}
func (x *MetricSummaryResponse) Reset() {
*x = MetricSummaryResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_metric_service_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *MetricSummaryResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*MetricSummaryResponse) ProtoMessage() {}
func (x *MetricSummaryResponse) ProtoReflect() protoreflect.Message {
mi := &file_metric_service_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use MetricSummaryResponse.ProtoReflect.Descriptor instead.
func (*MetricSummaryResponse) Descriptor() ([]byte, []int) {
return file_metric_service_proto_rawDescGZIP(), []int{5}
}
func (x *MetricSummaryResponse) GetReturnCode() int64 {
if x != nil {
return x.ReturnCode
}
return 0
}
func (x *MetricSummaryResponse) GetReturnMsg() string {
if x != nil {
return x.ReturnMsg
}
return ""
}
func (x *MetricSummaryResponse) GetMetricSummary() *MetricSummaryProto {
if x != nil {
return x.MetricSummary
}
return nil
}
type ListDatasetMetricMetasRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
DatasetName string `protobuf:"bytes,1,opt,name=dataset_name,json=datasetName,proto3" json:"dataset_name,omitempty"`
ProjectName *wrapperspb.StringValue `protobuf:"bytes,2,opt,name=project_name,json=projectName,proto3" json:"project_name,omitempty"`
}
func (x *ListDatasetMetricMetasRequest) Reset() {
*x = ListDatasetMetricMetasRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_metric_service_proto_msgTypes[6]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ListDatasetMetricMetasRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ListDatasetMetricMetasRequest) ProtoMessage() {}
func (x *ListDatasetMetricMetasRequest) ProtoReflect() protoreflect.Message {
mi := &file_metric_service_proto_msgTypes[6]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ListDatasetMetricMetasRequest.ProtoReflect.Descriptor instead.
func (*ListDatasetMetricMetasRequest) Descriptor() ([]byte, []int) {
return file_metric_service_proto_rawDescGZIP(), []int{6}
}
func (x *ListDatasetMetricMetasRequest) GetDatasetName() string {
if x != nil {
return x.DatasetName
}
return ""
}
func (x *ListDatasetMetricMetasRequest) GetProjectName() *wrapperspb.StringValue {
if x != nil {
return x.ProjectName
}
return nil
}
type ListModelMetricMetasRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
ModelName string `protobuf:"bytes,1,opt,name=model_name,json=modelName,proto3" json:"model_name,omitempty"`
ProjectName *wrapperspb.StringValue `protobuf:"bytes,2,opt,name=project_name,json=projectName,proto3" json:"project_name,omitempty"`
}
func (x *ListModelMetricMetasRequest) Reset() {
*x = ListModelMetricMetasRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_metric_service_proto_msgTypes[7]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ListModelMetricMetasRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ListModelMetricMetasRequest) ProtoMessage() {}
func (x *ListModelMetricMetasRequest) ProtoReflect() protoreflect.Message {
mi := &file_metric_service_proto_msgTypes[7]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ListModelMetricMetasRequest.ProtoReflect.Descriptor instead.
func (*ListModelMetricMetasRequest) Descriptor() ([]byte, []int) {
return file_metric_service_proto_rawDescGZIP(), []int{7}
}
func (x *ListModelMetricMetasRequest) GetModelName() string {
if x != nil {
return x.ModelName
}
return ""
}
func (x *ListModelMetricMetasRequest) GetProjectName() *wrapperspb.StringValue {
if x != nil {
return x.ProjectName
}
return nil
}
type ListMetricMetasResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
ReturnCode int64 `protobuf:"varint,1,opt,name=return_code,json=returnCode,proto3" json:"return_code,omitempty"`
ReturnMsg string `protobuf:"bytes,2,opt,name=return_msg,json=returnMsg,proto3" json:"return_msg,omitempty"`
MetricMetas []*MetricMetaProto `protobuf:"bytes,3,rep,name=metric_metas,json=metricMetas,proto3" json:"metric_metas,omitempty"`
}
func (x *ListMetricMetasResponse) Reset() {
*x = ListMetricMetasResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_metric_service_proto_msgTypes[8]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ListMetricMetasResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ListMetricMetasResponse) ProtoMessage() {}
func (x *ListMetricMetasResponse) ProtoReflect() protoreflect.Message {
mi := &file_metric_service_proto_msgTypes[8]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ListMetricMetasResponse.ProtoReflect.Descriptor instead.
func (*ListMetricMetasResponse) Descriptor() ([]byte, []int) {
return file_metric_service_proto_rawDescGZIP(), []int{8}
}
func (x *ListMetricMetasResponse) GetReturnCode() int64 {
if x != nil {
return x.ReturnCode
}
return 0
}
func (x *ListMetricMetasResponse) GetReturnMsg() string {
if x != nil {
return x.ReturnMsg
}
return ""
}
func (x *ListMetricMetasResponse) GetMetricMetas() []*MetricMetaProto {
if x != nil {
return x.MetricMetas
}
return nil
}
type ListMetricSummariesRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
MetricName *wrapperspb.StringValue `protobuf:"bytes,1,opt,name=metric_name,json=metricName,proto3" json:"metric_name,omitempty"`
MetricKey *wrapperspb.StringValue `protobuf:"bytes,2,opt,name=metric_key,json=metricKey,proto3" json:"metric_key,omitempty"`
ModelVersion *wrapperspb.Int32Value `protobuf:"bytes,3,opt,name=model_version,json=modelVersion,proto3" json:"model_version,omitempty"`
StartTime *wrapperspb.Int64Value `protobuf:"bytes,4,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"`
EndTime *wrapperspb.Int64Value `protobuf:"bytes,5,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"`
}
func (x *ListMetricSummariesRequest) Reset() {
*x = ListMetricSummariesRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_metric_service_proto_msgTypes[9]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ListMetricSummariesRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ListMetricSummariesRequest) ProtoMessage() {}
func (x *ListMetricSummariesRequest) ProtoReflect() protoreflect.Message {
mi := &file_metric_service_proto_msgTypes[9]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ListMetricSummariesRequest.ProtoReflect.Descriptor instead.
func (*ListMetricSummariesRequest) Descriptor() ([]byte, []int) {
return file_metric_service_proto_rawDescGZIP(), []int{9}
}
func (x *ListMetricSummariesRequest) GetMetricName() *wrapperspb.StringValue {
if x != nil {
return x.MetricName
}
return nil
}
func (x *ListMetricSummariesRequest) GetMetricKey() *wrapperspb.StringValue {
if x != nil {
return x.MetricKey
}
return nil
}
func (x *ListMetricSummariesRequest) GetModelVersion() *wrapperspb.Int32Value {
if x != nil {
return x.ModelVersion
}
return nil
}
func (x *ListMetricSummariesRequest) GetStartTime() *wrapperspb.Int64Value {
if x != nil {
return x.StartTime
}
return nil
}
func (x *ListMetricSummariesRequest) GetEndTime() *wrapperspb.Int64Value {
if x != nil {
return x.EndTime
}
return nil
}
type ListMetricSummariesResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
ReturnCode int64 `protobuf:"varint,1,opt,name=return_code,json=returnCode,proto3" json:"return_code,omitempty"`
ReturnMsg string `protobuf:"bytes,2,opt,name=return_msg,json=returnMsg,proto3" json:"return_msg,omitempty"`
MetricSummaries []*MetricSummaryProto `protobuf:"bytes,3,rep,name=metric_summaries,json=metricSummaries,proto3" json:"metric_summaries,omitempty"`
}
func (x *ListMetricSummariesResponse) Reset() {
*x = ListMetricSummariesResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_metric_service_proto_msgTypes[10]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ListMetricSummariesResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ListMetricSummariesResponse) ProtoMessage() {}
func (x *ListMetricSummariesResponse) ProtoReflect() protoreflect.Message {
mi := &file_metric_service_proto_msgTypes[10]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ListMetricSummariesResponse.ProtoReflect.Descriptor instead.
func (*ListMetricSummariesResponse) Descriptor() ([]byte, []int) {
return file_metric_service_proto_rawDescGZIP(), []int{10}
}
func (x *ListMetricSummariesResponse) GetReturnCode() int64 {
if x != nil {
return x.ReturnCode
}
return 0
}
func (x *ListMetricSummariesResponse) GetReturnMsg() string {
if x != nil {
return x.ReturnMsg
}
return ""
}
func (x *ListMetricSummariesResponse) GetMetricSummaries() []*MetricSummaryProto {
if x != nil {
return x.MetricSummaries
}
return nil
}
var File_metric_service_proto protoreflect.FileDescriptor
var file_metric_service_proto_rawDesc = []byte{
0x0a, 0x14, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65,
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x07, 0x61, 0x69, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x1a,
0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f,
0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x77,
0x72, 0x61, 0x70, 0x70, 0x65, 0x72, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x0d, 0x6d,
0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x34, 0x0a, 0x11,
0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x5f, 0x6e, 0x61, 0x6d, 0x65,
0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4e, 0x61,
0x6d, 0x65, 0x22, 0x21, 0x0a, 0x0b, 0x55, 0x75, 0x69, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
0x74, 0x12, 0x12, 0x0a, 0x04, 0x75, 0x75, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52,
0x04, 0x75, 0x75, 0x69, 0x64, 0x22, 0x4e, 0x0a, 0x11, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4d,
0x65, 0x74, 0x61, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x39, 0x0a, 0x0b, 0x6d, 0x65,
0x74, 0x72, 0x69, 0x63, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32,
0x18, 0x2e, 0x61, 0x69, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63,
0x4d, 0x65, 0x74, 0x61, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x52, 0x0a, 0x6d, 0x65, 0x74, 0x72, 0x69,
0x63, 0x4d, 0x65, 0x74, 0x61, 0x22, 0x8f, 0x01, 0x0a, 0x12, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63,
0x4d, 0x65, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1f, 0x0a, 0x0b,
0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x5f, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
0x03, 0x52, 0x0a, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x43, 0x6f, 0x64, 0x65, 0x12, 0x1d, 0x0a,
0x0a, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x5f, 0x6d, 0x73, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28,
0x09, 0x52, 0x09, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x4d, 0x73, 0x67, 0x12, 0x39, 0x0a, 0x0b,
0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28,
0x0b, 0x32, 0x18, 0x2e, 0x61, 0x69, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x4d, 0x65, 0x74, 0x72,
0x69, 0x63, 0x4d, 0x65, 0x74, 0x61, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x52, 0x0a, 0x6d, 0x65, 0x74,
0x72, 0x69, 0x63, 0x4d, 0x65, 0x74, 0x61, 0x22, 0x5a, 0x0a, 0x14, 0x4d, 0x65, 0x74, 0x72, 0x69,
0x63, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12,
0x42, 0x0a, 0x0e, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x5f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72,
0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x61, 0x69, 0x5f, 0x66, 0x6c, 0x6f,
0x77, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x50,
0x72, 0x6f, 0x74, 0x6f, 0x52, 0x0d, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x53, 0x75, 0x6d, 0x6d,
0x61, 0x72, 0x79, 0x22, 0x9b, 0x01, 0x0a, 0x15, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x53, 0x75,
0x6d, 0x6d, 0x61, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1f, 0x0a,
0x0b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x5f, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01,
0x28, 0x03, 0x52, 0x0a, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x43, 0x6f, 0x64, 0x65, 0x12, 0x1d,
0x0a, 0x0a, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x5f, 0x6d, 0x73, 0x67, 0x18, 0x02, 0x20, 0x01,
0x28, 0x09, 0x52, 0x09, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x4d, 0x73, 0x67, 0x12, 0x42, 0x0a,
0x0e, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x5f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x18,
0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x61, 0x69, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e,
0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x50, 0x72, 0x6f,
0x74, 0x6f, 0x52, 0x0d, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72,
0x79, 0x22, 0x83, 0x01, 0x0a, 0x1d, 0x4c, 0x69, 0x73, 0x74, 0x44, 0x61, 0x74, 0x61, 0x73, 0x65,
0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4d, 0x65, 0x74, 0x61, 0x73, 0x52, 0x65, 0x71, 0x75,
0x65, 0x73, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x64, 0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x5f, 0x6e,
0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x61, 0x74, 0x61, 0x73,
0x65, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x3f, 0x0a, 0x0c, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63,
0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53,
0x74, 0x72, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0b, 0x70, 0x72, 0x6f, 0x6a,
0x65, 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x7d, 0x0a, 0x1b, 0x4c, 0x69, 0x73, 0x74, 0x4d,
0x6f, 0x64, 0x65, 0x6c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4d, 0x65, 0x74, 0x61, 0x73, 0x52,
0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x5f,
0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6d, 0x6f, 0x64, 0x65,
0x6c, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x3f, 0x0a, 0x0c, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74,
0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74,
0x72, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0b, 0x70, 0x72, 0x6f, 0x6a, 0x65,
0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x96, 0x01, 0x0a, 0x17, 0x4c, 0x69, 0x73, 0x74, 0x4d,
0x65, 0x74, 0x72, 0x69, 0x63, 0x4d, 0x65, 0x74, 0x61, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
0x73, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x5f, 0x63, 0x6f, 0x64,
0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x43,
0x6f, 0x64, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x5f, 0x6d, 0x73,
0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x4d,
0x73, 0x67, 0x12, 0x3b, 0x0a, 0x0c, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x5f, 0x6d, 0x65, 0x74,
0x61, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x61, 0x69, 0x5f, 0x66, 0x6c,
0x6f, 0x77, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4d, 0x65, 0x74, 0x61, 0x50, 0x72, 0x6f,
0x74, 0x6f, 0x52, 0x0b, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4d, 0x65, 0x74, 0x61, 0x73, 0x22,
0xce, 0x02, 0x0a, 0x1a, 0x4c, 0x69, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x53, 0x75,
0x6d, 0x6d, 0x61, 0x72, 0x69, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x3d,
0x0a, 0x0b, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20,
0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75,
0x65, 0x52, 0x0a, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x3b, 0x0a,
0x0a, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28,
0x0b, 0x32, 0x1c, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x62, 0x75, 0x66, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52,
0x09, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x40, 0x0a, 0x0d, 0x6d, 0x6f,
0x64, 0x65, 0x6c, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28,
0x0b, 0x32, 0x1b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x62, 0x75, 0x66, 0x2e, 0x49, 0x6e, 0x74, 0x33, 0x32, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0c,
0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x3a, 0x0a, 0x0a,
0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b,
0x32, 0x1b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62,
0x75, 0x66, 0x2e, 0x49, 0x6e, 0x74, 0x36, 0x34, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x09, 0x73,
0x74, 0x61, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x36, 0x0a, 0x08, 0x65, 0x6e, 0x64, 0x5f,
0x74, 0x69, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x49, 0x6e, 0x74,
0x36, 0x34, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x07, 0x65, 0x6e, 0x64, 0x54, 0x69, 0x6d, 0x65,
0x22, 0xa5, 0x01, 0x0a, 0x1b, 0x4c, 0x69, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x53,
0x75, 0x6d, 0x6d, 0x61, 0x72, 0x69, 0x65, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
0x12, 0x1f, 0x0a, 0x0b, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x5f, 0x63, 0x6f, 0x64, 0x65, 0x18,
0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0a, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x43, 0x6f, 0x64,
0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x5f, 0x6d, 0x73, 0x67, 0x18,
0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x72, 0x65, 0x74, 0x75, 0x72, 0x6e, 0x4d, 0x73, 0x67,
0x12, 0x46, 0x0a, 0x10, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x5f, 0x73, 0x75, 0x6d, 0x6d, 0x61,
0x72, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x61, 0x69, 0x5f,
0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x53, 0x75, 0x6d, 0x6d, 0x61,
0x72, 0x79, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x52, 0x0f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x53,
0x75, 0x6d, 0x6d, 0x61, 0x72, 0x69, 0x65, 0x73, 0x32, 0x97, 0x0b, 0x0a, 0x0d, 0x4d, 0x65, 0x74,
0x72, 0x69, 0x63, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x7b, 0x0a, 0x12, 0x72, 0x65,
0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4d, 0x65, 0x74, 0x61,
0x12, 0x1a, 0x2e, 0x61, 0x69, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69,
0x63, 0x4d, 0x65, 0x74, 0x61, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x61,
0x69, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4d, 0x65, 0x74,
0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2c, 0x82, 0xd3, 0xe4, 0x93, 0x02,
0x26, 0x22, 0x21, 0x2f, 0x61, 0x69, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x6d, 0x65, 0x74, 0x72, 0x69,
0x63, 0x2f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x2f, 0x63, 0x72,
0x65, 0x61, 0x74, 0x65, 0x3a, 0x01, 0x2a, 0x12, 0x79, 0x0a, 0x10, 0x75, 0x70, 0x64, 0x61, 0x74,
0x65, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x1a, 0x2e, 0x61, 0x69,
0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4d, 0x65, 0x74, 0x61,
0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x61, 0x69, 0x5f, 0x66, 0x6c, 0x6f,
0x77, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4d, 0x65, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70,
0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2c, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x26, 0x22, 0x21, 0x2f, 0x61,
0x69, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x2f, 0x6d, 0x65, 0x74,
0x72, 0x69, 0x63, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x2f, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3a,
0x01, 0x2a, 0x12, 0x6f, 0x0a, 0x10, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x4d, 0x65, 0x74, 0x72,
0x69, 0x63, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x1a, 0x2e, 0x61, 0x69, 0x5f, 0x66, 0x6c, 0x6f, 0x77,
0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65,
0x73, 0x74, 0x1a, 0x11, 0x2e, 0x61, 0x69, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x52, 0x65, 0x73,
0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2c, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x26, 0x22, 0x21, 0x2f,
0x61, 0x69, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x2f, 0x6d, 0x65,
0x74, 0x72, 0x69, 0x63, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x2f, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65,
0x3a, 0x01, 0x2a, 0x12, 0x70, 0x0a, 0x0d, 0x67, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63,
0x4d, 0x65, 0x74, 0x61, 0x12, 0x1a, 0x2e, 0x61, 0x69, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x4d,
0x65, 0x74, 0x72, 0x69, 0x63, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
0x1a, 0x1b, 0x2e, 0x61, 0x69, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69,
0x63, 0x4d, 0x65, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x26, 0x82,
0xd3, 0xe4, 0x93, 0x02, 0x20, 0x12, 0x1e, 0x2f, 0x61, 0x69, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x6d,
0x65, 0x74, 0x72, 0x69, 0x63, 0x2f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x5f, 0x6d, 0x65, 0x74,
0x61, 0x2f, 0x67, 0x65, 0x74, 0x12, 0x93, 0x01, 0x0a, 0x16, 0x6c, 0x69, 0x73, 0x74, 0x44, 0x61,
0x74, 0x61, 0x73, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4d, 0x65, 0x74, 0x61, 0x73,
0x12, 0x26, 0x2e, 0x61, 0x69, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x44,
0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4d, 0x65, 0x74, 0x61,
0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x61, 0x69, 0x5f, 0x66, 0x6c,
0x6f, 0x77, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4d, 0x65, 0x74,
0x61, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2f, 0x82, 0xd3, 0xe4, 0x93,
0x02, 0x29, 0x12, 0x27, 0x2f, 0x61, 0x69, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x6d, 0x65, 0x74, 0x72,
0x69, 0x63, 0x2f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x5f, 0x6d, 0x65, 0x74, 0x61, 0x2f, 0x64,
0x61, 0x74, 0x61, 0x73, 0x65, 0x74, 0x2f, 0x6c, 0x69, 0x73, 0x74, 0x12, 0x8d, 0x01, 0x0a, 0x14,
0x6c, 0x69, 0x73, 0x74, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4d,
0x65, 0x74, 0x61, 0x73, 0x12, 0x24, 0x2e, 0x61, 0x69, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x4c,
0x69, 0x73, 0x74, 0x4d, 0x6f, 0x64, 0x65, 0x6c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4d, 0x65,
0x74, 0x61, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x20, 0x2e, 0x61, 0x69, 0x5f,
0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x4d,
0x65, 0x74, 0x61, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2d, 0x82, 0xd3,
0xe4, 0x93, 0x02, 0x27, 0x12, 0x25, 0x2f, 0x61, 0x69, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x6d, 0x65,
0x74, 0x72, 0x69, 0x63, 0x2f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x5f, 0x6d, 0x65, 0x74, 0x61,
0x2f, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x2f, 0x6c, 0x69, 0x73, 0x74, 0x12, 0x87, 0x01, 0x0a, 0x15,
0x72, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x53, 0x75,
0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, 0x1d, 0x2e, 0x61, 0x69, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e,
0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x52, 0x65, 0x71,
0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, 0x69, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x4d,
0x65, 0x74, 0x72, 0x69, 0x63, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70,
0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2f, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x29, 0x22, 0x24, 0x2f, 0x61,
0x69, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x2f, 0x6d, 0x65, 0x74,
0x72, 0x69, 0x63, 0x5f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x2f, 0x63, 0x72, 0x65, 0x61,
0x74, 0x65, 0x3a, 0x01, 0x2a, 0x12, 0x85, 0x01, 0x0a, 0x13, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65,
0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, 0x1d, 0x2e,
0x61, 0x69, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x53, 0x75,
0x6d, 0x6d, 0x61, 0x72, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61,
0x69, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x53, 0x75, 0x6d,
0x6d, 0x61, 0x72, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2f, 0x82, 0xd3,
0xe4, 0x93, 0x02, 0x29, 0x22, 0x24, 0x2f, 0x61, 0x69, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x6d, 0x65,
0x74, 0x72, 0x69, 0x63, 0x2f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x5f, 0x73, 0x75, 0x6d, 0x6d,
0x61, 0x72, 0x79, 0x2f, 0x75, 0x70, 0x64, 0x61, 0x74, 0x65, 0x3a, 0x01, 0x2a, 0x12, 0x6f, 0x0a,
0x13, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x53, 0x75, 0x6d,
0x6d, 0x61, 0x72, 0x79, 0x12, 0x14, 0x2e, 0x61, 0x69, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x55,
0x75, 0x69, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x11, 0x2e, 0x61, 0x69, 0x5f,
0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2f, 0x82,
0xd3, 0xe4, 0x93, 0x02, 0x29, 0x22, 0x24, 0x2f, 0x61, 0x69, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x6d,
0x65, 0x74, 0x72, 0x69, 0x63, 0x2f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x5f, 0x73, 0x75, 0x6d,
0x6d, 0x61, 0x72, 0x79, 0x2f, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x3a, 0x01, 0x2a, 0x12, 0x73,
0x0a, 0x10, 0x67, 0x65, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x53, 0x75, 0x6d, 0x6d, 0x61,
0x72, 0x79, 0x12, 0x14, 0x2e, 0x61, 0x69, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x55, 0x75, 0x69,
0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x61, 0x69, 0x5f, 0x66, 0x6c,
0x6f, 0x77, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79,
0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x29, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x23,
0x12, 0x21, 0x2f, 0x61, 0x69, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63,
0x2f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x5f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x2f,
0x67, 0x65, 0x74, 0x12, 0x8c, 0x01, 0x0a, 0x13, 0x6c, 0x69, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72,
0x69, 0x63, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x69, 0x65, 0x73, 0x12, 0x23, 0x2e, 0x61, 0x69,
0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63,
0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x69, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
0x1a, 0x24, 0x2e, 0x61, 0x69, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x4d,
0x65, 0x74, 0x72, 0x69, 0x63, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x69, 0x65, 0x73, 0x52, 0x65,
0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x2a, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x24, 0x12, 0x22,
0x2f, 0x61, 0x69, 0x66, 0x6c, 0x6f, 0x77, 0x2f, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x2f, 0x6d,
0x65, 0x74, 0x72, 0x69, 0x63, 0x5f, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x2f, 0x6c, 0x69,
0x73, 0x74, 0x42, 0x29, 0x0a, 0x17, 0x6f, 0x72, 0x67, 0x2e, 0x61, 0x69, 0x66, 0x6c, 0x6f, 0x77,
0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x5a, 0x08, 0x2f,
0x61, 0x69, 0x5f, 0x66, 0x6c, 0x6f, 0x77, 0x88, 0x01, 0x01, 0x90, 0x01, 0x01, 0x62, 0x06, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_metric_service_proto_rawDescOnce sync.Once
file_metric_service_proto_rawDescData = file_metric_service_proto_rawDesc
)
func file_metric_service_proto_rawDescGZIP() []byte {
file_metric_service_proto_rawDescOnce.Do(func() {
file_metric_service_proto_rawDescData = protoimpl.X.CompressGZIP(file_metric_service_proto_rawDescData)
})
return file_metric_service_proto_rawDescData
}
var file_metric_service_proto_msgTypes = make([]protoimpl.MessageInfo, 11)
var file_metric_service_proto_goTypes = []interface{}{
(*MetricNameRequest)(nil), // 0: ai_flow.MetricNameRequest
(*UuidRequest)(nil), // 1: ai_flow.UuidRequest
(*MetricMetaRequest)(nil), // 2: ai_flow.MetricMetaRequest
(*MetricMetaResponse)(nil), // 3: ai_flow.MetricMetaResponse
(*MetricSummaryRequest)(nil), // 4: ai_flow.MetricSummaryRequest
(*MetricSummaryResponse)(nil), // 5: ai_flow.MetricSummaryResponse
(*ListDatasetMetricMetasRequest)(nil), // 6: ai_flow.ListDatasetMetricMetasRequest
(*ListModelMetricMetasRequest)(nil), // 7: ai_flow.ListModelMetricMetasRequest
(*ListMetricMetasResponse)(nil), // 8: ai_flow.ListMetricMetasResponse
(*ListMetricSummariesRequest)(nil), // 9: ai_flow.ListMetricSummariesRequest
(*ListMetricSummariesResponse)(nil), // 10: ai_flow.ListMetricSummariesResponse
(*MetricMetaProto)(nil), // 11: ai_flow.MetricMetaProto
(*MetricSummaryProto)(nil), // 12: ai_flow.MetricSummaryProto
(*wrapperspb.StringValue)(nil), // 13: google.protobuf.StringValue
(*wrapperspb.Int32Value)(nil), // 14: google.protobuf.Int32Value
(*wrapperspb.Int64Value)(nil), // 15: google.protobuf.Int64Value
(*Response)(nil), // 16: ai_flow.Response
}
var file_metric_service_proto_depIdxs = []int32{
11, // 0: ai_flow.MetricMetaRequest.metric_meta:type_name -> ai_flow.MetricMetaProto
11, // 1: ai_flow.MetricMetaResponse.metric_meta:type_name -> ai_flow.MetricMetaProto
12, // 2: ai_flow.MetricSummaryRequest.metric_summary:type_name -> ai_flow.MetricSummaryProto
12, // 3: ai_flow.MetricSummaryResponse.metric_summary:type_name -> ai_flow.MetricSummaryProto
13, // 4: ai_flow.ListDatasetMetricMetasRequest.project_name:type_name -> google.protobuf.StringValue
13, // 5: ai_flow.ListModelMetricMetasRequest.project_name:type_name -> google.protobuf.StringValue
11, // 6: ai_flow.ListMetricMetasResponse.metric_metas:type_name -> ai_flow.MetricMetaProto
13, // 7: ai_flow.ListMetricSummariesRequest.metric_name:type_name -> google.protobuf.StringValue
13, // 8: ai_flow.ListMetricSummariesRequest.metric_key:type_name -> google.protobuf.StringValue
14, // 9: ai_flow.ListMetricSummariesRequest.model_version:type_name -> google.protobuf.Int32Value
15, // 10: ai_flow.ListMetricSummariesRequest.start_time:type_name -> google.protobuf.Int64Value
15, // 11: ai_flow.ListMetricSummariesRequest.end_time:type_name -> google.protobuf.Int64Value
12, // 12: ai_flow.ListMetricSummariesResponse.metric_summaries:type_name -> ai_flow.MetricSummaryProto
2, // 13: ai_flow.MetricService.registerMetricMeta:input_type -> ai_flow.MetricMetaRequest
2, // 14: ai_flow.MetricService.updateMetricMeta:input_type -> ai_flow.MetricMetaRequest
0, // 15: ai_flow.MetricService.deleteMetricMeta:input_type -> ai_flow.MetricNameRequest
0, // 16: ai_flow.MetricService.getMetricMeta:input_type -> ai_flow.MetricNameRequest
6, // 17: ai_flow.MetricService.listDatasetMetricMetas:input_type -> ai_flow.ListDatasetMetricMetasRequest
7, // 18: ai_flow.MetricService.listModelMetricMetas:input_type -> ai_flow.ListModelMetricMetasRequest
4, // 19: ai_flow.MetricService.registerMetricSummary:input_type -> ai_flow.MetricSummaryRequest
4, // 20: ai_flow.MetricService.updateMetricSummary:input_type -> ai_flow.MetricSummaryRequest
1, // 21: ai_flow.MetricService.deleteMetricSummary:input_type -> ai_flow.UuidRequest
1, // 22: ai_flow.MetricService.getMetricSummary:input_type -> ai_flow.UuidRequest
9, // 23: ai_flow.MetricService.listMetricSummaries:input_type -> ai_flow.ListMetricSummariesRequest
3, // 24: ai_flow.MetricService.registerMetricMeta:output_type -> ai_flow.MetricMetaResponse
3, // 25: ai_flow.MetricService.updateMetricMeta:output_type -> ai_flow.MetricMetaResponse
16, // 26: ai_flow.MetricService.deleteMetricMeta:output_type -> ai_flow.Response
3, // 27: ai_flow.MetricService.getMetricMeta:output_type -> ai_flow.MetricMetaResponse
8, // 28: ai_flow.MetricService.listDatasetMetricMetas:output_type -> ai_flow.ListMetricMetasResponse
8, // 29: ai_flow.MetricService.listModelMetricMetas:output_type -> ai_flow.ListMetricMetasResponse
5, // 30: ai_flow.MetricService.registerMetricSummary:output_type -> ai_flow.MetricSummaryResponse
5, // 31: ai_flow.MetricService.updateMetricSummary:output_type -> ai_flow.MetricSummaryResponse
16, // 32: ai_flow.MetricService.deleteMetricSummary:output_type -> ai_flow.Response
5, // 33: ai_flow.MetricService.getMetricSummary:output_type -> ai_flow.MetricSummaryResponse
10, // 34: ai_flow.MetricService.listMetricSummaries:output_type -> ai_flow.ListMetricSummariesResponse
24, // [24:35] is the sub-list for method output_type
13, // [13:24] is the sub-list for method input_type
13, // [13:13] is the sub-list for extension type_name
13, // [13:13] is the sub-list for extension extendee
0, // [0:13] is the sub-list for field type_name
}
func init() { file_metric_service_proto_init() }
func file_metric_service_proto_init() {
if File_metric_service_proto != nil {
return
}
file_message_proto_init()
if !protoimpl.UnsafeEnabled {
file_metric_service_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*MetricNameRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_metric_service_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*UuidRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_metric_service_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*MetricMetaRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_metric_service_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*MetricMetaResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_metric_service_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*MetricSummaryRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_metric_service_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*MetricSummaryResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_metric_service_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListDatasetMetricMetasRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_metric_service_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListModelMetricMetasRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_metric_service_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListMetricMetasResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_metric_service_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListMetricSummariesRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_metric_service_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ListMetricSummariesResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_metric_service_proto_rawDesc,
NumEnums: 0,
NumMessages: 11,
NumExtensions: 0,
NumServices: 1,
},
GoTypes: file_metric_service_proto_goTypes,
DependencyIndexes: file_metric_service_proto_depIdxs,
MessageInfos: file_metric_service_proto_msgTypes,
}.Build()
File_metric_service_proto = out.File
file_metric_service_proto_rawDesc = nil
file_metric_service_proto_goTypes = nil
file_metric_service_proto_depIdxs = nil
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConnInterface
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion6
// MetricServiceClient is the client API for MetricService service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type MetricServiceClient interface {
RegisterMetricMeta(ctx context.Context, in *MetricMetaRequest, opts ...grpc.CallOption) (*MetricMetaResponse, error)
UpdateMetricMeta(ctx context.Context, in *MetricMetaRequest, opts ...grpc.CallOption) (*MetricMetaResponse, error)
DeleteMetricMeta(ctx context.Context, in *MetricNameRequest, opts ...grpc.CallOption) (*Response, error)
GetMetricMeta(ctx context.Context, in *MetricNameRequest, opts ...grpc.CallOption) (*MetricMetaResponse, error)
ListDatasetMetricMetas(ctx context.Context, in *ListDatasetMetricMetasRequest, opts ...grpc.CallOption) (*ListMetricMetasResponse, error)
ListModelMetricMetas(ctx context.Context, in *ListModelMetricMetasRequest, opts ...grpc.CallOption) (*ListMetricMetasResponse, error)
RegisterMetricSummary(ctx context.Context, in *MetricSummaryRequest, opts ...grpc.CallOption) (*MetricSummaryResponse, error)
UpdateMetricSummary(ctx context.Context, in *MetricSummaryRequest, opts ...grpc.CallOption) (*MetricSummaryResponse, error)
DeleteMetricSummary(ctx context.Context, in *UuidRequest, opts ...grpc.CallOption) (*Response, error)
GetMetricSummary(ctx context.Context, in *UuidRequest, opts ...grpc.CallOption) (*MetricSummaryResponse, error)
ListMetricSummaries(ctx context.Context, in *ListMetricSummariesRequest, opts ...grpc.CallOption) (*ListMetricSummariesResponse, error)
}
type metricServiceClient struct {
cc grpc.ClientConnInterface
}
func NewMetricServiceClient(cc grpc.ClientConnInterface) MetricServiceClient {
return &metricServiceClient{cc}
}
func (c *metricServiceClient) RegisterMetricMeta(ctx context.Context, in *MetricMetaRequest, opts ...grpc.CallOption) (*MetricMetaResponse, error) {
out := new(MetricMetaResponse)
err := c.cc.Invoke(ctx, "/ai_flow.MetricService/registerMetricMeta", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *metricServiceClient) UpdateMetricMeta(ctx context.Context, in *MetricMetaRequest, opts ...grpc.CallOption) (*MetricMetaResponse, error) {
out := new(MetricMetaResponse)
err := c.cc.Invoke(ctx, "/ai_flow.MetricService/updateMetricMeta", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *metricServiceClient) DeleteMetricMeta(ctx context.Context, in *MetricNameRequest, opts ...grpc.CallOption) (*Response, error) {
out := new(Response)
err := c.cc.Invoke(ctx, "/ai_flow.MetricService/deleteMetricMeta", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *metricServiceClient) GetMetricMeta(ctx context.Context, in *MetricNameRequest, opts ...grpc.CallOption) (*MetricMetaResponse, error) {
out := new(MetricMetaResponse)
err := c.cc.Invoke(ctx, "/ai_flow.MetricService/getMetricMeta", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *metricServiceClient) ListDatasetMetricMetas(ctx context.Context, in *ListDatasetMetricMetasRequest, opts ...grpc.CallOption) (*ListMetricMetasResponse, error) {
out := new(ListMetricMetasResponse)
err := c.cc.Invoke(ctx, "/ai_flow.MetricService/listDatasetMetricMetas", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *metricServiceClient) ListModelMetricMetas(ctx context.Context, in *ListModelMetricMetasRequest, opts ...grpc.CallOption) (*ListMetricMetasResponse, error) {
out := new(ListMetricMetasResponse)
err := c.cc.Invoke(ctx, "/ai_flow.MetricService/listModelMetricMetas", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *metricServiceClient) RegisterMetricSummary(ctx context.Context, in *MetricSummaryRequest, opts ...grpc.CallOption) (*MetricSummaryResponse, error) {
out := new(MetricSummaryResponse)
err := c.cc.Invoke(ctx, "/ai_flow.MetricService/registerMetricSummary", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *metricServiceClient) UpdateMetricSummary(ctx context.Context, in *MetricSummaryRequest, opts ...grpc.CallOption) (*MetricSummaryResponse, error) {
out := new(MetricSummaryResponse)
err := c.cc.Invoke(ctx, "/ai_flow.MetricService/updateMetricSummary", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *metricServiceClient) DeleteMetricSummary(ctx context.Context, in *UuidRequest, opts ...grpc.CallOption) (*Response, error) {
out := new(Response)
err := c.cc.Invoke(ctx, "/ai_flow.MetricService/deleteMetricSummary", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *metricServiceClient) GetMetricSummary(ctx context.Context, in *UuidRequest, opts ...grpc.CallOption) (*MetricSummaryResponse, error) {
out := new(MetricSummaryResponse)
err := c.cc.Invoke(ctx, "/ai_flow.MetricService/getMetricSummary", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *metricServiceClient) ListMetricSummaries(ctx context.Context, in *ListMetricSummariesRequest, opts ...grpc.CallOption) (*ListMetricSummariesResponse, error) {
out := new(ListMetricSummariesResponse)
err := c.cc.Invoke(ctx, "/ai_flow.MetricService/listMetricSummaries", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// MetricServiceServer is the server API for MetricService service.
type MetricServiceServer interface {
RegisterMetricMeta(context.Context, *MetricMetaRequest) (*MetricMetaResponse, error)
UpdateMetricMeta(context.Context, *MetricMetaRequest) (*MetricMetaResponse, error)
DeleteMetricMeta(context.Context, *MetricNameRequest) (*Response, error)
GetMetricMeta(context.Context, *MetricNameRequest) (*MetricMetaResponse, error)
ListDatasetMetricMetas(context.Context, *ListDatasetMetricMetasRequest) (*ListMetricMetasResponse, error)
ListModelMetricMetas(context.Context, *ListModelMetricMetasRequest) (*ListMetricMetasResponse, error)
RegisterMetricSummary(context.Context, *MetricSummaryRequest) (*MetricSummaryResponse, error)
UpdateMetricSummary(context.Context, *MetricSummaryRequest) (*MetricSummaryResponse, error)
DeleteMetricSummary(context.Context, *UuidRequest) (*Response, error)
GetMetricSummary(context.Context, *UuidRequest) (*MetricSummaryResponse, error)
ListMetricSummaries(context.Context, *ListMetricSummariesRequest) (*ListMetricSummariesResponse, error)
}
// UnimplementedMetricServiceServer can be embedded to have forward compatible implementations.
type UnimplementedMetricServiceServer struct {
}
func (*UnimplementedMetricServiceServer) RegisterMetricMeta(context.Context, *MetricMetaRequest) (*MetricMetaResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method RegisterMetricMeta not implemented")
}
func (*UnimplementedMetricServiceServer) UpdateMetricMeta(context.Context, *MetricMetaRequest) (*MetricMetaResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method UpdateMetricMeta not implemented")
}
func (*UnimplementedMetricServiceServer) DeleteMetricMeta(context.Context, *MetricNameRequest) (*Response, error) {
return nil, status.Errorf(codes.Unimplemented, "method DeleteMetricMeta not implemented")
}
func (*UnimplementedMetricServiceServer) GetMetricMeta(context.Context, *MetricNameRequest) (*MetricMetaResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetMetricMeta not implemented")
}
func (*UnimplementedMetricServiceServer) ListDatasetMetricMetas(context.Context, *ListDatasetMetricMetasRequest) (*ListMetricMetasResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ListDatasetMetricMetas not implemented")
}
func (*UnimplementedMetricServiceServer) ListModelMetricMetas(context.Context, *ListModelMetricMetasRequest) (*ListMetricMetasResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ListModelMetricMetas not implemented")
}
func (*UnimplementedMetricServiceServer) RegisterMetricSummary(context.Context, *MetricSummaryRequest) (*MetricSummaryResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method RegisterMetricSummary not implemented")
}
func (*UnimplementedMetricServiceServer) UpdateMetricSummary(context.Context, *MetricSummaryRequest) (*MetricSummaryResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method UpdateMetricSummary not implemented")
}
func (*UnimplementedMetricServiceServer) DeleteMetricSummary(context.Context, *UuidRequest) (*Response, error) {
return nil, status.Errorf(codes.Unimplemented, "method DeleteMetricSummary not implemented")
}
func (*UnimplementedMetricServiceServer) GetMetricSummary(context.Context, *UuidRequest) (*MetricSummaryResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetMetricSummary not implemented")
}
func (*UnimplementedMetricServiceServer) ListMetricSummaries(context.Context, *ListMetricSummariesRequest) (*ListMetricSummariesResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ListMetricSummaries not implemented")
}
func RegisterMetricServiceServer(s *grpc.Server, srv MetricServiceServer) {
s.RegisterService(&_MetricService_serviceDesc, srv)
}
func _MetricService_RegisterMetricMeta_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(MetricMetaRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(MetricServiceServer).RegisterMetricMeta(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ai_flow.MetricService/RegisterMetricMeta",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(MetricServiceServer).RegisterMetricMeta(ctx, req.(*MetricMetaRequest))
}
return interceptor(ctx, in, info, handler)
}
func _MetricService_UpdateMetricMeta_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(MetricMetaRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(MetricServiceServer).UpdateMetricMeta(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ai_flow.MetricService/UpdateMetricMeta",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(MetricServiceServer).UpdateMetricMeta(ctx, req.(*MetricMetaRequest))
}
return interceptor(ctx, in, info, handler)
}
func _MetricService_DeleteMetricMeta_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(MetricNameRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(MetricServiceServer).DeleteMetricMeta(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ai_flow.MetricService/DeleteMetricMeta",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(MetricServiceServer).DeleteMetricMeta(ctx, req.(*MetricNameRequest))
}
return interceptor(ctx, in, info, handler)
}
func _MetricService_GetMetricMeta_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(MetricNameRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(MetricServiceServer).GetMetricMeta(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ai_flow.MetricService/GetMetricMeta",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(MetricServiceServer).GetMetricMeta(ctx, req.(*MetricNameRequest))
}
return interceptor(ctx, in, info, handler)
}
func _MetricService_ListDatasetMetricMetas_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ListDatasetMetricMetasRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(MetricServiceServer).ListDatasetMetricMetas(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ai_flow.MetricService/ListDatasetMetricMetas",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(MetricServiceServer).ListDatasetMetricMetas(ctx, req.(*ListDatasetMetricMetasRequest))
}
return interceptor(ctx, in, info, handler)
}
func _MetricService_ListModelMetricMetas_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ListModelMetricMetasRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(MetricServiceServer).ListModelMetricMetas(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ai_flow.MetricService/ListModelMetricMetas",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(MetricServiceServer).ListModelMetricMetas(ctx, req.(*ListModelMetricMetasRequest))
}
return interceptor(ctx, in, info, handler)
}
func _MetricService_RegisterMetricSummary_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(MetricSummaryRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(MetricServiceServer).RegisterMetricSummary(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ai_flow.MetricService/RegisterMetricSummary",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(MetricServiceServer).RegisterMetricSummary(ctx, req.(*MetricSummaryRequest))
}
return interceptor(ctx, in, info, handler)
}
func _MetricService_UpdateMetricSummary_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(MetricSummaryRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(MetricServiceServer).UpdateMetricSummary(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ai_flow.MetricService/UpdateMetricSummary",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(MetricServiceServer).UpdateMetricSummary(ctx, req.(*MetricSummaryRequest))
}
return interceptor(ctx, in, info, handler)
}
func _MetricService_DeleteMetricSummary_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(UuidRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(MetricServiceServer).DeleteMetricSummary(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ai_flow.MetricService/DeleteMetricSummary",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(MetricServiceServer).DeleteMetricSummary(ctx, req.(*UuidRequest))
}
return interceptor(ctx, in, info, handler)
}
func _MetricService_GetMetricSummary_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(UuidRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(MetricServiceServer).GetMetricSummary(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ai_flow.MetricService/GetMetricSummary",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(MetricServiceServer).GetMetricSummary(ctx, req.(*UuidRequest))
}
return interceptor(ctx, in, info, handler)
}
func _MetricService_ListMetricSummaries_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) |
var _MetricService_serviceDesc = grpc.ServiceDesc{
ServiceName: "ai_flow.MetricService",
HandlerType: (*MetricServiceServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "registerMetricMeta",
Handler: _MetricService_RegisterMetricMeta_Handler,
},
{
MethodName: "updateMetricMeta",
Handler: _MetricService_UpdateMetricMeta_Handler,
},
{
MethodName: "deleteMetricMeta",
Handler: _MetricService_DeleteMetricMeta_Handler,
},
{
MethodName: "getMetricMeta",
Handler: _MetricService_GetMetricMeta_Handler,
},
{
MethodName: "listDatasetMetricMetas",
Handler: _MetricService_ListDatasetMetricMetas_Handler,
},
{
MethodName: "listModelMetricMetas",
Handler: _MetricService_ListModelMetricMetas_Handler,
},
{
MethodName: "registerMetricSummary",
Handler: _MetricService_RegisterMetricSummary_Handler,
},
{
MethodName: "updateMetricSummary",
Handler: _MetricService_UpdateMetricSummary_Handler,
},
{
MethodName: "deleteMetricSummary",
Handler: _MetricService_DeleteMetricSummary_Handler,
},
{
MethodName: "getMetricSummary",
Handler: _MetricService_GetMetricSummary_Handler,
},
{
MethodName: "listMetricSummaries",
Handler: _MetricService_ListMetricSummaries_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "metric_service.proto",
}
| {
in := new(ListMetricSummariesRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(MetricServiceServer).ListMetricSummaries(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/ai_flow.MetricService/ListMetricSummaries",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(MetricServiceServer).ListMetricSummaries(ctx, req.(*ListMetricSummariesRequest))
}
return interceptor(ctx, in, info, handler)
} |
main.py | import sys
import time
import cv2
import numpy as np
def scenedetect(cap, threshold=30, min_scene_len=15): | curr = 0
while True:
ret, im = cap.read()
if not ret:
break
curr_hsv = im[::downscale_factor, ::downscale_factor]
curr_hsv = cv2.cvtColor(curr_hsv, cv2.COLOR_BGR2HSV)
curr_hsv = curr_hsv.astype('int32')
if last_hsv is not None:
delta_hsv = np.mean(np.abs(curr_hsv - last_hsv))
if delta_hsv >= threshold and curr - first >= min_scene_len:
yield first, curr, delta_hsv
first = curr
last_hsv = curr_hsv
curr += 1
yield first, curr, 0
fn = 'video.rmvb'
cap = cv2.VideoCapture(fn)
start = time.time()
for first, last, delta_hsv in scenedetect(cap):
print(first, last, delta_hsv)
print(time.time() - start)
cap.release() | w = cap.get(cv2.CAP_PROP_FRAME_WIDTH)
downscale_factor = int(w / 200)
last_hsv = None
first = 0 |
index.d_20211014153554.ts |
export declare class NativescriptUuid extends NativescriptUuidCommon {} | import { NativescriptUuidCommon } from './common';
import { NativescriptUuidCommon } from './common'; |
|
app.py | from game_screen import *
class App:
def __init__(self, width, height):
self.width = width
self.height = height
self.window = pygame.display.set_mode((self.width, self.height))
pygame.display.set_caption('VersuSpace')
self.running = True
self.quit_listener = AppQuitListener(self)
self.menu_screen = MenuScreen(self)
self.game_screen = GameScreen(self)
self.coming_soon_screen = ComingSoonScreen(self)
self.instruction_screen = InstructionScreen(self)
self.current_screen = self.menu_screen
self.clock = pygame.time.Clock()
self.fps = 60
def run(self):
while self.running:
self.current_screen.handler.handle_events()
self.current_screen.step(1 / self.fps)
self.current_screen.draw(self.window)
pygame.display.update()
self.clock.tick(self.fps)
def | (self):
self.running = False
def display_coming_soon(self):
self.current_screen = self.coming_soon_screen
def display_menu(self):
self.current_screen = self.menu_screen
def display_instructions(self):
self.current_screen = self.instruction_screen
def start_game(self):
self.current_screen = self.game_screen
| stop |
always.py | # ---------------------------------------------------------------------
# Gufo Err: AlwaysFailFast
# ---------------------------------------------------------------------
# Copyright (C) 2022, Gufo Labs
# ---------------------------------------------------------------------
# Python modules
from typing import Type
from types import TracebackType
# Gufo Labs modules
from ..abc.failfast import BaseFailFast
class AlwaysFailFast(BaseFailFast):
"""
Always fail-fast. Trigger fail-fast unconditionally.
"""
def must_die(
self,
t: Type[BaseException],
v: BaseException,
tb: TracebackType,
) -> bool:
| return True |
|
jisgou3.js | var Meter3=(function(){var e={styles:{sAngle:0.93,eAngle:2.07,area:{radius:30,colors:{"0":"#1266BC","0.15":"#67C6F2","0.27":"#45F5E6","0.75":"#FFDE00","0.93":"#F5694B","1":"#FF0202"},lineWidth:1,scaleLength:9,scaleWidth:0.2,lineColor:"#fff"},range:{color:"#F4674B",width:2,arrow:{height:15,radius:4}},value:{margin:-50,color:"#F4674B",font:"bold 52px Microsoft YaHei"},title:{margin:-5,color:"#F4674B",font:"bold 20px Microsoft YaHei"},subTitle:{margin:25,color:"#999",font:"14px Microsoft YaHei"},label:{radius:28,color:"#aaa",background:"#f5f5f5",font:"12px Microsoft YaHei"},inner:{radius:97,color:"#999",dashedWidth:3}}};var b,d,j,g,k,m,l,r,s,o,a,u;var v=function(C,B){for(var A in B){if(C.hasOwnProperty(A)&&typeof C[A]=="object"){v(C[A],B[A])}else{C[A]=B[A]}}};var c=function(B,A){return{x:e.centerPoint.x+B*Math.cos(Math.PI*A),y:e.centerPoint.y+B*Math.sin(Math.PI*A)}};var x=function(F){var E=e.data.area,A=E.length-1;for(var B=A;B>=0;B--){if(F>=E[B].min&&F<E[B].max){A=B}}var D=(k-g)/E.length,C=D*A+g,G=D*(A+1)+g,H=E[A];return{range:(F-H.min)/(H.max-H.min)*(G-C)+C,index:A}};var f=function(D,B){var A=D.x||e.centerPoint.x,F=D.y||e.centerPoint.y,C=D.start||0,E=D.end||2;d.beginPath();d.moveTo(A,F);switch(B){case 1:d.setLineDash&&d.setLineDash([u.dashedWidth]);case 2:d.arc(A,F,D.r,Math.PI*C,Math.PI*E);d.closePath();d.strokeStyle=D.style;d.stroke();break;default:d.arc(A,F,D.r,Math.PI*C,Math.PI*E);d.closePath();d.fillStyle=D.style;d.fill();break}};var w=function(){var B=d.createLinearGradient(0,0,e.radius*2,0);for(var A in m.colors){B.addColorStop(A,m.colors[A])}f({r:e.radius,start:g,end:k,style:B});f({r:e.radius-m.radius,style:"#fff"})};var q=function(B){var A=e.radius-m.radius;f({r:A,start:g,end:B.range,style:a.background});f({r:A-a.radius,start:g,end:B.range,style:l.color});f({r:A-a.radius-l.width,style:"#fff"})};var t=function(A){d.font=A.font;d.fillStyle=A.color;d.textAlign=A.align||"center";d.textBaseline=A.vertical||"middle";d.moveTo(A.x,A.y);d.fillText(A.text,A.x,A.y)};var p=function(C,B){f({r:u.radius,start:g,end:k,style:u.color},1);f({r:u.radius-1,style:"#fff"});var A=e.data;t({font:r.font,color:r.color,text:B,x:e.radius,y:e.radius+r.margin});t({font:s.font,color:s.color,text:A.title.replace("{t}",A.area[C.index].text).replace("{v}",B),x:e.radius,y:e.radius+s.margin});t({font:o.font,color:o.color,text:A.subTitle,x:e.radius,y:e.radius+o.margin})};var i=function(E){var D=e.radius-m.radius-a.radius,F=c(D,E.range),B=F.x-1,H=F.y+0.5;f({x:B,y:H,r:l.arrow.radius,style:l.color});var C=c(D-l.arrow.height,E.range),A=c(D,E.range-0.01),G=c(D,E.range+0.01);d.beginPath();d.moveTo(C.x-1,C.y+0.5);d.lineTo(A.x-1,A.y+0.5);d.lineTo(G.x-1,G.y+0.5);d.closePath();d.fillStyle=l.color;d.fill();f({x:B,y:H,r:l.arrow.radius-l.width,style:"#fff"})};var n=function(A){d.beginPath();d.moveTo(A.start.x,A.start.y);d.lineTo(A.end.x,A.end.y);d.closePath();d.strokeStyle=A.style;d.lineWidth=A.width||1;d.stroke()};var h=function(){var D=m.scaleLength,G=e.data.area,K=D*G.length,I=(k-g)/K;for(var F=1;F<K;F++){n({start:c(e.radius,g+I*F),end:c(e.radius-m.radius,g+I*F),style:m.lineColor,width:F%D==0?m.lineWidth:m.scaleWidth})}var M=[];for(var H=0;H<G.length;H++){var C=G[H];if(M.join("").indexOf(C.min)==-1){M.push(C.min)}M.push(C.text);M.push(C.max)}var A=M.length-1,B=(k-g)/A,L=a,N=e.radius-m.radius-L.radius/2;for(var E=0;E<=A;E++){var J=c(N,g+B*E);L.x=J.x;L.y=J.y;L.text=M[E];t(L)}};var z=function(A,B){var D=e.data.value,C=e.data.area[0].min;var E=setInterval(function(){d.clearRect(0,0,A,B);d.fillStyle="#fff";d.fillRect(0,0,A,B);C=C+10>D?D:C+10;var F=x(C);w();q(F);p(F,C);i(F);h();if(C===D){clearInterval(E)}},10)};var y={};y.setOptions=function(A){v(e,A);j=e.styles;g=j.sAngle;k=j.eAngle;m=j.area;l=j.range;r=j.value;s=j.title;o=j.subTitle;a=j.label;u=j.inner;b=typeof e.element=="string"?document.getElementById(e.element):e.element;d=b.getContext("2d");return y};y.init=function(){z(b.offsetWidth,b.offsetHeight);return y};return y})(); |
||
spec.rs | /*
* Copyright 2020 Nuclei Studio OÜ
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use super::mock::*;
use crate::{Conviction, Error, Locks, Proposals};
use frame_support::{assert_noop, assert_ok, StorageMap};
use governance_os_support::{
testing::{primitives::AccountId, ALICE, BOB, TEST_TOKEN_ID},
traits::{LockableCurrencies, ProposalResult, StandardizedVoting},
};
use sp_core::H256;
#[test]
fn initialize_registers_proposal_hash() {
ExtBuilder::default().build().execute_with(|| {
let mock_hash = H256::default();
// Make sure it does not exists by default
assert!(!Proposals::<Test>::contains_key(mock_hash));
assert_ok!(<ConvictionVoting as StandardizedVoting>::initiate(
mock_hash,
Default::default()
));
assert!(Proposals::<Test>::contains_key(mock_hash));
})
}
#[test]
fn initiate_logs_block_number() {
ExtBuilder::default().build().execute_with(|| {
let mock_hash = H256::default();
assert_ok!(<ConvictionVoting as StandardizedVoting>::initiate(
mock_hash,
Default::default()
));
assert_eq!(
ConvictionVoting::proposals(mock_hash).created_on,
ConvictionVoting::now()
);
})
}
#[test]
fn vote_lock_tokens() { |
macro_rules! test_vote_edit {
($function:tt, $support:ident, $var:tt) => {
#[test]
fn $function() {
ExtBuilder::default()
.one_hundred_for_alice_n_bob()
.build()
.execute_with(|| {
let mock_hash = H256::default();
assert_ok!(<ConvictionVoting as StandardizedVoting>::initiate(
mock_hash,
mock_voting_parameters()
));
assert_ok!(<ConvictionVoting as StandardizedVoting>::vote(
mock_hash,
&ALICE,
Conviction {
in_support: $support,
power: 10
}
));
assert_eq!(ConvictionVoting::proposals(mock_hash).$var, 10);
assert_ok!(<ConvictionVoting as StandardizedVoting>::vote(
mock_hash,
&ALICE,
Conviction {
in_support: $support,
power: 15
}
));
assert_eq!(
ConvictionVoting::proposals(mock_hash).convictions,
vec![(
ALICE,
1,
Conviction {
in_support: $support,
power: 15
}
)]
);
assert_eq!(ConvictionVoting::proposals(mock_hash).$var, 15);
assert_eq!(
ConvictionVoting::locks((TEST_TOKEN_ID, &ALICE)),
vec![(mock_hash, $support, 15)]
);
assert_eq!(
<Tokens as LockableCurrencies<AccountId>>::locked_balance(
TEST_TOKEN_ID,
&ALICE
),
15
);
})
}
};
}
test_vote_edit!(vote_edit_favorable, true, conviction_for);
test_vote_edit!(vote_edit_against, false, conviction_against);
#[test]
fn vote_edit_change_our_mind() {
ExtBuilder::default()
.one_hundred_for_alice_n_bob()
.build()
.execute_with(|| {
let mock_hash = H256::default();
assert_ok!(<ConvictionVoting as StandardizedVoting>::initiate(
mock_hash,
mock_voting_parameters()
));
assert_ok!(<ConvictionVoting as StandardizedVoting>::vote(
mock_hash,
&ALICE,
Conviction {
in_support: true,
power: 10
}
));
assert_eq!(ConvictionVoting::proposals(mock_hash).conviction_for, 10);
assert_eq!(ConvictionVoting::proposals(mock_hash).conviction_against, 0);
assert_ok!(<ConvictionVoting as StandardizedVoting>::vote(
mock_hash,
&ALICE,
Conviction {
in_support: false,
power: 15
}
));
assert_eq!(ConvictionVoting::proposals(mock_hash).conviction_for, 0);
assert_eq!(
ConvictionVoting::proposals(mock_hash).conviction_against,
15
);
assert_eq!(
ConvictionVoting::proposals(mock_hash).convictions,
vec![(
ALICE,
1,
Conviction {
in_support: false,
power: 15
}
)]
);
assert_eq!(
ConvictionVoting::locks((TEST_TOKEN_ID, &ALICE)),
vec![(mock_hash, false, 15)]
);
assert_eq!(
<Tokens as LockableCurrencies<AccountId>>::locked_balance(TEST_TOKEN_ID, &ALICE),
15
);
})
}
#[test]
fn votes_saved_correctly() {
ExtBuilder::default()
.one_hundred_for_alice_n_bob()
.build()
.execute_with(|| {
let mock_hash = H256::default();
assert_ok!(<ConvictionVoting as StandardizedVoting>::initiate(
mock_hash,
mock_voting_parameters()
));
assert_ok!(<ConvictionVoting as StandardizedVoting>::vote(
mock_hash,
&ALICE,
Conviction {
in_support: true,
power: 10
}
));
assert_ok!(<ConvictionVoting as StandardizedVoting>::vote(
mock_hash,
&BOB,
Conviction {
in_support: false,
power: 15
}
));
assert_eq!(
ConvictionVoting::locks((TEST_TOKEN_ID, &ALICE)),
vec![(mock_hash, true, 10)]
);
assert_eq!(
ConvictionVoting::locks((TEST_TOKEN_ID, &BOB)),
vec![(mock_hash, false, 15)]
);
assert_eq!(
ConvictionVoting::proposals(mock_hash).convictions,
vec![
(
ALICE,
1,
Conviction {
in_support: true,
power: 10
}
),
(
BOB,
1,
Conviction {
in_support: false,
power: 15
}
)
]
);
assert_eq!(ConvictionVoting::proposals(mock_hash).conviction_for, 10);
assert_eq!(
ConvictionVoting::proposals(mock_hash).conviction_against,
15
);
})
}
#[test]
fn vote_other_proposals_extend_locks() {
ExtBuilder::default()
.one_hundred_for_alice_n_bob()
.build()
.execute_with(|| {
let mut mock_hash_1 = H256::default();
let mut mock_hash_2 = H256::default();
mock_hash_1.randomize();
mock_hash_2.randomize();
assert!(mock_hash_1 != mock_hash_2);
assert_ok!(<ConvictionVoting as StandardizedVoting>::initiate(
mock_hash_1,
mock_voting_parameters()
));
assert_ok!(<ConvictionVoting as StandardizedVoting>::vote(
mock_hash_1,
&ALICE,
Conviction {
in_support: true,
power: 10
}
));
assert_ok!(<ConvictionVoting as StandardizedVoting>::initiate(
mock_hash_2,
mock_voting_parameters()
));
assert_ok!(<ConvictionVoting as StandardizedVoting>::vote(
mock_hash_2,
&ALICE,
Conviction {
in_support: true,
power: 11
}
));
assert_eq!(
ConvictionVoting::locks((TEST_TOKEN_ID, &ALICE)),
vec![(mock_hash_1, true, 10), (mock_hash_2, true, 11)]
);
// Locked the max of both
assert_eq!(
<Tokens as LockableCurrencies<AccountId>>::locked_balance(TEST_TOKEN_ID, &ALICE),
11
);
})
}
#[test]
fn vote_fail_if_not_enough_tokens() {
ExtBuilder::default().build().execute_with(|| {
let mock_hash = H256::default();
assert_ok!(<ConvictionVoting as StandardizedVoting>::initiate(
mock_hash,
mock_voting_parameters()
));
assert_noop!(
<ConvictionVoting as StandardizedVoting>::vote(
mock_hash,
&ALICE,
Conviction {
in_support: true,
power: 10
}
),
Error::<Test>::NotEnoughBalance
);
})
}
macro_rules! test_close_or_veto {
($function:tt, $t1:ident, $t2:ident, $t3:ident) => {
#[test]
fn $t1() {
ExtBuilder::default()
.one_hundred_for_alice_n_bob()
.build()
.execute_with(|| {
let mock_hash = H256::default();
assert_ok!(<ConvictionVoting as StandardizedVoting>::initiate(
mock_hash,
mock_voting_parameters()
));
assert_ok!(<ConvictionVoting as StandardizedVoting>::vote(
mock_hash,
&ALICE,
Conviction {
in_support: true,
power: 10
}
));
assert_ok!(<ConvictionVoting as StandardizedVoting>::vote(
mock_hash,
&BOB,
Conviction {
in_support: false,
power: 15
}
));
System::set_block_number(ConvictionVoting::now() + 10_000);
assert_ok!(ConvictionVoting::$function(mock_hash));
assert_eq!(
<Tokens as LockableCurrencies<AccountId>>::locked_balance(
TEST_TOKEN_ID,
&ALICE
),
0
);
assert_eq!(
<Tokens as LockableCurrencies<AccountId>>::locked_balance(
TEST_TOKEN_ID,
&BOB
),
0
);
})
}
#[test]
fn $t2() {
ExtBuilder::default()
.one_hundred_for_alice_n_bob()
.build()
.execute_with(|| {
let mock_hash = H256::default();
assert_ok!(<ConvictionVoting as StandardizedVoting>::initiate(
mock_hash,
mock_voting_parameters()
));
assert_ok!(<ConvictionVoting as StandardizedVoting>::vote(
mock_hash,
&ALICE,
Conviction {
in_support: true,
power: 10
}
));
System::set_block_number(ConvictionVoting::now() + 10_000);
assert_ok!(ConvictionVoting::$function(mock_hash));
assert!(!Locks::<Test>::contains_key((TEST_TOKEN_ID, &ALICE)));
assert!(!Proposals::<Test>::contains_key(mock_hash));
assert_eq!(
<Tokens as LockableCurrencies<AccountId>>::locked_balance(
TEST_TOKEN_ID,
&ALICE
),
0
);
})
}
#[test]
fn $t3() {
ExtBuilder::default()
.one_hundred_for_alice_n_bob()
.build()
.execute_with(|| {
let mut mock_hash_1 = H256::default();
let mut mock_hash_2 = H256::default();
mock_hash_1.randomize();
mock_hash_2.randomize();
assert!(mock_hash_1 != mock_hash_2);
assert_ok!(<ConvictionVoting as StandardizedVoting>::initiate(
mock_hash_1,
mock_voting_parameters()
));
assert_ok!(<ConvictionVoting as StandardizedVoting>::initiate(
mock_hash_2,
mock_voting_parameters()
));
assert_ok!(<ConvictionVoting as StandardizedVoting>::vote(
mock_hash_1,
&ALICE,
Conviction {
in_support: true,
power: 15
}
));
assert_ok!(<ConvictionVoting as StandardizedVoting>::vote(
mock_hash_2,
&ALICE,
Conviction {
in_support: true,
power: 10
}
));
assert_eq!(
<Tokens as LockableCurrencies<AccountId>>::locked_balance(
TEST_TOKEN_ID,
&ALICE
),
15
);
System::set_block_number(ConvictionVoting::now() + 10_000);
assert_ok!(ConvictionVoting::$function(mock_hash_1));
assert_eq!(
ConvictionVoting::locks((TEST_TOKEN_ID, &ALICE)),
vec![(mock_hash_2, true, 10)]
);
assert!(!Proposals::<Test>::contains_key(mock_hash_1));
assert_eq!(
<Tokens as LockableCurrencies<AccountId>>::locked_balance(
TEST_TOKEN_ID,
&ALICE
),
10
);
})
}
};
}
test_close_or_veto!(
veto,
veto_unlocks_coins,
veto_free_storage_if_last_proposal,
veto_cleans_storage
);
test_close_or_veto!(
close,
close_unlocks_coins,
close_free_storage_if_last_proposal,
close_cleans_storage
);
#[test]
fn close_error_if_early() {
ExtBuilder::default()
.one_hundred_for_alice_n_bob()
.build()
.execute_with(|| {
let mock_hash = H256::default();
assert_ok!(<ConvictionVoting as StandardizedVoting>::initiate(
mock_hash,
mock_voting_parameters()
));
assert_eq!(
ConvictionVoting::close(mock_hash).expect_err("too early and not passing"),
Error::<Test>::CannotClose.into()
);
// We did not advance blocks and particpation and quorum criteria are not met
assert_noop!(
ConvictionVoting::close(mock_hash),
Error::<Test>::CannotClose
);
})
}
#[test]
fn close_failing() {
ExtBuilder::default()
.one_hundred_for_alice_n_bob()
.build()
.execute_with(|| {
let mock_hash = H256::default();
assert_ok!(<ConvictionVoting as StandardizedVoting>::initiate(
mock_hash,
mock_voting_parameters()
));
System::set_block_number(ConvictionVoting::now() + 10_000);
assert_eq!(
ConvictionVoting::close(mock_hash).expect("proposal shall fail with no error"),
ProposalResult::Failing
);
})
}
#[test]
fn close_passing_early() {
ExtBuilder::default()
.one_hundred_for_alice_n_bob()
.build()
.execute_with(|| {
let mock_hash = H256::default();
assert_ok!(<ConvictionVoting as StandardizedVoting>::initiate(
mock_hash,
mock_voting_parameters()
));
assert_ok!(<ConvictionVoting as StandardizedVoting>::vote(
mock_hash,
&ALICE,
Conviction {
in_support: true,
power: 99
}
));
// TTL is 1_000 so we are still early. We need a few blocks for
// the conviction to accumulate.
System::set_block_number(ConvictionVoting::now() + 500);
assert_eq!(
ConvictionVoting::close(mock_hash).expect("proposal shall pass"),
ProposalResult::Passing
);
})
}
|
ExtBuilder::default()
.one_hundred_for_alice_n_bob()
.build()
.execute_with(|| {
let mock_hash = H256::default();
assert_ok!(<ConvictionVoting as StandardizedVoting>::initiate(
mock_hash,
mock_voting_parameters()
));
assert_ok!(<ConvictionVoting as StandardizedVoting>::vote(
mock_hash,
&ALICE,
Conviction {
in_support: true,
power: 10
}
));
assert_eq!(
<Tokens as LockableCurrencies<AccountId>>::locked_balance(TEST_TOKEN_ID, &ALICE),
10
);
})
}
|
test_wasb_sensor.py | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import unittest
import datetime
from airflow import DAG, configuration
from airflow.contrib.sensors.wasb_sensor import WasbBlobSensor
from airflow.contrib.sensors.wasb_sensor import WasbPrefixSensor
try:
from unittest import mock
except ImportError:
try:
import mock
except ImportError:
mock = None
class TestWasbBlobSensor(unittest.TestCase):
_config = {
'container_name': 'container',
'blob_name': 'blob',
'wasb_conn_id': 'conn_id',
'timeout': 100,
}
def setUp(self):
|
def test_init(self):
sensor = WasbBlobSensor(
task_id='wasb_sensor',
dag=self.dag,
**self._config
)
self.assertEqual(sensor.container_name, self._config['container_name'])
self.assertEqual(sensor.blob_name, self._config['blob_name'])
self.assertEqual(sensor.wasb_conn_id, self._config['wasb_conn_id'])
self.assertEqual(sensor.check_options, {})
self.assertEqual(sensor.timeout, self._config['timeout'])
sensor = WasbBlobSensor(
task_id='wasb_sensor',
dag=self.dag,
check_options={'timeout': 2},
**self._config
)
self.assertEqual(sensor.check_options, {'timeout': 2})
@mock.patch('airflow.contrib.sensors.wasb_sensor.WasbHook',
autospec=True)
def test_poke(self, mock_hook):
mock_instance = mock_hook.return_value
sensor = WasbBlobSensor(
task_id='wasb_sensor',
dag=self.dag,
check_options={'timeout': 2},
**self._config
)
sensor.poke(None)
mock_instance.check_for_blob.assert_called_once_with(
'container', 'blob', timeout=2
)
class TestWasbPrefixSensor(unittest.TestCase):
_config = {
'container_name': 'container',
'prefix': 'prefix',
'wasb_conn_id': 'conn_id',
'timeout': 100,
}
def setUp(self):
configuration.load_test_config()
args = {
'owner': 'airflow',
'start_date': datetime.datetime(2017, 1, 1)
}
self.dag = DAG('test_dag_id', default_args=args)
def test_init(self):
sensor = WasbPrefixSensor(
task_id='wasb_sensor',
dag=self.dag,
**self._config
)
self.assertEqual(sensor.container_name, self._config['container_name'])
self.assertEqual(sensor.prefix, self._config['prefix'])
self.assertEqual(sensor.wasb_conn_id, self._config['wasb_conn_id'])
self.assertEqual(sensor.check_options, {})
self.assertEqual(sensor.timeout, self._config['timeout'])
sensor = WasbPrefixSensor(
task_id='wasb_sensor',
dag=self.dag,
check_options={'timeout': 2},
**self._config
)
self.assertEqual(sensor.check_options, {'timeout': 2})
@mock.patch('airflow.contrib.sensors.wasb_sensor.WasbHook',
autospec=True)
def test_poke(self, mock_hook):
mock_instance = mock_hook.return_value
sensor = WasbPrefixSensor(
task_id='wasb_sensor',
dag=self.dag,
check_options={'timeout': 2},
**self._config
)
sensor.poke(None)
mock_instance.check_for_prefix.assert_called_once_with(
'container', 'prefix', timeout=2
)
if __name__ == '__main__':
unittest.main()
| configuration.load_test_config()
args = {
'owner': 'airflow',
'start_date': datetime.datetime(2017, 1, 1)
}
self.dag = DAG('test_dag_id', default_args=args) |
__init__.py | from . import ac
from . import q_learning
from . import rnnq_learning
AC = ac.ActorCritic
MFAC = ac.MFAC
IL = q_learning.DQN
MFQ = q_learning.MFQ
POMFQ = q_learning.POMFQ
rnnIL = rnnq_learning.DQN
rnnMFQ = rnnq_learning.MFQ
def spawn_ai(algo_name, sess, env, handle, human_name, max_steps):
| if algo_name == 'mfq':
model = MFQ(sess, human_name, handle, env, max_steps, memory_size=80000)
elif algo_name == 'mfac':
model = MFAC(sess, human_name, handle, env)
elif algo_name == 'ac':
model = AC(sess, human_name, handle, env)
elif algo_name == 'il':
model = IL(sess, human_name, handle, env, max_steps, memory_size=80000)
elif algo_name == 'rnnIL':
model = rnnIL(sess, human_name, handle, env, max_steps, memory_size=80000)
elif algo_name == 'rnnMFQ':
model = rnnMFQ(sess, human_name, handle, env, max_steps, memory_size=80000)
elif algo_name == 'pomfq':
model = POMFQ(sess, human_name, handle, env, max_steps, memory_size=80000)
return model |
|
consumer.py | from confluent_kafka import Consumer, KafkaException, KafkaError
import sys
import logging
from pprint import pformat
def | (consumer, partitions):
print('Assignment:', partitions)
if __name__ == '__main__':
conf = {
'bootstrap.servers': 'localhost:9092',
'group.id': 'devnation-python',
'session.timeout.ms': 6000,
'auto.offset.reset': 'earliest'
}
c = Consumer(conf)
c.subscribe(['devnation'], on_assign=print_assignment)
# Read messages from Kafka, print to stdout
try:
while True:
msg = c.poll(timeout=1.0)
if msg is None:
continue
if msg.error():
if msg.error().code() == KafkaError._PARTITION_EOF:
# Continue -> we reached the end of the partition
continue
else:
sys.stderr.write('-E- Something went wrong: %s' % msg.error())
break
else:
# Proper message
sys.stderr.write('-I- %s [%d] at offset %d with key %s: ' %
(msg.topic(), msg.partition(), msg.offset(),
str(msg.key())))
print(msg.value())
except KeyboardInterrupt:
sys.stderr.write('%% Aborted by user\n')
finally:
c.close()
| print_assignment |
test.pb.go | // Code generated by protoc-gen-go. DO NOT EDIT.
// source: private/test/catalogtest/test.proto
package catalogtest
import (
context "context"
fmt "fmt"
proto "github.com/golang/protobuf/proto"
plugin "github.com/spiffe/spire/proto/spire/common/plugin"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
math "math"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
type Request struct {
In string `protobuf:"bytes,1,opt,name=in,proto3" json:"in,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Request) Reset() { *m = Request{} }
func (m *Request) String() string { return proto.CompactTextString(m) }
func (*Request) ProtoMessage() {}
func (*Request) Descriptor() ([]byte, []int) {
return fileDescriptor_57a70e6f6968aac9, []int{0}
}
func (m *Request) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Request.Unmarshal(m, b)
}
func (m *Request) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Request.Marshal(b, m, deterministic)
}
func (m *Request) XXX_Merge(src proto.Message) {
xxx_messageInfo_Request.Merge(m, src)
}
func (m *Request) XXX_Size() int {
return xxx_messageInfo_Request.Size(m)
}
func (m *Request) XXX_DiscardUnknown() {
xxx_messageInfo_Request.DiscardUnknown(m)
}
var xxx_messageInfo_Request proto.InternalMessageInfo
func (m *Request) GetIn() string {
if m != nil {
return m.In
}
return ""
}
type Response struct {
Out string `protobuf:"bytes,1,opt,name=out,proto3" json:"out,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Response) Reset() { *m = Response{} }
func (m *Response) String() string { return proto.CompactTextString(m) }
func (*Response) ProtoMessage() {}
func (*Response) Descriptor() ([]byte, []int) {
return fileDescriptor_57a70e6f6968aac9, []int{1}
}
func (m *Response) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Response.Unmarshal(m, b)
}
func (m *Response) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Response.Marshal(b, m, deterministic)
}
func (m *Response) XXX_Merge(src proto.Message) {
xxx_messageInfo_Response.Merge(m, src)
}
func (m *Response) XXX_Size() int {
return xxx_messageInfo_Response.Size(m)
}
func (m *Response) XXX_DiscardUnknown() {
xxx_messageInfo_Response.DiscardUnknown(m)
}
var xxx_messageInfo_Response proto.InternalMessageInfo
func (m *Response) GetOut() string {
if m != nil {
return m.Out
}
return ""
}
func init() {
proto.RegisterType((*Request)(nil), "Request")
proto.RegisterType((*Response)(nil), "Response")
}
func init() |
var fileDescriptor_57a70e6f6968aac9 = []byte{
// 261 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x91, 0xb1, 0x4e, 0xfb, 0x30,
0x10, 0xc6, 0x95, 0xfe, 0xa5, 0xb6, 0xb9, 0x4a, 0x7f, 0x90, 0x27, 0xa8, 0x18, 0x4a, 0x10, 0x15,
0x0b, 0xb6, 0x48, 0xc5, 0x0b, 0xd0, 0x85, 0x11, 0x85, 0xad, 0x5b, 0x1a, 0x5d, 0x82, 0xa5, 0xd4,
0x36, 0xf6, 0xb9, 0xaf, 0xc0, 0x6b, 0x23, 0x3b, 0x71, 0xd5, 0xa1, 0x12, 0x4b, 0x72, 0xe7, 0xfb,
0xbe, 0x4f, 0x3f, 0xdd, 0xc1, 0x83, 0xb1, 0xf2, 0x58, 0x13, 0x0a, 0x42, 0x47, 0xa2, 0xa9, 0xa9,
0xee, 0x75, 0x17, 0xeb, 0xf0, 0xe1, 0xc6, 0x6a, 0xd2, 0xcb, 0x95, 0x33, 0xd2, 0xa2, 0x68, 0xf4,
0xe1, 0xa0, 0x95, 0x30, 0xbd, 0xef, 0x64, 0xfa, 0x0d, 0x8a, 0xe2, 0x16, 0x66, 0x15, 0x7e, 0x7b,
0x74, 0xc4, 0xfe, 0xc3, 0x44, 0xaa, 0x9b, 0x6c, 0x95, 0x3d, 0xe5, 0xd5, 0x44, 0xaa, 0xe2, 0x0e,
0xe6, 0x15, 0x3a, 0xa3, 0x95, 0x43, 0x76, 0x0d, 0xff, 0xb4, 0xa7, 0x71, 0x18, 0xca, 0xf2, 0x27,
0x83, 0xe9, 0x47, 0x4c, 0x62, 0xf7, 0x00, 0xdb, 0xba, 0xef, 0xc7, 0x6e, 0xce, 0xc7, 0xc0, 0x65,
0xce, 0x4f, 0xfe, 0x1d, 0xe4, 0x5b, 0xad, 0x5a, 0xd9, 0x79, 0x8b, 0xec, 0x91, 0x47, 0x2c, 0x3e,
0x60, 0xf1, 0x91, 0xe7, 0x34, 0x4f, 0xf6, 0xf5, 0x5f, 0xb2, 0x21, 0xbb, 0x7c, 0x86, 0xd9, 0x27,
0xda, 0xa3, 0x6c, 0x90, 0x15, 0xb0, 0x08, 0x24, 0xa9, 0xbd, 0x84, 0x52, 0xbe, 0xc2, 0xe2, 0x5d,
0x3b, 0x4a, 0x9a, 0x35, 0x5c, 0x05, 0xcb, 0xf9, 0xd3, 0x25, 0xdb, 0xdb, 0x66, 0xf7, 0xd2, 0x49,
0xfa, 0xf2, 0xfb, 0xc0, 0x23, 0x9c, 0x91, 0x6d, 0x8b, 0x62, 0x58, 0x6f, 0xdc, 0xa4, 0x48, 0xf7,
0x38, 0x3b, 0xc5, 0x7e, 0x1a, 0x47, 0x9b, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0xa4, 0xbe, 0xe8,
0x5c, 0xad, 0x01, 0x00, 0x00,
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConn
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion4
// PluginClient is the client API for Plugin service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type PluginClient interface {
CallPlugin(ctx context.Context, in *Request, opts ...grpc.CallOption) (*Response, error)
Configure(ctx context.Context, in *plugin.ConfigureRequest, opts ...grpc.CallOption) (*plugin.ConfigureResponse, error)
}
type pluginClient struct {
cc *grpc.ClientConn
}
func NewPluginClient(cc *grpc.ClientConn) PluginClient {
return &pluginClient{cc}
}
func (c *pluginClient) CallPlugin(ctx context.Context, in *Request, opts ...grpc.CallOption) (*Response, error) {
out := new(Response)
err := c.cc.Invoke(ctx, "/Plugin/CallPlugin", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *pluginClient) Configure(ctx context.Context, in *plugin.ConfigureRequest, opts ...grpc.CallOption) (*plugin.ConfigureResponse, error) {
out := new(plugin.ConfigureResponse)
err := c.cc.Invoke(ctx, "/Plugin/Configure", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// PluginServer is the server API for Plugin service.
type PluginServer interface {
CallPlugin(context.Context, *Request) (*Response, error)
Configure(context.Context, *plugin.ConfigureRequest) (*plugin.ConfigureResponse, error)
}
// UnimplementedPluginServer can be embedded to have forward compatible implementations.
type UnimplementedPluginServer struct {
}
func (*UnimplementedPluginServer) CallPlugin(ctx context.Context, req *Request) (*Response, error) {
return nil, status.Errorf(codes.Unimplemented, "method CallPlugin not implemented")
}
func (*UnimplementedPluginServer) Configure(ctx context.Context, req *plugin.ConfigureRequest) (*plugin.ConfigureResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method Configure not implemented")
}
func RegisterPluginServer(s *grpc.Server, srv PluginServer) {
s.RegisterService(&_Plugin_serviceDesc, srv)
}
func _Plugin_CallPlugin_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(Request)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(PluginServer).CallPlugin(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/Plugin/CallPlugin",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(PluginServer).CallPlugin(ctx, req.(*Request))
}
return interceptor(ctx, in, info, handler)
}
func _Plugin_Configure_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(plugin.ConfigureRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(PluginServer).Configure(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/Plugin/Configure",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(PluginServer).Configure(ctx, req.(*plugin.ConfigureRequest))
}
return interceptor(ctx, in, info, handler)
}
var _Plugin_serviceDesc = grpc.ServiceDesc{
ServiceName: "Plugin",
HandlerType: (*PluginServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "CallPlugin",
Handler: _Plugin_CallPlugin_Handler,
},
{
MethodName: "Configure",
Handler: _Plugin_Configure_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "private/test/catalogtest/test.proto",
}
// ServiceClient is the client API for Service service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type ServiceClient interface {
CallService(ctx context.Context, in *Request, opts ...grpc.CallOption) (*Response, error)
}
type serviceClient struct {
cc *grpc.ClientConn
}
func NewServiceClient(cc *grpc.ClientConn) ServiceClient {
return &serviceClient{cc}
}
func (c *serviceClient) CallService(ctx context.Context, in *Request, opts ...grpc.CallOption) (*Response, error) {
out := new(Response)
err := c.cc.Invoke(ctx, "/Service/CallService", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// ServiceServer is the server API for Service service.
type ServiceServer interface {
CallService(context.Context, *Request) (*Response, error)
}
// UnimplementedServiceServer can be embedded to have forward compatible implementations.
type UnimplementedServiceServer struct {
}
func (*UnimplementedServiceServer) CallService(ctx context.Context, req *Request) (*Response, error) {
return nil, status.Errorf(codes.Unimplemented, "method CallService not implemented")
}
func RegisterServiceServer(s *grpc.Server, srv ServiceServer) {
s.RegisterService(&_Service_serviceDesc, srv)
}
func _Service_CallService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(Request)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ServiceServer).CallService(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/Service/CallService",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ServiceServer).CallService(ctx, req.(*Request))
}
return interceptor(ctx, in, info, handler)
}
var _Service_serviceDesc = grpc.ServiceDesc{
ServiceName: "Service",
HandlerType: (*ServiceServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "CallService",
Handler: _Service_CallService_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "private/test/catalogtest/test.proto",
}
// HostServiceClient is the client API for HostService service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type HostServiceClient interface {
CallHostService(ctx context.Context, in *Request, opts ...grpc.CallOption) (*Response, error)
}
type hostServiceClient struct {
cc *grpc.ClientConn
}
func NewHostServiceClient(cc *grpc.ClientConn) HostServiceClient {
return &hostServiceClient{cc}
}
func (c *hostServiceClient) CallHostService(ctx context.Context, in *Request, opts ...grpc.CallOption) (*Response, error) {
out := new(Response)
err := c.cc.Invoke(ctx, "/HostService/CallHostService", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// HostServiceServer is the server API for HostService service.
type HostServiceServer interface {
CallHostService(context.Context, *Request) (*Response, error)
}
// UnimplementedHostServiceServer can be embedded to have forward compatible implementations.
type UnimplementedHostServiceServer struct {
}
func (*UnimplementedHostServiceServer) CallHostService(ctx context.Context, req *Request) (*Response, error) {
return nil, status.Errorf(codes.Unimplemented, "method CallHostService not implemented")
}
func RegisterHostServiceServer(s *grpc.Server, srv HostServiceServer) {
s.RegisterService(&_HostService_serviceDesc, srv)
}
func _HostService_CallHostService_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(Request)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(HostServiceServer).CallHostService(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/HostService/CallHostService",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(HostServiceServer).CallHostService(ctx, req.(*Request))
}
return interceptor(ctx, in, info, handler)
}
var _HostService_serviceDesc = grpc.ServiceDesc{
ServiceName: "HostService",
HandlerType: (*HostServiceServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "CallHostService",
Handler: _HostService_CallHostService_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "private/test/catalogtest/test.proto",
}
| {
proto.RegisterFile("private/test/catalogtest/test.proto", fileDescriptor_57a70e6f6968aac9)
} |
base_socket_connection.py | from __future__ import absolute_import
import abc
import math
import os
import socket
import struct
from future.utils import with_metaclass
from boofuzz.connections import itarget_connection
def _seconds_to_sockopt_format(seconds):
"""Convert floating point seconds value to second/useconds struct used by UNIX socket library.
For Windows, convert to whole milliseconds.
"""
if os.name == "nt":
return int(seconds * 1000)
else:
microseconds_per_second = 1000000
whole_seconds = int(math.floor(seconds))
whole_microseconds = int(math.floor((seconds % 1) * microseconds_per_second))
return struct.pack("ll", whole_seconds, whole_microseconds)
class | (with_metaclass(abc.ABCMeta, itarget_connection.ITargetConnection)):
"""This class serves as a base for a number of Connections over sockets.
.. versionadded:: 0.2.0
Args:
send_timeout (float): Seconds to wait for send before timing out. Default 5.0.
recv_timeout (float): Seconds to wait for recv before timing out. Default 5.0.
"""
def __init__(self, send_timeout, recv_timeout):
self._send_timeout = send_timeout
self._recv_timeout = recv_timeout
self._sock = None
def close(self):
"""
Close connection to the target.
Returns:
None
"""
self._sock.close()
@abc.abstractmethod
def open(self):
"""
Opens connection to the target. Make sure to call close!
Returns:
None
"""
self._sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDTIMEO, _seconds_to_sockopt_format(self._send_timeout))
self._sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO, _seconds_to_sockopt_format(self._recv_timeout))
| BaseSocketConnection |
0002_user_conf_link.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-06-02 18:17
from __future__ import unicode_literals
from django.db import migrations, models
class | (migrations.Migration):
dependencies = [
('signup', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='user',
name='conf_link',
field=models.CharField(default=b'', max_length=200),
),
]
| Migration |
0d7b8275.c329fe7e.js | (window.webpackJsonp=window.webpackJsonp||[]).push([[13],{144:function(e,t,r){"use strict";r.r(t),r.d(t,"frontMatter",(function(){return a})),r.d(t,"metadata",(function(){return s})),r.d(t,"rightToc",(function(){return c})),r.d(t,"default",(function(){return p}));var n=r(2),o=r(9),i=(r(0),r(326)),a={id:"index",title:"Overview"},s={id:"version-v0.1/self-service/flows/index",title:"Overview",description:"ORY Kratos allows end-users to sign up, log in, log out, update profile",source:"@site/versioned_docs/version-v0.1/self-service/flows/index.md",permalink:"/kratos/docs/v0.1/self-service/flows/index",editUrl:"https://github.com/ory/kratos/edit/master/docs/versioned_docs/version-v0.1/self-service/flows/index.md",version:"v0.1",lastUpdatedBy:"hackerman",lastUpdatedAt:1588432565,sidebar:"version-v0.1/docs",previous:{title:"Threat Models and Security Profiles",permalink:"/kratos/docs/v0.1/concepts/security"},next:{title:"User Login and User Registration",permalink:"/kratos/docs/v0.1/self-service/flows/user-login-user-registration"}},c=[{value:"Network Flows for Browsers",id:"network-flows-for-browsers",children:[]}],l={rightToc:c};function p(e){var t=e.components,r=Object(o.a)(e,["components"]);return Object(i.b)("wrapper",Object(n.a)({},l,r,{components:t,mdxType:"MDXLayout"}),Object(i.b)("p",null,'ORY Kratos allows end-users to sign up, log in, log out, update profile\ninformation, recover accounts, and perform other important account management\ntasks without third party involvement ("self-service").'),Object(i.b)("p",null,"The opposite to self-service management is someone using administrative\nprivileges to create, update, or delete accounts."),Object(i.b)("h2",{id:"network-flows-for-browsers"},"Network Flows for Browsers"),Object(i.b)("p",null,"All Self-Service Flows such as ",Object(i.b)("a",Object(n.a)({parentName:"p"},{href:"./user-login.md"}),"User Login"),",\n",Object(i.b)("a",Object(n.a)({parentName:"p"},{href:"./user-login-user-registration.mdx"}),"User Registration"),",\n",Object(i.b)("a",Object(n.a)({parentName:"p"},{href:"/kratos/docs/v0.1/self-service/flows/user-profile-management"}),"Profile Management")," use the same template:"),Object(i.b)("ol",null,Object(i.b)("li",{parentName:"ol"},"The Browser makes an HTTP request to the flow's initialization endpoint (e.g.\n",Object(i.b)("inlineCode",{parentName:"li"},"/auth/browser/login"),");"),Object(i.b)("li",{parentName:"ol"},"The initialization endpoint processes data and associates it with a request\nID and redirects the browser to the flow's configured UI URL (e.g.\n",Object(i.b)("inlineCode",{parentName:"li"},"urls.login_ui"),"), appending the request ID as the ",Object(i.b)("inlineCode",{parentName:"li"},"request")," URL Query\nParameter;"),Object(i.b)("li",{parentName:"ol"},"The endpoint responsible for the UI URL uses the ",Object(i.b)("inlineCode",{parentName:"li"},"request")," URL Query\nParameter (e.g. ",Object(i.b)("inlineCode",{parentName:"li"},"http://my-app/auth/login?request=abcde"),") to fetch the data\npreviously associated with the Request ID from either ORY Kratos's Public or\nAdmin API."),Object(i.b)("li",{parentName:"ol"},"The UI endpoint renders the fetched data in any way it sees it fit. The flow\nis typically completed by the browser making another request to one of ORY\nKratos' endpoints, which is usually described in the fetched request data.")))}p.isMDXComponent=!0},326:function(e,t,r){"use strict";r.d(t,"a",(function(){return u})),r.d(t,"b",(function(){return b}));var n=r(0),o=r.n(n);function i(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}function a(e,t){var r=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),r.push.apply(r,n)}return r}function | (e){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{};t%2?a(Object(r),!0).forEach((function(t){i(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):a(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function c(e,t){if(null==e)return{};var r,n,o=function(e,t){if(null==e)return{};var r,n,o={},i=Object.keys(e);for(n=0;n<i.length;n++)r=i[n],t.indexOf(r)>=0||(o[r]=e[r]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(n=0;n<i.length;n++)r=i[n],t.indexOf(r)>=0||Object.prototype.propertyIsEnumerable.call(e,r)&&(o[r]=e[r])}return o}var l=o.a.createContext({}),p=function(e){var t=o.a.useContext(l),r=t;return e&&(r="function"==typeof e?e(t):s(s({},t),e)),r},u=function(e){var t=p(e.components);return o.a.createElement(l.Provider,{value:t},e.children)},d={inlineCode:"code",wrapper:function(e){var t=e.children;return o.a.createElement(o.a.Fragment,{},t)}},f=o.a.forwardRef((function(e,t){var r=e.components,n=e.mdxType,i=e.originalType,a=e.parentName,l=c(e,["components","mdxType","originalType","parentName"]),u=p(r),f=n,b=u["".concat(a,".").concat(f)]||u[f]||d[f]||i;return r?o.a.createElement(b,s(s({ref:t},l),{},{components:r})):o.a.createElement(b,s({ref:t},l))}));function b(e,t){var r=arguments,n=t&&t.mdxType;if("string"==typeof e||n){var i=r.length,a=new Array(i);a[0]=f;var s={};for(var c in t)hasOwnProperty.call(t,c)&&(s[c]=t[c]);s.originalType=e,s.mdxType="string"==typeof e?e:n,a[1]=s;for(var l=2;l<i;l++)a[l]=r[l];return o.a.createElement.apply(null,a)}return o.a.createElement.apply(null,r)}f.displayName="MDXCreateElement"}}]); | s |
NormalTxEosDataCreator.js | 'use strict'
let TxDataCreator = require('../common/TxDataCreator');
let ccUtil = require('../../../api/ccUtil');
let utils = require('../../../util/util');
let logger = utils.getLogger('NormalTxEosDataCreator.js');
/**
* @class
* @augments TxDataCreator
*/
class NormalTxEosDataCreator extends TxDataCreator{
/**
* @constructor
* @param {Object} input - {@link CrossChain#input input} of final users.(gas, gasPrice, value and so on)
* @param {Object} config - {@link CrossChain#config config} of cross chain used.
*/
constructor(input,config) {
super(input,config);
}
/**
* @override
* @returns {Promise<{code: boolean, result: null}>}
*/
async createCommonData(){
logger.debug("Entering NormalTxEosDataCreator::createCommonData");
this.retResult.code = true;
let commonData = {};
commonData.from = this.input.from;
if(this.input.chainType === 'WAN'){
commonData.to = this.config.buddySCAddr;
}else{
commonData.to = this.input.to;
}
commonData.value = parseFloat(this.input.amount).toFixed(4) + ' ' + this.config.tokenSymbol;
// commonData.gasPrice = ccUtil.getGWeiToWei(this.input.gasPrice);
// commonData.gasLimit = Number(this.input.gasLimit);
// commonData.gas = Number(this.input.gasLimit);
commonData.nonce = 0; // need todo
this.retResult.result = commonData;
try{
this.retResult.code = true;
// if(this.input.hasOwnProperty('testOrNot')){
// commonData.nonce = ccUtil.getNonceTest();
// }else{
// commonData.nonce = await ccUtil.getNonceByLocal(commonData.from,this.input.chainType);
// logger.info("NormalTxEosDataCreator::createCommonData getNonceByLocal,%s",commonData.nonce);
// }
// logger.debug("nonce:is ",commonData.nonce);
logger.debug(commonData);
// if(this.input.chainType === 'WAN'){
// commonData.Txtype = '0x01';
// }
this.retResult.result = commonData;
}catch(error){
logger.error("error:",error);
this.retResult.code = false;
this.retResult.result = error;
}
return Promise.resolve(this.retResult);
}
/**
* @override
* @returns {{code: boolean, result: null}|transUtil.this.retResult|{code, result}}
*/
async createContractData(){
let actions;
try{
logger.debug("Entering NormalTxEosDataCreator::createContractData");
// input action can be newaccount/buyrambytes/sellram/delegatebw/undelegatebw
if (this.input.action && this.input.action === 'newaccount') {
actions = [{
account: 'eosio',
name: this.input.action,
authorization: [{
actor: this.input.from,
permission: 'active',
}],
data: {
creator: this.input.from,
name: this.input.accountName,
owner: {
threshold: 1,
keys: [{
key: this.input.ownerPublicKey,
weight: 1
}],
accounts: [],
waits: []
},
active: {
threshold: 1,
keys: [{
key: this.input.activePublicKey,
weight: 1
}],
accounts: [],
waits: []
},
}
},
{
account: 'eosio',
name: 'buyrambytes',
authorization: [{
actor: this.input.from,
permission: 'active',
}],
data: {
payer: this.input.from,
receiver: this.input.accountName,
bytes: parseInt(Number(this.input.ramBytes) * 1024)
}
},{
account: 'eosio',
name: 'delegatebw',
authorization: [{
actor: this.input.from,
permission: 'active',
}],
data: {
from: this.input.from,
receiver: this.input.accountName,
stake_net_quantity: parseFloat(this.input.netAmount).toFixed(4) + ' EOS',
stake_cpu_quantity: parseFloat(this.input.cpuAmount).toFixed(4) + ' EOS',
transfer: false
}
}];
} else if (this.input.action && this.input.action === 'buyrambytes') {
actions = [{
account: 'eosio',
name: this.input.action,
authorization: [{
actor: this.input.from,
permission: 'active',
}],
data: {
payer: this.input.from,
receiver: this.input.to,
bytes: parseInt(Number(this.input.ramBytes) * 1024)
}
}];
} else if (this.input.action && this.input.action === 'sellram') {
actions = [{
account: 'eosio',
name: this.input.action,
authorization: [{
actor: this.input.from,
permission: 'active',
}],
data: {
account: this.input.from,
bytes: parseInt(Number(this.input.ramBytes) * 1024)
}
}];
} else if (this.input.action && this.input.action === 'delegatebw') {
actions = [{
account: 'eosio',
name: this.input.action,
authorization: [{
actor: this.input.from,
permission: 'active',
}],
data: {
from: this.input.from,
receiver: this.input.to,
stake_net_quantity: parseFloat(this.input.netAmount).toFixed(4) + ' EOS',
stake_cpu_quantity: parseFloat(this.input.cpuAmount).toFixed(4) + ' EOS',
transfer: false
}
}];
} else if (this.input.action && this.input.action === 'undelegatebw') {
actions = [{
account: 'eosio',
name: this.input.action,
authorization: [{
actor: this.input.from,
permission: 'active',
}],
data: {
from: this.input.from,
receiver: this.input.to,
unstake_net_quantity: parseFloat(this.input.netAmount).toFixed(4) + ' EOS',
unstake_cpu_quantity: parseFloat(this.input.cpuAmount).toFixed(4) + ' EOS'
}
}];
} else {
actions = [{
account: 'eosio.token',
name: 'transfer',
authorization: [{
actor: this.input.from,
permission: 'active',
}],
data: {
from: this.input.from,
to: this.input.to,
quantity: parseFloat(this.input.amount).toFixed(4) + ' ' + this.config.tokenSymbol,
memo: '',
}
}];
}
logger.debug("NormalTxEosDataCreator:: action is ",JSON.stringify(actions, null, 2));
let packedTx = await ccUtil.packTransaction(this.input.chainType, {actions:actions});
this.retResult.result = packedTx; | this.retResult.result = error;
this.retResult.code = false;
}
return this.retResult;
}
}
module.exports = NormalTxEosDataCreator; | this.retResult.code = true;
}catch(error){
logger.error("NormalTxEosDataCreator::createContractData: error: ",error); |
api.go | package compat
import (
"go/types"
)
// API
type API struct {
// Packages included directly in the API.
Packages []*Package
// Reachable is the set of all objects that are reachable from the API.
Reachable []*Object
}
// NewAPI creates an empty API.
func NewAPI() *API |
func (a *API) LookupSymbol(sym *Symbol) *Object {
for _, obj := range a.Reachable {
if &obj.Symbol == sym {
return obj
}
}
return nil
}
type Package struct {
Path string `json:"path"`
Objects map[string]*Object `json:"objects"`
}
func NewPackage(path string) *Package {
return &Package{
Path: path,
Objects: make(map[string]*Object),
}
}
type DeclarationType string
const (
TypeDeclaration DeclarationType = "type"
AliasDeclaration = "alias"
VarDeclaration = "var"
ConstDeclaration = "const"
FuncDeclaration = "func"
)
type Type string
const (
StructType Type = "struct"
BasicType = "basic"
SliceType = "slice"
ArrayType = "array"
FuncType = "func"
ChanType = "chan"
MapType = "map"
PointerType = "pointer"
InterfaceType = "interface"
)
type Symbol struct {
Package string `json:"package"`
Name string `json:"name"`
}
type Object struct {
Symbol Symbol `json:"symbol"`
Type DeclarationType `json:"type"`
Definition *Definition `json:"definition,omitempty"`
Methods []*Func `json:"methods,omitempty"`
}
type Definition struct {
Type Type `json:"type"`
Symbol *Symbol `json:"symbol,omitempty"`
Elem *Definition `json:"elem,omitempty"`
Key *Definition `json:"key,omitempty"`
Len int64 `json:"len,omitempty"`
ChanDir types.ChanDir `json:"chandir,omitempty"`
Fields []*Field `json:"fields,omitempty"`
Functions []*Func `json:"functions,omitempty"`
Signature *Signature `json:"signature,omitempty"`
}
type Field struct {
Name string
Type *Definition
}
type Func struct {
Signature
Name string
}
type Signature struct {
Params []*Definition
Results []*Definition
Variadic bool
}
| {
return &API{}
} |
client.py | # Import packages
import copy
import pickle
import socket
from string import ascii_lowercase
import enchant
from colorama import Fore, init
init(autoreset=True)
d = enchant.Dict("en_US")
# Read config file
SOLUTION_FILE = "solution_words.json"
SETTINGS_FILE = "settings.json"
EMPTY = "—"
HEADERSIZE = 10
# Connect to server
while True:
IP = input("IP: ")
PORT = int(input("PORT: "))
USERNAME = input("USRENAME: ")
try:
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
client_socket.connect((IP, PORT))
print("Successfully connected. Waiting for round to end.")
break
except (IOError, ValueError):
print("Could not connect to server.")
# Class game
class Game:
def __init__(self, width, height, solution_word):
self.width = width
self.board = [[EMPTY for _ in range(self.width)] for _ in range(height)]
self.solution_word = solution_word
self.correct_places = set()
self.incorrect_places = set()
self.wrong_letter = set()
self.colored_board = []
def run_game(self): # Logic of the game
wh |
def input_word(self):
while True:
# Allow the user to input a word
user_word = input("Enter a word: ").lower()
# Check if the word is valid
if len(user_word) != self.width:
print(f"{Fore.RED}Incorrect length")
continue
if not d.check(user_word):
print(f"{Fore.RED}Not a real word")
continue
if list(user_word) in self.board:
print(f"{Fore.RED}Word already given")
continue
break
# Put user's guess in board
for i, word in enumerate(self.board):
if word[0] == EMPTY:
self.board[i] = list(user_word)
break
def color_board(self):
self.colored_board = copy.deepcopy(self.board)
for y, word in enumerate(self.colored_board):
solution_word_list = list(self.solution_word)
# Check for letters in correct places
for x, letter in enumerate(word):
if solution_word_list[x] == letter:
self.colored_board[y][x] = Fore.GREEN + word[x]
solution_word_list[x] = ""
self.correct_places.add(letter)
if letter in self.incorrect_places:
self.incorrect_places.remove(letter)
# Check for letters in incorrect places but still in the word
for x, letter in enumerate(word):
if letter in solution_word_list:
self.colored_board[y][x] = Fore.YELLOW + word[x]
solution_word_list[solution_word_list.index(letter)] = ""
# Check for letters in incorrect places and not in the word
for letter in word:
condition1 = letter not in self.correct_places
condition2 = letter not in self.incorrect_places
condition3 = letter not in solution_word_list
condition4 = letter != EMPTY
if condition1 and condition2 and condition3 and condition4:
self.wrong_letter.add(letter)
def print_board(self): # Print the board
print()
for word in self.colored_board:
for letter in word:
print(letter, end=" ")
print()
def print_letters(self):
# Print correct, incorrect, and wrong letters
for letter in ascii_lowercase:
color = ""
if letter in self.correct_places:
color = Fore.GREEN
elif letter in self.incorrect_places:
color = Fore.YELLOW
elif letter in self.wrong_letter:
color = Fore.RED
print(f"{color}{letter.upper()}", end=" ")
print()
def win_check(self):
for word in self.board:
solution_list = list(self.solution_word)
if word == solution_list:
self.print_board()
print(f"{Fore.GREEN}You win!\n"*3)
return True
return False if self.board[-1][-1] != EMPTY else None
def share(self):
print("Shareable emojis:")
for word in self.colored_board:
if word[0] == EMPTY:
break
for letter in word:
if letter[:-1] == Fore.GREEN:
print("\N{Large Green Square}", end="")
elif letter[:-1] == Fore.YELLOW:
print("\N{Large Yellow Square}", end="")
else:
print("\N{Black Large Square}", end="")
print()
def print_leaderboard(self):
print("Waiting for others to finish the wordle.")
leaderboard = receive()
print(f"The word was {self.solution_word}")
leaderboard = dict((sorted(leaderboard.items(), key=lambda item: item[1]))) # Sort the dict
for place, username in enumerate(leaderboard):
print(f"#{place + 1}: {username} {round(leaderboard[username], 2)}")
def wait_for_gamestart():
while True:
if receive() == "game start":
break
dimensions = receive()
solution_word = receive()
return dimensions, solution_word
def receive():
message_header = client_socket.recv(HEADERSIZE)
message_length = int(message_header.decode('utf-8').strip())
return pickle.loads(client_socket.recv(message_length))
def send(message):
message = pickle.dumps(message)
message = f"{len(message):<{HEADERSIZE}}".encode("utf-8") + message
client_socket.send(message)
# Functions
def main():
try:
while True:
dimensions, solution_word = wait_for_gamestart()
g = Game(*dimensions, solution_word)
g.run_game()
except ConnectionResetError:
print("Connection to server closed.")
if __name__ == "__main__":
main()
| ile True:
self.color_board()
self.print_board()
self.print_letters()
self.input_word()
# Check if won
if won := self.win_check() is not None:
if not won:
send("incomplete")
print(f"{Fore.RED}You lose!\n"*3)
else:
send("found word")
self.color_board()
self.print_board()
send(USERNAME)
self.share()
self.print_leaderboard()
break
|
distributor_test.go | /*
Copyright IBM Corp. All Rights Reserved.
SPDX-License-Identifier: Apache-2.0
*/
package privdata
import (
"errors"
"fmt"
"testing"
proto "github.com/hyperledger/fabric-protos-go/gossip"
"github.com/hyperledger/fabric-protos-go/peer"
"github.com/hyperledger/fabric-protos-go/transientstore"
"github.com/jxu86/fabric-gm/core/common/privdata"
"github.com/jxu86/fabric-gm/gossip/api"
gcommon "github.com/jxu86/fabric-gm/gossip/common"
"github.com/jxu86/fabric-gm/gossip/discovery"
"github.com/jxu86/fabric-gm/gossip/filter"
gossip2 "github.com/jxu86/fabric-gm/gossip/gossip"
"github.com/jxu86/fabric-gm/gossip/metrics"
"github.com/jxu86/fabric-gm/gossip/metrics/mocks"
mocks2 "github.com/jxu86/fabric-gm/gossip/privdata/mocks"
"github.com/jxu86/fabric-gm/gossip/protoext"
"github.com/jxu86/fabric-gm/protoutil"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
)
func Setup(mock *mocks2.CollectionAccessPolicy, requiredPeerCount int, maxPeerCount int,
accessFilter privdata.Filter, orgs map[string]struct{}, memberOnlyRead bool) {
mock.On("AccessFilter").Return(accessFilter)
mock.On("RequiredPeerCount").Return(requiredPeerCount)
mock.On("MaximumPeerCount").Return(maxPeerCount)
mock.On("MemberOrgs").Return(orgs)
mock.On("IsMemberOnlyRead").Return(memberOnlyRead)
}
type gossipMock struct {
err error
mock.Mock
api.PeerSignature
}
func (g *gossipMock) IdentityInfo() api.PeerIdentitySet {
return g.Called().Get(0).(api.PeerIdentitySet)
}
func (g *gossipMock) PeersOfChannel(channelID gcommon.ChannelID) []discovery.NetworkMember {
return g.Called(channelID).Get(0).([]discovery.NetworkMember)
}
func (g *gossipMock) SendByCriteria(message *protoext.SignedGossipMessage, criteria gossip2.SendCriteria) error {
args := g.Called(message, criteria)
if args.Get(0) != nil {
return args.Get(0).(error)
}
return nil
}
func (g *gossipMock) PeerFilter(channel gcommon.ChannelID, messagePredicate api.SubChannelSelectionCriteria) (filter.RoutingFilter, error) {
if g.err != nil {
return nil, g.err
}
return func(member discovery.NetworkMember) bool {
return messagePredicate(g.PeerSignature)
}, nil
}
func TestDistributor(t *testing.T) | {
channelID := "test"
g := &gossipMock{
Mock: mock.Mock{},
PeerSignature: api.PeerSignature{
Signature: []byte{3, 4, 5},
Message: []byte{6, 7, 8},
PeerIdentity: []byte{0, 1, 2},
},
}
sendings := make(chan struct {
*proto.PrivatePayload
gossip2.SendCriteria
}, 8)
g.On("PeersOfChannel", gcommon.ChannelID(channelID)).Return([]discovery.NetworkMember{
{PKIid: gcommon.PKIidType{1}},
{PKIid: gcommon.PKIidType{2}},
})
g.On("IdentityInfo").Return(api.PeerIdentitySet{
{
PKIId: gcommon.PKIidType{1},
Organization: api.OrgIdentityType("org1"),
},
{
PKIId: gcommon.PKIidType{2},
Organization: api.OrgIdentityType("org2"),
},
})
g.On("SendByCriteria", mock.Anything, mock.Anything).Run(func(args mock.Arguments) {
msg := args.Get(0).(*protoext.SignedGossipMessage)
sendCriteria := args.Get(1).(gossip2.SendCriteria)
sendings <- struct {
*proto.PrivatePayload
gossip2.SendCriteria
}{
PrivatePayload: msg.GetPrivateData().Payload,
SendCriteria: sendCriteria,
}
}).Return(nil)
accessFactoryMock := &mocks2.CollectionAccessFactory{}
c1ColConfig := &peer.CollectionConfig{
Payload: &peer.CollectionConfig_StaticCollectionConfig{
StaticCollectionConfig: &peer.StaticCollectionConfig{
Name: "c1",
RequiredPeerCount: 1,
MaximumPeerCount: 1,
},
},
}
c2ColConfig := &peer.CollectionConfig{
Payload: &peer.CollectionConfig_StaticCollectionConfig{
StaticCollectionConfig: &peer.StaticCollectionConfig{
Name: "c2",
RequiredPeerCount: 1,
MaximumPeerCount: 1,
},
},
}
policyMock := &mocks2.CollectionAccessPolicy{}
Setup(policyMock, 1, 2, func(_ protoutil.SignedData) bool {
return true
}, map[string]struct{}{
"org1": {},
"org2": {},
}, false)
accessFactoryMock.On("AccessPolicy", c1ColConfig, channelID).Return(policyMock, nil)
accessFactoryMock.On("AccessPolicy", c2ColConfig, channelID).Return(policyMock, nil)
testMetricProvider := mocks.TestUtilConstructMetricProvider()
metrics := metrics.NewGossipMetrics(testMetricProvider.FakeProvider).PrivdataMetrics
d := NewDistributor(channelID, g, accessFactoryMock, metrics, 0)
pdFactory := &pvtDataFactory{}
pvtData := pdFactory.addRWSet().addNSRWSet("ns1", "c1", "c2").addRWSet().addNSRWSet("ns2", "c1", "c2").create()
err := d.Distribute("tx1", &transientstore.TxPvtReadWriteSetWithConfigInfo{
PvtRwset: pvtData[0].WriteSet,
CollectionConfigs: map[string]*peer.CollectionConfigPackage{
"ns1": {
Config: []*peer.CollectionConfig{c1ColConfig, c2ColConfig},
},
},
}, 0)
assert.NoError(t, err)
err = d.Distribute("tx2", &transientstore.TxPvtReadWriteSetWithConfigInfo{
PvtRwset: pvtData[1].WriteSet,
CollectionConfigs: map[string]*peer.CollectionConfigPackage{
"ns2": {
Config: []*peer.CollectionConfig{c1ColConfig, c2ColConfig},
},
},
}, 0)
assert.NoError(t, err)
expectedMaxCount := map[string]int{}
expectedMinAck := map[string]int{}
i := 0
assert.Len(t, sendings, 8)
for dis := range sendings {
key := fmt.Sprintf("%s~%s", dis.PrivatePayload.Namespace, dis.PrivatePayload.CollectionName)
expectedMaxCount[key] += dis.SendCriteria.MaxPeers
expectedMinAck[key] += dis.SendCriteria.MinAck
i++
if i == 8 {
break
}
}
// Ensure MaxPeers is maxInternalPeers which is 2
assert.Equal(t, 2, expectedMaxCount["ns1~c1"])
assert.Equal(t, 2, expectedMaxCount["ns2~c2"])
// and MinAck is minInternalPeers which is 1
assert.Equal(t, 1, expectedMinAck["ns1~c1"])
assert.Equal(t, 1, expectedMinAck["ns2~c2"])
// Channel is empty after we read 8 times from it
assert.Len(t, sendings, 0)
// Bad path: dependencies (gossip and others) don't work properly
g.err = errors.New("failed obtaining filter")
err = d.Distribute("tx1", &transientstore.TxPvtReadWriteSetWithConfigInfo{
PvtRwset: pvtData[0].WriteSet,
CollectionConfigs: map[string]*peer.CollectionConfigPackage{
"ns1": {
Config: []*peer.CollectionConfig{c1ColConfig, c2ColConfig},
},
},
}, 0)
assert.Error(t, err)
assert.Contains(t, err.Error(), "failed obtaining filter")
g.Mock = mock.Mock{}
g.On("SendByCriteria", mock.Anything, mock.Anything).Return(errors.New("failed sending"))
g.On("PeersOfChannel", gcommon.ChannelID(channelID)).Return([]discovery.NetworkMember{
{PKIid: gcommon.PKIidType{1}},
})
g.On("IdentityInfo").Return(api.PeerIdentitySet{
{
PKIId: gcommon.PKIidType{1},
Organization: api.OrgIdentityType("org1"),
},
})
g.err = nil
err = d.Distribute("tx1", &transientstore.TxPvtReadWriteSetWithConfigInfo{
PvtRwset: pvtData[0].WriteSet,
CollectionConfigs: map[string]*peer.CollectionConfigPackage{
"ns1": {
Config: []*peer.CollectionConfig{c1ColConfig, c2ColConfig},
},
},
}, 0)
assert.Error(t, err)
assert.Contains(t, err.Error(), "Failed disseminating 2 out of 2 private dissemination plans")
assert.Equal(t,
[]string{"channel", channelID},
testMetricProvider.FakeSendDuration.WithArgsForCall(0),
)
assert.True(t, testMetricProvider.FakeSendDuration.ObserveArgsForCall(0) > 0)
} |
|
database_unmap_dsl.rs | use database_unmap::DatabaseUnmapCommand;
pub fn database_unmap() -> DatabaseUnmapCommand {
DatabaseUnmapCommand::new()
}
#[cfg(test)]
mod test {
use super::*;
use database_unmap::DatabaseUnmapCommand;
#[test]
fn | () {
let syntax = database_unmap();
let actual = DatabaseUnmapCommand::new();
assert_eq!(syntax, actual);
}
}
| test_database_unmap |
parse.rs | use proc_macro2::Span as Span2;
use syn::{
braced, bracketed, parenthesized,
parse::{Parse, ParseBuffer, ParseStream},
parse_macro_input,
punctuated::Punctuated,
spanned::Spanned,
token, Error, Expr, ExprMacro, Ident, Lit, Macro, Result, Token,
};
use crate::util;
use super::{
Properties, Property, PropertyName, PropertyType, ReturnedWidget, Tracker, Widget, WidgetFunc,
};
impl Parse for Tracker {
fn parse(input: ParseStream) -> Result<Self> {
let bool_fn = input.parse()?;
let mut update_fns = Vec::new();
while !input.is_empty() {
let _comma: Token![,] = input.parse()?;
// allow comma at the end of the macro
if !input.is_empty() {
update_fns.push(input.parse()?);
}
}
Ok(Tracker {
bool_fn,
update_fns,
})
}
}
impl Parse for PropertyName {
fn parse(input: ParseStream) -> Result<Self> {
Ok(if input.peek(Token![::]) || input.peek2(Token! [::]) {
PropertyName::Path(input.parse()?)
} else {
PropertyName::Ident(input.parse()?)
})
}
}
impl Parse for Property {
fn parse(input: ParseStream) -> Result<Self> {
let name = input.parse()?;
let mut optional_assign = false;
let mut iterative = false;
let mut braced_args = false;
if input.peek(Token![!]) {
if let PropertyName::Ident(ref ident_name) = name {
if ident_name == "factory" {
let _exclm: Token![!] = input.parse()?;
let paren_input;
parenthesized!(paren_input in input);
return Ok(Property {
name,
ty: PropertyType::Factory(paren_input.parse()?),
generics: None,
optional_assign,
iterative,
args: None,
});
}
}
return Err(input.error("Expected factory macro"));
}
// check for property(a, b, c): ...
let args = if input.peek(token::Paren) {
let paren_input;
parenthesized!(paren_input in input);
Some(paren_input.parse()?)
}
// check for property[a, b, c]: ...
else if input.peek(token::Bracket) {
let paren_input;
bracketed!(paren_input in input);
braced_args = true;
Some(paren_input.parse()?)
} else {
None
};
let generics = if input.peek(Token![<]) {
Some(input.parse()?)
} else {
None
};
// look for event handlers: property(a, ...) => move |a, ...| { ... }
let ty = if input.peek(Token! [=>]) {
let _arrow: Token![=>] = input.parse()?;
if braced_args {
input.parse().map(PropertyType::ConnectComponent)?
} else {
input.parse().map(PropertyType::Connect)?
}
}
// look for widgets
else if input.peek(Token![=]) || input.peek3(Token![=]) {
if input.peek(Token![=]) {
let _token: Token![=] = input.parse()?;
} else {
let _colon: Token![:] = input.parse()?;
}
input.parse().map(PropertyType::Widget)?
}
// look for properties or optional properties (?)
else if input.peek(Token! [:]) || input.peek(Token! [?]) {
// look for ? at beginning for optional assign
if input.peek(Token! [?]) {
let _question_mark: Token![?] = input.parse()?;
optional_assign = true;
}
let colon: Token! [:] = input.parse()?;
let colon_span = colon.span();
if input.peek(Lit) {
input.parse().map(PropertyType::Value)?
} else if input.peek2(Token![!]) {
let mac: Macro = input.parse()?;
let segs = &mac.path.segments;
if segs.len() == 1 {
let ident = &segs.first().expect("Macro has no segments").ident;
if ident == "track" {
let tokens = mac.tokens.into();
PropertyType::Track(parse_macro_input::parse(tokens)?)
} else if ident == "parent" {
let tokens = mac.tokens.into();
PropertyType::Parent(parse_macro_input::parse(tokens)?)
} else if ident == "args" {
let tokens = mac.tokens.into();
PropertyType::Args(parse_macro_input::parse(tokens)?)
} else if ident == "watch" {
PropertyType::Watch(mac.tokens)
} else if ident == "iterate" {
iterative = true;
let tokens = mac.tokens.into();
PropertyType::Expr(parse_macro_input::parse(tokens)?)
} else if ident == "iterate_watch" {
iterative = true;
let tokens = mac.tokens.into();
PropertyType::Watch(parse_macro_input::parse(tokens)?)
} else {
PropertyType::Expr(Expr::Macro(ExprMacro {
attrs: Vec::new(),
mac,
}))
}
} else {
input.parse().map(PropertyType::Expr)?
}
} else {
match input.parse().map(PropertyType::Expr) {
Ok(expr) => expr,
Err(parse_err) => {
let mut err = Error::new(colon_span, "Did you confuse `=` with`:`?");
err.combine(parse_err);
return Err(err);
}
}
}
} else {
return Err(input.error("Unexpected token. Expected =>, =, : or ?:"));
};
if !input.is_empty() && !input.peek(Token![,]) {
Err(input.error("expected `,`. Did you confuse `=` with`:`?"))
} else {
Ok(Property {
name,
ty,
generics,
args,
optional_assign,
iterative,
})
}
}
}
impl Parse for Properties {
fn parse(input: ParseStream) -> Result<Self> {
let props: Punctuated<Property, Token![,]> = input.parse_terminated(Property::parse)?;
let properties = props.into_pairs().map(|pair| pair.into_value()).collect();
Ok(Properties { properties })
}
}
impl Parse for WidgetFunc {
fn parse(input: ParseStream) -> Result<Self> {
let mut path_segments = Vec::new();
let mut args = None;
let mut ty = None;
path_segments.push(input.parse()?);
loop {
if input.peek(Ident) {
path_segments.push(input.parse()?);
} else if input.peek(Token! [::]) {
let _colon: Token![::] = input.parse()?;
} else if input.peek(token::Paren) {
let paren_input;
parenthesized!(paren_input in input);
args = Some(paren_input.call(Punctuated::parse_terminated)?);
if input.peek(Token! [->]) {
let _token: Token! [->] = input.parse()?;
let mut ty_path = vec![input.parse()?];
loop {
if input.peek(Ident) {
ty_path.push(input.parse()?);
} else if input.peek(Token! [::]) {
let _colon: Token![::] = input.parse()?;
} else {
break;
}
}
ty = Some(ty_path);
}
break;
} else {
break;
}
}
Ok(WidgetFunc {
path_segments,
args,
ty,
})
}
}
impl Parse for Widget {
fn parse(input: ParseStream) -> Result<Self> {
let mut name_opt: Option<Ident> = None;
if input.peek2(Token![=]) {
name_opt = Some(input.parse()?);
let _token: Token![=] = input.parse()?;
};
let inner_input: Option<ParseBuffer>;
let upcomming_some = {
let forked_input = input.fork();
if forked_input.peek(Ident) {
let ident: Ident = forked_input.parse()?;
ident == "Some"
} else {
false
}
};
let wrapper = if upcomming_some && input.peek2(token::Paren) {
let ident = input.parse()?;
let paren_input;
parenthesized!(paren_input in input);
inner_input = Some(paren_input);
Some(ident)
} else {
inner_input = None;
None
};
let func_input = if let Some(paren_input) = &inner_input {
&paren_input
} else {
input
};
let assign_as_ref = if func_input.peek(Token![&]) {
let _ref: Token![&] = func_input.parse()?;
true
} else {
false
};
let func: WidgetFunc = func_input.parse()?;
let inner;
let _token = braced!(inner in input);
let properties = inner.parse()?;
let name = if let Some(name) = name_opt {
name
} else {
util::idents_to_snake_case(&func.path_segments)
};
let returned_widget = if input.peek(Token![->]) {
let _arrow: Token![->] = input.parse()?;
Some(input.parse()?)
} else {
None
};
Ok(Widget {
name,
func,
properties,
wrapper,
assign_as_ref,
returned_widget,
})
}
}
impl Parse for ReturnedWidget {
fn parse(input: ParseStream) -> Result<Self> {
let mut is_optional = false;
let (name, ty) = if input.peek(Ident) {
let name = input.parse()?;
let _colon: Token![:] = input.parse()?;
let ty = input.parse()?;
if input.peek(Token![?]) {
let _mark: Token![?] = input.parse()?;
is_optional = true;
}
(Some(name), Some(ty))
} else {
if input.peek(Token![?]) {
let _mark: Token![?] = input.parse()?;
is_optional = true;
}
| crate::util::idents_to_snake_case(&[Ident::new("_returned_widget", Span2::call_site())])
});
let inner;
let _token = braced!(inner in input);
let properties = inner.parse()?;
Ok(ReturnedWidget {
name,
ty,
properties,
is_optional,
})
}
} | (None, None)
};
let name = name.unwrap_or_else(|| { |
prescaling_test.go | package main
import (
"fmt"
"testing"
"time"
"github.com/stretchr/testify/require"
)
func TestPrescalingWithoutHPA(t *testing.T) {
t.Parallel()
stacksetName := "stackset-prescale-no-hpa"
specFactory := NewTestStacksetSpecFactory(stacksetName).Ingress().StackGC(3, 15).Replicas(3)
// create stack with 3 replicas
firstStack := "v1"
spec := specFactory.Create(firstStack)
err := createStackSet(stacksetName, 1, spec)
require.NoError(t, err)
_, err = waitForStack(t, stacksetName, firstStack)
require.NoError(t, err)
// create second stack with 3 replicas
secondStack := "v2"
spec = specFactory.Create(secondStack)
err = updateStackset(stacksetName, spec)
require.NoError(t, err)
_, err = waitForStack(t, stacksetName, secondStack)
require.NoError(t, err)
// switch traffic so that both stacks are receiving equal traffic and verify traffic has actually switched
fullFirstStack := fmt.Sprintf("%s-%s", stacksetName, firstStack)
fullSecondStack := fmt.Sprintf("%s-%s", stacksetName, secondStack)
_, err = waitForIngress(t, stacksetName)
require.NoError(t, err)
desiredTraffic := map[string]float64{
fullFirstStack: 50,
fullSecondStack: 50,
}
err = setDesiredTrafficWeightsIngress(stacksetName, desiredTraffic)
require.NoError(t, err)
err = trafficWeightsUpdatedIngress(t, stacksetName, weightKindActual, desiredTraffic, nil).withTimeout(time.Minute * 4).await()
require.NoError(t, err)
// create third stack with only 1 replica and wait for the deployment to be created
thirdStack := "v3"
fullThirdStack := fmt.Sprintf("%s-%s", stacksetName, thirdStack)
spec = specFactory.Replicas(1).Create(thirdStack)
err = updateStackset(stacksetName, spec)
require.NoError(t, err)
deployment, err := waitForDeployment(t, fullThirdStack)
require.NoError(t, err)
require.EqualValues(t, 1, *deployment.Spec.Replicas)
// switch 50% of the traffic to the new stack
desiredTraffic = map[string]float64{
fullThirdStack: 50,
fullFirstStack: 25,
fullSecondStack: 25,
}
err = setDesiredTrafficWeightsIngress(stacksetName, desiredTraffic)
require.NoError(t, err)
err = trafficWeightsUpdatedIngress(t, stacksetName, weightKindActual, desiredTraffic, nil).withTimeout(time.Minute * 4).await()
require.NoError(t, err)
// recheck the deployment of the last stack and verify that the number of replicas is 3 till the end of the
// prescaling timeout
for i := 1; i <= 6; i++ {
deployment, err = waitForDeployment(t, fullThirdStack)
require.NoError(t, err)
require.EqualValues(t, 3, *(deployment.Spec.Replicas))
time.Sleep(time.Second * 10)
}
time.Sleep(time.Second * 10)
deployment, err = waitForDeployment(t, fullThirdStack)
require.NoError(t, err)
require.EqualValues(t, 1, *(deployment.Spec.Replicas))
}
func | (t *testing.T) {
t.Parallel()
stacksetName := "stackset-prescale-hpa"
specFactory := NewTestStacksetSpecFactory(stacksetName).Ingress().StackGC(3, 15).
HPA(1, 10).Replicas(3)
// create first stack with 3 replicas
firstStack := "v1"
spec := specFactory.Create(firstStack)
err := createStackSet(stacksetName, 1, spec)
require.NoError(t, err)
_, err = waitForStack(t, stacksetName, firstStack)
require.NoError(t, err)
// create second stack with 3 replicas
secondStack := "v2"
spec = specFactory.Create(secondStack)
err = updateStackset(stacksetName, spec)
require.NoError(t, err)
_, err = waitForStack(t, stacksetName, secondStack)
require.NoError(t, err)
// switch traffic so that both stacks are receiving equal traffic
fullFirstStack := fmt.Sprintf("%s-%s", stacksetName, firstStack)
fullSecondStack := fmt.Sprintf("%s-%s", stacksetName, secondStack)
_, err = waitForIngress(t, stacksetName)
require.NoError(t, err)
desiredTraffic := map[string]float64{
fullFirstStack: 50,
fullSecondStack: 50,
}
err = setDesiredTrafficWeightsIngress(stacksetName, desiredTraffic)
require.NoError(t, err)
err = trafficWeightsUpdatedIngress(t, stacksetName, weightKindActual, desiredTraffic, nil).withTimeout(time.Minute * 4).await()
require.NoError(t, err)
// create a third stack with only one replica and verify the deployment has only one pod
thirdStack := "v3"
fullThirdStack := fmt.Sprintf("%s-%s", stacksetName, thirdStack)
spec = specFactory.Replicas(1).Create(thirdStack)
err = updateStackset(stacksetName, spec)
require.NoError(t, err)
deployment, err := waitForDeployment(t, fullThirdStack)
require.NoError(t, err)
require.EqualValues(t, 1, *deployment.Spec.Replicas)
// switch 50% of the traffic to the third stack and wait for the process to be complete
desiredTraffic = map[string]float64{
fullThirdStack: 50,
fullFirstStack: 25,
fullSecondStack: 25,
}
err = setDesiredTrafficWeightsIngress(stacksetName, desiredTraffic)
require.NoError(t, err)
err = trafficWeightsUpdatedIngress(t, stacksetName, weightKindActual, desiredTraffic, nil).withTimeout(time.Minute * 4).await()
require.NoError(t, err)
// verify that the third stack now has 3 replicas till the end of the prescaling period
for i := 1; i <= 6; i++ {
hpa, err := waitForHPA(t, fullThirdStack)
require.NoError(t, err)
require.EqualValues(t, 3, *(hpa.Spec.MinReplicas))
time.Sleep(time.Second * 10)
}
time.Sleep(time.Second * 10)
hpa, err := waitForHPA(t, fullThirdStack)
require.NoError(t, err)
require.EqualValues(t, 1, *(hpa.Spec.MinReplicas))
}
func TestPrescalingPreventDelete(t *testing.T) {
stackPrescalingTimeout := 5
t.Parallel()
stacksetName := "stackset-prevent-delete"
factory := NewTestStacksetSpecFactory(stacksetName).StackGC(1, 15).Ingress().Replicas(3)
// create stackset with first version
firstVersion := "v1"
fullFirstStack := fmt.Sprintf("%s-%s", stacksetName, firstVersion)
firstCreateTimestamp := time.Now()
err := createStackSet(stacksetName, stackPrescalingTimeout, factory.Create(firstVersion))
require.NoError(t, err)
_, err = waitForDeployment(t, fullFirstStack)
require.NoError(t, err)
_, err = waitForIngress(t, fullFirstStack)
require.NoError(t, err)
// update stackset with second version
secondVersion := "v2"
fullSecondStack := fmt.Sprintf("%s-%s", stacksetName, firstVersion)
secondCreateTimestamp := time.Now()
err = updateStackset(stacksetName, factory.Create(secondVersion))
require.NoError(t, err)
_, err = waitForDeployment(t, fullSecondStack)
require.NoError(t, err)
_, err = waitForIngress(t, fullSecondStack)
require.NoError(t, err)
// switch all traffic to the new stack
desiredTrafficMap := map[string]float64{
fullSecondStack: 100,
}
err = setDesiredTrafficWeightsIngress(stacksetName, desiredTrafficMap)
require.NoError(t, err)
err = trafficWeightsUpdatedIngress(t, stacksetName, weightKindActual, desiredTrafficMap, nil).withTimeout(2 * time.Minute).await()
require.NoError(t, err)
// update stackset with third version
thirdVersion := "v3"
fullThirdStack := fmt.Sprintf("%s-%s", stacksetName, firstVersion)
thirdCreateTimestamp := time.Now()
err = updateStackset(stacksetName, factory.Create(thirdVersion))
require.NoError(t, err)
_, err = waitForDeployment(t, fullThirdStack)
require.NoError(t, err)
_, err = waitForIngress(t, fullThirdStack)
require.NoError(t, err)
desiredTrafficMap = map[string]float64{
fullThirdStack: 100,
}
err = setDesiredTrafficWeightsIngress(stacksetName, desiredTrafficMap)
require.NoError(t, err)
err = trafficWeightsUpdatedIngress(t, stacksetName, weightKindActual, desiredTrafficMap, nil).withTimeout(2 * time.Minute).await()
require.NoError(t, err)
// verify that all stack deployments are still present and their prescaling is active
for time.Now().Before(firstCreateTimestamp.Add(time.Minute * time.Duration(stackPrescalingTimeout))) {
firstDeployment, err := waitForDeployment(t, fullFirstStack)
require.NoError(t, err)
require.EqualValues(t, 3, *firstDeployment.Spec.Replicas)
time.Sleep(15 * time.Second)
}
for time.Now().Before(secondCreateTimestamp.Add(time.Minute * time.Duration(stackPrescalingTimeout))) {
secondDeployment, err := waitForDeployment(t, fullSecondStack)
require.NoError(t, err)
require.EqualValues(t, 3, *secondDeployment.Spec.Replicas)
time.Sleep(15 * time.Second)
}
for time.Now().Before(thirdCreateTimestamp.Add(time.Minute * time.Duration(stackPrescalingTimeout))) {
thirdDeployment, err := waitForDeployment(t, fullThirdStack)
require.NoError(t, err)
require.EqualValues(t, 3, *thirdDeployment.Spec.Replicas)
time.Sleep(15 * time.Second)
}
}
func TestPrescalingWaitsForBackends(t *testing.T) {
// Create 3 stacks with traffic 0.0, 50.0 & 50.0
// Switch traffic to be 0.0, 30, 70.0
// Verify that actual traffic changes correctly
t.Parallel()
stacksetName := "stackset-prescale-backends-wait"
specFactory := NewTestStacksetSpecFactory(stacksetName).Ingress().StackGC(3, 15).Replicas(3)
// create stack with 3 replicas
firstStack := "v1"
spec := specFactory.Create(firstStack)
err := createStackSet(stacksetName, 1, spec)
require.NoError(t, err)
_, err = waitForStack(t, stacksetName, firstStack)
require.NoError(t, err)
// create second stack with 3 replicas
secondStack := "v2"
spec = specFactory.Create(secondStack)
err = updateStackset(stacksetName, spec)
require.NoError(t, err)
_, err = waitForStack(t, stacksetName, secondStack)
require.NoError(t, err)
// create third stack with 3 replicas
thirdStack := "v3"
spec = specFactory.Create(thirdStack)
err = updateStackset(stacksetName, spec)
require.NoError(t, err)
_, err = waitForStack(t, stacksetName, thirdStack)
require.NoError(t, err)
_, err = waitForIngress(t, stacksetName)
require.NoError(t, err)
// switch traffic so that all three stacks are receiving 0%, 50% & 50% traffic and verify traffic has actually switched
fullFirstStack := fmt.Sprintf("%s-%s", stacksetName, firstStack)
fullSecondStack := fmt.Sprintf("%s-%s", stacksetName, secondStack)
fullThirdStack := fmt.Sprintf("%s-%s", stacksetName, thirdStack)
desiredTraffic := map[string]float64{
fullFirstStack: 0,
fullSecondStack: 50,
fullThirdStack: 50,
}
err = setDesiredTrafficWeightsIngress(stacksetName, desiredTraffic)
require.NoError(t, err)
err = trafficWeightsUpdatedIngress(t, stacksetName, weightKindActual, desiredTraffic, nil).withTimeout(time.Minute * 4).await()
require.NoError(t, err)
// switch traffic so that all three stacks are receiving 0%, 30% & 70% traffic respectively
desiredTraffic = map[string]float64{
fullFirstStack: 0,
fullSecondStack: 30,
fullThirdStack: 70,
}
err = setDesiredTrafficWeightsIngress(stacksetName, desiredTraffic)
require.NoError(t, err)
err = trafficWeightsUpdatedIngress(t, stacksetName, weightKindActual, desiredTraffic, func(actualTraffic map[string]float64) error {
// err out if the traffic for any of the stacks is outside of the expected range
if actualTraffic[fullFirstStack] > 0 {
return fmt.Errorf("%v traffic not exactly %v", actualTraffic[fullFirstStack], 0)
}
if actualTraffic[fullSecondStack] > 50 || actualTraffic[fullSecondStack] < 30 {
return fmt.Errorf("%v traffic not between %v and %v", actualTraffic[fullSecondStack], 30, 50)
}
if actualTraffic[fullThirdStack] > 70 || actualTraffic[fullThirdStack] < 50 {
return fmt.Errorf("%v traffic not between %v and %v", actualTraffic[fullThirdStack], 50, 70)
}
return nil
}).withTimeout(time.Minute * 4).await()
require.NoError(t, err)
}
| TestPrescalingWithHPA |
bootstrap.js | window._ = require('lodash');
/**
* We'll load jQuery and the Bootstrap jQuery plugin which provides support
* for JavaScript based Bootstrap features such as modals and tabs. This
* code may be modified to fit the specific needs of your application.
*/
try {
window.$ = window.jQuery = require('jquery');
} catch (e) {}
/**
* We'll load the axios HTTP library which allows us to easily issue requests
* to our Laravel back-end. This library automatically handles sending the
* CSRF token as a header based on the value of the "XSRF" token cookie.
*/
/**
* Include the es6-promis polyfill for IE11 support
*/
require('es6-promise').polyfill();
window.axios = require('axios');
window.axios.defaults.headers.common['X-Requested-With'] = 'XMLHttpRequest';
/**
* Next we will register the CSRF Token as a common header with Axios so that
* all outgoing HTTP requests automatically have it attached. This is just
* a simple convenience so we don't have to attach every token manually.
*/
let token = document.head.querySelector('meta[name="csrf-token"]');
if (token) {
window.axios.defaults.headers.common['X-CSRF-TOKEN'] = token.content;
} else {
console.error('CSRF token not found: https://laravel.com/docs/csrf#csrf-x-csrf-token');
}
| * Echo exposes an expressive API for subscribing to channels and listening
* for events that are broadcast by Laravel. Echo and event broadcasting
* allows your team to easily build robust real-time web applications.
*/
// import Echo from 'laravel-echo'
// window.Pusher = require('pusher-js');
// window.Echo = new Echo({
// broadcaster: 'pusher',
// key: 'your-pusher-key'
// }); | /** |
priority.go | /*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package meta
import (
"fmt"
"github.com/lavalamp/client-go-flat/apimachinery/pkg/runtime/schema"
)
const (
AnyGroup = "*"
AnyVersion = "*"
AnyResource = "*"
AnyKind = "*"
)
// PriorityRESTMapper is a wrapper for automatically choosing a particular Resource or Kind
// when multiple matches are possible
type PriorityRESTMapper struct {
// Delegate is the RESTMapper to use to locate all the Kind and Resource matches
Delegate RESTMapper
// ResourcePriority is a list of priority patterns to apply to matching resources.
// The list of all matching resources is narrowed based on the patterns until only one remains.
// A pattern with no matches is skipped. A pattern with more than one match uses its
// matches as the list to continue matching against.
ResourcePriority []schema.GroupVersionResource
// KindPriority is a list of priority patterns to apply to matching kinds.
// The list of all matching kinds is narrowed based on the patterns until only one remains.
// A pattern with no matches is skipped. A pattern with more than one match uses its
// matches as the list to continue matching against.
KindPriority []schema.GroupVersionKind
}
func (m PriorityRESTMapper) String() string {
return fmt.Sprintf("PriorityRESTMapper{\n\t%v\n\t%v\n\t%v\n}", m.ResourcePriority, m.KindPriority, m.Delegate)
}
// ResourceFor finds all resources, then passes them through the ResourcePriority patterns to find a single matching hit.
func (m PriorityRESTMapper) ResourceFor(partiallySpecifiedResource schema.GroupVersionResource) (schema.GroupVersionResource, error) {
originalGVRs, err := m.Delegate.ResourcesFor(partiallySpecifiedResource)
if err != nil {
return schema.GroupVersionResource{}, err
}
if len(originalGVRs) == 1 {
return originalGVRs[0], nil
}
remainingGVRs := append([]schema.GroupVersionResource{}, originalGVRs...)
for _, pattern := range m.ResourcePriority {
matchedGVRs := []schema.GroupVersionResource{}
for _, gvr := range remainingGVRs {
if resourceMatches(pattern, gvr) {
matchedGVRs = append(matchedGVRs, gvr)
}
}
switch len(matchedGVRs) {
case 0:
// if you have no matches, then nothing matched this pattern just move to the next
continue
case 1:
// one match, return
return matchedGVRs[0], nil
default:
// more than one match, use the matched hits as the list moving to the next pattern.
// this way you can have a series of selection criteria
remainingGVRs = matchedGVRs
}
}
return schema.GroupVersionResource{}, &AmbiguousResourceError{PartialResource: partiallySpecifiedResource, MatchingResources: originalGVRs}
}
// KindFor finds all kinds, then passes them through the KindPriority patterns to find a single matching hit.
func (m PriorityRESTMapper) KindFor(partiallySpecifiedResource schema.GroupVersionResource) (schema.GroupVersionKind, error) {
originalGVKs, err := m.Delegate.KindsFor(partiallySpecifiedResource)
if err != nil |
if len(originalGVKs) == 1 {
return originalGVKs[0], nil
}
remainingGVKs := append([]schema.GroupVersionKind{}, originalGVKs...)
for _, pattern := range m.KindPriority {
matchedGVKs := []schema.GroupVersionKind{}
for _, gvr := range remainingGVKs {
if kindMatches(pattern, gvr) {
matchedGVKs = append(matchedGVKs, gvr)
}
}
switch len(matchedGVKs) {
case 0:
// if you have no matches, then nothing matched this pattern just move to the next
continue
case 1:
// one match, return
return matchedGVKs[0], nil
default:
// more than one match, use the matched hits as the list moving to the next pattern.
// this way you can have a series of selection criteria
remainingGVKs = matchedGVKs
}
}
return schema.GroupVersionKind{}, &AmbiguousResourceError{PartialResource: partiallySpecifiedResource, MatchingKinds: originalGVKs}
}
func resourceMatches(pattern schema.GroupVersionResource, resource schema.GroupVersionResource) bool {
if pattern.Group != AnyGroup && pattern.Group != resource.Group {
return false
}
if pattern.Version != AnyVersion && pattern.Version != resource.Version {
return false
}
if pattern.Resource != AnyResource && pattern.Resource != resource.Resource {
return false
}
return true
}
func kindMatches(pattern schema.GroupVersionKind, kind schema.GroupVersionKind) bool {
if pattern.Group != AnyGroup && pattern.Group != kind.Group {
return false
}
if pattern.Version != AnyVersion && pattern.Version != kind.Version {
return false
}
if pattern.Kind != AnyKind && pattern.Kind != kind.Kind {
return false
}
return true
}
func (m PriorityRESTMapper) RESTMapping(gk schema.GroupKind, versions ...string) (mapping *RESTMapping, err error) {
mappings, err := m.Delegate.RESTMappings(gk)
if err != nil {
return nil, err
}
// any versions the user provides take priority
priorities := m.KindPriority
if len(versions) > 0 {
priorities = make([]schema.GroupVersionKind, 0, len(m.KindPriority)+len(versions))
for _, version := range versions {
gv := schema.GroupVersion{
Version: version,
Group: gk.Group,
}
priorities = append(priorities, gv.WithKind(AnyKind))
}
priorities = append(priorities, m.KindPriority...)
}
remaining := append([]*RESTMapping{}, mappings...)
for _, pattern := range priorities {
var matching []*RESTMapping
for _, m := range remaining {
if kindMatches(pattern, m.GroupVersionKind) {
matching = append(matching, m)
}
}
switch len(matching) {
case 0:
// if you have no matches, then nothing matched this pattern just move to the next
continue
case 1:
// one match, return
return matching[0], nil
default:
// more than one match, use the matched hits as the list moving to the next pattern.
// this way you can have a series of selection criteria
remaining = matching
}
}
if len(remaining) == 1 {
return remaining[0], nil
}
var kinds []schema.GroupVersionKind
for _, m := range mappings {
kinds = append(kinds, m.GroupVersionKind)
}
return nil, &AmbiguousKindError{PartialKind: gk.WithVersion(""), MatchingKinds: kinds}
}
func (m PriorityRESTMapper) RESTMappings(gk schema.GroupKind, versions ...string) ([]*RESTMapping, error) {
return m.Delegate.RESTMappings(gk, versions...)
}
func (m PriorityRESTMapper) AliasesForResource(alias string) (aliases []string, ok bool) {
return m.Delegate.AliasesForResource(alias)
}
func (m PriorityRESTMapper) ResourceSingularizer(resource string) (singular string, err error) {
return m.Delegate.ResourceSingularizer(resource)
}
func (m PriorityRESTMapper) ResourcesFor(partiallySpecifiedResource schema.GroupVersionResource) ([]schema.GroupVersionResource, error) {
return m.Delegate.ResourcesFor(partiallySpecifiedResource)
}
func (m PriorityRESTMapper) KindsFor(partiallySpecifiedResource schema.GroupVersionResource) (gvk []schema.GroupVersionKind, err error) {
return m.Delegate.KindsFor(partiallySpecifiedResource)
}
| {
return schema.GroupVersionKind{}, err
} |
build_util.rs | use crate::config::StateroomConfig;
use anyhow::{anyhow, Result};
use cargo_metadata::Message;
use std::{
fs::read_to_string,
path::PathBuf,
process::{Command, Stdio},
};
use wasm_bindgen_cli_support::Bindgen;
pub fn locate_config() -> anyhow::Result<StateroomConfig> {
if let Ok(r) = read_to_string("stateroom.toml") {
tracing::info!("Loading config from file (stateroom.toml)");
toml::from_str(&r).map_err(|e| e.into())
} else {
tracing::info!("Didn't find a stateroom.toml file in current directory, using default");
Ok(StateroomConfig::default())
}
}
pub fn run_cargo_build_command(
package: &Option<String>,
target: &str,
release: bool,
) -> Result<PathBuf> {
let mut build_command = Command::new("cargo");
build_command.stdout(Stdio::piped());
build_command.arg("build");
build_command.args(["--message-format", "json-render-diagnostics"]);
if let Some(package) = package {
// If package is None, build the package we are in.
build_command.args(["--package", package]);
}
build_command.args(["--target", target]);
if release {
build_command.arg("--release");
}
let mut build_command = build_command.spawn()?;
let reader = std::io::BufReader::new(
build_command
.stdout
.take()
.ok_or_else(|| anyhow!("Could not read stdout stream."))?,
);
let mut found_wasm_modules = Vec::new();
for message in cargo_metadata::Message::parse_stream(reader) {
match message {
// TODO: handle error when toolchain is not installed, and retry after
// attempting to install toolchain.
Ok(Message::CompilerArtifact(artifact)) => {
for filename in artifact.filenames {
if filename
.extension()
.map_or(false, |ext| ext.to_ascii_lowercase() == "wasm")
{
found_wasm_modules.push(filename);
}
}
}
Ok(Message::BuildFinished(finished)) => {
if !finished.success {
return Err(anyhow!("Build error."));
}
}
Err(e) => return Err(anyhow!("Unknown error during build: {:?}.", e)),
_ => (),
}
}
build_command
.wait()
.map_err(|e| anyhow!("Encountered OS error running build subprocess: {:?}", e))?;
let result = match found_wasm_modules.as_slice() {
[] => return Err(anyhow!("No .wasm files emitted by build.")),
[a] => a,
_ => return Err(anyhow!("Multiple .wasm files emitted by build.")),
};
Ok(result.into())
}
pub struct BuildResult {
pub server_wasm: String,
pub client_wasm: Option<String>,
}
pub fn | (config: &StateroomConfig) -> Result<BuildResult> {
tracing::info!("Building service");
let server_wasm = run_cargo_build_command(&config.service.package, "wasm32-wasi", true)?;
let client_wasm = if let Some(client_config) = &config.client {
tracing::info!("Building client");
let client_wasm_path = run_cargo_build_command(
&Some(client_config.package.to_string()),
"wasm32-unknown-unknown",
true,
)
.expect("Error building client.");
Bindgen::new()
.input_path(client_wasm_path)
.web(true)?
.emit_start(false)
.generate("client-pkg")?;
// TODO: run wasm-opt
Some("client-pkg".to_string())
} else {
None
};
Ok(BuildResult {
client_wasm,
server_wasm: server_wasm.to_str().unwrap().to_string(),
})
}
| do_build |
keystone_test.go | package exporters |
||
plugin.min.js | /**
* Copyright (c) Tiny Technologies, Inc. All rights reserved. | *
* Version: 5.6.0 (2020-11-18)
*/
!function(){"use strict";var e,n,t,r=tinymce.util.Tools.resolve("tinymce.PluginManager"),s=function(e){return function(){return e}},i=s(!1),o=s(!0),a=function(){return l},l=(e=function(e){return e.isNone()},{fold:function(e,n){return e()},is:i,isSome:i,isNone:o,getOr:t=function(e){return e},getOrThunk:n=function(e){return e()},getOrDie:function(e){throw new Error(e||"error: getOrDie called on none.")},getOrNull:s(null),getOrUndefined:s(undefined),or:t,orThunk:n,map:a,each:function(){},bind:a,exists:i,forall:o,filter:a,equals:e,equals_:e,toArray:function(){return[]},toString:s("none()")}),u=function(t){var e=s(t),n=function(){return a},r=function(e){return e(t)},a={fold:function(e,n){return n(t)},is:function(e){return t===e},isSome:o,isNone:i,getOr:e,getOrThunk:e,getOrDie:e,getOrNull:e,getOrUndefined:e,or:n,orThunk:n,map:function(e){return u(e(t))},each:function(e){e(t)},bind:r,exists:r,forall:r,filter:function(e){return e(t)?a:l},toArray:function(){return[t]},toString:function(){return"some("+t+")"},equals:function(e){return e.is(t)},equals_:function(e,n){return e.fold(i,function(e){return n(t,e)})}};return a},c={some:u,none:a,from:function(e){return null===e||e===undefined?l:u(e)}},p=function(e){return n=e,(t=0)<=t&&t<n.length?c.some(n[t]):c.none();var n,t},d=tinymce.util.Tools.resolve("tinymce.dom.DOMUtils");function g(e){return e&&"PRE"===e.nodeName&&-1!==e.className.indexOf("language-")}function m(t){return function(e,n){return t(n)}}var f="undefined"!=typeof window?window:Function("return this;")(),h={},b={exports:h},y={};!function(n,t,r,p){var e=window.Prism;window.Prism={manual:!0},function(e){"object"==typeof t&&void 0!==r?r.exports=e():"function"==typeof n&&n.amd?n([],e):("undefined"!=typeof window?window:void 0!==y?y:"undefined"!=typeof self?self:this).EphoxContactWrapper=e()}(function(){return function c(s,i,o){function l(n,e){if(!i[n]){if(!s[n]){var t="function"==typeof p&&p;if(!e&&t)return t(n,!0);if(u)return u(n,!0);var r=new Error("Cannot find module '"+n+"'");throw r.code="MODULE_NOT_FOUND",r}var a=i[n]={exports:{}};s[n][0].call(a.exports,function(e){return l(s[n][1][e]||e)},a,a.exports,c,s,i,o)}return i[n].exports}for(var u="function"==typeof p&&p,e=0;e<o.length;e++)l(o[e]);return l}({1:[function(e,n,t){Prism.languages.c=Prism.languages.extend("clike",{comment:{pattern:/\/\/(?:[^\r\n\\]|\\(?:\r\n?|\n|(?![\r\n])))*|\/\*[\s\S]*?(?:\*\/|$)/,greedy:!0},"class-name":{pattern:/(\b(?:enum|struct)\s+(?:__attribute__\s*\(\([\s\S]*?\)\)\s*)?)\w+/,lookbehind:!0},keyword:/\b(?:__attribute__|_Alignas|_Alignof|_Atomic|_Bool|_Complex|_Generic|_Imaginary|_Noreturn|_Static_assert|_Thread_local|asm|typeof|inline|auto|break|case|char|const|continue|default|do|double|else|enum|extern|float|for|goto|if|int|long|register|return|short|signed|sizeof|static|struct|switch|typedef|union|unsigned|void|volatile|while)\b/,"function":/[a-z_]\w*(?=\s*\()/i,operator:/>>=?|<<=?|->|([-+&|:])\1|[?:~]|[-+*/%&|^!=<>]=?/,number:/(?:\b0x(?:[\da-f]+\.?[\da-f]*|\.[\da-f]+)(?:p[+-]?\d+)?|(?:\b\d+\.?\d*|\B\.\d+)(?:e[+-]?\d+)?)[ful]*/i}),Prism.languages.insertBefore("c","string",{macro:{pattern:/(^\s*)#\s*[a-z]+(?:[^\r\n\\/]|\/(?!\*)|\/\*(?:[^*]|\*(?!\/))*\*\/|\\(?:\r\n|[\s\S]))*/im,lookbehind:!0,greedy:!0,alias:"property",inside:{string:[{pattern:/^(#\s*include\s*)<[^>]+>/,lookbehind:!0},Prism.languages.c.string],comment:Prism.languages.c.comment,directive:{pattern:/^(#\s*)[a-z]+/,lookbehind:!0,alias:"keyword"},"directive-hash":/^#/,punctuation:/##|\\(?=[\r\n])/,expression:{pattern:/\S[\s\S]*/,inside:Prism.languages.c}}},constant:/\b(?:__FILE__|__LINE__|__DATE__|__TIME__|__TIMESTAMP__|__func__|EOF|NULL|SEEK_CUR|SEEK_END|SEEK_SET|stdin|stdout|stderr)\b/}),delete Prism.languages.c["boolean"]},{}],2:[function(e,n,t){Prism.languages.clike={comment:[{pattern:/(^|[^\\])\/\*[\s\S]*?(?:\*\/|$)/,lookbehind:!0},{pattern:/(^|[^\\:])\/\/.*/,lookbehind:!0,greedy:!0}],string:{pattern:/(["'])(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/,greedy:!0},"class-name":{pattern:/(\b(?:class|interface|extends|implements|trait|instanceof|new)\s+|\bcatch\s+\()[\w.\\]+/i,lookbehind:!0,inside:{punctuation:/[.\\]/}},keyword:/\b(?:if|else|while|do|for|return|in|instanceof|function|new|try|throw|catch|finally|null|break|continue)\b/,"boolean":/\b(?:true|false)\b/,"function":/\w+(?=\()/,number:/\b0x[\da-f]+\b|(?:\b\d+\.?\d*|\B\.\d+)(?:e[+-]?\d+)?/i,operator:/[<>]=?|[!=]=?=?|--?|\+\+?|&&?|\|\|?|[?*/~^%]/,punctuation:/[{}[\];(),.:]/}},{}],3:[function(e,t,n){(function(n){(function(){var e=function(u){var c=/\blang(?:uage)?-([\w-]+)\b/i,n=0,O={manual:u.Prism&&u.Prism.manual,disableWorkerMessageHandler:u.Prism&&u.Prism.disableWorkerMessageHandler,util:{encode:function a(e){return e instanceof N?new N(e.type,a(e.content),e.alias):Array.isArray(e)?e.map(a):e.replace(/&/g,"&").replace(/</g,"<").replace(/\u00a0/g," ")},type:function(e){return Object.prototype.toString.call(e).slice(8,-1)},objId:function(e){return e.__id||Object.defineProperty(e,"__id",{value:++n}),e.__id},clone:function i(e,t){var r,n;switch(t=t||{},O.util.type(e)){case"Object":if(n=O.util.objId(e),t[n])return t[n];for(var a in r={},t[n]=r,e)e.hasOwnProperty(a)&&(r[a]=i(e[a],t));return r;case"Array":return(n=O.util.objId(e),t[n])?t[n]:(r=[],t[n]=r,e.forEach(function(e,n){r[n]=i(e,t)}),r);default:return e}},getLanguage:function(e){for(;e&&!c.test(e.className);)e=e.parentElement;return e?(e.className.match(c)||[,"none"])[1].toLowerCase():"none"},currentScript:function(){if("undefined"==typeof document)return null;if("currentScript"in document)return document.currentScript;try{throw new Error}catch(r){var e=(/at [^(\r\n]*\((.*):.+:.+\)$/i.exec(r.stack)||[])[1];if(e){var n=document.getElementsByTagName("script");for(var t in n)if(n[t].src==e)return n[t]}return null}},isActive:function(e,n,t){for(var r="no-"+n;e;){var a=e.classList;if(a.contains(n))return!0;if(a.contains(r))return!1;e=e.parentElement}return!!t}},languages:{extend:function(e,n){var t=O.util.clone(O.languages[e]);for(var r in n)t[r]=n[r];return t},insertBefore:function(t,e,n,r){var a=(r=r||O.languages)[t],s={};for(var i in a)if(a.hasOwnProperty(i)){if(i==e)for(var o in n)n.hasOwnProperty(o)&&(s[o]=n[o]);n.hasOwnProperty(i)||(s[i]=a[i])}var l=r[t];return r[t]=s,O.languages.DFS(O.languages,function(e,n){n===l&&e!=t&&(this[e]=s)}),s},DFS:function l(e,n,t,r){r=r||{};var a,s,i=O.util.objId;for(var o in e){e.hasOwnProperty(o)&&(n.call(e,o,e[o],t||o),a=e[o],"Object"!==(s=O.util.type(a))||r[i(a)]?"Array"!==s||r[i(a)]||(r[i(a)]=!0,l(a,n,o,r)):(r[i(a)]=!0,l(a,n,null,r)))}}},plugins:{},highlightAll:function(e,n){O.highlightAllUnder(document,e,n)},highlightAllUnder:function(e,n,t){var r={callback:t,container:e,selector:'code[class*="language-"], [class*="language-"] code, code[class*="lang-"], [class*="lang-"] code'};O.hooks.run("before-highlightall",r),r.elements=Array.prototype.slice.apply(r.container.querySelectorAll(r.selector)),O.hooks.run("before-all-elements-highlight",r);for(var a,s=0;a=r.elements[s++];)O.highlightElement(a,!0===n,r.callback)},highlightElement:function(e,n,t){var r=O.util.getLanguage(e),a=O.languages[r];e.className=e.className.replace(c,"").replace(/\s+/g," ")+" language-"+r;var s=e.parentElement;s&&"pre"===s.nodeName.toLowerCase()&&(s.className=s.className.replace(c,"").replace(/\s+/g," ")+" language-"+r);var i,o={element:e,language:r,grammar:a,code:e.textContent};function l(e){o.highlightedCode=e,O.hooks.run("before-insert",o),o.element.innerHTML=o.highlightedCode,O.hooks.run("after-highlight",o),O.hooks.run("complete",o),t&&t.call(o.element)}if(O.hooks.run("before-sanity-check",o),!o.code)return O.hooks.run("complete",o),void(t&&t.call(o.element));O.hooks.run("before-highlight",o),o.grammar?n&&u.Worker?((i=new Worker(O.filename)).onmessage=function(e){l(e.data)},i.postMessage(JSON.stringify({language:o.language,code:o.code,immediateClose:!0}))):l(O.highlight(o.code,o.grammar,o.language)):l(O.util.encode(o.code))},highlight:function(e,n,t){var r={code:e,grammar:n,language:t};return O.hooks.run("before-tokenize",r),r.tokens=O.tokenize(r.code,r.grammar),O.hooks.run("after-tokenize",r),N.stringify(O.util.encode(r.tokens),r.language)},tokenize:function(e,n){var t=n.rest;if(t){for(var r in t)n[r]=t[r];delete n.rest}var a=new s;return B(a,a.head,e),function C(e,n,t,r,a,s){for(var i in t)if(t.hasOwnProperty(i)&&t[i]){var o=t[i];o=Array.isArray(o)?o:[o];for(var l=0;l<o.length;++l){if(s&&s.cause==i+","+l)return;var u,c=o[l],p=c.inside,d=!!c.lookbehind,g=!!c.greedy,m=0,f=c.alias;g&&!c.pattern.global&&(u=c.pattern.toString().match(/[imsuy]*$/)[0],c.pattern=RegExp(c.pattern.source,u+"g"));for(var h=c.pattern||c,b=r.next,y=a;b!==n.tail&&!(s&&y>=s.reach);y+=b.value.length,b=b.next){var v=b.value;if(n.length>e.length)return;if(!(v instanceof N)){var w,k,x,_,P,F,A=1;if(g&&b!=n.tail.prev){h.lastIndex=y;var S=h.exec(e);if(!S)break;var $=S.index+(d&&S[1]?S[1].length:0),j=S.index+S[0].length,E=y;for(E+=b.value.length;E<=$;)b=b.next,E+=b.value.length;if(E-=b.value.length,y=E,b.value instanceof N)continue;for(var z=b;z!==n.tail&&(E<j||"string"==typeof z.value);z=z.next)A++,E+=z.value.length;A--,v=e.slice(y,E),S.index-=y}else{h.lastIndex=0;var S=h.exec(v)}S&&(d&&(m=S[1]?S[1].length:0),$=S.index+m,w=S[0].slice(m),j=$+w.length,k=v.slice(0,$),x=v.slice(j),_=y+v.length,s&&_>s.reach&&(s.reach=_),P=b.prev,k&&(P=B(n,P,k),y+=k.length),T(n,P,A),F=new N(i,p?O.tokenize(w,p):w,f,w),b=B(n,P,F),x&&B(n,b,x),1<A&&C(e,n,t,b.prev,y,{cause:i+","+l,reach:_}))}}}}}(e,a,n,a.head,0),function(e){var n=[],t=e.head.next;for(;t!==e.tail;)n.push(t.value),t=t.next;return n}(a)},hooks:{all:{},add:function(e,n){var t=O.hooks.all;t[e]=t[e]||[],t[e].push(n)},run:function(e,n){var t=O.hooks.all[e];if(t&&t.length)for(var r,a=0;r=t[a++];)r(n)}},Token:N};function N(e,n,t,r){this.type=e,this.content=n,this.alias=t,this.length=0|(r||"").length}function s(){var e={value:null,prev:null,next:null},n={value:null,prev:e,next:null};e.next=n,this.head=e,this.tail=n,this.length=0}function B(e,n,t){var r=n.next,a={value:t,prev:n,next:r};return n.next=a,r.prev=a,e.length++,a}function T(e,n,t){for(var r=n.next,a=0;a<t&&r!==e.tail;a++)r=r.next;(n.next=r).prev=n,e.length-=a}if(u.Prism=O,N.stringify=function o(e,n){if("string"==typeof e)return e;if(Array.isArray(e)){var t="";return e.forEach(function(e){t+=o(e,n)}),t}var r={type:e.type,content:o(e.content,n),tag:"span",classes:["token",e.type],attributes:{},language:n},a=e.alias;a&&(Array.isArray(a)?Array.prototype.push.apply(r.classes,a):r.classes.push(a)),O.hooks.run("wrap",r);var s="";for(var i in r.attributes)s+=" "+i+'="'+(r.attributes[i]||"").replace(/"/g,""")+'"';return"<"+r.tag+' class="'+r.classes.join(" ")+'"'+s+">"+r.content+"</"+r.tag+">"},!u.document)return u.addEventListener&&(O.disableWorkerMessageHandler||u.addEventListener("message",function(e){var n=JSON.parse(e.data),t=n.language,r=n.code,a=n.immediateClose;u.postMessage(O.highlight(r,O.languages[t],t)),a&&u.close()},!1)),O;var e,t=O.util.currentScript();function r(){O.manual||O.highlightAll()}return t&&(O.filename=t.src,t.hasAttribute("data-manual")&&(O.manual=!0)),O.manual||("loading"===(e=document.readyState)||"interactive"===e&&t&&t.defer?document.addEventListener("DOMContentLoaded",r):window.requestAnimationFrame?window.requestAnimationFrame(r):window.setTimeout(r,16)),O}("undefined"!=typeof window?window:"undefined"!=typeof WorkerGlobalScope&&self instanceof WorkerGlobalScope?self:{});void 0!==t&&t.exports&&(t.exports=e),void 0!==n&&(n.Prism=e)}).call(this)}).call(this,void 0!==y?y:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{}],4:[function(e,n,t){var r,a;r=Prism,a=/\b(?:alignas|alignof|asm|auto|bool|break|case|catch|char|char8_t|char16_t|char32_t|class|compl|concept|const|consteval|constexpr|constinit|const_cast|continue|co_await|co_return|co_yield|decltype|default|delete|do|double|dynamic_cast|else|enum|explicit|export|extern|float|for|friend|goto|if|inline|int|int8_t|int16_t|int32_t|int64_t|uint8_t|uint16_t|uint32_t|uint64_t|long|mutable|namespace|new|noexcept|nullptr|operator|private|protected|public|register|reinterpret_cast|requires|return|short|signed|sizeof|static|static_assert|static_cast|struct|switch|template|this|thread_local|throw|try|typedef|typeid|typename|union|unsigned|using|virtual|void|volatile|wchar_t|while)\b/,r.languages.cpp=r.languages.extend("c",{"class-name":[{pattern:RegExp(/(\b(?:class|concept|enum|struct|typename)\s+)(?!<keyword>)\w+/.source.replace(/<keyword>/g,function(){return a.source})),lookbehind:!0},/\b[A-Z]\w*(?=\s*::\s*\w+\s*\()/,/\b[A-Z_]\w*(?=\s*::\s*~\w+\s*\()/i,/\w+(?=\s*<(?:[^<>]|<(?:[^<>]|<[^<>]*>)*>)*>\s*::\s*\w+\s*\()/],keyword:a,number:{pattern:/(?:\b0b[01']+|\b0x(?:[\da-f']+\.?[\da-f']*|\.[\da-f']+)(?:p[+-]?[\d']+)?|(?:\b[\d']+\.?[\d']*|\B\.[\d']+)(?:e[+-]?[\d']+)?)[ful]*/i,greedy:!0},operator:/>>=?|<<=?|->|([-+&|:])\1|[?:~]|<=>|[-+*/%&|^!=<>]=?|\b(?:and|and_eq|bitand|bitor|not|not_eq|or|or_eq|xor|xor_eq)\b/,"boolean":/\b(?:true|false)\b/}),r.languages.insertBefore("cpp","string",{"raw-string":{pattern:/R"([^()\\ ]{0,16})\([\s\S]*?\)\1"/,alias:"string",greedy:!0}}),r.languages.insertBefore("cpp","class-name",{"base-clause":{pattern:/(\b(?:class|struct)\s+\w+\s*:\s*)(?:[^;{}"'])+?(?=\s*[;{])/,lookbehind:!0,greedy:!0,inside:r.languages.extend("cpp",{})}}),r.languages.insertBefore("inside","operator",{"class-name":/\b[a-z_]\w*\b(?!\s*::)/i},r.languages.cpp["base-clause"])},{}],5:[function(e,n,t){!function(t){function r(e,t){return e.replace(/<<(\d+)>>/g,function(e,n){return"(?:"+t[+n]+")"})}function a(e,n,t){return RegExp(r(e,n),t||"")}function e(e,n){for(var t=0;t<n;t++)e=e.replace(/<<self>>/g,function(){return"(?:"+e+")"});return e.replace(/<<self>>/g,"[^\\s\\S]")}var n="bool byte char decimal double dynamic float int long object sbyte short string uint ulong ushort var void",s="class enum interface struct",i="add alias and ascending async await by descending from get global group into join let nameof not notnull on or orderby partial remove select set unmanaged value when where where",o="abstract as base break case catch checked const continue default delegate do else event explicit extern finally fixed for foreach goto if implicit in internal is lock namespace new null operator out override params private protected public readonly ref return sealed sizeof stackalloc static switch this throw try typeof unchecked unsafe using virtual volatile while yield";function l(e){return"\\b(?:"+e.trim().replace(/ /g,"|")+")\\b"}var u=l(s),c=RegExp(l(n+" "+s+" "+i+" "+o)),p=l(s+" "+i+" "+o),d=l(n+" "+s+" "+o),g=e(/<(?:[^<>;=+\-*/%&|^]|<<self>>)*>/.source,2),m=e(/\((?:[^()]|<<self>>)*\)/.source,2),f=/@?\b[A-Za-z_]\w*\b/.source,h=r(/<<0>>(?:\s*<<1>>)?/.source,[f,g]),b=r(/(?!<<0>>)<<1>>(?:\s*\.\s*<<1>>)*/.source,[p,h]),y=/\[\s*(?:,\s*)*\]/.source,v=r(/<<0>>(?:\s*(?:\?\s*)?<<1>>)*(?:\s*\?)?/.source,[b,y]),w=r(/[^,()<>[\];=+\-*/%&|^]|<<0>>|<<1>>|<<2>>/.source,[g,m,y]),k=r(/\(<<0>>+(?:,<<0>>+)+\)/.source,[w]),x=r(/(?:<<0>>|<<1>>)(?:\s*(?:\?\s*)?<<2>>)*(?:\s*\?)?/.source,[k,b,y]),_={keyword:c,punctuation:/[<>()?,.:[\]]/},P=/'(?:[^\r\n'\\]|\\.|\\[Uux][\da-fA-F]{1,8})'/.source,F=/"(?:\\.|[^\\"\r\n])*"/.source,A=/@"(?:""|\\[\s\S]|[^\\"])*"(?!")/.source;t.languages.csharp=t.languages.extend("clike",{string:[{pattern:a(/(^|[^$\\])<<0>>/.source,[A]),lookbehind:!0,greedy:!0},{pattern:a(/(^|[^@$\\])<<0>>/.source,[F]),lookbehind:!0,greedy:!0},{pattern:RegExp(P),greedy:!0,alias:"character"}],"class-name":[{pattern:a(/(\busing\s+static\s+)<<0>>(?=\s*;)/.source,[b]),lookbehind:!0,inside:_},{pattern:a(/(\busing\s+<<0>>\s*=\s*)<<1>>(?=\s*;)/.source,[f,x]),lookbehind:!0,inside:_},{pattern:a(/(\busing\s+)<<0>>(?=\s*=)/.source,[f]),lookbehind:!0},{pattern:a(/(\b<<0>>\s+)<<1>>/.source,[u,h]),lookbehind:!0,inside:_},{pattern:a(/(\bcatch\s*\(\s*)<<0>>/.source,[b]),lookbehind:!0,inside:_},{pattern:a(/(\bwhere\s+)<<0>>/.source,[f]),lookbehind:!0},{pattern:a(/(\b(?:is(?:\s+not)?|as)\s+)<<0>>/.source,[v]),lookbehind:!0,inside:_},{pattern:a(/\b<<0>>(?=\s+(?!<<1>>)<<2>>(?:\s*[=,;:{)\]]|\s+(?:in|when)\b))/.source,[x,d,f]),inside:_}],keyword:c,number:/(?:\b0(?:x[\da-f_]*[\da-f]|b[01_]*[01])|(?:\B\.\d+(?:_+\d+)*|\b\d+(?:_+\d+)*(?:\.\d+(?:_+\d+)*)?)(?:e[-+]?\d+(?:_+\d+)*)?)(?:ul|lu|[dflmu])?\b/i,operator:/>>=?|<<=?|[-=]>|([-+&|])\1|~|\?\?=?|[-+*/%&|^!=<>]=?/,punctuation:/\?\.?|::|[{}[\];(),.:]/}),t.languages.insertBefore("csharp","number",{range:{pattern:/\.\./,alias:"operator"}}),t.languages.insertBefore("csharp","punctuation",{"named-parameter":{pattern:a(/([(,]\s*)<<0>>(?=\s*:)/.source,[f]),lookbehind:!0,alias:"punctuation"}}),t.languages.insertBefore("csharp","class-name",{namespace:{pattern:a(/(\b(?:namespace|using)\s+)<<0>>(?:\s*\.\s*<<0>>)*(?=\s*[;{])/.source,[f]),lookbehind:!0,inside:{punctuation:/\./}},"type-expression":{pattern:a(/(\b(?:default|typeof|sizeof)\s*\(\s*)(?:[^()\s]|\s(?!\s*\))|<<0>>)*(?=\s*\))/.source,[m]),lookbehind:!0,alias:"class-name",inside:_},"return-type":{pattern:a(/<<0>>(?=\s+(?:<<1>>\s*(?:=>|[({]|\.\s*this\s*\[)|this\s*\[))/.source,[x,b]),inside:_,alias:"class-name"},"constructor-invocation":{pattern:a(/(\bnew\s+)<<0>>(?=\s*[[({])/.source,[x]),lookbehind:!0,inside:_,alias:"class-name"},"generic-method":{pattern:a(/<<0>>\s*<<1>>(?=\s*\()/.source,[f,g]),inside:{"function":a(/^<<0>>/.source,[f]),generic:{pattern:RegExp(g),alias:"class-name",inside:_}}},"type-list":{pattern:a(/\b((?:<<0>>\s+<<1>>|where\s+<<2>>)\s*:\s*)(?:<<3>>|<<4>>)(?:\s*,\s*(?:<<3>>|<<4>>))*(?=\s*(?:where|[{;]|=>|$))/.source,[u,h,f,x,c.source]),lookbehind:!0,inside:{keyword:c,"class-name":{pattern:RegExp(x),greedy:!0,inside:_},punctuation:/,/}},preprocessor:{pattern:/(^\s*)#.*/m,lookbehind:!0,alias:"property",inside:{directive:{pattern:/(\s*#)\b(?:define|elif|else|endif|endregion|error|if|line|pragma|region|undef|warning)\b/,lookbehind:!0,alias:"keyword"}}}});var S=F+"|"+P,$=r(/\/(?![*/])|\/\/[^\r\n]*[\r\n]|\/\*(?:[^*]|\*(?!\/))*\*\/|<<0>>/.source,[S]),j=e(r(/[^"'/()]|<<0>>|\(<<self>>*\)/.source,[$]),2),E=/\b(?:assembly|event|field|method|module|param|property|return|type)\b/.source,z=r(/<<0>>(?:\s*\(<<1>>*\))?/.source,[b,j]);t.languages.insertBefore("csharp","class-name",{attribute:{pattern:a(/((?:^|[^\s\w>)?])\s*\[\s*)(?:<<0>>\s*:\s*)?<<1>>(?:\s*,\s*<<1>>)*(?=\s*\])/.source,[E,z]),lookbehind:!0,greedy:!0,inside:{target:{pattern:a(/^<<0>>(?=\s*:)/.source,[E]),alias:"keyword"},"attribute-arguments":{pattern:a(/\(<<0>>*\)/.source,[j]),inside:t.languages.csharp},"class-name":{pattern:RegExp(b),inside:{punctuation:/\./}},punctuation:/[:,]/}}});var C=/:[^}\r\n]+/.source,O=e(r(/[^"'/()]|<<0>>|\(<<self>>*\)/.source,[$]),2),N=r(/\{(?!\{)(?:(?![}:])<<0>>)*<<1>>?\}/.source,[O,C]),B=e(r(/[^"'/()]|\/(?!\*)|\/\*(?:[^*]|\*(?!\/))*\*\/|<<0>>|\(<<self>>*\)/.source,[S]),2),T=r(/\{(?!\{)(?:(?![}:])<<0>>)*<<1>>?\}/.source,[B,C]);function D(e,n){return{interpolation:{pattern:a(/((?:^|[^{])(?:\{\{)*)<<0>>/.source,[e]),lookbehind:!0,inside:{"format-string":{pattern:a(/(^\{(?:(?![}:])<<0>>)*)<<1>>(?=\}$)/.source,[n,C]),lookbehind:!0,inside:{punctuation:/^:/}},punctuation:/^\{|\}$/,expression:{pattern:/[\s\S]+/,alias:"language-csharp",inside:t.languages.csharp}}},string:/[\s\S]+/}}t.languages.insertBefore("csharp","string",{"interpolation-string":[{pattern:a(/(^|[^\\])(?:\$@|@\$)"(?:""|\\[\s\S]|\{\{|<<0>>|[^\\{"])*"/.source,[N]),lookbehind:!0,greedy:!0,inside:D(N,O)},{pattern:a(/(^|[^@\\])\$"(?:\\.|\{\{|<<0>>|[^\\"{])*"/.source,[T]),lookbehind:!0,greedy:!0,inside:D(T,B)}]})}(Prism),Prism.languages.dotnet=Prism.languages.cs=Prism.languages.csharp},{}],6:[function(e,n,t){!function(e){var n=/("|')(?:\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/;e.languages.css={comment:/\/\*[\s\S]*?\*\//,atrule:{pattern:/@[\w-]+[\s\S]*?(?:;|(?=\s*\{))/,inside:{rule:/^@[\w-]+/,"selector-function-argument":{pattern:/(\bselector\s*\((?!\s*\))\s*)(?:[^()]|\((?:[^()]|\([^()]*\))*\))+?(?=\s*\))/,lookbehind:!0,alias:"selector"},keyword:{pattern:/(^|[^\w-])(?:and|not|only|or)(?![\w-])/,lookbehind:!0}}},url:{pattern:RegExp("\\burl\\((?:"+n.source+"|"+/(?:[^\\\r\n()"']|\\[\s\S])*/.source+")\\)","i"),greedy:!0,inside:{"function":/^url/i,punctuation:/^\(|\)$/,string:{pattern:RegExp("^"+n.source+"$"),alias:"url"}}},selector:RegExp("[^{}\\s](?:[^{};\"']|"+n.source+")*?(?=\\s*\\{)"),string:{pattern:n,greedy:!0},property:/[-_a-z\xA0-\uFFFF][-\w\xA0-\uFFFF]*(?=\s*:)/i,important:/!important\b/i,"function":/[-a-z0-9]+(?=\()/i,punctuation:/[(){};:,]/},e.languages.css.atrule.inside.rest=e.languages.css;var t=e.languages.markup;t&&(t.tag.addInlined("style","css"),e.languages.insertBefore("inside","attr-value",{"style-attr":{pattern:/\s*style=("|')(?:\\[\s\S]|(?!\1)[^\\])*\1/i,inside:{"attr-name":{pattern:/^\s*style/i,inside:t.tag.inside},punctuation:/^\s*=\s*['"]|['"]\s*$/,"attr-value":{pattern:/.+/i,inside:e.languages.css}},alias:"language-css"}},t.tag))}(Prism)},{}],7:[function(e,n,t){var r,a,s;r=Prism,a=/\b(?:abstract|assert|boolean|break|byte|case|catch|char|class|const|continue|default|do|double|else|enum|exports|extends|final|finally|float|for|goto|if|implements|import|instanceof|int|interface|long|module|native|new|null|open|opens|package|private|protected|provides|public|record|requires|return|short|static|strictfp|super|switch|synchronized|this|throw|throws|to|transient|transitive|try|uses|var|void|volatile|while|with|yield)\b/,s=/\b[A-Z](?:\w*[a-z]\w*)?\b/,r.languages.java=r.languages.extend("clike",{"class-name":[s,/\b[A-Z]\w*(?=\s+\w+\s*[;,=())])/],keyword:a,"function":[r.languages.clike["function"],{pattern:/(\:\:)[a-z_]\w*/,lookbehind:!0}],number:/\b0b[01][01_]*L?\b|\b0x[\da-f_]*\.?[\da-f_p+-]+\b|(?:\b\d[\d_]*\.?[\d_]*|\B\.\d[\d_]*)(?:e[+-]?\d[\d_]*)?[dfl]?/i,operator:{pattern:/(^|[^.])(?:<<=?|>>>?=?|->|--|\+\+|&&|\|\||::|[?:~]|[-+*/%&|^!=<>]=?)/m,lookbehind:!0}}),r.languages.insertBefore("java","string",{"triple-quoted-string":{pattern:/"""[ \t]*[\r\n](?:(?:"|"")?(?:\\.|[^"\\]))*"""/,greedy:!0,alias:"string"}}),r.languages.insertBefore("java","class-name",{annotation:{alias:"punctuation",pattern:/(^|[^.])@\w+/,lookbehind:!0},namespace:{pattern:RegExp(/(\b(?:exports|import(?:\s+static)?|module|open|opens|package|provides|requires|to|transitive|uses|with)\s+)(?!<keyword>)[a-z]\w*(?:\.[a-z]\w*)*\.?/.source.replace(/<keyword>/g,function(){return a.source})),lookbehind:!0,inside:{punctuation:/\./}},generics:{pattern:/<(?:[\w\s,.&?]|<(?:[\w\s,.&?]|<(?:[\w\s,.&?]|<[\w\s,.&?]*>)*>)*>)*>/,inside:{"class-name":s,keyword:a,punctuation:/[<>(),.:]/,operator:/[?&|]/}}})},{}],8:[function(e,n,t){Prism.languages.javascript=Prism.languages.extend("clike",{"class-name":[Prism.languages.clike["class-name"],{pattern:/(^|[^$\w\xA0-\uFFFF])[_$A-Z\xA0-\uFFFF][$\w\xA0-\uFFFF]*(?=\.(?:prototype|constructor))/,lookbehind:!0}],keyword:[{pattern:/((?:^|})\s*)(?:catch|finally)\b/,lookbehind:!0},{pattern:/(^|[^.]|\.\.\.\s*)\b(?:as|async(?=\s*(?:function\b|\(|[$\w\xA0-\uFFFF]|$))|await|break|case|class|const|continue|debugger|default|delete|do|else|enum|export|extends|for|from|function|(?:get|set)(?=\s*[\[$\w\xA0-\uFFFF])|if|implements|import|in|instanceof|interface|let|new|null|of|package|private|protected|public|return|static|super|switch|this|throw|try|typeof|undefined|var|void|while|with|yield)\b/,lookbehind:!0}],number:/\b(?:(?:0[xX](?:[\dA-Fa-f](?:_[\dA-Fa-f])?)+|0[bB](?:[01](?:_[01])?)+|0[oO](?:[0-7](?:_[0-7])?)+)n?|(?:\d(?:_\d)?)+n|NaN|Infinity)\b|(?:\b(?:\d(?:_\d)?)+\.?(?:\d(?:_\d)?)*|\B\.(?:\d(?:_\d)?)+)(?:[Ee][+-]?(?:\d(?:_\d)?)+)?/,"function":/#?[_$a-zA-Z\xA0-\uFFFF][$\w\xA0-\uFFFF]*(?=\s*(?:\.\s*(?:apply|bind|call)\s*)?\()/,operator:/--|\+\+|\*\*=?|=>|&&=?|\|\|=?|[!=]==|<<=?|>>>?=?|[-+*/%&|^!=<>]=?|\.{3}|\?\?=?|\?\.?|[~:]/}),Prism.languages.javascript["class-name"][0].pattern=/(\b(?:class|interface|extends|implements|instanceof|new)\s+)[\w.\\]+/,Prism.languages.insertBefore("javascript","keyword",{regex:{pattern:/((?:^|[^$\w\xA0-\uFFFF."'\])\s]|\b(?:return|yield))\s*)\/(?:\[(?:[^\]\\\r\n]|\\.)*]|\\.|[^/\\\[\r\n])+\/[gimyus]{0,6}(?=(?:\s|\/\*(?:[^*]|\*(?!\/))*\*\/)*(?:$|[\r\n,.;:})\]]|\/\/))/,lookbehind:!0,greedy:!0,inside:{"regex-source":{pattern:/^(\/)[\s\S]+(?=\/[a-z]*$)/,lookbehind:!0,alias:"language-regex",inside:Prism.languages.regex},"regex-flags":/[a-z]+$/,"regex-delimiter":/^\/|\/$/}},"function-variable":{pattern:/#?[_$a-zA-Z\xA0-\uFFFF][$\w\xA0-\uFFFF]*(?=\s*[=:]\s*(?:async\s*)?(?:\bfunction\b|(?:\((?:[^()]|\([^()]*\))*\)|[_$a-zA-Z\xA0-\uFFFF][$\w\xA0-\uFFFF]*)\s*=>))/,alias:"function"},parameter:[{pattern:/(function(?:\s+[_$A-Za-z\xA0-\uFFFF][$\w\xA0-\uFFFF]*)?\s*\(\s*)(?!\s)(?:[^()]|\([^()]*\))+?(?=\s*\))/,lookbehind:!0,inside:Prism.languages.javascript},{pattern:/[_$a-z\xA0-\uFFFF][$\w\xA0-\uFFFF]*(?=\s*=>)/i,inside:Prism.languages.javascript},{pattern:/(\(\s*)(?!\s)(?:[^()]|\([^()]*\))+?(?=\s*\)\s*=>)/,lookbehind:!0,inside:Prism.languages.javascript},{pattern:/((?:\b|\s|^)(?!(?:as|async|await|break|case|catch|class|const|continue|debugger|default|delete|do|else|enum|export|extends|finally|for|from|function|get|if|implements|import|in|instanceof|interface|let|new|null|of|package|private|protected|public|return|set|static|super|switch|this|throw|try|typeof|undefined|var|void|while|with|yield)(?![$\w\xA0-\uFFFF]))(?:[_$A-Za-z\xA0-\uFFFF][$\w\xA0-\uFFFF]*\s*)\(\s*|\]\s*\(\s*)(?!\s)(?:[^()]|\([^()]*\))+?(?=\s*\)\s*\{)/,lookbehind:!0,inside:Prism.languages.javascript}],constant:/\b[A-Z](?:[A-Z_]|\dx?)*\b/}),Prism.languages.insertBefore("javascript","string",{"template-string":{pattern:/`(?:\\[\s\S]|\${(?:[^{}]|{(?:[^{}]|{[^}]*})*})+}|(?!\${)[^\\`])*`/,greedy:!0,inside:{"template-punctuation":{pattern:/^`|`$/,alias:"string"},interpolation:{pattern:/((?:^|[^\\])(?:\\{2})*)\${(?:[^{}]|{(?:[^{}]|{[^}]*})*})+}/,lookbehind:!0,inside:{"interpolation-punctuation":{pattern:/^\${|}$/,alias:"punctuation"},rest:Prism.languages.javascript}},string:/[\s\S]+/}}}),Prism.languages.markup&&Prism.languages.markup.tag.addInlined("script","javascript"),Prism.languages.js=Prism.languages.javascript},{}],9:[function(e,n,t){function b(e,n){return"___"+e.toUpperCase()+n+"___"}var y;y=Prism,Object.defineProperties(y.languages["markup-templating"]={},{buildPlaceholders:{value:function(r,a,e,s){var i;r.language===a&&(i=r.tokenStack=[],r.code=r.code.replace(e,function(e){if("function"==typeof s&&!s(e))return e;for(var n,t=i.length;-1!==r.code.indexOf(n=b(a,t));)++t;return i[t]=e,n}),r.grammar=y.languages.markup)}},tokenizePlaceholders:{value:function(d,g){var m,f;d.language===g&&d.tokenStack&&(d.grammar=y.languages[g],m=0,f=Object.keys(d.tokenStack),function h(e){for(var n=0;n<e.length&&!(m>=f.length);n++){var t,r,a,s,i,o,l,u,c,p=e[n];"string"==typeof p||p.content&&"string"==typeof p.content?(t=f[m],r=d.tokenStack[t],a="string"==typeof p?p:p.content,s=b(g,t),-1<(i=a.indexOf(s))&&(++m,o=a.substring(0,i),l=new y.Token(g,y.tokenize(r,d.grammar),"language-"+g,r),u=a.substring(i+s.length),c=[],o&&c.push.apply(c,h([o])),c.push(l),u&&c.push.apply(c,h([u])),"string"==typeof p?e.splice.apply(e,[n,1].concat(c)):p.content=c)):p.content&&h(p.content)}return e}(d.tokens))}}})},{}],10:[function(e,n,t){Prism.languages.markup={comment:/<!--[\s\S]*?-->/,prolog:/<\?[\s\S]+?\?>/,doctype:{pattern:/<!DOCTYPE(?:[^>"'[\]]|"[^"]*"|'[^']*')+(?:\[(?:[^<"'\]]|"[^"]*"|'[^']*'|<(?!!--)|<!--(?:[^-]|-(?!->))*-->)*\]\s*)?>/i,greedy:!0,inside:{"internal-subset":{pattern:/(\[)[\s\S]+(?=\]>$)/,lookbehind:!0,greedy:!0,inside:null},string:{pattern:/"[^"]*"|'[^']*'/,greedy:!0},punctuation:/^<!|>$|[[\]]/,"doctype-tag":/^DOCTYPE/,name:/[^\s<>'"]+/}},cdata:/<!\[CDATA\[[\s\S]*?]]>/i,tag:{pattern:/<\/?(?!\d)[^\s>\/=$<%]+(?:\s(?:\s*[^\s>\/=]+(?:\s*=\s*(?:"[^"]*"|'[^']*'|[^\s'">=]+(?=[\s>]))|(?=[\s/>])))+)?\s*\/?>/,greedy:!0,inside:{tag:{pattern:/^<\/?[^\s>\/]+/,inside:{punctuation:/^<\/?/,namespace:/^[^\s>\/:]+:/}},"attr-value":{pattern:/=\s*(?:"[^"]*"|'[^']*'|[^\s'">=]+)/,inside:{punctuation:[{pattern:/^=/,alias:"attr-equals"},/"|'/]}},punctuation:/\/?>/,"attr-name":{pattern:/[^\s>\/]+/,inside:{namespace:/^[^\s>\/:]+:/}}}},entity:[{pattern:/&[\da-z]{1,8};/i,alias:"named-entity"},/&#x?[\da-f]{1,8};/i]},Prism.languages.markup.tag.inside["attr-value"].inside.entity=Prism.languages.markup.entity,Prism.languages.markup.doctype.inside["internal-subset"].inside=Prism.languages.markup,Prism.hooks.add("wrap",function(e){"entity"===e.type&&(e.attributes.title=e.content.replace(/&/,"&"))}),Object.defineProperty(Prism.languages.markup.tag,"addInlined",{value:function(e,n){var t={};t["language-"+n]={pattern:/(^<!\[CDATA\[)[\s\S]+?(?=\]\]>$)/i,lookbehind:!0,inside:Prism.languages[n]},t.cdata=/^<!\[CDATA\[|\]\]>$/i;var r={"included-cdata":{pattern:/<!\[CDATA\[[\s\S]*?\]\]>/i,inside:t}};r["language-"+n]={pattern:/[\s\S]+/,inside:Prism.languages[n]};var a={};a[e]={pattern:RegExp(/(<__[\s\S]*?>)(?:<!\[CDATA\[(?:[^\]]|\](?!\]>))*\]\]>|(?!<!\[CDATA\[)[\s\S])*?(?=<\/__>)/.source.replace(/__/g,function(){return e}),"i"),lookbehind:!0,greedy:!0,inside:r},Prism.languages.insertBefore("markup","cdata",a)}}),Prism.languages.html=Prism.languages.markup,Prism.languages.mathml=Prism.languages.markup,Prism.languages.svg=Prism.languages.markup,Prism.languages.xml=Prism.languages.extend("markup",{}),Prism.languages.ssml=Prism.languages.xml,Prism.languages.atom=Prism.languages.xml,Prism.languages.rss=Prism.languages.xml},{}],11:[function(e,n,t){!function(n){n.languages.php=n.languages.extend("clike",{keyword:/\b(?:__halt_compiler|abstract|and|array|as|break|callable|case|catch|class|clone|const|continue|declare|default|die|do|echo|else|elseif|empty|enddeclare|endfor|endforeach|endif|endswitch|endwhile|eval|exit|extends|final|finally|for|foreach|function|global|goto|if|implements|include|include_once|instanceof|insteadof|interface|isset|list|match|namespace|new|or|parent|print|private|protected|public|require|require_once|return|static|switch|throw|trait|try|unset|use|var|while|xor|yield)\b/i,"boolean":{pattern:/\b(?:false|true)\b/i,alias:"constant"},constant:[/\b[A-Z_][A-Z0-9_]*\b/,/\b(?:null)\b/i],comment:{pattern:/(^|[^\\])(?:\/\*[\s\S]*?\*\/|\/\/.*)/,lookbehind:!0}}),n.languages.insertBefore("php","string",{"shell-comment":{pattern:/(^|[^\\])#.*/,lookbehind:!0,alias:"comment"}}),n.languages.insertBefore("php","comment",{delimiter:{pattern:/\?>$|^<\?(?:php(?=\s)|=)?/i,alias:"important"}}),n.languages.insertBefore("php","keyword",{variable:/\$+(?:\w+\b|(?={))/i,"package":{pattern:/(\\|namespace\s+|use\s+)[\w\\]+/,lookbehind:!0,inside:{punctuation:/\\/}}}),n.languages.insertBefore("php","operator",{property:{pattern:/(->)[\w]+/,lookbehind:!0}});var e={pattern:/{\$(?:{(?:{[^{}]+}|[^{}]+)}|[^{}])+}|(^|[^\\{])\$+(?:\w+(?:\[[^\r\n\[\]]+\]|->\w+)*)/,lookbehind:!0,inside:n.languages.php};n.languages.insertBefore("php","string",{"nowdoc-string":{pattern:/<<<'([^']+)'[\r\n](?:.*[\r\n])*?\1;/,greedy:!0,alias:"string",inside:{delimiter:{pattern:/^<<<'[^']+'|[a-z_]\w*;$/i,alias:"symbol",inside:{punctuation:/^<<<'?|[';]$/}}}},"heredoc-string":{pattern:/<<<(?:"([^"]+)"[\r\n](?:.*[\r\n])*?\1;|([a-z_]\w*)[\r\n](?:.*[\r\n])*?\2;)/i,greedy:!0,alias:"string",inside:{delimiter:{pattern:/^<<<(?:"[^"]+"|[a-z_]\w*)|[a-z_]\w*;$/i,alias:"symbol",inside:{punctuation:/^<<<"?|[";]$/}},interpolation:e}},"single-quoted-string":{pattern:/'(?:\\[\s\S]|[^\\'])*'/,greedy:!0,alias:"string"},"double-quoted-string":{pattern:/"(?:\\[\s\S]|[^\\"])*"/,greedy:!0,alias:"string",inside:{interpolation:e}}}),delete n.languages.php.string,n.hooks.add("before-tokenize",function(e){/<\?/.test(e.code)&&n.languages["markup-templating"].buildPlaceholders(e,"php",/<\?(?:[^"'/#]|\/(?![*/])|("|')(?:\\[\s\S]|(?!\1)[^\\])*\1|(?:\/\/|#)(?:[^?\n\r]|\?(?!>))*(?=$|\?>|[\r\n])|\/\*[\s\S]*?(?:\*\/|$))*?(?:\?>|$)/gi)}),n.hooks.add("after-tokenize",function(e){n.languages["markup-templating"].tokenizePlaceholders(e,"php")})}(Prism)},{}],12:[function(e,n,t){Prism.languages.python={comment:{pattern:/(^|[^\\])#.*/,lookbehind:!0},"string-interpolation":{pattern:/(?:f|rf|fr)(?:("""|''')[\s\S]*?\1|("|')(?:\\.|(?!\2)[^\\\r\n])*\2)/i,greedy:!0,inside:{interpolation:{pattern:/((?:^|[^{])(?:{{)*){(?!{)(?:[^{}]|{(?!{)(?:[^{}]|{(?!{)(?:[^{}])+})+})+}/,lookbehind:!0,inside:{"format-spec":{pattern:/(:)[^:(){}]+(?=}$)/,lookbehind:!0},"conversion-option":{pattern://,alias:"punctuation"},rest:null}},string:/[\s\S]+/}},"triple-quoted-string":{pattern:/(?:[rub]|rb|br)?("""|''')[\s\S]*?\1/i,greedy:!0,alias:"string"},string:{pattern:/(?:[rub]|rb|br)?("|')(?:\\.|(?!\1)[^\\\r\n])*\1/i,greedy:!0},"function":{pattern:/((?:^|\s)def[ \t]+)[a-zA-Z_]\w*(?=\s*\()/g,lookbehind:!0},"class-name":{pattern:/(\bclass\s+)\w+/i,lookbehind:!0},decorator:{pattern:/(^\s*)@\w+(?:\.\w+)*/im,lookbehind:!0,alias:["annotation","punctuation"],inside:{punctuation:/\./}},keyword:/\b(?:and|as|assert|async|await|break|class|continue|def|del|elif|else|except|exec|finally|for|from|global|if|import|in|is|lambda|nonlocal|not|or|pass|print|raise|return|try|while|with|yield)\b/,builtin:/\b(?:__import__|abs|all|any|apply|ascii|basestring|bin|bool|buffer|bytearray|bytes|callable|chr|classmethod|cmp|coerce|compile|complex|delattr|dict|dir|divmod|enumerate|eval|execfile|file|filter|float|format|frozenset|getattr|globals|hasattr|hash|help|hex|id|input|int|intern|isinstance|issubclass|iter|len|list|locals|long|map|max|memoryview|min|next|object|oct|open|ord|pow|property|range|raw_input|reduce|reload|repr|reversed|round|set|setattr|slice|sorted|staticmethod|str|sum|super|tuple|type|unichr|unicode|vars|xrange|zip)\b/,"boolean":/\b(?:True|False|None)\b/,number:/(?:\b(?=\d)|\B(?=\.))(?:0[bo])?(?:(?:\d|0x[\da-f])[\da-f]*\.?\d*|\.\d+)(?:e[+-]?\d+)?j?\b/i,operator:/[-+%=]=?|!=|\*\*?=?|\/\/?=?|<[<=>]?|>[=>]?|[&|^~]/,punctuation:/[{}[\];(),.:]/},Prism.languages.python["string-interpolation"].inside.interpolation.inside.rest=Prism.languages.python,Prism.languages.py=Prism.languages.python},{}],13:[function(e,n,t){!function(e){e.languages.ruby=e.languages.extend("clike",{comment:[/#.*/,{pattern:/^=begin\s[\s\S]*?^=end/m,greedy:!0}],"class-name":{pattern:/(\b(?:class)\s+|\bcatch\s+\()[\w.\\]+/i,lookbehind:!0,inside:{punctuation:/[.\\]/}},keyword:/\b(?:alias|and|BEGIN|begin|break|case|class|def|define_method|defined|do|each|else|elsif|END|end|ensure|extend|for|if|in|include|module|new|next|nil|not|or|prepend|protected|private|public|raise|redo|require|rescue|retry|return|self|super|then|throw|undef|unless|until|when|while|yield)\b/});var n={pattern:/#\{[^}]+\}/,inside:{delimiter:{pattern:/^#\{|\}$/,alias:"tag"},rest:e.languages.ruby}};delete e.languages.ruby["function"],e.languages.insertBefore("ruby","keyword",{regex:[{pattern:RegExp(/%r/.source+"(?:"+[/([^a-zA-Z0-9\s{(\[<])(?:(?!\1)[^\\]|\\[\s\S])*\1[gim]{0,3}/.source,/\((?:[^()\\]|\\[\s\S])*\)[gim]{0,3}/.source,/\{(?:[^#{}\\]|#(?:\{[^}]+\})?|\\[\s\S])*\}[gim]{0,3}/.source,/\[(?:[^\[\]\\]|\\[\s\S])*\][gim]{0,3}/.source,/<(?:[^<>\\]|\\[\s\S])*>[gim]{0,3}/.source].join("|")+")"),greedy:!0,inside:{interpolation:n}},{pattern:/(^|[^/])\/(?!\/)(?:\[[^\r\n\]]+\]|\\.|[^[/\\\r\n])+\/[gim]{0,3}(?=\s*(?:$|[\r\n,.;})]))/,lookbehind:!0,greedy:!0}],variable:/[@$]+[a-zA-Z_]\w*(?:[?!]|\b)/,symbol:{pattern:/(^|[^:]):[a-zA-Z_]\w*(?:[?!]|\b)/,lookbehind:!0},"method-definition":{pattern:/(\bdef\s+)[\w.]+/,lookbehind:!0,inside:{"function":/\w+$/,rest:e.languages.ruby}}}),e.languages.insertBefore("ruby","number",{builtin:/\b(?:Array|Bignum|Binding|Class|Continuation|Dir|Exception|FalseClass|File|Stat|Fixnum|Float|Hash|Integer|IO|MatchData|Method|Module|NilClass|Numeric|Object|Proc|Range|Regexp|String|Struct|TMS|Symbol|ThreadGroup|Thread|Time|TrueClass)\b/,constant:/\b[A-Z]\w*(?:[?!]|\b)/}),e.languages.ruby.string=[{pattern:RegExp(/%[qQiIwWxs]?/.source+"(?:"+[/([^a-zA-Z0-9\s{(\[<])(?:(?!\1)[^\\]|\\[\s\S])*\1/.source,/\((?:[^()\\]|\\[\s\S])*\)/.source,/\{(?:[^#{}\\]|#(?:\{[^}]+\})?|\\[\s\S])*\}/.source,/\[(?:[^\[\]\\]|\\[\s\S])*\]/.source,/<(?:[^<>\\]|\\[\s\S])*>/.source].join("|")+")"),greedy:!0,inside:{interpolation:n}},{pattern:/("|')(?:#\{[^}]+\}|\\(?:\r\n|[\s\S])|(?!\1)[^\\\r\n])*\1/,greedy:!0,inside:{interpolation:n}}],e.languages.rb=e.languages.ruby}(Prism)},{}],14:[function(e,n,t){var r=e("prismjs/components/prism-core");e("prismjs/components/prism-clike"),e("prismjs/components/prism-markup-templating"),e("prismjs/components/prism-c"),e("prismjs/components/prism-cpp"),e("prismjs/components/prism-csharp"),e("prismjs/components/prism-css"),e("prismjs/components/prism-java"),e("prismjs/components/prism-javascript"),e("prismjs/components/prism-markup"),e("prismjs/components/prism-php"),e("prismjs/components/prism-python"),e("prismjs/components/prism-ruby"),n.exports={boltExport:r}},{"prismjs/components/prism-c":1,"prismjs/components/prism-clike":2,"prismjs/components/prism-core":3,"prismjs/components/prism-cpp":4,"prismjs/components/prism-csharp":5,"prismjs/components/prism-css":6,"prismjs/components/prism-java":7,"prismjs/components/prism-javascript":8,"prismjs/components/prism-markup":10,"prismjs/components/prism-markup-templating":9,"prismjs/components/prism-php":11,"prismjs/components/prism-python":12,"prismjs/components/prism-ruby":13}]},{},[14])(14)});var a=window.Prism;window.Prism=e}(undefined,h,b,undefined);var v=b.exports.boltExport,w=function(e){return f.Prism&&e.getParam("codesample_global_prismjs",!1,"boolean")?f.Prism:v},k=function(e){var n=e.selection?e.selection.getNode():null;return g(n)?c.some(n):c.none()},x=function(s){var t,e=s.getParam("codesample_languages")||[{text:"HTML/XML",value:"markup"},{text:"JavaScript",value:"javascript"},{text:"CSS",value:"css"},{text:"PHP",value:"php"},{text:"Ruby",value:"ruby"},{text:"Python",value:"python"},{text:"Java",value:"java"},{text:"C",value:"c"},{text:"C#",value:"csharp"},{text:"C++",value:"cpp"}],n=p(e).fold(function(){return""},function(e){return e.value}),r=(t=n,k(s).fold(function(){return t},function(e){var n=e.className.match(/language-(\w+)/);return n?n[1]:t})),a=k(s).fold(function(){return""},function(e){return e.textContent});s.windowManager.open({title:"Insert/Edit Code Sample",size:"large",body:{type:"panel",items:[{type:"selectbox",name:"language",label:"Language",items:e},{type:"textarea",name:"code",label:"Code view"}]},buttons:[{type:"cancel",name:"cancel",text:"Cancel"},{type:"submit",name:"save",text:"Save",primary:!0}],initialData:{language:r,code:a},onSubmit:function(e){var n,t,r,a=e.getData();n=s,t=a.language,r=a.code,n.undoManager.transact(function(){var e=k(n);return r=d.DOM.encode(r),e.fold(function(){n.insertContent('<pre id="__new" class="language-'+t+'">'+r+"</pre>"),n.selection.select(n.$("#__new").removeAttr("id")[0])},function(e){n.dom.setAttrib(e,"class","language-"+t),e.innerHTML=r,w(n).highlightElement(e),n.selection.select(e)})}),e.close()}})},_=function(r){r.ui.registry.addToggleButton("codesample",{icon:"code-sample",tooltip:"Insert/edit code sample",onAction:function(){return x(r)},onSetup:function(t){var e=function(){var e,n;t.setActive((n=(e=r).selection.getStart(),e.dom.is(n,'pre[class*="language-"]')))};return r.on("NodeChange",e),function(){return r.off("NodeChange",e)}}}),r.ui.registry.addMenuItem("codesample",{text:"Code sample...",icon:"code-sample",onAction:function(){return x(r)}})};r.add("codesample",function(n){var t,a,r;a=(t=n).$,t.on("PreProcess",function(e){a("pre[contenteditable=false]",e.node).filter(m(g)).each(function(e,n){var t=a(n),r=n.textContent;t.attr("class",a.trim(t.attr("class"))),t.removeAttr("contentEditable"),t.empty().append(a("<code></code>").each(function(){this.textContent=r}))})}),t.on("SetContent",function(){var e=a("pre").filter(m(g)).filter(function(e,n){return"false"!==n.contentEditable});e.length&&t.undoManager.transact(function(){e.each(function(e,n){a(n).find("br").each(function(e,n){n.parentNode.replaceChild(t.getDoc().createTextNode("\n"),n)}),n.contentEditable="false",n.innerHTML=t.dom.encode(n.textContent),w(t).highlightElement(n),n.className=a.trim(n.className)})})}),_(n),(r=n).addCommand("codesample",function(){var e=r.selection.getNode();r.selection.isCollapsed()||g(e)?x(r):r.formatter.toggle("code")}),n.on("dblclick",function(e){g(e.target)&&x(n)})})}(); | * Licensed under the LGPL or a commercial license.
* For LGPL see License.txt in the project root for license information.
* For commercial licenses see https://www.tiny.cloud/ |
tap.js | 'use strict';
/**
* @module TAP
*/
/**
* Module dependencies.
*/
var util = require('util');
var Base = require('./base');
var constants = require('../runner').constants;
var EVENT_TEST_PASS = constants.EVENT_TEST_PASS;
var EVENT_TEST_FAIL = constants.EVENT_TEST_FAIL;
var EVENT_RUN_BEGIN = constants.EVENT_RUN_BEGIN;
var EVENT_RUN_END = constants.EVENT_RUN_END;
var EVENT_TEST_PENDING = constants.EVENT_TEST_PENDING;
var EVENT_TEST_END = constants.EVENT_TEST_END;
var inherits = require('../utils').inherits;
var sprintf = util.format;
/**
* Expose `TAP`.
*/
exports = module.exports = TAP;
/**
* Constructs a new TAP reporter with runner instance and reporter options.
*
* @public
* @class
* @extends Mocha.reporters.Base
* @memberof Mocha.reporters
* @param {Runner} runner - Instance triggers reporter actions.
* @param {Object} [options] - runner options
*/
function TAP(runner, options) {
Base.call(this, runner, options);
var self = this;
var n = 1;
var tapVersion = '12';
if (options && options.reporterOptions) {
if (options.reporterOptions.tapVersion) {
tapVersion = options.reporterOptions.tapVersion.toString();
}
}
this._producer = createProducer(tapVersion);
runner.once(EVENT_RUN_BEGIN, function() {
var ntests = runner.grepTotal(runner.suite);
self._producer.writeVersion();
self._producer.writePlan(ntests);
});
runner.on(EVENT_TEST_END, function() {
++n;
});
runner.on(EVENT_TEST_PENDING, function(test) {
self._producer.writePending(n, test);
});
runner.on(EVENT_TEST_PASS, function(test) {
self._producer.writePass(n, test);
});
runner.on(EVENT_TEST_FAIL, function(test, err) {
self._producer.writeFail(n, test, err);
});
runner.once(EVENT_RUN_END, function() {
self._producer.writeEpilogue(runner.stats);
});
}
/**
* Inherit from `Base.prototype`.
*/
inherits(TAP, Base);
/**
* Returns a TAP-safe title of `test`.
*
* @private
* @param {Test} test - Test instance.
* @return {String} title with any hash character removed
*/
function title(test) {
return test.fullTitle().replace(/#/g, '');
}
/**
* Writes newline-terminated formatted string to reporter output stream.
*
* @private
* @param {string} format - `printf`-like format string
* @param {...*} [varArgs] - Format string arguments
*/
function println(format, varArgs) {
var vargs = Array.from(arguments);
vargs[0] += '\n';
process.stdout.write(sprintf.apply(null, vargs));
}
/**
* Returns a `tapVersion`-appropriate TAP producer instance, if possible.
*
* @private
* @param {string} tapVersion - Version of TAP specification to produce.
* @returns {TAPProducer} specification-appropriate instance
* @throws {Error} if specification version has no associated producer.
*/
function createProducer(tapVersion) {
var producers = {
'12': new TAP12Producer(),
'13': new TAP13Producer()
};
var producer = producers[tapVersion];
if (!producer) {
throw new Error(
'invalid or unsupported TAP version: ' + JSON.stringify(tapVersion)
);
}
return producer;
}
/**
* @summary
* Constructs a new TAPProducer.
*
* @description
* <em>Only</em> to be used as an abstract base class.
*
* @private
* @constructor
*/
function TAPProducer() {}
/**
* Writes the TAP version to reporter output stream.
*
* @abstract
*/
TAPProducer.prototype.writeVersion = function() {};
/**
* Writes the plan to reporter output stream.
*
* @abstract
* @param {number} ntests - Number of tests that are planned to run.
*/
TAPProducer.prototype.writePlan = function(ntests) {
println('%d..%d', 1, ntests);
};
/**
* Writes that test passed to reporter output stream.
*
* @abstract
* @param {number} n - Index of test that passed.
* @param {Test} test - Instance containing test information.
*/
TAPProducer.prototype.writePass = function(n, test) {
println('ok %d %s', n, title(test));
};
/**
* Writes that test was skipped to reporter output stream.
*
* @abstract
* @param {number} n - Index of test that was skipped.
* @param {Test} test - Instance containing test information.
*/
TAPProducer.prototype.writePending = function(n, test) {
println('ok %d %s # SKIP -', n, title(test));
};
/**
* Writes that test failed to reporter output stream.
*
* @abstract
* @param {number} n - Index of test that failed.
* @param {Test} test - Instance containing test information.
* @param {Error} err - Reason the test failed.
*/
TAPProducer.prototype.writeFail = function(n, test, err) {
println('not ok %d %s', n, title(test));
};
/**
* Writes the summary epilogue to reporter output stream.
*
* @abstract
* @param {Object} stats - Object containing run statistics.
*/
TAPProducer.prototype.writeEpilogue = function(stats) {
// :TBD: Why is this not counting pending tests?
println('# tests ' + (stats.passes + stats.failures));
println('# pass ' + stats.passes);
// :TBD: Why are we not showing pending results?
println('# fail ' + stats.failures);
};
/**
* @summary
* Constructs a new TAP12Producer.
*
* @description
* Produces output conforming to the TAP12 specification.
*
* @private
* @constructor
* @extends TAPProducer
* @see {@link https://testanything.org/tap-specification.html|Specification}
*/
function | () {
/**
* Writes that test failed to reporter output stream, with error formatting.
* @override
*/
this.writeFail = function(n, test, err) {
TAPProducer.prototype.writeFail.call(this, n, test, err);
if (err.message) {
println(err.message.replace(/^/gm, ' '));
}
if (err.stack) {
println(err.stack.replace(/^/gm, ' '));
}
};
}
/**
* Inherit from `TAPProducer.prototype`.
*/
inherits(TAP12Producer, TAPProducer);
/**
* @summary
* Constructs a new TAP13Producer.
*
* @description
* Produces output conforming to the TAP13 specification.
*
* @private
* @constructor
* @extends TAPProducer
* @see {@link https://testanything.org/tap-version-13-specification.html|Specification}
*/
function TAP13Producer() {
/**
* Writes the TAP version to reporter output stream.
* @override
*/
this.writeVersion = function() {
println('TAP version 13');
};
/**
* Writes that test failed to reporter output stream, with error formatting.
* @override
*/
this.writeFail = function(n, test, err) {
TAPProducer.prototype.writeFail.call(this, n, test, err);
var emitYamlBlock = err.message != null || err.stack != null;
if (emitYamlBlock) {
println(indent(1) + '---');
if (err.message) {
println(indent(2) + 'message: |-');
println(err.message.replace(/^/gm, indent(3)));
}
if (err.stack) {
println(indent(2) + 'stack: |-');
println(err.stack.replace(/^/gm, indent(3)));
}
println(indent(1) + '...');
}
};
function indent(level) {
return Array(level + 1).join(' ');
}
}
/**
* Inherit from `TAPProducer.prototype`.
*/
inherits(TAP13Producer, TAPProducer);
TAP.description = 'TAP-compatible output';
| TAP12Producer |
main.rs | // Copyright 2021 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
anyhow::{Context as _, Error},
fidl_fuchsia_netemul_test::{CounterRequest, CounterRequestStream},
fuchsia_async as fasync,
fuchsia_component::{
client,
server::{ServiceFs, ServiceFsDir},
},
futures::prelude::*,
log::{error, info},
std::sync::Arc,
std::sync::Mutex,
};
struct CounterData {
value: u32,
}
const SVC_DIR: &str = "/svc";
async fn | (
stream: CounterRequestStream,
data: Arc<Mutex<CounterData>>,
) -> Result<(), fidl::Error> {
stream
.try_for_each(|request| async {
match request {
CounterRequest::Increment { responder } => {
let mut d = data.lock().unwrap();
d.value += 1;
info!("incrementing counter to {}", d.value);
let () = responder
.send(d.value)
.unwrap_or_else(|e| error!("error sending response: {:?}", e));
}
CounterRequest::ConnectToProtocol { protocol_name, request, control_handle: _ } => {
info!("connecting to protocol '{}'", protocol_name);
let () = client::connect_channel_to_protocol_at_path(
request,
&format!("{}/{}", SVC_DIR, protocol_name),
)
.unwrap_or_else(|e| {
error!(
"error connecting request to protocol '{}' in '{}' directory: {:?}",
protocol_name, SVC_DIR, e,
)
});
}
CounterRequest::OpenInNamespace { path, flags, request, control_handle: _ } => {
info!("connecting to node at '{}'", path);
let () = fdio::open(&path, flags, request).unwrap_or_else(|e| {
error!("error connecting request to node at path '{}': {}", path, e)
});
}
CounterRequest::TryOpenDirectory { path, responder } => {
info!("opening directory at '{}'", path);
match std::fs::read_dir(&path) {
Ok(std::fs::ReadDir { .. }) => responder
.send(&mut Ok(()))
.unwrap_or_else(|e| error!("error sending response: {:?}", e)),
Err(e) => {
let status = match e.kind() {
std::io::ErrorKind::NotFound | std::io::ErrorKind::BrokenPipe => {
info!("failed to open directory at '{}': {}", path, e);
fuchsia_zircon::Status::NOT_FOUND
}
_ => {
error!("failed to open directory at '{}': {}", path, e);
fuchsia_zircon::Status::IO
}
};
let () = responder
.send(&mut Err(status.into_raw()))
.unwrap_or_else(|e| error!("error sending response: {:?}", e));
}
}
}
}
Ok(())
})
.await
}
/// Command line arguments for the counter service.
#[derive(argh::FromArgs)]
struct Args {
/// the value at which to start the counter.
#[argh(option, default = "0")]
starting_value: u32,
}
#[fasync::run_singlethreaded]
async fn main() -> Result<(), Error> {
let Args { starting_value } = argh::from_env();
let () = fuchsia_syslog::init().context("cannot init logger")?;
let mut fs = ServiceFs::new();
let inspector = fuchsia_inspect::component::inspector();
let data = {
let data = Arc::new(Mutex::new(CounterData { value: starting_value }));
let data_clone = data.clone();
let () = inspector.root().record_lazy_child("counter", move || {
let srv = fuchsia_inspect::Inspector::new();
let () = srv.root().record_uint(
"count",
data.lock().expect("failed to acquire lock on `CounterData`").value.into(),
);
futures::future::ok(srv).boxed()
});
data_clone
};
let () = inspect_runtime::serve(inspector, &mut fs).context("error serving inspect")?;
let _: &mut ServiceFsDir<'_, _> = fs.dir("svc").add_fidl_service(|s: CounterRequestStream| s);
let _: &mut ServiceFs<_> =
fs.take_and_serve_directory_handle().context("error serving directory handle")?;
let () = fs
.for_each_concurrent(None, |stream| async {
handle_counter(stream, data.clone())
.await
.unwrap_or_else(|e| error!("error handling CounterRequestStream: {:?}", e))
})
.await;
Ok(())
}
| handle_counter |
example_test.go | // Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ipv6_test
import (
"fmt"
"log"
"net"
"os"
"time"
"github.com/socketplane/socketplane/Godeps/_workspace/src/golang.org/x/net/internal/iana"
"github.com/socketplane/socketplane/Godeps/_workspace/src/golang.org/x/net/ipv6"
"golang.org/x/net/icmp"
)
func ExampleConn_markingTCP() {
ln, err := net.Listen("tcp6", "[::]:1024")
if err != nil {
log.Fatal(err)
}
defer ln.Close()
for {
c, err := ln.Accept()
if err != nil {
log.Fatal(err)
}
go func(c net.Conn) {
defer c.Close()
p := ipv6.NewConn(c)
if err := p.SetTrafficClass(iana.DiffServAF11); err != nil {
log.Fatal(err)
}
if err := p.SetHopLimit(128); err != nil {
log.Fatal(err)
}
if _, err := c.Write([]byte("HELLO-R-U-THERE-ACK")); err != nil {
log.Fatal(err)
}
}(c)
}
}
func ExamplePacketConn_servingOneShotMulticastDNS() {
c, err := net.ListenPacket("udp6", "[::]:5353") // mDNS over UDP
if err != nil {
log.Fatal(err)
}
defer c.Close()
p := ipv6.NewPacketConn(c)
en0, err := net.InterfaceByName("en0")
if err != nil {
log.Fatal(err)
}
mDNSLinkLocal := net.UDPAddr{IP: net.ParseIP("ff02::fb")}
if err := p.JoinGroup(en0, &mDNSLinkLocal); err != nil {
log.Fatal(err)
}
defer p.LeaveGroup(en0, &mDNSLinkLocal)
if err := p.SetControlMessage(ipv6.FlagDst|ipv6.FlagInterface, true); err != nil {
log.Fatal(err)
}
var wcm ipv6.ControlMessage
b := make([]byte, 1500)
for {
_, rcm, peer, err := p.ReadFrom(b)
if err != nil {
log.Fatal(err)
}
if !rcm.Dst.IsMulticast() || !rcm.Dst.Equal(mDNSLinkLocal.IP) {
continue
}
wcm.IfIndex = rcm.IfIndex
answers := []byte("FAKE-MDNS-ANSWERS") // fake mDNS answers, you need to implement this
if _, err := p.WriteTo(answers, &wcm, peer); err != nil {
log.Fatal(err)
}
}
}
func ExamplePacketConn_tracingIPPacketRoute() {
// Tracing an IP packet route to www.google.com.
const host = "www.google.com"
ips, err := net.LookupIP(host)
if err != nil {
log.Fatal(err)
}
var dst net.IPAddr
for _, ip := range ips {
if ip.To16() != nil && ip.To4() == nil |
}
if dst.IP == nil {
log.Fatal("no AAAA record found")
}
c, err := net.ListenPacket(fmt.Sprintf("ip6:%d", iana.ProtocolIPv6ICMP), "::") // ICMP for IPv6
if err != nil {
log.Fatal(err)
}
defer c.Close()
p := ipv6.NewPacketConn(c)
if err := p.SetControlMessage(ipv6.FlagHopLimit|ipv6.FlagSrc|ipv6.FlagDst|ipv6.FlagInterface, true); err != nil {
log.Fatal(err)
}
wm := icmp.Message{
Type: ipv6.ICMPTypeEchoRequest, Code: 0,
Body: &icmp.Echo{
ID: os.Getpid() & 0xffff,
Data: []byte("HELLO-R-U-THERE"),
},
}
var f ipv6.ICMPFilter
f.SetAll(true)
f.Accept(ipv6.ICMPTypeTimeExceeded)
f.Accept(ipv6.ICMPTypeEchoReply)
if err := p.SetICMPFilter(&f); err != nil {
log.Fatal(err)
}
var wcm ipv6.ControlMessage
rb := make([]byte, 1500)
for i := 1; i <= 64; i++ { // up to 64 hops
wm.Body.(*icmp.Echo).Seq = i
wb, err := wm.Marshal(nil)
if err != nil {
log.Fatal(err)
}
// In the real world usually there are several
// multiple traffic-engineered paths for each hop.
// You may need to probe a few times to each hop.
begin := time.Now()
wcm.HopLimit = i
if _, err := p.WriteTo(wb, &wcm, &dst); err != nil {
log.Fatal(err)
}
if err := p.SetReadDeadline(time.Now().Add(3 * time.Second)); err != nil {
log.Fatal(err)
}
n, rcm, peer, err := p.ReadFrom(rb)
if err != nil {
if err, ok := err.(net.Error); ok && err.Timeout() {
fmt.Printf("%v\t*\n", i)
continue
}
log.Fatal(err)
}
rm, err := icmp.ParseMessage(iana.ProtocolIPv6ICMP, rb[:n])
if err != nil {
log.Fatal(err)
}
rtt := time.Since(begin)
// In the real world you need to determine whether the
// received message is yours using ControlMessage.Src,
// ControlMesage.Dst, icmp.Echo.ID and icmp.Echo.Seq.
switch rm.Type {
case ipv6.ICMPTypeTimeExceeded:
names, _ := net.LookupAddr(peer.String())
fmt.Printf("%d\t%v %+v %v\n\t%+v\n", i, peer, names, rtt, rcm)
case ipv6.ICMPTypeEchoReply:
names, _ := net.LookupAddr(peer.String())
fmt.Printf("%d\t%v %+v %v\n\t%+v\n", i, peer, names, rtt, rcm)
return
}
}
}
func ExamplePacketConn_advertisingOSPFHello() {
c, err := net.ListenPacket(fmt.Sprintf("ip6:%d", iana.ProtocolOSPFIGP), "::") // OSPF for IPv6
if err != nil {
log.Fatal(err)
}
defer c.Close()
p := ipv6.NewPacketConn(c)
en0, err := net.InterfaceByName("en0")
if err != nil {
log.Fatal(err)
}
allSPFRouters := net.IPAddr{IP: net.ParseIP("ff02::5")}
if err := p.JoinGroup(en0, &allSPFRouters); err != nil {
log.Fatal(err)
}
defer p.LeaveGroup(en0, &allSPFRouters)
hello := make([]byte, 24) // fake hello data, you need to implement this
ospf := make([]byte, 16) // fake ospf header, you need to implement this
ospf[0] = 3 // version 3
ospf[1] = 1 // hello packet
ospf = append(ospf, hello...)
if err := p.SetChecksum(true, 12); err != nil {
log.Fatal(err)
}
cm := ipv6.ControlMessage{
TrafficClass: iana.DiffServCS6,
HopLimit: 1,
IfIndex: en0.Index,
}
if _, err := p.WriteTo(ospf, &cm, &allSPFRouters); err != nil {
log.Fatal(err)
}
}
| {
dst.IP = ip
fmt.Printf("using %v for tracing an IP packet route to %s\n", dst.IP, host)
break
} |
LA_Apartment_Analysis.py | import neighborhoods_api
import apartments_scrape
import queries_from_terminal
import sys
print(f"We're in file {__file__}")
#Require the user to input this driver and source option
#Will prompt the user to enter a source argument (remote or local)
if len(sys.argv) < 2:
print('To few arguments, please put in LA_Apartment_Analysis.py and data source argument (remote or local). EX: "LA_Apartment_Analysis.py remote"')
sys.exit(0)
| print("Calling neighborhoods_api.py. This should create the neighborhood table. Please wait...")
neighborhoods_api.main()
print("Calling apartments_scrape.py. This should create the apartment table. Please wait...")
apartments_scrape.main()
print("Calling queries_from_terminal.py. This should return some queries about the database. Please wait...")
queries_from_terminal.main()
elif sys.argv[1] == 'local':
print("Calling queries_from_terminal.py. This should return some queries about the database. Please wait...")
queries_from_terminal.main()
else:
print("Please enter 'remote' or 'local' as your second argument. EX: 'LA_Apartment_Analysis.py remote' ")
sys.exit(0) | if sys.argv[1] == 'remote': |
printElections.go | package engine
import (
"fmt"
"time"
"github.com/DCNT-Hammer/dcnt/common/interfaces"
"github.com/DCNT-Hammer/dcnt/elections"
"github.com/DCNT-Hammer/dcnt/state"
)
func lookup(id interfaces.IHash) *state.State |
func printSimElections(elects *int, value int, listenTo *int, wsapiNode *int) {
out := ""
if *listenTo < 0 || *listenTo >= len(fnodes) {
return
}
for *elects == value {
prt := "===SimElectionsStart===\n\n"
prt += "-------------------------\n"
if len(fnodes) == 0 {
return
}
//s := fnodes[*listenTo].State
//eo := s.Elections.(*elections.Elections)
prt = prt + "\n"
for _, fn := range fnodes {
s := fn.State
e := s.Elections.(*elections.Elections)
if e.Adapter != nil {
prt += e.Adapter.Status()
prt += "\n"
prt += e.Adapter.VolunteerControlsStatus()
prt += "\n"
//prt += e.Adapter.MessageLists()
//prt += "\n"
} else {
prt += fmt.Sprintf("%s has no simelection\n", fn.State.GetFactomNodeName())
}
}
prt = prt + "===SimElectionsEnd===\n"
if prt != out {
fmt.Println(prt)
out = prt
}
time.Sleep(time.Second)
}
}
func printElections(elects *int, value int, listenTo *int, wsapiNode *int) {
out := ""
if *listenTo < 0 || *listenTo >= len(fnodes) {
return
}
for *elects == value {
prt := "===ElectionsStart===\n\n"
if len(fnodes) == 0 {
return
}
s := fnodes[*listenTo].State
eo := s.Elections.(*elections.Elections)
prt = prt + fmt.Sprintf("%3s %15s %15s\n", "#", "Federated", "Audit")
for i := 0; i < len(eo.Federated)+len(eo.Audit); i++ {
fed := ""
aud := ""
if i < len(eo.Federated) {
id := eo.Federated[i].GetChainID()
f := lookup(id)
if f != nil {
fed = f.FactomNodeName
}
}
if i < len(eo.Audit) {
id := eo.Audit[i].GetChainID()
a := lookup(id)
if a != nil {
aud = a.FactomNodeName
}
}
if fed == "" && aud == "" {
break
}
prt = prt + fmt.Sprintf("%3d %15s %15s\n", i, fed, aud)
}
prt = prt + "\n" + fnodes[0].State.Election0
for i, _ := range eo.Federated {
prt = prt + fmt.Sprintf("%4d ", i)
}
for i, _ := range eo.Audit {
prt = prt + fmt.Sprintf("%4d ", i)
}
prt = prt + "\n"
for _, fn := range fnodes {
s := fn.State
if s.Elections.(*elections.Elections).Adapter != nil {
e := s.Elections.(*elections.Elections).Electing
prt += fmt.Sprintf("%2d ", e)
if s.Elections.(*elections.Elections).Adapter.IsObserver() {
prt += "O " // Observer
} else {
prt += "A " // Active
}
} else {
prt += "__ _ " // Active
}
prt = prt + s.Election1 + s.Election2 + "\n"
}
prt = prt + "===ElectionsEnd===\n"
if prt != out {
fmt.Println(prt)
out = prt
}
time.Sleep(time.Second)
}
}
| {
for _, fn := range fnodes {
if fn.State.IdentityChainID.Fixed() == id.Fixed() {
return fn.State
}
}
return nil
} |
lib.rs | // DO NOT EDIT !
// This file was generated automatically from 'src/mako/api/lib.rs.mako'
// DO NOT EDIT !
//! This documentation was generated from *Postmaster Tools* crate version *2.0.5+20210330*, where *20210330* is the exact revision of the *gmailpostmastertools:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v2.0.5*.
//!
//! Everything else about the *Postmaster Tools* *v1* API can be found at the
//! [official documentation site](https://developers.google.com/gmail/postmaster).
//! The original source code is [on github](https://github.com/Byron/google-apis-rs/tree/main/gen/gmailpostmastertools1).
//! # Features
//!
//! Handle the following *Resources* with ease from the central [hub](PostmasterTools) ...
//!
//! * [domains](api::Domain)
//! * [*get*](api::DomainGetCall), [*list*](api::DomainListCall), [*traffic stats get*](api::DomainTrafficStatGetCall) and [*traffic stats list*](api::DomainTrafficStatListCall)
//!
//!
//!
//!
//! Not what you are looking for ? Find all other Google APIs in their Rust [documentation index](http://byron.github.io/google-apis-rs).
//!
//! # Structure of this Library
//!
//! The API is structured into the following primary items:
//!
//! * **[Hub](PostmasterTools)**
//! * a central object to maintain state and allow accessing all *Activities*
//! * creates [*Method Builders*](client::MethodsBuilder) which in turn
//! allow access to individual [*Call Builders*](client::CallBuilder)
//! * **[Resources](client::Resource)**
//! * primary types that you can apply *Activities* to
//! * a collection of properties and *Parts*
//! * **[Parts](client::Part)**
//! * a collection of properties
//! * never directly used in *Activities*
//! * **[Activities](client::CallBuilder)**
//! * operations to apply to *Resources*
//!
//! All *structures* are marked with applicable traits to further categorize them and ease browsing.
//! | //! ```
//!
//! Or specifically ...
//!
//! ```ignore
//! let r = hub.domains().traffic_stats_get(...).doit().await
//! let r = hub.domains().traffic_stats_list(...).doit().await
//! let r = hub.domains().get(...).doit().await
//! let r = hub.domains().list(...).doit().await
//! ```
//!
//! The `resource()` and `activity(...)` calls create [builders][builder-pattern]. The second one dealing with `Activities`
//! supports various methods to configure the impending operation (not shown here). It is made such that all required arguments have to be
//! specified right away (i.e. `(...)`), whereas all optional ones can be [build up][builder-pattern] as desired.
//! The `doit()` method performs the actual communication with the server and returns the respective result.
//!
//! # Usage
//!
//! ## Setting up your Project
//!
//! To use this library, you would put the following lines into your `Cargo.toml` file:
//!
//! ```toml
//! [dependencies]
//! google-gmailpostmastertools1 = "*"
//! hyper = "^0.14"
//! hyper-rustls = "^0.22"
//! serde = "^1.0"
//! serde_json = "^1.0"
//! yup-oauth2 = "^5.0"
//! ```
//!
//! ## A complete example
//!
//! ```test_harness,no_run
//! extern crate hyper;
//! extern crate hyper_rustls;
//! extern crate yup_oauth2 as oauth2;
//! extern crate google_gmailpostmastertools1 as gmailpostmastertools1;
//! use gmailpostmastertools1::{Result, Error};
//! # async fn dox() {
//! use std::default::Default;
//! use oauth2;
//! use gmailpostmastertools1::PostmasterTools;
//!
//! // Get an ApplicationSecret instance by some means. It contains the `client_id` and
//! // `client_secret`, among other things.
//! let secret: oauth2::ApplicationSecret = Default::default();
//! // Instantiate the authenticator. It will choose a suitable authentication flow for you,
//! // unless you replace `None` with the desired Flow.
//! // Provide your own `AuthenticatorDelegate` to adjust the way it operates and get feedback about
//! // what's going on. You probably want to bring in your own `TokenStorage` to persist tokens and
//! // retrieve them from storage.
//! let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
//! secret,
//! yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
//! ).build().await.unwrap();
//! let mut hub = PostmasterTools::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
//! // You can configure optional parameters by calling the respective setters at will, and
//! // execute the final call using `doit()`.
//! // Values shown here are possibly random and not representative !
//! let result = hub.domains().traffic_stats_list("parent")
//! .start_date_year(-52)
//! .start_date_month(-20)
//! .start_date_day(-55)
//! .page_token("gubergren")
//! .page_size(-51)
//! .end_date_year(-12)
//! .end_date_month(-75)
//! .end_date_day(-4)
//! .doit().await;
//!
//! match result {
//! Err(e) => match e {
//! // The Error enum provides details about what exactly happened.
//! // You can also just use its `Debug`, `Display` or `Error` traits
//! Error::HttpError(_)
//! |Error::Io(_)
//! |Error::MissingAPIKey
//! |Error::MissingToken(_)
//! |Error::Cancelled
//! |Error::UploadSizeLimitExceeded(_, _)
//! |Error::Failure(_)
//! |Error::BadRequest(_)
//! |Error::FieldClash(_)
//! |Error::JsonDecodeError(_, _) => println!("{}", e),
//! },
//! Ok(res) => println!("Success: {:?}", res),
//! }
//! # }
//! ```
//! ## Handling Errors
//!
//! All errors produced by the system are provided either as [Result](client::Result) enumeration as return value of
//! the doit() methods, or handed as possibly intermediate results to either the
//! [Hub Delegate](client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html).
//!
//! When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This
//! makes the system potentially resilient to all kinds of errors.
//!
//! ## Uploads and Downloads
//! If a method supports downloads, the response body, which is part of the [Result](client::Result), should be
//! read by you to obtain the media.
//! If such a method also supports a [Response Result](client::ResponseResult), it will return that by default.
//! You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making
//! this call: `.param("alt", "media")`.
//!
//! Methods supporting uploads can do so using up to 2 different protocols:
//! *simple* and *resumable*. The distinctiveness of each is represented by customized
//! `doit(...)` methods, which are then named `upload(...)` and `upload_resumable(...)` respectively.
//!
//! ## Customization and Callbacks
//!
//! You may alter the way an `doit()` method is called by providing a [delegate](client::Delegate) to the
//! [Method Builder](client::CallBuilder) before making the final `doit()` call.
//! Respective methods will be called to provide progress information, as well as determine whether the system should
//! retry on failure.
//!
//! The [delegate trait](client::Delegate) is default-implemented, allowing you to customize it with minimal effort.
//!
//! ## Optional Parts in Server-Requests
//!
//! All structures provided by this library are made to be [encodable](client::RequestValue) and
//! [decodable](client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses
//! are valid.
//! Most optionals are are considered [Parts](client::Part) which are identifiable by name, which will be sent to
//! the server to indicate either the set parts of the request or the desired parts in the response.
//!
//! ## Builder Arguments
//!
//! Using [method builders](client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods.
//! These will always take a single argument, for which the following statements are true.
//!
//! * [PODs][wiki-pod] are handed by copy
//! * strings are passed as `&str`
//! * [request values](client::RequestValue) are moved
//!
//! Arguments will always be copied or cloned into the builder, to make them independent of their original life times.
//!
//! [wiki-pod]: http://en.wikipedia.org/wiki/Plain_old_data_structure
//! [builder-pattern]: http://en.wikipedia.org/wiki/Builder_pattern
//! [google-go-api]: https://github.com/google/google-api-go-client
//!
//!
// Unused attributes happen thanks to defined, but unused structures
// We don't warn about this, as depending on the API, some data structures or facilities are never used.
// Instead of pre-determining this, we just disable the lint. It's manually tuned to not have any
// unused imports in fully featured APIs. Same with unused_mut ... .
#![allow(unused_imports, unused_mut, dead_code)]
// DO NOT EDIT !
// This file was generated automatically from 'src/mako/api/lib.rs.mako'
// DO NOT EDIT !
#[macro_use]
extern crate serde_derive;
extern crate hyper;
extern crate serde;
extern crate serde_json;
extern crate yup_oauth2 as oauth2;
extern crate mime;
extern crate url;
pub mod api;
pub mod client;
// Re-export the hub type and some basic client structs
pub use api::PostmasterTools;
pub use client::{Result, Error, Delegate}; | //! Generally speaking, you can invoke *Activities* like this:
//!
//! ```Rust,ignore
//! let r = hub.resource().activity(...).doit().await |
canister.rs | // This canister is a placeholder
fn main() | {} |
|
startQiskit_noisy62.py | # qubit number=3
# total number=10
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
import networkx as nx
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def make_circuit(n:int) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
prog = QuantumCircuit(input_qubit)
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
prog.y(input_qubit[3]) # number=5
for edge in E:
k = edge[0]
l = edge[1]
prog.cp(-2 * gamma, input_qubit[k-1], input_qubit[l-1])
prog.p(gamma, k)
prog.p(gamma, l)
prog.rx(2 * beta, range(len(V)))
prog.swap(input_qubit[1],input_qubit[0]) # number=6
prog.swap(input_qubit[1],input_qubit[0]) # number=7
prog.y(input_qubit[3]) # number=8
prog.y(input_qubit[3]) # number=9
# circuit end
return prog
if __name__ == '__main__':
| n = 4
V = np.arange(0, n, 1)
E = [(0, 1, 1.0), (0, 2, 1.0), (1, 2, 1.0), (3, 2, 1.0), (3, 1, 1.0)]
G = nx.Graph()
G.add_nodes_from(V)
G.add_weighted_edges_from(E)
step_size = 0.1
a_gamma = np.arange(0, np.pi, step_size)
a_beta = np.arange(0, np.pi, step_size)
a_gamma, a_beta = np.meshgrid(a_gamma, a_beta)
F1 = 3 - (np.sin(2 * a_beta) ** 2 * np.sin(2 * a_gamma) ** 2 - 0.5 * np.sin(4 * a_beta) * np.sin(4 * a_gamma)) * (
1 + np.cos(4 * a_gamma) ** 2)
result = np.where(F1 == np.amax(F1))
a = list(zip(result[0], result[1]))[0]
gamma = a[0] * step_size
beta = a[1] * step_size
prog = make_circuit(4)
sample_shot =5600
writefile = open("../data/startQiskit_noisy62.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
backend = FakeYorktown()
circuit1 = transpile(prog, FakeYorktown())
circuit1.measure_all()
prog = circuit1
info = execute(prog,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close() |
|
commandInfo.py | from typing import Dict, Any
from telegram import Update, ParseMode
from telegram.ext import CallbackContext
from config.constants import (
USER_DATA_V1_SETTINGS_CAMPUS,
USER_DATA_V1_SETTINGS_ONLINE,
USER_DATA_V1_INTRA_LOGIN,
USER_DATA_V1_INTRA_CAMPUS,
USER_DATA_V1_SETTINGS_ACTIVE,
USER_DATA_V1_AUTHORIZED,
USER_DATA_V1_TELEGRAM_USERNAME,
USER_DATA_V1_MATCH_WITH,
)
from config.env import ADMIN_IDS
from utils.lang import COMMAND_DENIED_NOT_AUTHORIZED
def info(data: Dict[str, Any], is_admin_request: bool = False) -> str:
fields = [
USER_DATA_V1_INTRA_LOGIN,
USER_DATA_V1_INTRA_CAMPUS,
USER_DATA_V1_SETTINGS_CAMPUS,
USER_DATA_V1_SETTINGS_ONLINE,
USER_DATA_V1_SETTINGS_ACTIVE,
USER_DATA_V1_TELEGRAM_USERNAME,
]
if is_admin_request:
fields.append(USER_DATA_V1_MATCH_WITH)
return '\n'.join(['{}: {}'.format(x, data.get(x, '???')) for x in fields])
def info_other(upd: Update, ctx: CallbackContext) -> None:
param = ctx.args[0]
user = None
for uid, udata in ctx.dispatcher.user_data.items():
if USER_DATA_V1_INTRA_LOGIN not in udata:
continue
if udata[USER_DATA_V1_INTRA_LOGIN] == param:
user = udata
break
if str(uid) == param:
user = udata
break
if not user:
ctx.bot.send_message(upd.effective_user.id, text='{} not found'.format(param))
return
message = info(user, is_admin_request=True)
ctx.bot.send_message(
upd.effective_user.id,
text='```\ntelegram.id: {}\n{}\n```'.format(
uid,
message
),
parse_mode=ParseMode.MARKDOWN
)
def info_self(upd: Update, ctx: CallbackContext) -> None:
|
def handler_command_info(upd: Update, ctx: CallbackContext) -> None:
if not ctx.user_data.get(USER_DATA_V1_AUTHORIZED, False):
ctx.bot.send_message(upd.effective_user.id, text=COMMAND_DENIED_NOT_AUTHORIZED)
return
if ctx.args and upd.effective_user.id in ADMIN_IDS:
return info_other(upd, ctx)
return info_self(upd, ctx)
| message = info(ctx.user_data)
ctx.bot.send_message(upd.effective_user.id, text='```\n{}\n```'.format(message), parse_mode=ParseMode.MARKDOWN) |
classes.py | """List of classes in lexical and NPZ order."""
# ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! !
# ! PLEASE DO NOT MODIFY THIS LIST OF CLASSES !
# ! LIST MODIFICATION CAUSES GREAT F**K UP !
# ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! ! !
CLASSES = [
"A", # 0
"Ar", # 1
"B", # 2
"Ba", # 3
"C", # 4
"DT pog", # 5
"EN pn", # 6
"Gn", # 7
"Go", # 8
"LL", # 9
"Me", # 10
"N", # 11
"Or", # 12
"Po", # 13 | "S", # 16
"SNA", # 17
"SNP pm", # 18
"Se", # 19
"Sn", # 20
"UL", # 21
"aii", # 22
"ais", # 23
"ii", # 24
"is", # 25
"n_", # 26
] | "Pog", # 14
"Pt", # 15 |
admin_log.js |
'use strict'
const { Model } = require('../../class')
const { query } = require('../mysql')
class | extends Model {
constructor() {
super('admin_log')
}
create({ userId = 0, url = '', params = '' }) {
const post = [userId, url, params]
const sql = `
insert into \`${this.tableName}\`
(user_id, url, params)
values
(?, ?, ?)
`
return query(sql, post)
}
getList({ startId = 0, pageSize = 10 }) {
const post = [startId, pageSize]
const sql = `
select
id,
user_id,
url,
params,
create_at
from ${this.tableName}
where
id >= ?
and is_valid = 1
order by id desc
limit ? offset 0
`
return query(sql, post)
}
}
module.exports = new AdminLogModel()
| AdminLogModel |
invoke.go | // Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package gocommand is a helper for calling the go command.
package gocommand
import (
"bytes"
"context"
"fmt"
"io"
"os"
"regexp"
"strconv"
"strings"
"sync"
"time"
exec "golang.org/x/sys/execabs"
"golang.org/x/tools/internal/event"
)
// An Runner will run go command invocations and serialize
// them if it sees a concurrency error.
type Runner struct {
// once guards the runner initialization.
once sync.Once
// inFlight tracks available workers.
inFlight chan struct{}
// serialized guards the ability to run a go command serially,
// to avoid deadlocks when claiming workers.
serialized chan struct{}
}
const maxInFlight = 10
func (runner *Runner) initialize() {
runner.once.Do(func() {
runner.inFlight = make(chan struct{}, maxInFlight)
runner.serialized = make(chan struct{}, 1)
})
}
// 1.13: go: updates to go.mod needed, but contents have changed
// 1.14: go: updating go.mod: existing contents have changed since last read
var modConcurrencyError = regexp.MustCompile(`go:.*go.mod.*contents have changed`)
// Run is a convenience wrapper around RunRaw.
// It returns only stdout and a "friendly" error.
func (runner *Runner) Run(ctx context.Context, inv Invocation) (*bytes.Buffer, error) {
stdout, _, friendly, _ := runner.RunRaw(ctx, inv)
return stdout, friendly
}
// RunPiped runs the invocation serially, always waiting for any concurrent
// invocations to complete first.
func (runner *Runner) RunPiped(ctx context.Context, inv Invocation, stdout, stderr io.Writer) error {
_, err := runner.runPiped(ctx, inv, stdout, stderr)
return err
}
// RunRaw runs the invocation, serializing requests only if they fight over
// go.mod changes.
func (runner *Runner) RunRaw(ctx context.Context, inv Invocation) (*bytes.Buffer, *bytes.Buffer, error, error) {
// Make sure the runner is always initialized.
runner.initialize()
// First, try to run the go command concurrently.
stdout, stderr, friendlyErr, err := runner.runConcurrent(ctx, inv)
// If we encounter a load concurrency error, we need to retry serially.
if friendlyErr == nil || !modConcurrencyError.MatchString(friendlyErr.Error()) {
return stdout, stderr, friendlyErr, err
}
event.Error(ctx, "Load concurrency error, will retry serially", err)
// Run serially by calling runPiped.
stdout.Reset()
stderr.Reset()
friendlyErr, err = runner.runPiped(ctx, inv, stdout, stderr)
return stdout, stderr, friendlyErr, err
}
func (runner *Runner) runConcurrent(ctx context.Context, inv Invocation) (*bytes.Buffer, *bytes.Buffer, error, error) {
// Wait for 1 worker to become available.
select {
case <-ctx.Done():
return nil, nil, nil, ctx.Err()
case runner.inFlight <- struct{}{}:
defer func() { <-runner.inFlight }()
}
stdout, stderr := &bytes.Buffer{}, &bytes.Buffer{}
friendlyErr, err := inv.runWithFriendlyError(ctx, stdout, stderr)
return stdout, stderr, friendlyErr, err
}
func (runner *Runner) runPiped(ctx context.Context, inv Invocation, stdout, stderr io.Writer) (error, error) {
// Make sure the runner is always initialized.
runner.initialize()
// Acquire the serialization lock. This avoids deadlocks between two
// runPiped commands.
select {
case <-ctx.Done():
return nil, ctx.Err()
case runner.serialized <- struct{}{}: | defer func() { <-runner.serialized }()
}
// Wait for all in-progress go commands to return before proceeding,
// to avoid load concurrency errors.
for i := 0; i < maxInFlight; i++ {
select {
case <-ctx.Done():
return nil, ctx.Err()
case runner.inFlight <- struct{}{}:
// Make sure we always "return" any workers we took.
defer func() { <-runner.inFlight }()
}
}
return inv.runWithFriendlyError(ctx, stdout, stderr)
}
// An Invocation represents a call to the go command.
type Invocation struct {
Verb string
Args []string
BuildFlags []string
// If ModFlag is set, the go command is invoked with -mod=ModFlag.
ModFlag string
// If ModFile is set, the go command is invoked with -modfile=ModFile.
ModFile string
// If Overlay is set, the go command is invoked with -overlay=Overlay.
Overlay string
// If CleanEnv is set, the invocation will run only with the environment
// in Env, not starting with os.Environ.
CleanEnv bool
Env []string
WorkingDir string
Logf func(format string, args ...interface{})
}
func (i *Invocation) runWithFriendlyError(ctx context.Context, stdout, stderr io.Writer) (friendlyError error, rawError error) {
rawError = i.run(ctx, stdout, stderr)
if rawError != nil {
friendlyError = rawError
// Check for 'go' executable not being found.
if ee, ok := rawError.(*exec.Error); ok && ee.Err == exec.ErrNotFound {
friendlyError = fmt.Errorf("go command required, not found: %v", ee)
}
if ctx.Err() != nil {
friendlyError = ctx.Err()
}
friendlyError = fmt.Errorf("err: %v: stderr: %s", friendlyError, stderr)
}
return
}
func (i *Invocation) run(ctx context.Context, stdout, stderr io.Writer) error {
log := i.Logf
if log == nil {
log = func(string, ...interface{}) {}
}
goArgs := []string{i.Verb}
appendModFile := func() {
if i.ModFile != "" {
goArgs = append(goArgs, "-modfile="+i.ModFile)
}
}
appendModFlag := func() {
if i.ModFlag != "" {
goArgs = append(goArgs, "-mod="+i.ModFlag)
}
}
appendOverlayFlag := func() {
if i.Overlay != "" {
goArgs = append(goArgs, "-overlay="+i.Overlay)
}
}
switch i.Verb {
case "env", "version":
goArgs = append(goArgs, i.Args...)
case "mod":
// mod needs the sub-verb before flags.
goArgs = append(goArgs, i.Args[0])
appendModFile()
goArgs = append(goArgs, i.Args[1:]...)
case "get":
goArgs = append(goArgs, i.BuildFlags...)
appendModFile()
goArgs = append(goArgs, i.Args...)
default: // notably list and build.
goArgs = append(goArgs, i.BuildFlags...)
appendModFile()
appendModFlag()
appendOverlayFlag()
goArgs = append(goArgs, i.Args...)
}
cmd := exec.Command("go", goArgs...)
cmd.Stdout = stdout
cmd.Stderr = stderr
// On darwin the cwd gets resolved to the real path, which breaks anything that
// expects the working directory to keep the original path, including the
// go command when dealing with modules.
// The Go stdlib has a special feature where if the cwd and the PWD are the
// same node then it trusts the PWD, so by setting it in the env for the child
// process we fix up all the paths returned by the go command.
if !i.CleanEnv {
cmd.Env = os.Environ()
}
cmd.Env = append(cmd.Env, i.Env...)
if i.WorkingDir != "" {
cmd.Env = append(cmd.Env, "PWD="+i.WorkingDir)
cmd.Dir = i.WorkingDir
}
defer func(start time.Time) { log("%s for %v", time.Since(start), cmdDebugStr(cmd)) }(time.Now())
return runCmdContext(ctx, cmd)
}
// runCmdContext is like exec.CommandContext except it sends os.Interrupt
// before os.Kill.
func runCmdContext(ctx context.Context, cmd *exec.Cmd) error {
if err := cmd.Start(); err != nil {
return err
}
resChan := make(chan error, 1)
go func() {
resChan <- cmd.Wait()
}()
select {
case err := <-resChan:
return err
case <-ctx.Done():
}
// Cancelled. Interrupt and see if it ends voluntarily.
cmd.Process.Signal(os.Interrupt)
select {
case err := <-resChan:
return err
case <-time.After(time.Second):
}
// Didn't shut down in response to interrupt. Kill it hard.
cmd.Process.Kill()
return <-resChan
}
func cmdDebugStr(cmd *exec.Cmd) string {
env := make(map[string]string)
for _, kv := range cmd.Env {
split := strings.SplitN(kv, "=", 2)
k, v := split[0], split[1]
env[k] = v
}
var args []string
for _, arg := range cmd.Args {
quoted := strconv.Quote(arg)
if quoted[1:len(quoted)-1] != arg || strings.Contains(arg, " ") {
args = append(args, quoted)
} else {
args = append(args, arg)
}
}
return fmt.Sprintf("GOROOT=%v GOPATH=%v GO111MODULE=%v GOPROXY=%v PWD=%v %v", env["GOROOT"], env["GOPATH"], env["GO111MODULE"], env["GOPROXY"], env["PWD"], strings.Join(args, " "))
} | |
datapipeline.rs | //! Types for the `DataPipeline` service.
/// The [`AWS::DataPipeline::Pipeline`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-datapipeline-pipeline.html) resource type.
#[derive(Debug, Default)]
pub struct Pipeline {
properties: PipelineProperties
}
/// Properties for the `Pipeline` resource.
#[derive(Debug, Default)]
pub struct PipelineProperties {
/// Property [`Activate`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-datapipeline-pipeline.html#cfn-datapipeline-pipeline-activate).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub activate: Option<::Value<bool>>,
/// Property [`Description`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-datapipeline-pipeline.html#cfn-datapipeline-pipeline-description).
///
/// Update type: _Immutable_.
/// AWS CloudFormation replaces the resource when you change this property.
pub description: Option<::Value<String>>,
/// Property [`Name`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-datapipeline-pipeline.html#cfn-datapipeline-pipeline-name).
///
/// Update type: _Immutable_.
/// AWS CloudFormation replaces the resource when you change this property.
pub name: ::Value<String>,
/// Property [`ParameterObjects`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-datapipeline-pipeline.html#cfn-datapipeline-pipeline-parameterobjects).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub parameter_objects: ::ValueList<self::pipeline::ParameterObject>,
/// Property [`ParameterValues`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-datapipeline-pipeline.html#cfn-datapipeline-pipeline-parametervalues).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub parameter_values: Option<::ValueList<self::pipeline::ParameterValue>>,
/// Property [`PipelineObjects`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-datapipeline-pipeline.html#cfn-datapipeline-pipeline-pipelineobjects).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub pipeline_objects: Option<::ValueList<self::pipeline::PipelineObject>>,
/// Property [`PipelineTags`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-datapipeline-pipeline.html#cfn-datapipeline-pipeline-pipelinetags).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub pipeline_tags: Option<::ValueList<self::pipeline::PipelineTag>>,
}
impl ::serde::Serialize for PipelineProperties {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
if let Some(ref activate) = self.activate {
::serde::ser::SerializeMap::serialize_entry(&mut map, "Activate", activate)?;
}
if let Some(ref description) = self.description {
::serde::ser::SerializeMap::serialize_entry(&mut map, "Description", description)?;
}
::serde::ser::SerializeMap::serialize_entry(&mut map, "Name", &self.name)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "ParameterObjects", &self.parameter_objects)?;
if let Some(ref parameter_values) = self.parameter_values {
::serde::ser::SerializeMap::serialize_entry(&mut map, "ParameterValues", parameter_values)?;
}
if let Some(ref pipeline_objects) = self.pipeline_objects {
::serde::ser::SerializeMap::serialize_entry(&mut map, "PipelineObjects", pipeline_objects)?;
}
if let Some(ref pipeline_tags) = self.pipeline_tags {
::serde::ser::SerializeMap::serialize_entry(&mut map, "PipelineTags", pipeline_tags)?;
}
::serde::ser::SerializeMap::end(map)
}
}
impl<'de> ::serde::Deserialize<'de> for PipelineProperties {
fn deserialize<D: ::serde::Deserializer<'de>>(d: D) -> Result<PipelineProperties, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = PipelineProperties;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type PipelineProperties")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut activate: Option<::Value<bool>> = None;
let mut description: Option<::Value<String>> = None;
let mut name: Option<::Value<String>> = None;
let mut parameter_objects: Option<::ValueList<self::pipeline::ParameterObject>> = None;
let mut parameter_values: Option<::ValueList<self::pipeline::ParameterValue>> = None;
let mut pipeline_objects: Option<::ValueList<self::pipeline::PipelineObject>> = None;
let mut pipeline_tags: Option<::ValueList<self::pipeline::PipelineTag>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"Activate" => {
activate = ::serde::de::MapAccess::next_value(&mut map)?;
}
"Description" => {
description = ::serde::de::MapAccess::next_value(&mut map)?;
}
"Name" => {
name = ::serde::de::MapAccess::next_value(&mut map)?;
}
"ParameterObjects" => {
parameter_objects = ::serde::de::MapAccess::next_value(&mut map)?;
}
"ParameterValues" => {
parameter_values = ::serde::de::MapAccess::next_value(&mut map)?;
}
"PipelineObjects" => {
pipeline_objects = ::serde::de::MapAccess::next_value(&mut map)?;
}
"PipelineTags" => {
pipeline_tags = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(PipelineProperties {
activate: activate,
description: description,
name: name.ok_or(::serde::de::Error::missing_field("Name"))?,
parameter_objects: parameter_objects.ok_or(::serde::de::Error::missing_field("ParameterObjects"))?,
parameter_values: parameter_values,
pipeline_objects: pipeline_objects,
pipeline_tags: pipeline_tags,
})
}
}
d.deserialize_map(Visitor)
}
}
impl ::Resource for Pipeline {
type Properties = PipelineProperties;
const TYPE: &'static str = "AWS::DataPipeline::Pipeline";
fn properties(&self) -> &PipelineProperties {
&self.properties
}
fn properties_mut(&mut self) -> &mut PipelineProperties {
&mut self.properties
}
}
impl ::private::Sealed for Pipeline {}
impl From<PipelineProperties> for Pipeline {
fn from(properties: PipelineProperties) -> Pipeline {
Pipeline { properties }
}
}
pub mod pipeline {
//! Property types for the `Pipeline` resource.
/// The [`AWS::DataPipeline::Pipeline.Field`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-pipelineobjects-fields.html) property type.
#[derive(Debug, Default)]
pub struct Field {
/// Property [`Key`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-pipelineobjects-fields.html#cfn-datapipeline-pipeline-pipelineobjects-fields-key).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub key: ::Value<String>,
/// Property [`RefValue`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-pipelineobjects-fields.html#cfn-datapipeline-pipeline-pipelineobjects-fields-refvalue).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub ref_value: Option<::Value<String>>,
/// Property [`StringValue`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-pipelineobjects-fields.html#cfn-datapipeline-pipeline-pipelineobjects-fields-stringvalue).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub string_value: Option<::Value<String>>,
}
impl ::codec::SerializeValue for Field {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "Key", &self.key)?;
if let Some(ref ref_value) = self.ref_value {
::serde::ser::SerializeMap::serialize_entry(&mut map, "RefValue", ref_value)?;
}
if let Some(ref string_value) = self.string_value {
::serde::ser::SerializeMap::serialize_entry(&mut map, "StringValue", string_value)?;
}
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for Field {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<Field, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type Field")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut key: Option<::Value<String>> = None;
let mut ref_value: Option<::Value<String>> = None;
let mut string_value: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"Key" => {
key = ::serde::de::MapAccess::next_value(&mut map)?;
}
"RefValue" => {
ref_value = ::serde::de::MapAccess::next_value(&mut map)?;
}
"StringValue" => {
string_value = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(Field {
key: key.ok_or(::serde::de::Error::missing_field("Key"))?,
ref_value: ref_value,
string_value: string_value,
})
}
}
d.deserialize_map(Visitor)
}
}
/// The [`AWS::DataPipeline::Pipeline.ParameterAttribute`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-parameterobjects-attributes.html) property type.
#[derive(Debug, Default)]
pub struct ParameterAttribute {
/// Property [`Key`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-parameterobjects-attributes.html#cfn-datapipeline-pipeline-parameterobjects-attribtues-key).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub key: ::Value<String>,
/// Property [`StringValue`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-parameterobjects-attributes.html#cfn-datapipeline-pipeline-parameterobjects-attribtues-stringvalue).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub string_value: ::Value<String>,
}
impl ::codec::SerializeValue for ParameterAttribute {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "Key", &self.key)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "StringValue", &self.string_value)?;
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for ParameterAttribute {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<ParameterAttribute, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = ParameterAttribute;
fn | (&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type ParameterAttribute")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut key: Option<::Value<String>> = None;
let mut string_value: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"Key" => {
key = ::serde::de::MapAccess::next_value(&mut map)?;
}
"StringValue" => {
string_value = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(ParameterAttribute {
key: key.ok_or(::serde::de::Error::missing_field("Key"))?,
string_value: string_value.ok_or(::serde::de::Error::missing_field("StringValue"))?,
})
}
}
d.deserialize_map(Visitor)
}
}
/// The [`AWS::DataPipeline::Pipeline.ParameterObject`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-parameterobjects.html) property type.
#[derive(Debug, Default)]
pub struct ParameterObject {
/// Property [`Attributes`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-parameterobjects.html#cfn-datapipeline-pipeline-parameterobjects-attributes).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub attributes: ::ValueList<ParameterAttribute>,
/// Property [`Id`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-parameterobjects.html#cfn-datapipeline-pipeline-parameterobjects-id).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub id: ::Value<String>,
}
impl ::codec::SerializeValue for ParameterObject {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "Attributes", &self.attributes)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "Id", &self.id)?;
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for ParameterObject {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<ParameterObject, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = ParameterObject;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type ParameterObject")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut attributes: Option<::ValueList<ParameterAttribute>> = None;
let mut id: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"Attributes" => {
attributes = ::serde::de::MapAccess::next_value(&mut map)?;
}
"Id" => {
id = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(ParameterObject {
attributes: attributes.ok_or(::serde::de::Error::missing_field("Attributes"))?,
id: id.ok_or(::serde::de::Error::missing_field("Id"))?,
})
}
}
d.deserialize_map(Visitor)
}
}
/// The [`AWS::DataPipeline::Pipeline.ParameterValue`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-parametervalues.html) property type.
#[derive(Debug, Default)]
pub struct ParameterValue {
/// Property [`Id`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-parametervalues.html#cfn-datapipeline-pipeline-parametervalues-id).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub id: ::Value<String>,
/// Property [`StringValue`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-parametervalues.html#cfn-datapipeline-pipeline-parametervalues-stringvalue).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub string_value: ::Value<String>,
}
impl ::codec::SerializeValue for ParameterValue {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "Id", &self.id)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "StringValue", &self.string_value)?;
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for ParameterValue {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<ParameterValue, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = ParameterValue;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type ParameterValue")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut id: Option<::Value<String>> = None;
let mut string_value: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"Id" => {
id = ::serde::de::MapAccess::next_value(&mut map)?;
}
"StringValue" => {
string_value = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(ParameterValue {
id: id.ok_or(::serde::de::Error::missing_field("Id"))?,
string_value: string_value.ok_or(::serde::de::Error::missing_field("StringValue"))?,
})
}
}
d.deserialize_map(Visitor)
}
}
/// The [`AWS::DataPipeline::Pipeline.PipelineObject`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-pipelineobjects.html) property type.
#[derive(Debug, Default)]
pub struct PipelineObject {
/// Property [`Fields`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-pipelineobjects.html#cfn-datapipeline-pipeline-pipelineobjects-fields).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub fields: ::ValueList<Field>,
/// Property [`Id`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-pipelineobjects.html#cfn-datapipeline-pipeline-pipelineobjects-id).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub id: ::Value<String>,
/// Property [`Name`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-pipelineobjects.html#cfn-datapipeline-pipeline-pipelineobjects-name).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub name: ::Value<String>,
}
impl ::codec::SerializeValue for PipelineObject {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "Fields", &self.fields)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "Id", &self.id)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "Name", &self.name)?;
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for PipelineObject {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<PipelineObject, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = PipelineObject;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type PipelineObject")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut fields: Option<::ValueList<Field>> = None;
let mut id: Option<::Value<String>> = None;
let mut name: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"Fields" => {
fields = ::serde::de::MapAccess::next_value(&mut map)?;
}
"Id" => {
id = ::serde::de::MapAccess::next_value(&mut map)?;
}
"Name" => {
name = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(PipelineObject {
fields: fields.ok_or(::serde::de::Error::missing_field("Fields"))?,
id: id.ok_or(::serde::de::Error::missing_field("Id"))?,
name: name.ok_or(::serde::de::Error::missing_field("Name"))?,
})
}
}
d.deserialize_map(Visitor)
}
}
/// The [`AWS::DataPipeline::Pipeline.PipelineTag`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-pipelinetags.html) property type.
#[derive(Debug, Default)]
pub struct PipelineTag {
/// Property [`Key`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-pipelinetags.html#cfn-datapipeline-pipeline-pipelinetags-key).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub key: ::Value<String>,
/// Property [`Value`](http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-datapipeline-pipeline-pipelinetags.html#cfn-datapipeline-pipeline-pipelinetags-value).
///
/// Update type: _Mutable_.
/// AWS CloudFormation doesn't replace the resource when you change this property.
pub value: ::Value<String>,
}
impl ::codec::SerializeValue for PipelineTag {
fn serialize<S: ::serde::Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
let mut map = ::serde::Serializer::serialize_map(s, None)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "Key", &self.key)?;
::serde::ser::SerializeMap::serialize_entry(&mut map, "Value", &self.value)?;
::serde::ser::SerializeMap::end(map)
}
}
impl ::codec::DeserializeValue for PipelineTag {
fn deserialize<'de, D: ::serde::Deserializer<'de>>(d: D) -> Result<PipelineTag, D::Error> {
struct Visitor;
impl<'de> ::serde::de::Visitor<'de> for Visitor {
type Value = PipelineTag;
fn expecting(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "a struct of type PipelineTag")
}
fn visit_map<A: ::serde::de::MapAccess<'de>>(self, mut map: A) -> Result<Self::Value, A::Error> {
let mut key: Option<::Value<String>> = None;
let mut value: Option<::Value<String>> = None;
while let Some(__cfn_key) = ::serde::de::MapAccess::next_key::<String>(&mut map)? {
match __cfn_key.as_ref() {
"Key" => {
key = ::serde::de::MapAccess::next_value(&mut map)?;
}
"Value" => {
value = ::serde::de::MapAccess::next_value(&mut map)?;
}
_ => {}
}
}
Ok(PipelineTag {
key: key.ok_or(::serde::de::Error::missing_field("Key"))?,
value: value.ok_or(::serde::de::Error::missing_field("Value"))?,
})
}
}
d.deserialize_map(Visitor)
}
}
}
| expecting |
index.js | const express = require('express');
const server = express()
server.get('/', (req, res)=> {
res.send("<h1>Hello, world</h1>");
});
const PORT = 8000; | server.listen(PORT, ()=> console.log(`server running on port ${PORT}`)) |
|
data.go | package codegen
import (
"fmt"
"sort"
"github.com/99designs/gqlgen/codegen/config"
"github.com/pkg/errors"
"github.com/vektah/gqlparser/ast"
)
// Data is a unified model of the code to be generated. Plugins may modify this structure to do things like implement
// resolvers or directives automatically (eg grpc, validation)
type Data struct {
Config *config.Config
Schema *ast.Schema
SchemaStr map[string]string
Directives DirectiveList
Objects Objects
Inputs Objects
Interfaces map[string]*Interface
ReferencedTypes map[string]*config.TypeReference
ComplexityRoots map[string]*Object
QueryRoot *Object
MutationRoot *Object
SubscriptionRoot *Object
}
type builder struct {
Config *config.Config
Schema *ast.Schema
SchemaStr map[string]string
Binder *config.Binder
Directives map[string]*Directive
}
func BuildData(cfg *config.Config) (*Data, error) |
func (b *builder) injectIntrospectionRoots(s *Data) error {
obj := s.Objects.ByName(b.Schema.Query.Name)
if obj == nil {
return fmt.Errorf("root query type must be defined")
}
__type, err := b.buildField(obj, &ast.FieldDefinition{
Name: "__type",
Type: ast.NamedType("__Type", nil),
Arguments: []*ast.ArgumentDefinition{
{
Name: "name",
Type: ast.NonNullNamedType("String", nil),
},
},
})
if err != nil {
return err
}
__schema, err := b.buildField(obj, &ast.FieldDefinition{
Name: "__schema",
Type: ast.NamedType("__Schema", nil),
})
if err != nil {
return err
}
obj.Fields = append(obj.Fields, __type, __schema)
return nil
}
| {
b := builder{
Config: cfg,
}
var err error
b.Schema, b.SchemaStr, err = cfg.LoadSchema()
if err != nil {
return nil, err
}
err = cfg.Check()
if err != nil {
return nil, err
}
err = cfg.Autobind(b.Schema)
if err != nil {
return nil, err
}
cfg.InjectBuiltins(b.Schema)
b.Binder, err = b.Config.NewBinder(b.Schema)
if err != nil {
return nil, err
}
b.Directives, err = b.buildDirectives()
if err != nil {
return nil, err
}
dataDirectives := make(map[string]*Directive)
for name, d := range b.Directives {
if !d.Builtin {
dataDirectives[name] = d
}
}
s := Data{
Config: cfg,
Directives: dataDirectives,
Schema: b.Schema,
SchemaStr: b.SchemaStr,
Interfaces: map[string]*Interface{},
}
for _, schemaType := range b.Schema.Types {
switch schemaType.Kind {
case ast.Object:
obj, err := b.buildObject(schemaType)
if err != nil {
return nil, errors.Wrap(err, "unable to build object definition")
}
s.Objects = append(s.Objects, obj)
case ast.InputObject:
input, err := b.buildObject(schemaType)
if err != nil {
return nil, errors.Wrap(err, "unable to build input definition")
}
s.Inputs = append(s.Inputs, input)
case ast.Union, ast.Interface:
s.Interfaces[schemaType.Name] = b.buildInterface(schemaType)
}
}
if s.Schema.Query != nil {
s.QueryRoot = s.Objects.ByName(s.Schema.Query.Name)
} else {
return nil, fmt.Errorf("query entry point missing")
}
if s.Schema.Mutation != nil {
s.MutationRoot = s.Objects.ByName(s.Schema.Mutation.Name)
}
if s.Schema.Subscription != nil {
s.SubscriptionRoot = s.Objects.ByName(s.Schema.Subscription.Name)
}
if err := b.injectIntrospectionRoots(&s); err != nil {
return nil, err
}
s.ReferencedTypes = b.buildTypes()
sort.Slice(s.Objects, func(i, j int) bool {
return s.Objects[i].Definition.Name < s.Objects[j].Definition.Name
})
sort.Slice(s.Inputs, func(i, j int) bool {
return s.Inputs[i].Definition.Name < s.Inputs[j].Definition.Name
})
return &s, nil
} |
context.mock.go | // +build debug
// Code generated by MockGen. DO NOT EDIT.
// Source: context.go
package internal
import (
gomock "github.com/golang/mock/gomock"
reflect "reflect"
)
// MockContext is a mock of Context interface
type MockContext struct {
ctrl *gomock.Controller
recorder *MockContextMockRecorder
}
// MockContextMockRecorder is the mock recorder for MockContext
type MockContextMockRecorder struct {
mock *MockContext
}
// NewMockContext creates a new mock instance
func | (ctrl *gomock.Controller) *MockContext {
mock := &MockContext{ctrl: ctrl}
mock.recorder = &MockContextMockRecorder{mock}
return mock
}
// EXPECT returns an object that allows the caller to indicate expected use
func (m *MockContext) EXPECT() *MockContextMockRecorder {
return m.recorder
}
// Set mocks base method
func (m *MockContext) Set(key, value interface{}) {
m.ctrl.T.Helper()
m.ctrl.Call(m, "Set", key, value)
}
// Set indicates an expected call of Set
func (mr *MockContextMockRecorder) Set(key, value interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Set", reflect.TypeOf((*MockContext)(nil).Set), key, value)
}
// Get mocks base method
func (m *MockContext) Get(key interface{}) (interface{}, bool) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "Get", key)
ret0, _ := ret[0].(interface{})
ret1, _ := ret[1].(bool)
return ret0, ret1
}
// Get indicates an expected call of Get
func (mr *MockContextMockRecorder) Get(key interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Get", reflect.TypeOf((*MockContext)(nil).Get), key)
}
// Persist mocks base method
func (m *MockContext) Persist(key interface{}, fn func() (interface{}, error)) interface{} {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "Persist", key, fn)
ret0, _ := ret[0].(interface{})
return ret0
}
// Persist indicates an expected call of Persist
func (mr *MockContextMockRecorder) Persist(key, fn interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Persist", reflect.TypeOf((*MockContext)(nil).Persist), key, fn)
}
| NewMockContext |
setup.py | #!/usr/bin/env python3
import os
from setuptools import setup, find_packages
def get_version():
from pyxrd.__version import __version__
if __version__.startswith("v"):
__version__ = __version__.replace("v", "")
return "%s" % __version__
def get_install_requires():
return [
'setuptools',
'numpy>=1.11',
'scipy>=1.1.0',
'matplotlib>=2.2.2',
'Pyro4>=4.41',
'deap>=1.0.1',
'cairocffi', | def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name="PyXRD",
version=get_version(),
description="PyXRD is a python implementation of the matrix algorithm developed for the X-ray diffraction analysis of disordered lamellar structures",
long_description=read('README.md'),
keywords="XRD disorder mixed-layers",
author="Mathijs Dumon",
author_email="[email protected]",
url="http://github.org/mathijs-dumon/PyXRD",
license="BSD",
setup_requires=[ "setuptools_git >= 1.2", ],
packages=find_packages(exclude=["test.*", "test", "tests_mvc", "tests_mvc.*"]),
include_package_data=True,
install_requires=get_install_requires(),
zip_safe=False,
classifiers=[
"Development Status :: 4 - Beta",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.4",
"Environment :: Win32 (MS Windows)",
"Environment :: X11 Applications :: Gnome",
"Environment :: X11 Applications :: GTK",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Science/Research",
"Topic :: Utilities",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Visualization",
"Natural Language :: English",
"License :: OSI Approved :: BSD License",
],
) | 'pygobject>=3.20'
]
|
imports_spec.js | import {describe, it, expect} from 'test_lib/test_lib';
import {Foo, Bar} from './foo';
// TODO: Does not work, as dart does not support renaming imports
// import {Foo as F} from './fixtures/foo';
import * as fooModule from './foo';
import * as exportModule from './export';
import {Type} from 'facade/lang'; | export function main() {
describe('imports', function() {
it('should work', function() {
expect(Foo).toBe('FOO');
expect(Bar).toBe('BAR');
// TODO: Does not work
// assert(F == 'FOO');
expect(fooModule.Foo).toBe('FOO');
expect(fooModule.Bar).toBe('BAR');
expect(exportModule.Foo).toBe('FOO');
expect(exportModule.Bar).toBe('BAR');
expect(Type).toBeTruthy();
});
});
} | |
resolvers.js | const { User, Book} = require('../models');
const { signToken } = require('../utils/auth');
const { AuthenticationError } = require ('apollo-server-express');
const resolvers = {
Query: {
me: async (parent, args, context) => {
if(context.user) {
const userData = await User.findOne({ _id: context.user._id })
.select('-__v -password')
return userData;
}
throw new AuthenticationError('You\'re not logged in');
}
},
Mutation: {
login: async(parent, {email, password }) => {
const user = await User.findOne({ email });
if (!user) {
throw new AuthenticationError('Incorrect Info');
}
const correctPassword = await user.isCorrectPassword(password);
if (!correctPassword) {
throw new AuthenticationError('Incorrect Info');
}
const token = signToken(user);
return { token, user };
},
addUser: async (parent, args) => {
const user = await User.create(args);
const token = signToken(user);
return { token, user };
},
saveBook: async (parent, { input }, context) => {
if (context.user) { | );
return updatedUser;
}
throw new AuthenticationError("Please log in")
},
removeBook: async (parent, args, context) => {
if (context.user) {
const updatedUser = await User.findOneAndUpdate(
{ _id: context.user._id },
{ $pull: { savedBooks: { bookId: args.bookId } } },
{ new: true }
);
return updatedUser;
}
throw new AuthenticationError("Please log in")
}
}
};
module.exports = resolvers; | const updatedUser = await User.findByIdAndUpdate(
{ _id: context.user._id },
{ $addToSet: { savedBooks: input } },
{ new: true } |
utils.py | from __future__ import division
import numpy as np
from scipy.stats import norm
import random
import matplotlib.pyplot as plt
import seaborn as sns
from scipy.optimize import curve_fit
from scipy import stats
import networkx as nx
import pandas as pd
from mpl_toolkits.axes_grid1.inset_locator import InsetPosition
class Initializer(object):
"""
Helper class to initialize the matrices for the SORN
"""
def __init__(self):
pass
@staticmethod
def generate_strong_inp(length: int, reservoir_size: int):
"""Generate strong one-hot vector of input. Random neurons in the reservoir acts as inputs
Args:
length (int): Number of input neurons
Returns:
inp (array): Input vector of length equals the number of neurons in the reservoir
with randomly chosen neuron set active
idx (list): List of chosen input neurons """
inp = [0] * reservoir_size
x = [0] * length
idx = np.random.choice(length, np.random.randint(reservoir_size))
for i in idx:
x[i] = 1.0e4
inp[: len(x)] = x
return inp, idx
# Generate multi-node one-hot strong inputs
@staticmethod
def multi_one_hot_inp(ne: int, inputs: list, n_nodes_per_inp: int):
"""Generate multi(n_nodes_per_inp) one hot vector for each input.
For each input, set n_nodes_per_inp equals one and the rest of
neurons in the pool recieves no external stimuli
Args:
ne (int): Number of excitatory units in sorn
inputs (list): input labels
n_nodes_per_inp(int): Number of target units in pool that receives single input
Returns:
one_hot_vector for each label with length equals ne
"""
one_hot = np.zeros((ne, len(inputs)))
idxs = []
for _ in range(n_nodes_per_inp):
idxs.append(random.sample(range(0, ne), len(inputs)))
idxs = list(zip(*idxs))
j = 0 # Max(j) = len(inputs)
for idx_list in idxs:
for i in idx_list:
one_hot[i][j] = 1
j += 1
return one_hot, idxs
@staticmethod
def generate_gaussian_inputs(length: int, reservoir_size: int):
"""Generate external stimuli sampled from Gaussian distribution.
Randomly neurons in the reservoir receives this input at each timestep
Args:
length (int): Number of input neurons
Returns:
out (array): Input vector of length equals the number of neurons in the reservoir
with randomly chosen neuron set active
idx (int): List of chosen input neurons
"""
out = [0] * reservoir_size
x = [0] * length
idx = np.random.choice(length, np.random.randint(reservoir_size))
inp = np.random.normal(length)
for i in idx:
x[i] = inp[i]
out[: len(x)] = x
return out, idx
@staticmethod
def normalize_weight_matrix(weight_matrix: np.array):
# Applied only while initializing the weight. During simulation, Synaptic scaling applied on weight matrices
""" Normalize the weights in the matrix such that incoming connections to a neuron sum up to 1
Args:
weight_matrix (array): Incoming Weights from W_ee or W_ei or W_ie
Returns:
weight_matrix (array): Normalized weight matrix"""
normalized_weight_matrix = weight_matrix / np.sum(weight_matrix, axis=0)
return normalized_weight_matrix
@staticmethod
def generate_lambd_connections(
synaptic_connection: str, ne: int, ni: int, lambd_w: int, lambd_std: int
):
"""Generate lambda incoming connections for Excitatory neurons and outgoing connections per Inhibitory neuron
Args:
synaptic_connection (str): Type of sysnpatic connection (EE,EI or IE)
ne (int): Number of excitatory units
ni (int): Number of inhibitory units
lambd_w (int): Average number of incoming connections
lambd_std (int): Standard deviation of average number of connections per neuron
Returns:
connection_weights (array) - Weight matrix
"""
if synaptic_connection == "EE":
"""Choose random lamda connections per neuron"""
# Draw normally distributed ne integers with mean lambd_w
lambdas_incoming = norm.ppf(
np.random.random(ne), loc=lambd_w, scale=lambd_std
).astype(int)
# lambdas_outgoing = norm.ppf(np.random.random(ne), loc=lambd_w, scale=lambd_std).astype(int)
# List of neurons
list_neurons = list(range(ne))
# Connection weights
connection_weights = np.zeros((ne, ne))
# For each lambd value in the above list,
# generate weights for incoming and outgoing connections
# -------------Gaussian Distribution of weights --------------
# weight_matrix = np.random.randn(Sorn.ne, Sorn.ni) + 2 # Small random values from gaussian distribution
# Centered around 2 to make all values positive
# ------------Uniform Distribution --------------------------
global_incoming_weights = np.random.uniform(0.0, 0.1, sum(lambdas_incoming))
# Index Counter
global_incoming_weights_idx = 0
# Choose the neurons in order [0 to 199]
for neuron in list_neurons:
# Choose ramdom unique (lambdas[neuron]) neurons from list_neurons
possible_connections = list_neurons.copy()
possible_connections.remove(
neuron
) # Remove the selected neuron from possible connections i!=j
# Choose random presynaptic neurons
possible_incoming_connections = random.sample(
possible_connections, lambdas_incoming[neuron]
)
incoming_weights_neuron = global_incoming_weights[
global_incoming_weights_idx : global_incoming_weights_idx
+ lambdas_incoming[neuron]
]
# ---------- Update the connection weight matrix ------------
# Update incoming connection weights for selected 'neuron'
for incoming_idx, incoming_weight in enumerate(incoming_weights_neuron):
connection_weights[possible_incoming_connections[incoming_idx]][
neuron
] = incoming_weight
global_incoming_weights_idx += lambdas_incoming[neuron]
return connection_weights
if synaptic_connection == "EI":
"""Choose random lamda connections per neuron"""
# Draw normally distributed ni integers with mean lambd_w
lambdas = norm.ppf(
np.random.random(ni), loc=lambd_w, scale=lambd_std
).astype(int)
# List of neurons
list_neurons = list(range(ni)) # Each i can connect with random ne neurons
# Initializing connection weights variable
connection_weights = np.zeros((ni, ne))
# ------------Uniform Distribution -----------------------------
global_outgoing_weights = np.random.uniform(0.0, 0.1, sum(lambdas))
# Index Counter
global_outgoing_weights_idx = 0
# Choose the neurons in order [0 to 40]
for neuron in list_neurons:
# Choose random unique (lambdas[neuron]) neurons from list_neurons
possible_connections = list(range(ne))
possible_outgoing_connections = random.sample(
possible_connections, lambdas[neuron]
) # possible_outgoing connections to the neuron
# Update weights
outgoing_weights = global_outgoing_weights[
global_outgoing_weights_idx : global_outgoing_weights_idx
+ lambdas[neuron]
]
# ---------- Update the connection weight matrix ------------
# Update outgoing connections for the neuron
for outgoing_idx, outgoing_weight in enumerate(
outgoing_weights
): # Update the columns in the connection matrix
connection_weights[neuron][
possible_outgoing_connections[outgoing_idx]
] = outgoing_weight
# Update the global weight values index
global_outgoing_weights_idx += lambdas[neuron]
return connection_weights
@staticmethod
def get_incoming_connection_dict(weights: np.array):
""" Get the non-zero entries in columns is the incoming connections for the neurons
Args:
weights (np.array): Connection/Synaptic weights
Returns:
dict : Dictionary of incoming connections to each neuron
"""
# Indices of nonzero entries in the columns
connection_dict = dict.fromkeys(range(1, len(weights) + 1), 0)
for i in range(len(weights[0])): # For each neuron
connection_dict[i] = list(np.nonzero(weights[:, i])[0])
return connection_dict
@staticmethod
def get_outgoing_connection_dict(weights: np.array):
"""Get the non-zero entries in rows is the outgoing connections for the neurons
Args:
weights (np.array): Connection/Synaptic weights
Returns:
dict : Dictionary of outgoing connections from each neuron
"""
# Indices of nonzero entries in the rows
connection_dict = dict.fromkeys(range(1, len(weights) + 1), 1)
for i in range(len(weights[0])): # For each neuron
connection_dict[i] = list(np.nonzero(weights[i, :])[0])
return connection_dict
@staticmethod
def prune_small_weights(weights: np.array, cutoff_weight: float):
"""Prune the connections with negative connection strength. The weights less than cutoff_weight set to 0
Args:
weights (np.array): Synaptic strengths
cutoff_weight (float): Lower weight threshold
Returns:
array: Connections weights with values less than cutoff_weight set to 0
"""
weights[weights <= cutoff_weight] = cutoff_weight
return weights
@staticmethod
def set_max_cutoff_weight(weights: np.array, cutoff_weight: float):
""" Set cutoff limit for the values in given array
Args:
weights (np.array): Synaptic strengths
cutoff_weight (float): Higher weight threshold
Returns:
array: Connections weights with values greater than cutoff_weight set to 1
"""
weights[weights > cutoff_weight] = cutoff_weight
return weights
@staticmethod
def get_unconnected_indexes(wee: np.array):
""" Helper function for Structural plasticity to randomly select the unconnected units
Args:
wee (array): Weight matrix
Returns:
list (indices): (row_idx,col_idx)"""
i, j = np.where(wee <= 0.0)
indices = list(zip(i, j))
self_conn_removed = []
for i, idxs in enumerate(indices):
if idxs[0] != idxs[1]:
self_conn_removed.append(indices[i])
return self_conn_removed
@staticmethod
def white_gaussian_noise(mu: float, sigma: float, t: int):
"""Generates white gaussian noise with mean mu, standard deviation sigma and
the noise length equals t
Args:
mu (float): Mean value of Gaussian noise
sigma (float): Standard deviation of Gaussian noise
t (int): Length of noise vector
Returns:
array: White gaussian noise of length t
"""
noise = np.random.normal(mu, sigma, t)
return np.expand_dims(noise, 1)
@staticmethod
def zero_sum_incoming_check(weights: np.array):
"""Make sure, each neuron in the pool has atleast 1 incoming connection
Args:
weights (array): Synaptic strengths
Returns:
array: Synaptic weights of neurons with atleast one positive (non-zero) incoming connection strength
"""
zero_sum_incomings = np.where(np.sum(weights, axis=0) == 0.0)
if len(zero_sum_incomings[-1]) == 0:
return weights
else:
for zero_sum_incoming in zero_sum_incomings[-1]:
rand_indices = np.random.randint(
int(weights.shape[0] * 0.2), size=2
)
rand_values = np.random.uniform(0.0, 0.1, 2)
for i, idx in enumerate(rand_indices):
weights[:, zero_sum_incoming][idx] = rand_values[i]
return weights
class Plotter(object):
"""Wrapper class to call plotting methods
"""
def __init__(self):
pass
@staticmethod
def hist_incoming_conn(
weights: np.array, bin_size: int, histtype: str, savefig: bool
):
"""Plot the histogram of number of presynaptic connections per neuron
Args:
weights (array): Connection weights
bin_size (int): Histogram bin size
histtype (str): Same as histtype matplotlib
savefig (bool): If True plot will be saved as png file in the cwd
Returns:
plot (matplotlib.pyplot): plot object
"""
num_incoming_weights = np.sum(np.array(weights) > 0, axis=0)
plt.figure(figsize=(12, 5))
plt.xlabel("Number of connections")
plt.ylabel("Probability")
# Fit a normal distribution to the data
mu, std = norm.fit(num_incoming_weights)
plt.hist(num_incoming_weights, bins=bin_size, density=True, alpha=0.6, color='b')
# PDF
xmin, xmax = plt.xlim()
x = np.linspace(xmin, xmax, max(num_incoming_weights))
p = norm.pdf(x, mu, std)
plt.plot(x, p, 'k', linewidth=2)
title = "Distribution of presynaptic connections: mu = %.2f, std = %.2f" % (mu, std)
plt.title(title)
if savefig:
plt.savefig("hist_incoming_conn")
return plt.show()
@staticmethod
def hist_outgoing_conn(
weights: np.array, bin_size: int, histtype: str, savefig: bool
):
"""Plot the histogram of number of incoming connections per neuron
Args:
weights (array): Connection weights
bin_size (int): Histogram bin size
histtype (str): Same as histtype matplotlib
savefig (bool): If True plot will be saved as png file in the cwd
Returns:
plot object """
# Plot the histogram of distribution of number of incoming connections in the network
num_outgoing_weights = np.sum(np.array(weights) > 0, axis=1)
plt.figure(figsize=(12, 5))
plt.xlabel("Number of connections")
plt.ylabel("Probability")
# Fit a normal distribution to the data
mu, std = norm.fit(num_outgoing_weights)
plt.hist(num_outgoing_weights, bins=bin_size, density=True, alpha=0.6, color='b')
# PDF
xmin, xmax = plt.xlim()
x = np.linspace(xmin, xmax, max(num_outgoing_weights))
p = norm.pdf(x, mu, std)
plt.plot(x, p, 'k', linewidth=2)
title = "Distribution of post synaptic connections: mu = %.2f, std = %.2f" % (mu, std)
plt.title(title)
if savefig:
plt.savefig("hist_outgoing_conn")
return plt.show()
@staticmethod
def network_connection_dynamics(
connection_counts: np.array, savefig: bool
):
"""Plot number of positive connection in the excitatory pool
Args:
connection_counts (array) - 1D Array of number of connections in the network per time step
savefig (bool) - If True plot will be saved as png file in the cwd
Returns:
plot object
"""
# Plot graph for entire simulation time period
_, ax1 = plt.subplots(figsize=(12, 5))
ax1.plot(connection_counts, label="Connection dynamics")
plt.margins(x=0)
ax1.set_xticks(ax1.get_xticks()[::2])
ax1.set_title("Network connection dynamics")
plt.ylabel("Number of active connections")
plt.xlabel("Time step")
plt.legend(loc="upper right")
plt.tight_layout()
if savefig:
plt.savefig("connection_dynamics")
return plt.show()
@staticmethod
def hist_firing_rate_network(spike_train: np.array, bin_size: int, savefig: bool):
""" Plot the histogram of firing rate (total number of neurons spike at each time step)
Args:
spike_train (array): Array of spike trains
bin_size (int): Histogram bin size
savefig (bool): If True, plot will be saved in the cwd
Returns:
plot object """
fr = np.count_nonzero(spike_train.tolist(), 1)
# Filter zero entries in firing rate list above
fr = list(filter(lambda a: a != 0, fr))
plt.title("Distribution of population activity without inactive time steps")
plt.xlabel("Spikes/time step")
plt.ylabel("Count")
plt.hist(fr, bin_size)
if savefig:
plt.savefig("hist_firing_rate_network.png")
return plt.show()
@staticmethod
def scatter_plot(spike_train: np.array, savefig: bool):
"""Scatter plot of spike trains
Args:
spike_train (list): Array of spike trains
with_firing_rates (bool): If True, firing rate of the network will be plotted
savefig (bool): If True, plot will be saved in the cwd
Returns:
plot object"""
# Conver the list of spike train into array
spike_train = np.asarray(spike_train)
# Get the indices where spike_train is 1
x, y = np.argwhere(spike_train.T == 1).T
plt.figure(figsize=(8, 5))
firing_rates = Statistics.firing_rate_network(spike_train).tolist()
plt.plot(firing_rates, label="Firing rate")
plt.legend(loc="upper left")
plt.scatter(y, x, s=0.1, color="black")
plt.title('Spike Trains')
plt.xlabel("Time step")
plt.ylabel("Neuron")
plt.legend(loc="upper left")
if savefig:
plt.savefig("ScatterSpikeTrain.png")
return plt.show()
@staticmethod
def raster_plot(spike_train: np.array, savefig: bool):
"""Raster plot of spike trains
Args:
spike_train (array): Array of spike trains
with_firing_rates (bool): If True, firing rate of the network will be plotted
savefig (bool): If True, plot will be saved in the cwd
Returns:
plot object"""
# Conver the list of spike train into array
spike_train = np.asarray(spike_train)
plt.figure(figsize=(11, 6))
firing_rates = Statistics.firing_rate_network(spike_train).tolist()
plt.plot(firing_rates, label="Firing rate")
plt.legend(loc="upper left")
plt.title('Spike Trains')
# Get the indices where spike_train is 1
x, y = np.argwhere(spike_train.T == 1).T
plt.plot(y, x, "|r")
plt.xlabel("Time step")
plt.ylabel("Neuron")
if savefig:
plt.savefig("RasterSpikeTrain.png")
return plt.show()
@staticmethod
def correlation(corr: np.array, savefig: bool):
"""Plot correlation between neurons
Args:
corr (array): Correlation matrix
savefig (bool): If true will save the plot at the current working directory
Returns:
matplotlib.pyplot: Neuron Correlation plot
"""
# Generate a mask for the upper triangle
mask = np.zeros_like(corr, dtype=np.bool)
mask[np.triu_indices_from(mask)] = True
f, ax = plt.subplots(figsize=(11, 9))
# Custom diverging colormap
cmap = sns.diverging_palette(220, 10, as_cmap=True)
sns.heatmap(
corr,
mask=mask,
cmap=cmap,
xticklabels=5,
yticklabels=5,
vmax=0.1,
center=0,
square=False,
linewidths=0.0,
cbar_kws={"shrink": 0.9},
)
if savefig:
plt.savefig("Correlation between neurons")
return None
@staticmethod
def isi_exponential_fit(
spike_train: np.array, neuron: int, bin_size: int, savefig: bool
):
"""Plot Exponential fit on the inter-spike intervals during training or simulation phase
Args:
spike_train (array): Array of spike trains
neuron (int): Target neuron
bin_size (int): Spike train will be splitted into bins of size bin_size
savefig (bool): If True, plot will be saved in the cwd
Returns:
plot object"""
isi = Statistics.spike_time_intervals(spike_train[:,neuron])
y, x = np.histogram(sorted(isi), bins=bin_size)
x = [int(i) for i in x]
y = [float(i) for i in y]
def exponential_func(y, a, b, c):
return a * np.exp(-b * np.array(y)) - c
# Curve fit
popt, _ = curve_fit(exponential_func, x[1:bin_size], y[1:bin_size])
plt.plot(
x[1:bin_size],
exponential_func(x[1:bin_size], *popt),
label="Exponential fit",
)
plt.title('Distribution of Inter Spike Intervals and Exponential Curve Fit')
plt.scatter(x[1:bin_size], y[1:bin_size], s=2.0, color="black", label="ISI")
plt.xlabel("ISI")
plt.ylabel("Frequency")
plt.legend()
if savefig:
plt.savefig("isi_exponential_fit")
return plt.show()
@staticmethod
def weight_distribution(weights: np.array, bin_size: int, savefig: bool):
"""Plot the distribution of synaptic weights
Args:
weights (array): Connection weights
bin_size (int): Spike train will be splited into bins of size bin_size
savefig (bool): If True, plot will be saved in the cwd
Returns:
plot object"""
weights = weights[
weights >= 0.01
] # Remove the weight values less than 0.01 # As reported in article SORN 2013
y, x = np.histogram(weights, bins=bin_size) # Create histogram with bin_size
plt.title('Synaptic weight distribution')
plt.scatter(x[:-1], y, s=2.0, c="black")
plt.xlabel("Connection strength")
plt.ylabel("Frequency")
if savefig:
plt.savefig("weight_distribution")
return plt.show()
@staticmethod
def linear_lognormal_fit(weights: np.array, num_points: int, savefig: bool):
"""Lognormal curve fit on connection weight distribution
Args:
weights (array): Connection weights
num_points(int): Number of points to be plotted in the x axis
savefig(bool): If True, plot will be saved in the cwd
Returns:
plot object"""
weights = np.array(weights.tolist())
weights = weights[weights >= 0.01]
M = float(np.mean(weights)) # Geometric mean
s = float(np.std(weights)) # Geometric standard deviation
# Lognormal distribution parameters
mu = float(np.mean(np.log(weights))) # Mean of log(X)
sigma = float(np.std(np.log(weights))) # Standard deviation of log(X)
shape = sigma # Scipy's shape parameter
scale = np.exp(mu) # Scipy's scale parameter
median = np.exp(mu)
mode = np.exp(mu - sigma ** 2) # Note that mode depends on both M and s
mean = np.exp(mu + (sigma ** 2 / 2)) # Note that mean depends on both M and s
x = np.linspace(
np.min(weights), np.max(weights), num=num_points
)
pdf = stats.lognorm.pdf(
x, shape, loc=0, scale=scale
)
plt.figure(figsize=(12, 4.5))
plt.title('Curve fit on connection weight distribution')
# Figure on linear scale
plt.subplot(121)
plt.plot(x, pdf)
plt.vlines(mode, 0, pdf.max(), linestyle=":", label="Mode")
plt.vlines(
mean,
0,
stats.lognorm.pdf(mean, shape, loc=0, scale=scale),
linestyle="--",
color="green",
label="Mean",
)
plt.vlines(
median,
0,
stats.lognorm.pdf(median, shape, loc=0, scale=scale),
color="blue",
label="Median",
)
plt.ylim(ymin=0)
plt.xlabel("Weight")
plt.title("Linear scale")
plt.legend()
# Figure on logarithmic scale
plt.subplot(122)
plt.semilogx(x, pdf)
plt.vlines(mode, 0, pdf.max(), linestyle=":", label="Mode")
plt.vlines(
mean,
0,
stats.lognorm.pdf(mean, shape, loc=0, scale=scale),
linestyle="--",
color="green",
label="Mean",
)
plt.vlines(
median,
0,
stats.lognorm.pdf(median, shape, loc=0, scale=scale),
color="blue",
label="Median",
)
plt.ylim(ymin=0)
plt.xlabel("Weight")
plt.title("Logarithmic scale")
plt.legend()
if savefig:
plt.savefig("LinearLognormalFit")
return plt.show()
@staticmethod
def plot_network(corr: np.array, corr_thres: float, fig_name: str = None):
"""Network x graphical visualization of the network using the correlation matrix
Args:
corr (array): Correlation between neurons
corr_thres (array): Threshold to prune the connection. Smaller the threshold,
higher the density of connections
fig_name (array, optional): Name of the figure. Defaults to None.
Returns:
matplotlib.pyplot: Plot instance
"""
df = pd.DataFrame(corr)
links = df.stack().reset_index()
links.columns = ["var1", "var2", "value"]
links_filtered = links.loc[
(links["value"] > corr_thres) & (links["var1"] != links["var2"])
]
G = nx.from_pandas_edgelist(links_filtered, "var1", "var2")
plt.figure(figsize=(50, 50))
nx.draw(
G,
with_labels=True,
node_color="orange",
node_size=50,
linewidths=5,
font_size=10,
)
plt.text(0.1, 0.9, "%s" % corr_thres)
plt.savefig("%s" % fig_name)
plt.show()
@staticmethod
def hamming_distance(hamming_dist: list, savefig: bool):
"""Hamming distance between true netorks states and perturbed network states
Args:
hamming_dist (list): Hamming distance values
savefig (bool): If True, save the fig at current working directory
Returns:
matplotlib.pyplot: Hamming distance between true and perturbed network states
"""
plt.figure(figsize=(15, 6))
plt.title("Hamming distance between actual and perturbed states")
plt.xlabel("Time steps")
plt.ylabel("Hamming distance")
plt.plot(hamming_dist)
if savefig:
plt.savefig("HammingDistance")
return plt.show()
class Statistics(object):
""" Wrapper class for statistical analysis methods """
def __init__(self):
pass
@staticmethod
def firing_rate_neuron(spike_train: np.array, neuron: int, bin_size: int):
"""Measure spike rate of given neuron during given time window
Args:
spike_train (array): Array of spike trains
neuron (int): Target neuron in the reservoir
bin_size (int): Divide the spike trains into bins of size bin_size
Returns:
int: firing_rate """
time_period = len(spike_train[:, 0])
neuron_spike_train = spike_train[:, neuron]
# Split the list(neuron_spike_train) into sub lists of length time_step
samples_spike_train = [
neuron_spike_train[i : i + bin_size]
for i in range(0, len(neuron_spike_train), bin_size)
]
spike_rate = 0.0
for _, spike_train in enumerate(samples_spike_train):
spike_rate += list(spike_train).count(1.0)
spike_rate = spike_rate * bin_size / time_period
return time_period, bin_size, spike_rate
@staticmethod
def firing_rate_network(spike_train: np.array):
"""Calculate number of neurons spikes at each time step.Firing rate of the network
Args:
spike_train (array): Array of spike trains
Returns:
int: firing_rate """
firing_rate = np.count_nonzero(spike_train.tolist(), 1)
return firing_rate
@staticmethod
def | (firing_rates: list):
"""Smoothem the firing rate depend on its scale. Smaller values corresponds to smoother series
Args:
firing_rates (list): List of number of active neurons per time step
Returns:
sd_diff (list): Float value signifies the smoothness of the semantic changes in firing rates
"""
diff = np.diff(firing_rates)
sd_diff = np.std(diff)
return sd_diff
@staticmethod
def scale_independent_smoothness_measure(firing_rates: list):
"""Smoothem the firing rate independent of its scale. Smaller values corresponds to smoother series
Args:
firing_rates (list): List of number of active neurons per time step
Returns:
coeff_var (list):Float value signifies the smoothness of the semantic changes in firing rates """
diff = np.diff(firing_rates)
mean_diff = np.mean(diff)
sd_diff = np.std(diff)
coeff_var = sd_diff / abs(mean_diff)
return coeff_var
@staticmethod
def autocorr(firing_rates: list, t: int = 2):
"""
Score interpretation
- scores near 1 imply a smoothly varying series
- scores near 0 imply that there's no overall linear relationship between a data point and the following one (that is, plot(x[-length(x)],x[-1]) won't give a scatter plot with any apparent linearity)
- scores near -1 suggest that the series is jagged in a particular way: if one point is above the mean, the next is likely to be below the mean by about the same amount, and vice versa.
Args:
firing_rates (list): Firing rates of the network
t (int, optional): Window size. Defaults to 2.
Returns:
array: Autocorrelation between neurons given their firing rates
"""
return np.corrcoef(
np.array(
[
firing_rates[0 : len(firing_rates) - t],
firing_rates[t : len(firing_rates)],
]
)
)
@staticmethod
def avg_corr_coeff(spike_train: np.array):
"""Measure Average Pearson correlation coeffecient between neurons
Args:
spike_train (array): Neural activity
Returns:
array: Average correlation coeffecient"""
corr_mat = np.corrcoef(np.asarray(spike_train).T)
avg_corr = np.sum(corr_mat, axis=1) / 200
corr_coeff = (
avg_corr.sum() / 200
) # 2D to 1D and either upper or lower half of correlation matrix.
return corr_mat, corr_coeff
@staticmethod
def spike_times(spike_train: np.array):
"""Get the time instants at which neuron spikes
Args:
spike_train (array): Spike trains of neurons
Returns:
(array): Spike time of each neurons in the pool"""
times = np.where(spike_train == 1.0)
return times
@staticmethod
def spike_time_intervals(spike_train):
"""Generate spike time intervals spike_trains
Args:
spike_train (array): Network activity
Returns:
list: Inter spike intervals for each neuron in the reservoir
"""
spike_times = Statistics.spike_times(spike_train)
isi = np.diff(spike_times[-1])
return isi
@staticmethod
def hamming_distance(actual_spike_train: np.array, perturbed_spike_train: np.array):
"""Hamming distance between true netorks states and perturbed network states
Args:
actual_spike_train (np.array): True network's states
perturbed_spike_train (np.array): Perturbated network's states
Returns:
float: Hamming distance between true and perturbed network states
"""
hd = [
np.count_nonzero(actual_spike_train[i] != perturbed_spike_train[i])
for i in range(len(actual_spike_train))
]
return hd
@staticmethod
def fanofactor(spike_train: np.array, neuron: int, window_size: int):
"""Investigate whether neuronal spike generation is a poisson process
Args:
spike_train (np.array): Spike train of neurons in the reservoir
neuron (int): Target neuron in the pool
window_size (int): Sliding window size for time step ranges to be considered for measuring the fanofactor
Returns:
float : Fano factor of the neuron spike train
"""
# Choose activity of random neuron
neuron_act = spike_train[:, neuron]
# Divide total observations into 'tws' time windows of size 'ws' for a neuron 60
tws = np.split(neuron_act, window_size)
fr = []
for i in range(len(tws)):
fr.append(np.count_nonzero(tws[i]))
# print('Firing rate of the neuron during each time window of size %s is %s' %(ws,fr))
mean_firing_rate = np.mean(fr)
variance_firing_rate = np.var(fr)
fano_factor = variance_firing_rate / mean_firing_rate
return mean_firing_rate, variance_firing_rate, fano_factor
@staticmethod
def spike_source_entropy(spike_train: np.array, num_neurons: int):
"""Measure the uncertainty about the origin of spike from the network using entropy
Args:
spike_train (np.array): Spike train of neurons
num_neurons (int): Number of neurons in the reservoir
Returns:
int : Spike source entropy of the network
"""
# Number of spikes from each neuron during the interval
n_spikes = np.count_nonzero(spike_train, axis=0)
p = n_spikes / np.count_nonzero(
spike_train
) # Probability of each neuron that can generate spike in next step
# print(p) # Note: pi shouldn't be zero
sse = np.sum([pi * np.log(pi) for pi in p]) / np.log(
1 / num_neurons
) # Spike source entropy
return sse
| scale_dependent_smoothness_measure |
fuzz.py | import atheris
with atheris.instrument_imports():
import sys
import warnings
import mdformat
from mdformat._util import is_md_equal
# Suppress all warnings.
warnings.simplefilter("ignore")
def test_one_input(input_bytes: bytes) -> None:
# We need a Unicode string, not bytes
fdp = atheris.FuzzedDataProvider(input_bytes)
data = fdp.ConsumeUnicode(sys.maxsize)
try:
formatted_data = mdformat.text(data)
except BaseException:
print_err(data)
raise
if not is_md_equal(data, formatted_data):
print_err(data)
raise Exception("Formatted Markdown not equal!")
def print_err(data):
codepoints = [hex(ord(x)) for x in data]
sys.stderr.write(f"Input was {type(data)}:\n{data}\nCodepoints:\n{codepoints}\n")
sys.stderr.flush()
def main(): | atheris.Fuzz()
if __name__ == "__main__":
main() | # For possible options, see https://llvm.org/docs/LibFuzzer.html#options
fuzzer_options = sys.argv
atheris.Setup(fuzzer_options, test_one_input) |
rest.py | from pythonwarrior.abilities.base import AbilityBase
class Rest(AbilityBase):
def description(self):
return "Gain 10% of max health back, but do nothing more."
def perform(self):
if self._unit.health < self._unit.max_health:
amount = int(self._unit.max_health * 0.1)
if (self._unit.health + amount) > self._unit.max_health:
amount = self._unit.max_health - self._unit.health
self._unit.health += amount | {'heal_amount': amount,
'current': self._unit.health})
else:
self._unit.say("is already fit as a fiddle") | self._unit.say("receives %(heal_amount)d health from resting, "
"up to %(current)d health" % |
c4_1026_mymaze.py | from turtle import *
PART_OF_PATH = 'O'
TRIED = '.'
OBSTACLE = '+'
DEAD_END = '-'
# 假设or判断语句中有x个并列条件,只要第一个条件满足,就会直接进入下一步。
# 先沿着第一支一直往下算,算不通就返回上一级的第二支。。。上一级全部不通就返回上上一级的第二支。。。如此循环,确定第一个条件是T or F
class Maze(object):
def __init__(self, filename):
# 把txt文件转换成list。确认S的初始位置
rowsInMaze = 0
colsInMaze = 0
self.mazelist = [] # 初始空list
file = open(filename, 'r')
for line in file:
rowlist = []
col = 0
for ch in line[:-1]:
rowlist.append(ch)
if ch == 'S':
self.startRow = rowsInMaze
self.startCol = col
col = col + 1 # 第5列(4)才开始出现符号(逗号也被加入了)
rowsInMaze += 1 # (symbols start from 0)
self.mazelist.append(rowlist)
colsInMaze = len(rowlist) # 总共的列数
# print(self.startRow, self.startCol)
self.rowsInMaze = rowsInMaze
self.colsInMaze = colsInMaze
self.xTranslate = -colsInMaze / 2 # 左上角的x值
self.yTranslate = rowsInMaze / 2 # 左上角的y值
self.t = Turtle(shape='turtle')
# setup(width=600, height=600)
# 左下角x左下角有,右上角下,右上角y
setworldcoordinates(-(colsInMaze - 1) / 2 - 1, -(rowsInMaze - 1) / 2 - 1,
(colsInMaze - 1) / 2 + 1, (rowsInMaze - 1) / 2 + 1)
def drawmaze(self):
# 画障碍墙
self.t.speed(1000)
for y in range(self.rowsInMaze): # 行/纵坐标
for x in range(self.colsInMaze): # 列/横坐标
if self.mazelist[y][x] == OBSTACLE:
self.drawbox(x + self.xTranslate, -y + self.yTranslate, 'tan') # 坐标变换
self.t.color('black', 'yellow') # ?
def drawbox(self, x, y, color):
# 画方框,在画障碍中被调用
tracer(0)
self.t.up()
self.t.goto(x - .5, y - .5) # 左下角
self.t.color('black', color)
self.t.setheading(90)
self.t.down()
self.t.begin_fill()
for i in range(4):
self.t.forward(1)
self.t.right(90)
self.t.end_fill()
update()
tracer(1)
def moveturtle(self, x, y):
self.t.up()
self.t.setheading(self.t.towards(self.xTranslate + x, self.yTranslate - y))
self.t.goto(self.xTranslate + x, self.yTranslate - y)
def dropbread(self, color):
self.t.dot(15, color) # 绘制直径为size的圆点,size默认为max(pensize+4,2*pensize),可指定为大于等于1的int
def updatePosition(self, row, col, val=None):
if val:
# 设定当前位置的状态是什么
self.mazelist[row][col] = val
self.moveturtle(col, row) # to (x,y)
if val == PART_OF_PATH:
color = 'green'
elif val == OBSTACLE:
color = 'black'
elif val == TRIED:
color = 'gray'
elif val == DEAD_END:
color = 'red'
else:
color = None
# 依据不同状态画上颜色
if | sExit(start_row, start_col):
maze.updatePosition(start_row, start_col, PART_OF_PATH)
return True
maze.updatePosition(start_row, start_col, TRIED)
found = searchmaze(maze, start_row, start_col - 1) or searchmaze(maze, start_row, start_col + 1) or \
searchmaze(maze, start_row - 1, start_col) or searchmaze(maze, start_row + 1, start_col)
if found:
maze.updatePosition(start_row, start_col, PART_OF_PATH)
else:
maze.updatePosition(start_row, start_col, DEAD_END)
return found
myMaze = Maze('maze1.txt')
myMaze.drawmaze()
print(myMaze.startRow, myMaze.startCol)
myMaze.updatePosition(myMaze.startRow, myMaze.startCol)
searchmaze(myMaze, myMaze.startRow, myMaze.startCol)
win = myMaze.t.getscreen()
win.exitonclick()
| color:
self.dropbread(color)
def __getitem__(self, idx):
return self.mazelist[idx]
def isExit(self, row, col):
return row == 0 or col == 0 or row == self.rowsInMaze - 1 or col == self.colsInMaze
def searchmaze(maze, start_row, start_col):
maze.updatePosition(start_row, start_col)
if maze[start_row][start_col] == OBSTACLE:
return False
if maze[start_row][start_col] == TRIED or maze[start_row][start_col] == DEAD_END:
return False
if maze.i |
pettingzoosislwaterworldDMFGACexecution.py | from pettingzoo.sisl.waterworld import waterworld
from RL_dmfgac import Actor
import csv
import numpy as np
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
def | (parallel_env):
n_actions = 25
num_episode = 0
while num_episode < 100:
observation = parallel_env.reset()
accumulated_reward = 0
max_cycles = 500
actions = {}
number_caught = 0
for step in range(max_cycles):
for agent in parallel_env.agents:
agent_observation = observation[agent]
action = actor[agent].choose_action(agent_observation)
actions[agent] = action
new_observation, rewards, dones, infos = parallel_env.step(actions)
number_caught = number_caught + parallel_env.return_number_caught()
if not parallel_env.agents:
break
for agent in parallel_env.agents:
accumulated_reward = accumulated_reward + rewards[agent]
observation = new_observation
print("The step we are at is", step)
print("the total food captured is", number_caught)
num_episode = num_episode + 1
print("The episode is", num_episode)
print('game over')
if __name__ == "__main__":
parallel_env = waterworld.parallel_env(n_pursuers=25, n_evaders=25, encounter_reward=1)
parallel_env.seed(1)
parallel_env.reset()
actor = {}
sess = tf.Session()
size = len(parallel_env.agents)
action_bound = [-1, 1]
for agent in parallel_env.agents:
name = agent
actor[agent] = Actor(sess, n_features=212, action_bound = action_bound, lr=0.00001, name = name)
actor[agent].restore_model("./"+agent+"/dmfgacactormodel.ckpt")
sess.run(tf.global_variables_initializer())
run_waterworld(parallel_env)
| run_waterworld |
builder.rs | use dprint_core::configuration::{resolve_global_config, ConfigKeyMap, ConfigKeyValue, GlobalConfiguration, NewLineKind};
use std::collections::HashMap;
use super::*;
/// Formatting configuration builder.
///
/// # Example
///
/// ```
/// use dprint_plugin_json::configuration::*;
///
/// let config = ConfigurationBuilder::new()
/// .line_width(80)
/// .build();
/// ```
pub struct ConfigurationBuilder {
pub(super) config: ConfigKeyMap,
global_config: Option<GlobalConfiguration>,
}
impl ConfigurationBuilder {
/// Constructs a new configuration builder.
pub fn new() -> ConfigurationBuilder {
ConfigurationBuilder {
config: HashMap::new(),
global_config: None,
}
}
/// Gets the final configuration that can be used to format a file.
pub fn build(&self) -> Configuration {
if let Some(global_config) = &self.global_config {
resolve_config(self.config.clone(), global_config).config
} else {
let global_config = resolve_global_config(HashMap::new(), &Default::default()).config;
resolve_config(self.config.clone(), &global_config).config
}
}
/// Set the global configuration.
pub fn global_config(&mut self, global_config: GlobalConfiguration) -> &mut Self {
self.global_config = Some(global_config);
self
}
/// The width of a line the printer will try to stay under. Note that the printer may exceed this width in certain cases.
/// Default: 120
pub fn line_width(&mut self, value: u32) -> &mut Self {
self.insert("lineWidth", (value as i32).into())
}
/// Whether to use tabs (true) or spaces (false).
///
/// Default: `false`
pub fn use_tabs(&mut self, value: bool) -> &mut Self {
self.insert("useTabs", value.into())
}
/// The number of columns for an indent.
///
/// Default: `2`
pub fn indent_width(&mut self, value: u8) -> &mut Self {
self.insert("indentWidth", (value as i32).into())
}
/// The kind of newline to use.
/// Default: `NewLineKind::LineFeed`
pub fn new_line_kind(&mut self, value: NewLineKind) -> &mut Self {
self.insert("newLineKind", value.to_string().into())
}
/// The kind of newline to use.
/// Default: true
pub fn comment_line_force_space_after_slashes(&mut self, value: bool) -> &mut Self {
self.insert("commentLine.forceSpaceAfterSlashes", value.into())
}
/// The text to use for an ignore comment (ex. `// dprint-ignore`).
///
/// Default: `"dprint-ignore"`
pub fn ignore_node_comment_text(&mut self, value: &str) -> &mut Self {
self.insert("ignoreNodeCommentText", value.into())
}
/// Whether to make objects and arrays collapse to a single line when below the line width.
/// Default: false
pub fn prefer_single_line(&mut self, value: bool) -> &mut Self {
self.insert("preferSingleLine", value.into())
}
/// Whether to make arrays collapse to a single line when below the line width.
/// Default: false
pub fn array_prefer_single_line(&mut self, value: bool) -> &mut Self {
self.insert("array.preferSingleLine", value.into())
}
/// Whether to make ojects collapse to a single line when below the line width.
/// Default: false
pub fn object_prefer_single_line(&mut self, value: bool) -> &mut Self {
self.insert("object.preferSingleLine", value.into())
}
/// Sets the configuration to what is used in Deno.
pub fn deno(&mut self) -> &mut Self {
self
.line_width(80)
.ignore_node_comment_text("deno-fmt-ignore")
.comment_line_force_space_after_slashes(false)
}
#[cfg(test)]
pub(super) fn get_inner_config(&self) -> ConfigKeyMap {
self.config.clone()
}
fn insert(&mut self, name: &str, value: ConfigKeyValue) -> &mut Self {
self.config.insert(String::from(name), value);
self
}
}
#[cfg(test)]
mod tests {
use dprint_core::configuration::{resolve_global_config, NewLineKind};
use std::collections::HashMap;
use super::*;
#[test]
fn check_all_values_set() |
#[test]
fn handle_global_config() {
let mut global_config = HashMap::new();
global_config.insert(String::from("lineWidth"), 90.into());
global_config.insert(String::from("newLineKind"), "crlf".into());
global_config.insert(String::from("useTabs"), true.into());
let global_config = resolve_global_config(global_config, &Default::default()).config;
let mut config_builder = ConfigurationBuilder::new();
let config = config_builder.global_config(global_config).build();
assert_eq!(config.line_width, 90);
assert_eq!(config.new_line_kind == NewLineKind::CarriageReturnLineFeed, true);
}
#[test]
fn use_json_defaults_when_global_not_set() {
let global_config = resolve_global_config(HashMap::new(), &Default::default()).config;
let mut config_builder = ConfigurationBuilder::new();
let config = config_builder.global_config(global_config).build();
assert_eq!(config.indent_width, 2); // this is different
assert_eq!(config.new_line_kind == NewLineKind::LineFeed, true);
}
#[test]
fn support_deno_config() {
let mut config_builder = ConfigurationBuilder::new();
let config = config_builder.deno().build();
assert_eq!(config.indent_width, 2);
assert_eq!(config.line_width, 80);
assert_eq!(config.new_line_kind == NewLineKind::LineFeed, true);
assert_eq!(config.use_tabs, false);
assert_eq!(config.comment_line_force_space_after_slashes, false);
assert_eq!(config.ignore_node_comment_text, "deno-fmt-ignore");
assert_eq!(config.array_prefer_single_line, false);
assert_eq!(config.object_prefer_single_line, false);
}
#[test]
fn support_prefer_single_line_config() {
let mut config_builder = ConfigurationBuilder::new();
let config = config_builder.prefer_single_line(true).build();
assert_eq!(config.array_prefer_single_line, true);
assert_eq!(config.object_prefer_single_line, true);
}
}
| {
let mut config = ConfigurationBuilder::new();
config
.new_line_kind(NewLineKind::CarriageReturnLineFeed)
.line_width(90)
.use_tabs(true)
.indent_width(4)
.new_line_kind(NewLineKind::CarriageReturnLineFeed)
.comment_line_force_space_after_slashes(false)
.prefer_single_line(true)
.array_prefer_single_line(true)
.object_prefer_single_line(false)
.ignore_node_comment_text("deno-fmt-ignore");
let inner_config = config.get_inner_config();
assert_eq!(inner_config.len(), 9);
let diagnostics = resolve_config(inner_config, &resolve_global_config(HashMap::new(), &Default::default()).config).diagnostics;
assert_eq!(diagnostics.len(), 0);
} |
features.py | import pytest
from datagears.core.network import Network
@pytest.fixture
def | () -> Network:
"""Testing fixture for a feature."""
from datagears.core.network import Network
from datagears.features.dummy import my_out
network = Network("my-network", outputs=[my_out])
return network
@pytest.fixture
def store_feature() -> Network:
"""Testing fixture for a feature."""
from datagears.core.network import Network
from datagears.core.stores import FeatureStore
from datagears.features.dummy import my_out
network = Network("my-network", outputs=[my_out], feature_store=FeatureStore())
return network
| myfeature |
galah.py | import astropy.io.fits as fits
from specutils import SpectrumList
from specutils.io.registers import data_loader
from .loaders import FITS_FILE_EXTS, SINGLE_SPLIT_LABEL
GALAH_CONFIG = {
"hdus": {
"0": {"purpose": "science"},
"1": {"purpose": "error_stdev"},
"2": {"purpose": "unreduced_science"},
"3": {"purpose": "unreduced_error_stdev"},
"4": {"purpose": "skip"},
},
"wcs": {
"pixel_reference_point_keyword": "CRPIX1",
"pixel_reference_point_value_keyword": "CRVAL1",
"pixel_width_keyword": "CDELT1",
"wavelength_unit": "Angstrom",
},
"units": {"flux_unit": "count"},
"all_standard_units": False,
"all_keywords": False,
"valid_wcs": False,
}
def identify_galah(origin, *args, **kwargs):
|
@data_loader(
label="GALAH", extensions=FITS_FILE_EXTS, dtype=SpectrumList,
identifier=identify_galah,
)
def galah_loader(fname):
spectra = SpectrumList.read(
fname, format=SINGLE_SPLIT_LABEL, **GALAH_CONFIG
)
return spectra
| """
Identify if the current file is a GALAH file
"""
file_obj = args[0]
if isinstance(file_obj, fits.hdu.hdulist.HDUList):
hdulist = file_obj
else:
hdulist = fits.open(file_obj, **kwargs)
if "galah" in hdulist[0].header.get("REFERENC"):
if not isinstance(file_obj, fits.hdu.hdulist.HDUList):
hdulist.close()
return True
if not isinstance(file_obj, fits.hdu.hdulist.HDUList):
hdulist.close()
return False |
generate_zarr.py | import argparse
import pegasusio as pio
import pandas as pd
parser = argparse.ArgumentParser(description='Merge demuxlet result with gene-count matrix.')
parser.add_argument('demux_res', metavar = 'demux_result.best', help = 'Demuxlet demultiplexing results.')
parser.add_argument('raw_mat', metavar = 'raw_feature_bc_matrix.h5', help = 'Raw gene count matrix in 10x format.')
parser.add_argument('out_file', metavar = 'output_result.zarr', help = 'Output zarr file.')
args = parser.parse_args()
demux_type_dict = {'SNG': 'singlet', 'DBL': 'doublet', 'AMB': 'unknown'}
def | (assignment_file: str, input_mat_file: str, output_zarr_file: str) -> None:
df = pd.read_csv(assignment_file, sep = '\t', header = 0, index_col = 'BARCODE')
df.index = pd.Index([x[:-2] for x in df.index])
df['demux_type'] = df['DROPLET.TYPE'].apply(lambda s: demux_type_dict[s])
df['assignment'] = ''
df.loc[df['demux_type'] == 'singlet', 'assignment'] = df.loc[df['demux_type'] == 'singlet', 'SNG.BEST.GUESS']
df.loc[df['demux_type'] == 'doublet', 'assignment'] = df.loc[df['demux_type'] == 'doublet', 'DBL.BEST.GUESS'].apply(lambda s: ','.join(s.split(',')[:-1]))
data = pio.read_input(input_mat_file)
data.obs['demux_type'] = ''
data.obs['assignment'] = ''
idx = data.obs_names.isin(df.index)
barcodes = data.obs_names[idx]
df_valid = df.loc[barcodes, ['demux_type', 'assignment']]
data.obs.loc[idx, 'demux_type'] = df_valid['demux_type'].values
data.obs.loc[idx, 'assignment'] = df_valid['assignment'].values
pio.write_output(data, output_zarr_file, zarr_zipstore = True)
if __name__ == '__main__':
write_output(args.demux_res, args.raw_mat, args.out_file) | write_output |
binance.py | import requests
import json
from datetime import datetime
import time
import pandas as pd
from pandas import DataFrame as df
import hmac
import hashlib
from interval_enum import Interval
from order_enum import Order
class BinanceClient:
def __init__(self, api_key, api_secret):
self.key = api_key
self.secret = api_secret
self.base = 'https://api.binance.com'
self.endpoint = {
'klines': '/api/v1/klines',
'price_ticker': '/api/v3/ticker/price',
'24hr_ticker': '/api/v3/ticker/24hr',
'historical_trade': '/api/v3/historicalTrades', # recent trades on the market
'order': '/api/v3/order',
'test_order': '/api/v3/order/test',
'open_order': '/api/v3/openOrders', # all open orders
'all_order': '/api/v3/allOrders', # all orders: active, cancelled, filler
'my_trade': '/api/v3/myTrades' # all trades for a specific symbol on the account
}
'''
***********************************************************
GET METHODS
***********************************************************
'''
'''
return klines for a specified symbol
@param
required - symbol: str, interval: Interval
'''
def get_klines(self, symbol, interval):
# specifying parameters for request body
params = {
'symbol': symbol,
'interval': interval.value
}
# specifying url enpoint
url = self.base + self.endpoint['klines']
# get api response
response = requests.get(url, params=params)
# convert json to dict
data = json.loads(response.text)
# convert dict to data frame
klines_df = df(data)
# get open time and close time from klines_df
o_timestamp_df = klines_df[0] # open timestamp
c_timestamp_df = klines_df[6] # close timestamp
# create empty arrays for formatted datetime
o_time = [] # open time
c_time = [] # close time
# convert timestamps to datetime format
for (o_timestamp, c_timestamp) in zip(o_timestamp_df, c_timestamp_df):
o_time.append(datetime.fromtimestamp(int(o_timestamp/1000)))
c_time.append(datetime.fromtimestamp(int(c_timestamp/1000)))
# convert datetime to string datetime format for df
o_timestamp_df = df(o_time)
c_timestamp_df = df(c_time)
# replacing the original timestamp with formatted datetime string
klines_df[0] = o_timestamp_df
klines_df[6] = c_timestamp_df
# modifying dataframe
klines_df.pop(11)
klines_df.columns = ['openTime', 'open', 'high', 'low', 'close',
'volume', 'closeTime', 'quoteAssetVol',
'no. of trades', 'taker_buy_baseAssetVol',
'taker_buy_quoteAssetVol']
return klines_df
'''
return current price
1. for a symbol if symbol is specified
2. for all symbols
@param
optional - symbol: str
'''
def get_price(self, symbol=None):
# specifying parameters for request body
params = {
'symbol': symbol
}
# specifying url endpoint
url = self.base + self.endpoint['price_ticker']
# get api response
response = requests.get(url, params=params)
# convert json to dict
data = json.loads(response.text)
# convert dict to dataframe
if isinstance(data, list):
price_df = df(data)
else:
price_df = df([data])
return price_df
'''
return 24 hour ticker
1. for a symbol if symbol is specified
2. for all symbols
@param
optional - symbol: str
'''
def get_24hr_ticker(self, symbol=None):
# specify parameters for request body
params = {
'symbol': symbol
}
# specifying url endpoint
url = self.base + self.endpoint['24hr_ticker']
# request api response
response = requests.get(url, params=params)
# convert json to dict
data = json.loads(response.text)
# convert dict to dataframe
if isinstance(data, list):
ticker_df = df(data)
else:
ticker_df = df([data])
# get openTime and closeTime from ticker_df
open_time_df = ticker_df['openTime']
close_time_df = ticker_df['closeTime']
# create new empty arrays for openTime and closeTime
open_time = []
close_time = []
# convert timestamps to datetime format
for (o, c) in zip(open_time_df, close_time_df):
open_time.append(datetime.fromtimestamp(int(o/1000)))
close_time.append(datetime.fromtimestamp(int(c/1000)))
# convert timestamps to string format
open_time_df = df(open_time)
close_time_df = df(close_time)
# replace timestamps in ticker_df with formatted timestamps
ticker_df['openTime'] = open_time_df
ticker_df['closeTime'] = close_time_df
return ticker_df
'''
return list of historical trades
1. start from a specific trade if tradeId is specified upto
the specified amount of trade records
2. most recent trades if tradeId is not specified
a. most recent 500 trades if limit is not specified
b. the amount of trades specified by limit
@param
required - symbol: str
optional - limit: int, tradeId: long
'''
def get_historical_trade(self, symbol, limit=None, tradeId=None):
# specifying parameter for request body
params = {
'symbol': symbol,
'limit': limit,
'fromId': tradeId
}
# specifying url endpoint
url = self.base + self.endpoint['historical_trade']
# request api response |
# when exception occurs
if not isinstance(data, list):
return data
# convert dict to dataframe
trade_df = df(data)
if not trade_df.empty:
# get time from trade_df
time_df = trade_df['time']
# make new empty array for time
_time = []
# convert timestamp to datetime format
for t in time_df:
_time.append(datetime.fromtimestamp(int(t/1000)))
# convert timestamp to string format
time_df = df(_time)
# replace timestamp in trade_df with formatted timestamp
trade_df['time'] = time_df
return trade_df
'''
get the status of an order
@param
required - symbol: str, orderId: long
'''
def get_query_order(self, symbol, orderId):
# specify parameters for request body
params = {
'symbol': symbol,
'orderId': orderId,
'timestamp': int(round(time.time()*1000))
}
# specify url endpoint
url = self.base + self.endpoint['order']
# sign request
self.sign_request(params)
# request api response
response = requests.get(url, params=params, headers={'X-MBX-APIKEY': self.key})
data = json.loads(response.text)
return data
'''
return list of open orders
1. of a symbol if symbol is specified
2. of all symbols if symbol is not specified
@param
optional - symbol: str
'''
def get_open_order(self, symbol=None):
# specify general paramenters for request body
params = {
'timestamp': int(round(time.time()*1000))
}
# specify optional parameters for request body
if symbol != None:
params['symbol'] = symbol
# specify url endpoint
url = self.base + self.endpoint['open_order']
# sign request
self.sign_request(params)
# request api response
response = requests.get(url, params=params, headers={'X-MBX-APIKEY': self.key})
# convert json to dict
data = json.loads(response.text)
# when exception occurs
if not isinstance(data, list):
return data
# convert dict to dataframe
open_order_df = df(data)
# if dataframe is not empty
if not open_order_df.empty:
# get time and updateTime form open_order_df
time_df = open_order_df['time'] # time
updateTime_df = open_order_df['updateTime'] # updateTime
# create new empty arrays for time and updateTime
_time = []
_updateTime = []
# convert time and updateTime to datetime format
for (t, u) in zip(time_df, updateTime_df):
_time.append(datetime.fromtimestamp(int(t/1000)))
_updateTime.append(datetime.fromtimestamp(int(u/1000)))
# convert time and updateTime to df
time_df = df(_time)
updateTime_df = df(_updateTime)
# replace original timestamps with formatted timestamps in open_order_df
open_order_df['time'] = time_df
open_order_df['updateTime'] = updateTime_df
return open_order_df
'''
return all orders of the specified symbol: active, canceled, filled
1. if orderId is specified, return orders with id >= orderId
2. else, return most recent orders for this symbol
@param
required - symbol: str
optional - orderId: long, limit: int
'''
def get_all_order(self, symbol, orderId=None, limit=None):
# specify the general parameters for request body
params = {
'symbol': symbol,
'timestamp': int(round(time.time()*1000))
}
# specify optional parameters for request body
if limit != None:
if orderId != None:
params['orderId'] = orderId
params['limit'] = limit
else:
params['limit'] = limit
else:
if orderId != None:
params['orderId'] = orderId
# specify url endpoint
url = self.base + self.endpoint['all_order']
# sign request
self.sign_request(params)
# request api response
response = requests.get(url, params=params, headers={'X-MBX-APIKEY': self.key})
# convert json to dict
data = json.loads(response.text)
# when exception occurs
if not isinstance(data, list):
return data
# convert data to dataframe
all_order_df = df(data)
# time and updateTime from all_order_df
time_df = all_order_df['time'] # time
updateTime_df = all_order_df['updateTime'] # updateTime
# create new empty arrays for time and updateTime
_time = []
_updateTime = []
# convert time and updateTime to datetime format
for (t, u) in zip(time_df, updateTime_df):
_time.append(datetime.fromtimestamp(int(t/1000)))
_updateTime.append(datetime.fromtimestamp(int(u/1000)))
# convert time and updateTime to df
time_df = df(_time)
updateTime_df = df(_updateTime)
# replace original timestamps with formatted timestamps in all_order_df
all_order_df['time'] = time_df
all_order_df['updateTime'] = updateTime_df
return all_order_df
'''
***********************************************************
POST METHODS
***********************************************************
'''
'''
make a new order
1. set test=True if want to test order
2. set test=False if want to place order and the order is relected on the account
@private
@params
required - symbol: str, side: enum, orderType: enum
'''
def __new_order(self, symbol, side, orderType, test=True, timeInForce=None, quantity=None,
quoteOrderQty=None, price=None, stopPrice=None, icebergQty=None):
# specify the general parameters for request body
params = {
'symbol': symbol,
'side': side.value,
'type': orderType.value,
'newOrderRespType': 'RESULT',
'timestamp': int(round(time.time()*1000))
}
# specify option parameters for request body
if orderType == Order.LIMIT:
params['timeInForce'] = timeInForce
params['quantity'] = quantity
params['price'] = price
if icebergQty != None:
params['icebergQty'] = icebergQty
elif orderType == Order.MARKET:
params['quantity'] = quantity
elif orderType == Order.STOP_LOSS:
params['quantity'] = quantity
params['stopPrice'] = stopPrice
elif orderType == Order.STOP_LOSS_LIMIT:
params['timeInForce'] = timeInForce
params['quantity'] = quantity
params['price'] = price
params['stopPrice'] = stopPrice
if icebergQty != None:
params['icebergQty'] = icebergQty
elif orderType == Order.TAKE_PROFIT:
params['quantity'] = quantity
params['stopPrice'] = stopPrice
elif orderType == Order.TAKE_PROFIT_LIMIT:
params['timeInForce'] = timeInForce
params['quantity'] = quantity
params['price'] = price
params['stopPrice'] = stopPrice
if icebergQty != None:
params['icebergQty'] = icebergQty
elif orderType == Order.LIMIT_MAKER:
params['quantity'] = quantity
params['price'] = price
else:
raise Exception('Invalid order type.')
# specify url endpoint
if test == True:
url = self.base + self.endpoint['test_order']
else:
url = self.base + self.endpoint['order']
# sign request
self.sign_request(params)
# initialize new order, request api response
response = requests.post(url, params=params, headers={'X-MBX-APIKEY': self.key})
data = json.loads(response.text)
return data
'''
make a new buy order
1. set test=True if want to test buy order
2. set test=False if want to place buy order and the buy order is relected on the account
@params
required - symbol: str, orderType: enum
'''
def buy(self, symbol, orderType, test=True, timeInForce=None, quantity=None,
quoteOrderQty=None, price=None, stopPrice=None, icebergQty=None):
return self.__new_order(symbol, Order.BUY, orderType, test=test, timeInForce=timeInForce, quantity=quantity,
quoteOrderQty=quoteOrderQty, price=price, stopPrice=stopPrice, icebergQty=icebergQty)
'''
make a new sell order
1. set test=True if want to test sell order
2. set test=False if want to place sell order and the sell order is relected on the account
@params
required - symbol: str, orderType: enum
'''
def sell(self, symbol, orderType, test=True, timeInForce=None, quantity=None,
quoteOrderQty=None, price=None, stopPrice=None, icebergQty=None):
return self.__new_order(symbol, Order.SELL, orderType, test=test, timeInForce=timeInForce, quantity=quantity,
quoteOrderQty=quoteOrderQty, price=price, stopPrice=stopPrice, icebergQty=icebergQty)
'''
***********************************************************
DELETE METHODS
***********************************************************
'''
'''
cancel an open order
@param
@require symbol: str, orderId: long
'''
def cancel_order(self, symbol, orderId):
# specify parameters for request body
params = {
'symbol': symbol,
'orderId': orderId,
'timestamp': int(round(time.time()*1000))
}
# specify url endpoint
url = self.base + self.endpoint['order']
# sign request
self.sign_request(params)
# initialize cancel order, request api response
response = requests.delete(url, params=params, headers={'X-MBX-APIKEY': self.key})
data = json.loads(response.text)
return data
'''
sign your request to Binance API
'''
def sign_request(self, params: dict):
#make a query string
query_string = '&'.join(["{}={}".format(d,params[d]) for d in params])
#hashing secret
signature = hmac.new(self.secret.encode('utf-8'),
query_string.encode('utf-8'),
hashlib.sha256)
# add your signature to the request body
params['signature'] = signature.hexdigest() | response = requests.get(url, params=params, headers={'X-MBX-APIKEY': self.key})
data = json.loads(response.text) |
list_storage_account_sas_tokens.py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'ListStorageAccountSasTokensResult',
'AwaitableListStorageAccountSasTokensResult',
'list_storage_account_sas_tokens',
]
warnings.warn("""The 'latest' version is deprecated. Please migrate to the function in the top-level module: 'azure-nextgen:datalakeanalytics:listStorageAccountSasTokens'.""", DeprecationWarning)
@pulumi.output_type
class ListStorageAccountSasTokensResult:
"""
The SAS response that contains the storage account, container and associated SAS token for connection use.
"""
def __init__(__self__, next_link=None, value=None):
if next_link and not isinstance(next_link, str):
raise TypeError("Expected argument 'next_link' to be a str")
pulumi.set(__self__, "next_link", next_link)
if value and not isinstance(value, list):
raise TypeError("Expected argument 'value' to be a list")
pulumi.set(__self__, "value", value)
@property
@pulumi.getter(name="nextLink")
def next_link(self) -> str:
"""
The link (url) to the next page of results.
"""
return pulumi.get(self, "next_link")
@property
@pulumi.getter
def | (self) -> Sequence['outputs.SasTokenInformationResponseResult']:
"""
The results of the list operation.
"""
return pulumi.get(self, "value")
class AwaitableListStorageAccountSasTokensResult(ListStorageAccountSasTokensResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return ListStorageAccountSasTokensResult(
next_link=self.next_link,
value=self.value)
def list_storage_account_sas_tokens(account_name: Optional[str] = None,
container_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
storage_account_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableListStorageAccountSasTokensResult:
"""
The SAS response that contains the storage account, container and associated SAS token for connection use.
Latest API Version: 2016-11-01.
:param str account_name: The name of the Data Lake Analytics account.
:param str container_name: The name of the Azure storage container for which the SAS token is being requested.
:param str resource_group_name: The name of the Azure resource group.
:param str storage_account_name: The name of the Azure storage account for which the SAS token is being requested.
"""
pulumi.log.warn("list_storage_account_sas_tokens is deprecated: The 'latest' version is deprecated. Please migrate to the function in the top-level module: 'azure-nextgen:datalakeanalytics:listStorageAccountSasTokens'.")
__args__ = dict()
__args__['accountName'] = account_name
__args__['containerName'] = container_name
__args__['resourceGroupName'] = resource_group_name
__args__['storageAccountName'] = storage_account_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:datalakeanalytics/latest:listStorageAccountSasTokens', __args__, opts=opts, typ=ListStorageAccountSasTokensResult).value
return AwaitableListStorageAccountSasTokensResult(
next_link=__ret__.next_link,
value=__ret__.value)
| value |
conf.py | # -*- coding: utf-8 -*-
#
# conda-forge documentation build configuration file, created by
# sphinx-quickstart on Wed Jun 1 01:44:13 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import csv
import os
import sys
import datetime
import cloud_sptheme as csp
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'myst_parser',
'sphinx.ext.todo',
'sphinxcontrib.fulltoc',
'sphinxcontrib.newsfeed',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
source_suffix = ['.rst', '.md']
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'conda-forge'
copyright = u'2016-%s, conda-forge' % datetime.datetime.now().strftime("%Y")
author = u'conda-forge'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = datetime.datetime.now().strftime("%Y.%m")
# The full version, including alpha/beta/rc tags.
release = datetime.datetime.now().strftime("%Y.%m.%d")
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'README.md']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# ---- Options for link validation --------
anchor_check_fps = [
r'https://conda-forge.org/status/#armosxaddition$',
r'https://github.com/conda-forge/conda-smithy/blob/main/CHANGELOG.rst#v3130$',
r'https://github.com/.*#L\d+-L\d+$',
r'https://github.com/conda-forge/miniforge/#download$',
r'https://github.com/conda-incubator/grayskull#introduction$',
]
linkcheck_exclude_documents = [r'.*/minutes/.*']
linkcheck_ignore = [
r'https://anaconda.org/?$', # 403 forbidden
r'https://cloudflare.com/learning/cdn/what-is-a-cdn/?$', # 403 forbidden
r'https://gitter.im/conda-forge/core$', # private team
r'https://polys.me/?$', # 403 forbidden
] + anchor_check_fps
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'cloud'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'sidebar_localtoc_title': 'Overview',
'externalrefs': False,
'relbarbgcolor': '#000000',
'footerbgcolor': '#FFFFFF',
'sectionbgcolor': '#cd5c5c',
'linkcolor': 'rgb(31, 158, 111)',
'sidebarlinkcolor': 'rgb(31, 158, 111)',
'codebgcolor': '#F2F2F2',
'sidebarbgcolor': '#F2F2F2',
'logotarget': '../../../index',
}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
html_theme_path = [csp.get_theme_dir()]
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#html_title = u'conda-forge v0.0.1a1'
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = '_static/logo_black_on_trans.png'
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = '_static/favicon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'conda-forgedoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'conda-forge.tex', u'conda-forge Documentation',
u'conda-forge', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'conda-forge', u'conda-forge Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'conda-forge', u'conda-forge Documentation',
author, 'conda-forge', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
def rstjinja(app, docname, source):
|
def setup(app):
app.connect("source-read", rstjinja)
| def get_formated_names(path_file):
with open(path_file, "r") as csv_file:
dict_csv = csv.DictReader(csv_file)
sorted_csv = sorted(dict_csv, key=lambda d: d["name"])
return "\n".join(
f"* `{m['name']},"
f" @{m['github_username']}"
f" <https://github.com/{m['github_username']}>`__" for m in sorted_csv
)
if (
app.builder.format != "html"
or os.path.basename(docname) != "governance"
):
return
src = source[0]
current_file = os.path.dirname(__file__)
context = app.config.html_context
context["core_members"] = get_formated_names(
os.path.join(current_file, "core.csv")
)
context["emeritus_members"] = get_formated_names(
os.path.join(current_file, "emeritus.csv")
)
rendered = app.builder.templates.render_string(src, context)
source[0] = rendered |
XOR.py | # coding: utf-8
import numpy as np
x1 = np.asarray([0, 0, 1, 1])
x2 = np.asarray([0, 1, 0, 1])
X = np.row_stack((np.ones(shape=(1, 4)), x1, x2))
print("X:\n%s" % X)
y = np.asarray([0, 1, 1, 0])
W1 = np.asarray([[-1, 2, -2],
[-1, -2, 2]])
W2 = np.asarray([-1, 2, 2])
def sigmoid(input): |
np.set_printoptions(precision=6, suppress=True)
z1 = np.matmul(W1, X)
print("W1*X = z1:\n%s" % z1)
a1 = np.row_stack((np.ones(shape=(1, 4)), sigmoid(z1)))
print("sigmoid(z1) = a1:\n%s" % a1)
z2 = np.matmul(W2, a1)
print("W2*a1 = z2:\n%s" % z2)
a2 = sigmoid(z2)
print("------------------------")
print("prediction: %s" % a2)
print("target: %s" % y)
print("------------------------")
# output:
# X:
# [[1. 1. 1. 1.]
# [0. 0. 1. 1.]
# [0. 1. 0. 1.]]
# W1*X = z1:
# [[-1. -3. 1. -1.]
# [-1. 1. -3. -1.]]
# sigmoid(z1) = a1:
# [[1. 1. 1. 1. ]
# [0.000045 0. 0.999955 0.000045]
# [0.000045 0.999955 0. 0.000045]]
# W2*a1 = z2:
# [-0.999818 0.999909 0.999909 -0.999818]
# ------------------------
# prediction: [0.000045 0.999955 0.999955 0.000045]
# target: [0 1 1 0]
# ------------------------ | return 1 / (1 + np.power(np.e, -10 * (input)))
|
report_portal.py | """ ReportPortal.io integration
1. Download the ReportPortal `docker-compose.yml` file as "docker-compose.report-portal.yml"
2. Setup permissions for ElasticSearch
3. Configure the `YAML` file based on OS
4. `docker-compose up`
5. Open ReportPortal and login (change password afterwards)
"""
import platform
from pylenium.scripts import cli_utils
def | ():
""" Stop all ReportPortal containers.
Returns:
`CompletedProcess`
"""
command = 'docker stop $(docker ps -a -f "name=reportportal" --format "{{.Names}}")'
if platform.system() == 'Windows':
command = "FOR /f \"tokens=*\" %i IN " \
"('docker ps -a -f \"name=reportportal\" --format \"{{.Names}}\"') " \
"DO docker stop %i"
stop_containers_response = cli_utils.run_process(command, shell=True)
if stop_containers_response.returncode != 0:
raise EnvironmentError(f'[FAILED] {command}'
'\n\nUnable to stop ReportPortal containers:'
'\n * Make sure Docker is installed and running'
'\n * Make sure this command is run in the same dir as docker-compose.report-portal.yml'
f'\nResponse: {stop_containers_response}')
return stop_containers_response
def __remove_containers():
""" Remove all ReportPortal containers that are stopped.
Returns:
`CompletedProcess`
"""
command = 'docker rm $(docker ps -a -f "name=reportportal" --format "{{.Names}}")'
if platform.system() == 'Windows':
command = "FOR /f \"tokens=*\" %i IN " \
"('docker ps -a -f \"name=reportportal\" --format \"{{.Names}}\"') " \
"DO docker rm %i"
remove_containers_response = cli_utils.run_process(command, shell=True)
if remove_containers_response.returncode != 0:
raise EnvironmentError(f'[FAILED] {command}'
'\n\nUnable to remove ReportPortal containers after stopping them.'
f'\nResponse: {remove_containers_response}')
return remove_containers_response
def download_compose_yaml_file():
""" Download the ReportPortal docker-compose.yml file.
* It is recommended to run this from the Project Root because
this places the file as "docker-compose.report-portal.yml" in the context where this command was run.
Returns:
`CompletedProcess` if successful.
Raises:
`ConnectionError` if process returns non-zero status code.
"""
response = cli_utils.run_process([
'curl', 'https://raw.githubusercontent.com/reportportal/reportportal/master/docker-compose.yml',
'-o', './docker-compose.report-portal.yml'
])
if response.returncode != 0:
raise ConnectionError(f'\n\nUnable to download docker-compose file from ReportPortal repo. '
f'\nResponse: {response}')
return response
def compose_up():
""" Spin up a ReportPortal instance using docker-compose.report-portal.yml.
Returns:
`CompletedProcess`
Raises:
`EnvironmentError` if process returns non-zero status code.
"""
response = cli_utils.run_process([
'docker-compose', '-p', 'reportportal', # prefix containers with 'reportportal'
'-f', 'docker-compose.report-portal.yml', # use our auto-generated compose.yml
'up', '-d', '--force-recreate' # spin up in detached, "daemon mode"
])
if response.returncode != 0:
raise EnvironmentError('\n\nUnable to run "docker-compose" command to create ReportPortal instance.'
'\n * Make sure Docker is installed and running'
'\n * Make sure this command is run in the same dir as docker-compose.report-portal.yml'
f'\nResponse: {response}')
return response
def down():
""" Tear down the ReportPortal instance.
This does not use the docker-compose.report-portal.yml file because, depending on Docker version, you may
or may not have a network created that is not handled by docker-compose down.
1. Stop all reportportal containers
2. Kill (remove) all reportportal containers
3. Remove the reportportal_default network (depends on docker version)
Returns:
`CompletedProcess` for the
Raises:
`EnvironmentError` if process returns non-zero status code.
"""
__stop_containers()
__remove_containers()
remove_network_response = cli_utils.run_process([
'docker', 'network', 'rm', 'reportportal_default'
])
return remove_network_response
| __stop_containers |
question2.rs | fn | () {
for i in 0..5 {
for j in 0..5 {
if j <= (4 - i) {
print!(" ");
}
else {
print!(" *");
}
}
println!("")
}
}
| main |
functions.js | const fetch = require('node-fetch')
const imgbb = require('imgbb-uploader')
const axios = require('axios')
const cfonts = require('cfonts')
const spin = require('spinnies')
const Crypto = require('crypto')
const wait = async (media) => new Promise(async (resolve, reject) => {
const attachmentData = `data:image/jpeg;base64,${media.toString('base64')}`
const response = await fetch("https://trace.moe/api/search",{method: "POST",body: JSON.stringify({ image: attachmentData }),headers: { "Content-Type": "application/json" }});
if (!response.ok) reject(`Imagem não encontrada!`);
const result = await response.json()
try {
const { is_adult, title, title_chinese, title_romaji, title_english, episode, season, similarity, filename, at, tokenthumb, anilist_id } = result.docs[0]
let belief = () => similarity < 0.89 ? "Saya memiliki keyakinan rendah dalam hal ini : " : ""
let ecch = () => is_adult ? "Iya" : "Tidak"
resolve({video: await getBuffer(`https://media.trace.moe/video/${anilist_id}/${encodeURIComponent(filename)}?t=${at}&token=${tokenthumb}`), teks: `${belief()}
~> Ecchi : *${ecch()}*
~> Título japonês: *${title}*
~> Ortografia do Título: *${title_romaji}*
~> Título inglês : *${title_english}*
~> Episode : *${episode}*
~> Estação : *${season}*`});
} catch (e) {
console.log(e)
reject(`Eu não sei que anime é esse`)
}
})
const simih = async (text) => {
try {
const sami = await fetch(`https://luc4rio.herokuapp.com/api/adicionais/simsimi?texto=${text}`, {method: 'GET'})
const res = await sami.json()
return res.success
} catch {
return 'Simi não que responder seu idiota!'
}
}
const h2k = (number) => {
var SI_POSTFIXES = ["", " K", " M", " G", " T", " P", " E"]
var tier = Math.log10(Math.abs(number)) / 3 | 0
if(tier == 0) return number
var postfix = SI_POSTFIXES[tier]
var scale = Math.pow(10, tier * 3)
var scaled = number / scale
var formatted = scaled.toFixed(1) + ''
if (/\.0$/.test(formatted))
formatted = formatted.substr(0, formatted.length - 2)
return formatted + postfix
}
const getBuffer = async (url, options) => {
try {
options ? options : {}
const res = await axios({
method: "get",
url,
headers: {
'DNT': 1,
'Upgrade-Insecure-Request': 1
},
...options,
responseType: 'arraybuffer'
})
return res.data
} catch (e) {
console.log(`Error : ${e}`)
}
}
const randomBytes = (length) => {
return Crypto.randomBytes(length)
}
const generateMessageID = () => {
return randomBytes(10).toString('hex').toUpperCase()
}
| }
return admins
}
const getRandom = (ext) => {
return `${Math.floor(Math.random() * 10000)}${ext}`
}
const spinner = {
"interval": 120,
"frames": [
"🕐",
"🕑",
"🕒",
"🕓",
"🕔",
"🕕",
"🕖",
"🕗",
"🕘",
"🕙",
"🕚",
"🕛"
]}
let globalSpinner;
const getGlobalSpinner = (disableSpins = false) => {
if(!globalSpinner) globalSpinner = new spin({ color: 'blue', succeedColor: 'green', spinner, disableSpins});
return globalSpinner;
}
spins = getGlobalSpinner(false)
const start = (id, text) => {
spins.add(id, {text: text})
/*setTimeout(() => {
spins.succeed('load-spin', {text: 'Suksess'})
}, Number(wait) * 1000)*/
}
const info = (id, text) => {
spins.update(id, {text: text})
}
const success = (id, text) => {
spins.succeed(id, {text: text})
}
const close = (id, text) => {
spins.fail(id, {text: text})
}
const banner = cfonts.render(('Albion|Desenvolvedor'), {
font: 'chrome',
color: 'candy',
align: 'center',
gradient: ["red","yellow"],
lineHeight: 3
});
module.exports = { wait, simih, getBuffer, h2k, generateMessageID, getGroupAdmins, getRandom, start, info, success, banner, close } | const getGroupAdmins = (participants) => {
admins = []
for (let i of participants) {
i.isAdmin ? admins.push(i.jid) : '' |
app.module.ts | import { Injector, NgModule } from '@angular/core';
import { createCustomElement } from '@angular/elements';
import { BrowserModule } from '@angular/platform-browser';
import { LoadingComponent } from './loading.component'; | @NgModule({
declarations: [LoadingComponent],
imports: [BrowserModule],
entryComponents: [LoadingComponent]
})
export class AppModule {
constructor(private injector: Injector) {
const customLoading = createCustomElement(LoadingComponent, { injector });
customElements.define('gioboa-loading-widget', customLoading);
}
ngDoBootstrap() {}
} | |
node.go | package node
import (
"fmt"
"io/ioutil"
"net"
"os"
"path/filepath"
"strconv"
"strings"
"sync"
"time"
kapi "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/labels"
"k8s.io/apimachinery/pkg/selection"
"k8s.io/apimachinery/pkg/util/wait"
"k8s.io/client-go/informers"
"k8s.io/client-go/kubernetes"
clientset "k8s.io/client-go/kubernetes"
"k8s.io/client-go/tools/cache"
"k8s.io/client-go/tools/record"
"k8s.io/client-go/util/retry"
"k8s.io/klog/v2"
utilnet "k8s.io/utils/net"
honode "github.com/ovn-org/ovn-kubernetes/go-controller/hybrid-overlay/pkg/controller"
"github.com/ovn-org/ovn-kubernetes/go-controller/pkg/cni"
"github.com/ovn-org/ovn-kubernetes/go-controller/pkg/config"
"github.com/ovn-org/ovn-kubernetes/go-controller/pkg/factory"
"github.com/ovn-org/ovn-kubernetes/go-controller/pkg/informer"
"github.com/ovn-org/ovn-kubernetes/go-controller/pkg/kube"
"github.com/ovn-org/ovn-kubernetes/go-controller/pkg/node/controllers/upgrade"
"github.com/ovn-org/ovn-kubernetes/go-controller/pkg/types"
"github.com/ovn-org/ovn-kubernetes/go-controller/pkg/util"
)
// OvnNode is the object holder for utilities meant for node management
type OvnNode struct {
name string
client clientset.Interface
Kube kube.Interface
watchFactory factory.NodeWatchFactory
stopChan chan struct{}
recorder record.EventRecorder
gateway Gateway
}
// NewNode creates a new controller for node management
func NewNode(kubeClient kubernetes.Interface, wf factory.NodeWatchFactory, name string, stopChan chan struct{}, eventRecorder record.EventRecorder) *OvnNode {
return &OvnNode{
name: name,
client: kubeClient,
Kube: &kube.Kube{KClient: kubeClient},
watchFactory: wf,
stopChan: stopChan,
recorder: eventRecorder,
}
}
func setupOVNNode(node *kapi.Node) error {
var err error
encapIP := config.Default.EncapIP
if encapIP == "" {
encapIP, err = util.GetNodePrimaryIP(node)
if err != nil {
return fmt.Errorf("failed to obtain local IP from node %q: %v", node.Name, err)
}
} else {
if ip := net.ParseIP(encapIP); ip == nil {
return fmt.Errorf("invalid encapsulation IP provided %q", encapIP)
}
}
setExternalIdsCmd := []string{
"set",
"Open_vSwitch",
".",
fmt.Sprintf("external_ids:ovn-encap-type=%s", config.Default.EncapType),
fmt.Sprintf("external_ids:ovn-encap-ip=%s", encapIP),
fmt.Sprintf("external_ids:ovn-remote-probe-interval=%d",
config.Default.InactivityProbe),
fmt.Sprintf("external_ids:ovn-openflow-probe-interval=%d",
config.Default.OpenFlowProbe),
fmt.Sprintf("external_ids:hostname=\"%s\"", node.Name),
"external_ids:ovn-monitor-all=true",
fmt.Sprintf("external_ids:ovn-enable-lflow-cache=%t", config.Default.LFlowCacheEnable),
}
if config.Default.LFlowCacheLimit > 0 {
setExternalIdsCmd = append(setExternalIdsCmd,
fmt.Sprintf("external_ids:ovn-limit-lflow-cache=%d", config.Default.LFlowCacheLimit),
)
}
if config.Default.LFlowCacheLimitKb > 0 {
setExternalIdsCmd = append(setExternalIdsCmd,
fmt.Sprintf("external_ids:ovn-limit-lflow-cache-kb=%d", config.Default.LFlowCacheLimitKb),
)
}
_, stderr, err := util.RunOVSVsctl(setExternalIdsCmd...)
if err != nil {
return fmt.Errorf("error setting OVS external IDs: %v\n %q", err, stderr)
}
// If EncapPort is not the default tell sbdb to use specified port.
if config.Default.EncapPort != config.DefaultEncapPort {
systemID, err := util.GetNodeChassisID()
if err != nil {
return err
}
uuid, _, err := util.RunOVNSbctl("--data=bare", "--no-heading", "--columns=_uuid", "find", "Encap",
fmt.Sprintf("chassis_name=%s", systemID))
if err != nil {
return err
}
if len(uuid) == 0 {
return fmt.Errorf("unable to find encap uuid to set geneve port for chassis %s", systemID)
}
_, stderr, errSet := util.RunOVNSbctl("set", "encap", uuid,
fmt.Sprintf("options:dst_port=%d", config.Default.EncapPort),
)
if errSet != nil {
return fmt.Errorf("error setting OVS encap-port: %v\n %q", errSet, stderr)
}
}
if config.Monitoring.NetFlowTargets != nil {
collectors := ""
for _, v := range config.Monitoring.NetFlowTargets {
collectors += "\"" + util.JoinHostPortInt32(v.Host.String(), v.Port) + "\"" + ","
}
collectors = strings.TrimSuffix(collectors, ",")
_, stderr, err := util.RunOVSVsctl(
"--",
"--id=@netflow",
"create",
"netflow",
fmt.Sprintf("targets=[%s]", collectors),
"active_timeout=60",
"--",
"set", "bridge", "br-int", "netflow=@netflow",
)
if err != nil {
return fmt.Errorf("error setting NetFlow: %v\n %q", err, stderr)
}
}
if config.Monitoring.SFlowTargets != nil {
collectors := ""
for _, v := range config.Monitoring.SFlowTargets {
collectors += "\"" + util.JoinHostPortInt32(v.Host.String(), v.Port) + "\"" + ","
}
collectors = strings.TrimSuffix(collectors, ",")
_, stderr, err := util.RunOVSVsctl(
"--",
"--id=@sflow",
"create",
"sflow",
"agent="+types.SFlowAgent,
fmt.Sprintf("targets=[%s]", collectors),
"--",
"set", "bridge", "br-int", "sflow=@sflow",
)
if err != nil {
return fmt.Errorf("error setting SFlow: %v\n %q", err, stderr)
}
}
if config.Monitoring.IPFIXTargets != nil {
collectors := ""
for _, v := range config.Monitoring.IPFIXTargets {
collectors += "\"" + util.JoinHostPortInt32(v.Host.String(), v.Port) + "\"" + ","
}
collectors = strings.TrimSuffix(collectors, ",")
_, stderr, err := util.RunOVSVsctl(
"--",
"--id=@ipfix",
"create",
"ipfix",
fmt.Sprintf("targets=[%s]", collectors),
"cache_active_timeout=60",
"--",
"set", "bridge", "br-int", "ipfix=@ipfix",
)
if err != nil {
return fmt.Errorf("error setting IPFIX: %v\n %q", err, stderr)
}
}
return nil
}
func isOVNControllerReady(name string) (bool, error) {
runDir := util.GetOvnRunDir()
pid, err := ioutil.ReadFile(runDir + "ovn-controller.pid")
if err != nil {
return false, fmt.Errorf("unknown pid for ovn-controller process: %v", err)
}
err = wait.PollImmediate(500*time.Millisecond, 60*time.Second, func() (bool, error) {
ctlFile := runDir + fmt.Sprintf("ovn-controller.%s.ctl", strings.TrimSuffix(string(pid), "\n"))
ret, _, err := util.RunOVSAppctl("-t", ctlFile, "connection-status")
if err == nil {
klog.Infof("Node %s connection status = %s", name, ret)
return ret == "connected", nil
}
return false, err
})
if err != nil {
return false, fmt.Errorf("timed out waiting sbdb for node %s: %v", name, err)
}
err = wait.PollImmediate(500*time.Millisecond, 60*time.Second, func() (bool, error) {
_, _, err := util.RunOVSVsctl("--", "br-exists", "br-int")
if err != nil {
return false, nil
}
return true, nil
})
if err != nil {
return false, fmt.Errorf("timed out checking whether br-int exists or not on node %s: %v", name, err)
}
err = wait.PollImmediate(500*time.Millisecond, 60*time.Second, func() (bool, error) {
stdout, _, err := util.RunOVSOfctl("dump-aggregate", "br-int")
if err != nil {
klog.V(5).Infof("Error dumping aggregate flows: %v "+
"for node: %s", err, name)
return false, nil
}
ret := strings.Contains(stdout, "flow_count=0")
if ret {
klog.V(5).Infof("Got a flow count of 0 when "+
"dumping flows for node: %s", name)
}
return !ret, nil
})
if err != nil {
return false, fmt.Errorf("timed out dumping br-int flow entries for node %s: %v", name, err)
}
return true, nil
}
// Starting with v21.03.0 OVN sets OVS.Interface.external-id:ovn-installed
// and OVNSB.Port_Binding.up when all OVS flows associated to a
// logical port have been successfully programmed.
// OVS.Interface.external-id:ovn-installed can only be used correctly
// in a combination with OVS.Interface.external-id:iface-id-ver
func getOVNIfUpCheckMode() (bool, error) {
if config.OvnKubeNode.DisableOVNIfaceIdVer {
klog.Infof("'iface-id-ver' is manually disabled, ovn-installed feature can't be used")
return false, nil
}
if _, stderr, err := util.RunOVNSbctl("--columns=up", "list", "Port_Binding"); err != nil {
if strings.Contains(stderr, "does not contain a column") {
klog.Infof("Falling back to using legacy CNI OVS flow readiness checks")
return false, nil
}
return false, fmt.Errorf("failed to check if port_binding is supported in OVN, stderr: %q, error: %v",
stderr, err)
}
klog.Infof("Detected support for port binding with external IDs")
return true, nil
}
// Start learns the subnets assigned to it by the master controller
// and calls the SetupNode script which establishes the logical switch
func (n *OvnNode) Start(wg *sync.WaitGroup) error {
var err error
var node *kapi.Node
var subnets []*net.IPNet
var mgmtPort ManagementPort
var mgmtPortConfig *managementPortConfig
var cniServer *cni.Server
var isOvnUpEnabled bool
klog.Infof("OVN Kube Node initialization, Mode: %s", config.OvnKubeNode.Mode)
// Setting debug log level during node bring up to expose bring up process.
// Log level is returned to configured value when bring up is complete.
var level klog.Level
if err := level.Set("5"); err != nil {
klog.Errorf("Setting klog \"loglevel\" to 5 failed, err: %v", err)
}
// Start and sync the watch factory to begin listening for events
if err := n.watchFactory.Start(); err != nil {
return err
}
if node, err = n.Kube.GetNode(n.name); err != nil {
return fmt.Errorf("error retrieving node %s: %v", n.name, err)
}
nodeAddrStr, err := util.GetNodePrimaryIP(node)
if err != nil {
return err
}
nodeAddr := net.ParseIP(nodeAddrStr)
if nodeAddr == nil {
return fmt.Errorf("failed to parse kubernetes node IP address. %v", err)
}
if config.OvnKubeNode.Mode != types.NodeModeSmartNICHost {
for _, auth := range []config.OvnAuthConfig{config.OvnNorth, config.OvnSouth} {
if err := auth.SetDBAuth(); err != nil {
return err
}
}
err = setupOVNNode(node)
if err != nil {
return err
}
}
// First wait for the node logical switch to be created by the Master, timeout is 300s.
err = wait.PollImmediate(500*time.Millisecond, 300*time.Second, func() (bool, error) {
if node, err = n.Kube.GetNode(n.name); err != nil {
klog.Infof("Waiting to retrieve node %s: %v", n.name, err)
return false, nil
}
subnets, err = util.ParseNodeHostSubnetAnnotation(node)
if err != nil {
klog.Infof("Waiting for node %s to start, no annotation found on node for subnet: %v", n.name, err)
return false, nil
}
return true, nil
})
if err != nil {
return fmt.Errorf("timed out waiting for node's: %q logical switch: %v", n.name, err)
}
klog.Infof("Node %s ready for ovn initialization with subnet %s", n.name, util.JoinIPNets(subnets, ","))
// Create CNI Server
if config.OvnKubeNode.Mode != types.NodeModeSmartNIC {
isOvnUpEnabled, err = getOVNIfUpCheckMode()
if err != nil {
return err
}
kclient, ok := n.Kube.(*kube.Kube)
if !ok {
return fmt.Errorf("cannot get kubeclient for starting CNI server")
}
cniServer, err = cni.NewCNIServer("", isOvnUpEnabled, n.watchFactory, kclient.KClient)
if err != nil {
return err
}
}
// Setup Management port and gateway
if config.OvnKubeNode.Mode != types.NodeModeSmartNICHost {
if _, err = isOVNControllerReady(n.name); err != nil {
return err
}
}
mgmtPort = NewManagementPort(n.name, subnets)
nodeAnnotator := kube.NewNodeAnnotator(n.Kube, node)
waiter := newStartupWaiter()
mgmtPortConfig, err = mgmtPort.Create(nodeAnnotator, waiter)
if err != nil {
return err
}
// Initialize gateway
if config.OvnKubeNode.Mode == types.NodeModeSmartNICHost {
err = n.initGatewaySmartNicHost(nodeAddr)
if err != nil {
return err
}
} else {
if err := n.initGateway(subnets, nodeAnnotator, waiter, mgmtPortConfig, nodeAddr); err != nil {
return err
}
}
if err := nodeAnnotator.Run(); err != nil {
return fmt.Errorf("failed to set node %s annotations: %v", n.name, err)
}
// Wait for management port and gateway resources to be created by the master
klog.Infof("Waiting for gateway and management port readiness...")
start := time.Now()
if err := waiter.Wait(); err != nil {
return err
}
go n.gateway.Run(n.stopChan, wg)
klog.Infof("Gateway and management port readiness took %v", time.Since(start))
// Note(adrianc): Smart-NIC deployments are expected to support the new shared gateway changes, upgrade flow
// is not needed. Future upgrade flows will need to take Smart-NICs into account.
if config.OvnKubeNode.Mode == types.NodeModeFull {
// Upgrade for Node. If we upgrade workers before masters, then we need to keep service routing via
// mgmt port until masters have been updated and modified OVN config. Run a goroutine to handle this case
// note this will change in the future to control-plane:
// https://github.com/kubernetes/kubernetes/pull/95382
masterNode, err := labels.NewRequirement("node-role.kubernetes.io/master", selection.Exists, nil)
if err != nil {
return err
}
labelSelector := labels.NewSelector()
labelSelector = labelSelector.Add(*masterNode)
informerFactory := informers.NewSharedInformerFactoryWithOptions(n.client, 0,
informers.WithTweakListOptions(func(options *metav1.ListOptions) {
options.LabelSelector = labelSelector.String()
}))
upgradeController := upgrade.NewController(n.Kube, informerFactory.Core().V1().Nodes())
initialTopoVersion := upgradeController.GetInitialTopoVersion()
bridgeName := n.gateway.GetGatewayBridgeIface()
needLegacySvcRoute := true
if initialTopoVersion >= types.OvnHostToSvcOFTopoVersion && config.GatewayModeShared == config.Gateway.Mode {
// Configure route for svc towards shared gw bridge
// Have to have the route to bridge for multi-NIC mode, where the default gateway may go to a non-OVS interface
if err := configureSvcRouteViaBridge(bridgeName); err != nil {
return err
}
needLegacySvcRoute = false
}
// Determine if we need to run upgrade checks
if initialTopoVersion != types.OvnCurrentTopologyVersion {
if needLegacySvcRoute && config.GatewayModeShared == config.Gateway.Mode {
klog.Info("System may be upgrading, falling back to to legacy K8S Service via mp0")
// add back legacy route for service via mp0
link, err := util.LinkSetUp(types.K8sMgmtIntfName)
if err != nil {
return fmt.Errorf("unable to get link for %s, error: %v", types.K8sMgmtIntfName, err)
}
var gwIP net.IP
for _, subnet := range config.Kubernetes.ServiceCIDRs {
if utilnet.IsIPv4CIDR(subnet) {
gwIP = mgmtPortConfig.ipv4.gwIP
} else {
gwIP = mgmtPortConfig.ipv6.gwIP
}
err := util.LinkRoutesAdd(link, gwIP, []*net.IPNet{subnet}, 0)
if err != nil && !os.IsExist(err) {
return fmt.Errorf("unable to add legacy route for services via mp0, error: %v", err)
}
}
}
// need to run upgrade controller
informerStop := make(chan struct{})
informerFactory.Start(informerStop)
go func() {
if err := upgradeController.Run(n.stopChan, informerStop); err != nil {
klog.Fatalf("Error while running upgrade controller: %v", err)
}
// upgrade complete now see what needs upgrading
// migrate service route from ovn-k8s-mp0 to shared gw bridge
if initialTopoVersion < types.OvnHostToSvcOFTopoVersion && config.GatewayModeShared == config.Gateway.Mode {
if err := upgradeServiceRoute(bridgeName); err != nil {
klog.Fatalf("Failed to upgrade service route for node, error: %v", err)
}
}
// ensure CNI support for port binding built into OVN, as masters have been upgraded
if initialTopoVersion < types.OvnPortBindingTopoVersion && cniServer != nil && !isOvnUpEnabled {
isOvnUpEnabled, err = getOVNIfUpCheckMode()
if err != nil {
klog.Errorf("%v", err)
}
if isOvnUpEnabled {
cniServer.EnableOVNPortUpSupport()
}
}
}()
}
}
if config.HybridOverlay.Enabled {
// Not supported with Smart-NIC, enforced in config
// TODO(adrianc): Revisit above comment
nodeController, err := honode.NewNode(
n.Kube,
n.name,
n.watchFactory.NodeInformer(),
n.watchFactory.LocalPodInformer(),
informer.NewDefaultEventHandler,
)
if err != nil {
return err
}
wg.Add(1)
go func() {
defer wg.Done()
nodeController.Run(n.stopChan)
}()
}
if err := level.Set(strconv.Itoa(config.Logging.Level)); err != nil {
klog.Errorf("Reset of initial klog \"loglevel\" failed, err: %v", err)
}
// start management port health check
mgmtPort.CheckManagementPortHealth(mgmtPortConfig, n.stopChan)
if config.OvnKubeNode.Mode != types.NodeModeSmartNICHost {
// start health check to ensure there are no stale OVS internal ports
go wait.Until(func() {
checkForStaleOVSInterfaces(n.name, n.watchFactory.(*factory.WatchFactory))
}, time.Minute, n.stopChan)
n.WatchEndpoints()
}
if config.OvnKubeNode.Mode != types.NodeModeSmartNIC {
// conditionally write cni config file
confFile := filepath.Join(config.CNI.ConfDir, config.CNIConfFileName)
_, err = os.Stat(confFile)
if os.IsNotExist(err) {
err = config.WriteCNIConfig()
if err != nil {
return err
}
}
}
if config.OvnKubeNode.Mode == types.NodeModeSmartNIC {
n.watchSmartNicPods(isOvnUpEnabled)
} else {
// start the cni server
err = cniServer.Start(cni.HandleCNIRequest)
}
return err
}
func (n *OvnNode) WatchEndpoints() {
n.watchFactory.AddEndpointsHandler(cache.ResourceEventHandlerFuncs{
UpdateFunc: func(old, new interface{}) {
epNew := new.(*kapi.Endpoints)
epOld := old.(*kapi.Endpoints)
newEpAddressMap := buildEndpointAddressMap(epNew.Subsets)
for item := range buildEndpointAddressMap(epOld.Subsets) {
if _, ok := newEpAddressMap[item]; !ok {
err := deleteConntrack(item.ip, item.port, item.protocol)
if err != nil {
klog.Errorf("Failed to delete conntrack entry for %s: %v", item.ip, err)
}
}
}
},
DeleteFunc: func(obj interface{}) {
ep := obj.(*kapi.Endpoints)
for item := range buildEndpointAddressMap(ep.Subsets) {
err := deleteConntrack(item.ip, item.port, item.protocol)
if err != nil {
klog.Errorf("Failed to delete conntrack entry for %s: %v", item.ip, err)
}
}
},
}, nil)
}
// validateGatewayMTU checks if the MTU of the given network interface is big
// enough to carry the `config.Default.MTU` and the Geneve header. If the MTU
// is not big enough, it will taint the node with the value of
// `types.OvnK8sSmallMTUTaintKey`
func (n *OvnNode) validateGatewayMTU(gatewayInterfaceName string) error {
tooSmallMTUTaint := &kapi.Taint{Key: types.OvnK8sSmallMTUTaintKey, Effect: kapi.TaintEffectNoSchedule}
mtu, err := util.GetNetworkInterfaceMTU(gatewayInterfaceName)
if err != nil {
return fmt.Errorf("could not get MTU from gateway network interface %s: %w", gatewayInterfaceName, err)
}
// calc required MTU
var requiredMTU int
if config.IPv4Mode && !config.IPv6Mode {
// we run in single-stack IPv4 only
requiredMTU = config.Default.MTU + types.GeneveHeaderLengthIPv4
} else {
// we run in single-stack IPv6 or dual-stack mode
requiredMTU = config.Default.MTU + types.GeneveHeaderLengthIPv6
}
// check if node needs to be tainted
if mtu < requiredMTU {
klog.V(2).Infof("MTU (%d) of gateway network interface %s is not big enough to deal with Geneve header overhead (sum %d). Tainting node with %v...", mtu, gatewayInterfaceName, requiredMTU, tooSmallMTUTaint)
return retry.RetryOnConflict(retry.DefaultRetry, func() error {
return n.Kube.SetTaintOnNode(n.name, tooSmallMTUTaint)
})
} else {
klog.V(2).Infof("MTU (%d) of gateway network interface %s is big enough to deal with Geneve header overhead (sum %d). Making sure node is not tainted with %v...", mtu, gatewayInterfaceName, requiredMTU, tooSmallMTUTaint)
return retry.RetryOnConflict(retry.DefaultRetry, func() error {
return n.Kube.RemoveTaintFromNode(n.name, tooSmallMTUTaint)
})
}
}
type epAddressItem struct {
ip string
port int32
protocol kapi.Protocol
}
//buildEndpointAddressMap builds a map of all UDP and SCTP ports in the endpoint subset along with that port's IP address
func buildEndpointAddressMap(epSubsets []kapi.EndpointSubset) map[epAddressItem]struct{} {
epMap := make(map[epAddressItem]struct{})
for _, subset := range epSubsets {
for _, address := range subset.Addresses {
for _, port := range subset.Ports {
if port.Protocol == kapi.ProtocolUDP || port.Protocol == kapi.ProtocolSCTP {
epMap[epAddressItem{
ip: address.IP,
port: port.Port,
protocol: port.Protocol,
}] = struct{}{}
}
}
}
}
return epMap
}
func configureSvcRouteViaBridge(bridge string) error {
gwIPs, _, err := getGatewayNextHops()
if err != nil {
return fmt.Errorf("unable to get the gateway next hops, error: %v", err)
}
return configureSvcRouteViaInterface(bridge, gwIPs)
}
func upgradeServiceRoute(bridgeName string) error | {
klog.Info("Updating K8S Service route")
// Flush old routes
link, err := util.LinkSetUp(types.K8sMgmtIntfName)
if err != nil {
return fmt.Errorf("unable to get link: %s, error: %v", types.K8sMgmtIntfName, err)
}
if err := util.LinkRoutesDel(link, config.Kubernetes.ServiceCIDRs); err != nil {
return fmt.Errorf("unable to delete routes on upgrade, error: %v", err)
}
// add route via OVS bridge
if err := configureSvcRouteViaBridge(bridgeName); err != nil {
return fmt.Errorf("unable to add svc route via OVS bridge interface, error: %v", err)
}
klog.Info("Successfully updated Kubernetes service route towards OVS")
// Clean up gw0 and local ovs bridge as best effort
if err := deleteLocalNodeAccessBridge(); err != nil {
klog.Warningf("Error while removing Local Node Access Bridge, error: %v", err)
}
return nil
} |
|
testing.py | import gocept.selenium
import logging
import pkg_resources
import plone.testing
import urlparse
import zeit.cms.testing
import zeit.content.text.jinja
import zeit.push.interfaces
import zeit.workflow.testing
import zope.interface
log = logging.getLogger(__name__)
class PushNotifier(object):
zope.interface.implements(zeit.push.interfaces.IPushNotifier)
def __init__(self):
|
def reset(self):
self.calls = []
def send(self, text, link, **kw):
self.calls.append((text, link, kw))
log.info('PushNotifier.send(%s)', dict(
text=text, link=link, kw=kw))
product_config = """\
<product-config zeit.push>
twitter-accounts file://{fixtures}/twitter-accounts.xml
twitter-main-account twitter-test
twitter-print-account twitter-print
facebook-accounts file://{fixtures}/facebook-accounts.xml
facebook-main-account fb-test
facebook-magazin-account fb-magazin
facebook-campus-account fb-campus
facebook-breaking-news-expiration 1800
push-target-url http://www.zeit.de/
mobile-image-url http://img.zeit.de/
urbanairship-audience-group subscriptions
urbanairship-author-push-template-name authors.json
mobile-buttons file://{fixtures}/mobile-buttons.xml
push-payload-templates http://xml.zeit.de/data/urbanairship-templates/
homepage-banner-uniqueid http://xml.zeit.de/banner
</product-config>
""".format(fixtures=pkg_resources.resource_filename(
__name__, 'tests/fixtures'))
class ZCMLLayer(zeit.cms.testing.ZCMLLayer):
def setUp(self):
# Break circular dependency
import zeit.content.article.testing
self.product_config = (
product_config +
zeit.cms.testing.cms_product_config +
zeit.workflow.testing.product_config +
zeit.content.article.testing.product_config)
super(ZCMLLayer, self).setUp()
ZCML_LAYER = ZCMLLayer('testing.zcml')
class PushMockLayer(plone.testing.Layer):
"""Helper layer to reset mock notifiers."""
def testSetUp(self):
for service in ['urbanairship', 'twitter', 'facebook', 'homepage']:
notifier = zope.component.getUtility(
zeit.push.interfaces.IPushNotifier, name=service)
notifier.reset()
PUSH_MOCK_LAYER = PushMockLayer()
class UrbanairshipTemplateLayer(plone.testing.Layer):
defaultBases = (ZCML_LAYER,)
def create_template(self, text=None, name='template.json'):
if not text:
text = pkg_resources.resource_string(
__name__, 'tests/fixtures/payloadtemplate.json')
with zeit.cms.testing.site(self['functional_setup'].getRootFolder()):
with zeit.cms.testing.interaction():
cfg = zope.app.appsetup.product.getProductConfiguration(
'zeit.push')
folder = zeit.cms.content.add.find_or_create_folder(
*urlparse.urlparse(
cfg['push-payload-templates']).path[1:].split('/'))
template = zeit.content.text.jinja.JinjaTemplate()
template.text = text
template.title = name.split('.')[0].capitalize()
folder[name] = template
def setUp(self):
self['create_template'] = self.create_template
def testSetUp(self):
self.create_template('', 'foo.json')
self.create_template('', 'eilmeldung.json')
self.create_template(pkg_resources.resource_string(
__name__, 'tests/fixtures/authors.json'),
'authors.json')
URBANAIRSHIP_TEMPLATE_LAYER = UrbanairshipTemplateLayer()
LAYER = plone.testing.Layer(
bases=(URBANAIRSHIP_TEMPLATE_LAYER, PUSH_MOCK_LAYER),
name='ZCMLPushMockLayer',
module=__name__)
class TestCase(zeit.cms.testing.FunctionalTestCase):
layer = LAYER
def create_payload_template(self, text=None, name='template.json'):
self.layer['create_template'](text, name)
WSGI_LAYER = zeit.cms.testing.WSGILayer(
name='WSGILayer', bases=(LAYER,))
HTTP_LAYER = gocept.httpserverlayer.wsgi.Layer(
name='HTTPLayer', bases=(WSGI_LAYER,))
WD_LAYER = gocept.selenium.WebdriverLayer(
name='WebdriverLayer', bases=(HTTP_LAYER,))
WEBDRIVER_LAYER = gocept.selenium.WebdriverSeleneseLayer(
name='WebdriverSeleneseLayer', bases=(WD_LAYER,))
| self.reset() |
reset.rs | use malvolio::prelude::*;
| todo!()
}
#[post("/reset")]
#[allow(unused)]
pub fn reset_page() -> Html {
todo!()
} | #[get("/reset")]
#[allow(unused)]
pub fn reset() -> Html { |
acpi.rs | use core::ptr::NonNull;
use acpi::AcpiTables;
use x86_64::PhysAddr;
use crate::once::Once;
use super::memory::phys_to_virt;
#[derive(Clone, Copy, Debug)]
pub struct AcpiHandlerImpl {}
impl acpi::AcpiHandler for AcpiHandlerImpl {
unsafe fn map_physical_region<T>(
&self,
physical_address: usize,
size: usize,
) -> acpi::PhysicalMapping<Self, T> {
let virtual_address = phys_to_virt(PhysAddr::new(physical_address as u64));
acpi::PhysicalMapping::new(
physical_address,
NonNull::new(virtual_address.as_mut_ptr()).unwrap(),
size,
size,
*self,
)
}
fn unmap_physical_region<T>(_region: &acpi::PhysicalMapping<Self, T>) {}
}
static ACPI: Once<acpi::AcpiTables<AcpiHandlerImpl>> = Once::new();
static HANDLER: AcpiHandlerImpl = AcpiHandlerImpl {};
pub fn | (rsdp: u64) {
ACPI.call_once(|| unsafe { acpi::AcpiTables::from_rsdp(HANDLER, rsdp as usize).unwrap() });
}
pub fn get_acpi_root() -> &'static AcpiTables<AcpiHandlerImpl> {
ACPI.poll()
.expect("need to call acpi::init before get_acpi_root")
}
| init |
q2r.py | # -*- coding: utf-8 -*-
"""Command line scripts to launch a `Q2rCalculation` for testing and demonstration purposes."""
from aiida.cmdline.params import options as options_core
from aiida.cmdline.params import types
from aiida.cmdline.utils import decorators
import click
from . import cmd_launch
from ..utils import launch, options
@cmd_launch.command('q2r')
@options_core.CODE(required=True, type=types.CodeParamType(entry_point='quantumespresso.q2r'))
@options_core.CALCULATION(required=True)
@options.MAX_NUM_MACHINES()
@options.MAX_WALLCLOCK_SECONDS()
@options.WITH_MPI()
@options.DAEMON()
@decorators.with_dbenv()
def launch_calculation(code, calculation, max_num_machines, max_wallclock_seconds, with_mpi, daemon):
| """Run a Q2rCalculation."""
from aiida.plugins import CalculationFactory
from aiida_quantumespresso.utils.resources import get_default_options
# Check that the parent calculation node comes from quantumespresso.ph.
# I cannot move this check into the option declaration, because CalcJobNode is not subclassed by the specific
# calculation plugins (only Process is), and there is no feature yet to filter by the associated process_type.
expected_process_type = 'aiida.calculations:quantumespresso.ph'
if calculation.process_type != expected_process_type:
raise click.BadParameter(
f'input calculation node has process_type: {calculation.process_type}; should be {expected_process_type}'
)
inputs = {
'code': code,
'parent_folder': calculation.outputs.remote_folder,
'metadata': {
'options': get_default_options(max_num_machines, max_wallclock_seconds, with_mpi),
}
}
launch.launch_process(CalculationFactory('quantumespresso.q2r'), daemon, **inputs) |
|
message_source.rs | use anyhow::Result;
use async_io::Timer;
use std::time::Duration;
use std::time::Instant;
use crate::runtime::AsyncKernel;
use crate::runtime::Block;
use crate::runtime::BlockMeta;
use crate::runtime::BlockMetaBuilder;
use crate::runtime::MessageIo;
use crate::runtime::MessageIoBuilder;
use crate::runtime::Pmt;
use crate::runtime::StreamIo; |
pub struct MessageSource {
message: Pmt,
interval: Duration,
t_last: Instant,
n_messages: Option<usize>,
}
impl MessageSource {
pub fn new(message: Pmt, interval: Duration, n_messages: Option<usize>) -> Block {
Block::new_async(
BlockMetaBuilder::new("MessageSource").build(),
StreamIoBuilder::new().build(),
MessageIoBuilder::new().register_output("out").build(),
MessageSource {
message,
interval,
t_last: Instant::now(),
n_messages,
},
)
}
async fn sleep(dur: Duration) {
Timer::after(dur).await;
}
}
#[async_trait]
impl AsyncKernel for MessageSource {
async fn work(
&mut self,
io: &mut WorkIo,
_sio: &mut StreamIo,
mio: &mut MessageIo<Self>,
_b: &mut BlockMeta,
) -> Result<()> {
let now = Instant::now();
if now >= self.t_last + self.interval {
mio.post(0, self.message.clone()).await;
self.t_last = now;
if let Some(ref mut n) = self.n_messages {
*n -= 1;
if *n == 0 {
io.finished = true;
}
}
}
io.block_on(MessageSource::sleep(
self.t_last + self.interval - Instant::now(),
));
Ok(())
}
async fn init(
&mut self,
_sio: &mut StreamIo,
_mio: &mut MessageIo<Self>,
_b: &mut BlockMeta,
) -> Result<()> {
self.t_last = Instant::now();
Ok(())
}
}
pub struct MessageSourceBuilder {
message: Pmt,
duration: Duration,
n_messages: Option<usize>,
}
impl MessageSourceBuilder {
pub fn new(message: Pmt, duration: Duration) -> MessageSourceBuilder {
MessageSourceBuilder {
message,
duration,
n_messages: None,
}
}
pub fn n_messages(mut self, n: usize) -> MessageSourceBuilder {
self.n_messages = Some(n);
self
}
pub fn build(self) -> Block {
MessageSource::new(self.message, self.duration, self.n_messages)
}
} | use crate::runtime::StreamIoBuilder;
use crate::runtime::WorkIo; |
analog_sensor_driver_test.go | package aio
import (
"errors"
"strings"
"testing"
"time"
"github.com/stevebargelt/mygobot"
"github.com/stevebargelt/mygobot/gobottest"
)
var _ gobot.Driver = (*AnalogSensorDriver)(nil)
func TestAnalogSensorDriver(t *testing.T) {
a := newAioTestAdaptor()
d := NewAnalogSensorDriver(a, "1")
gobottest.Refute(t, d.Connection(), nil)
// default interval
gobottest.Assert(t, d.interval, 10*time.Millisecond)
a = newAioTestAdaptor()
d = NewAnalogSensorDriver(a, "42", 30*time.Second)
gobottest.Assert(t, d.Pin(), "42")
gobottest.Assert(t, d.interval, 30*time.Second)
a.TestAdaptorAnalogRead(func() (val int, err error) {
val = 100
return
})
ret := d.Command("Read")(nil).(map[string]interface{})
gobottest.Assert(t, ret["val"].(int), 100)
gobottest.Assert(t, ret["err"], nil)
}
func TestAnalogSensorDriverStart(t *testing.T) {
sem := make(chan bool, 1)
a := newAioTestAdaptor()
d := NewAnalogSensorDriver(a, "1")
// expect data to be received
d.Once(d.Event(Data), func(data interface{}) {
gobottest.Assert(t, data.(int), 100)
sem <- true
})
// send data
a.TestAdaptorAnalogRead(func() (val int, err error) {
val = 100
return
})
gobottest.Assert(t, d.Start(), nil)
select {
case <-sem:
case <-time.After(1 * time.Second):
t.Errorf("AnalogSensor Event \"Data\" was not published")
}
// expect error to be received
d.Once(d.Event(Error), func(data interface{}) {
gobottest.Assert(t, data.(error).Error(), "read error")
sem <- true
})
// send error
a.TestAdaptorAnalogRead(func() (val int, err error) {
err = errors.New("read error")
return
})
select {
case <-sem:
case <-time.After(1 * time.Second):
t.Errorf("AnalogSensor Event \"Error\" was not published")
}
// send a halt message
d.Once(d.Event(Data), func(data interface{}) {
sem <- true
})
a.TestAdaptorAnalogRead(func() (val int, err error) {
val = 200
return
})
d.halt <- true
select {
case <-sem:
t.Errorf("AnalogSensor Event should not published")
case <-time.After(1 * time.Second):
}
}
func TestAnalogSensorDriverHalt(t *testing.T) {
d := NewAnalogSensorDriver(newAioTestAdaptor(), "1")
done := make(chan struct{})
go func() {
<-d.halt
close(done)
}()
gobottest.Assert(t, d.Halt(), nil)
select {
case <-done:
case <-time.After(100 * time.Millisecond):
t.Errorf("AnalogSensor was not halted")
}
}
func TestAnalogSensorDriverDefaultName(t *testing.T) {
d := NewAnalogSensorDriver(newAioTestAdaptor(), "1")
gobottest.Assert(t, strings.HasPrefix(d.Name(), "AnalogSensor"), true)
}
func | (t *testing.T) {
d := NewAnalogSensorDriver(newAioTestAdaptor(), "1")
d.SetName("mybot")
gobottest.Assert(t, d.Name(), "mybot")
}
| TestAnalogSensorDriverSetName |
alert_request.rs | /*
* Client Portal Web API
*
* Client Poral Web API
*
* OpenAPI spec version: 1.0.0
*
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*/
#[allow(unused_imports)]
use serde_json::Value;
#[derive(Debug, Serialize, Deserialize)]
pub struct AlertRequest {
/// The message you want to receive via email or text message
#[serde(rename = "alertMessage")]
alert_message: Option<String>,
/// name of alert
#[serde(rename = "alertName")]
alert_name: Option<String>,
/// whether alert is repeatable or not, so value can only be 0 or 1, this has to be 1 for MTA alert
#[serde(rename = "alertRepeatable")]
alert_repeatable: Option<i32>,
#[serde(rename = "conditions")]
conditions: Option<Vec<::models::AlertrequestConditions>>,
/// email address to receive alert
#[serde(rename = "email")]
email: Option<String>,
/// format, YYYYMMDD-HH:mm:ss, please NOTE this will only work when tif is GTD
#[serde(rename = "expireTime")]
expire_time: Option<String>,
/// value can only be 0 or 1, set to 1 to enable the alert only in IBKR mobile
#[serde(rename = "iTWSOrdersOnly")]
i_tws_orders_only: Option<i32>,
/// orderId is required when modifying alert. You can get it from /iserver/account/:accountId/alerts
#[serde(rename = "orderId")]
order_id: Option<i32>,
/// value can only be 0 or 1, set to 1 if the alert can be triggered outside regular trading hours.
#[serde(rename = "outsideRth")]
outside_rth: Option<i32>,
/// audio message to play when alert is triggered
#[serde(rename = "playAudio")]
play_audio: Option<String>,
/// whether allowing to send email or not, so value can only be 0 or 1,
#[serde(rename = "sendMessage")]
send_message: Option<i32>,
/// value can only be 0 or 1, set to 1 to allow to show alert in pop-ups
#[serde(rename = "showPopup")]
show_popup: Option<i32>,
/// time in force, can only be GTC or GTD
#[serde(rename = "tif")]
tif: Option<String>,
/// for MTA alert only, each user has a unique toolId and it will stay the same, do not send for normal alert
#[serde(rename = "toolId")]
tool_id: Option<i32>
}
impl AlertRequest {
pub fn new() -> AlertRequest {
AlertRequest {
alert_message: None,
alert_name: None,
alert_repeatable: None,
conditions: None,
email: None,
expire_time: None,
i_tws_orders_only: None,
order_id: None,
outside_rth: None,
play_audio: None,
send_message: None,
show_popup: None,
tif: None,
tool_id: None
}
}
pub fn set_alert_message(&mut self, alert_message: String) {
self.alert_message = Some(alert_message);
}
pub fn with_alert_message(mut self, alert_message: String) -> AlertRequest {
self.alert_message = Some(alert_message);
self
}
pub fn alert_message(&self) -> Option<&String> {
self.alert_message.as_ref()
}
pub fn reset_alert_message(&mut self) {
self.alert_message = None;
}
pub fn set_alert_name(&mut self, alert_name: String) {
self.alert_name = Some(alert_name);
}
pub fn with_alert_name(mut self, alert_name: String) -> AlertRequest {
self.alert_name = Some(alert_name);
self
}
pub fn alert_name(&self) -> Option<&String> {
self.alert_name.as_ref()
}
pub fn reset_alert_name(&mut self) {
self.alert_name = None;
}
pub fn set_alert_repeatable(&mut self, alert_repeatable: i32) {
self.alert_repeatable = Some(alert_repeatable);
}
pub fn with_alert_repeatable(mut self, alert_repeatable: i32) -> AlertRequest {
self.alert_repeatable = Some(alert_repeatable);
self
}
pub fn alert_repeatable(&self) -> Option<&i32> {
self.alert_repeatable.as_ref()
}
pub fn reset_alert_repeatable(&mut self) {
self.alert_repeatable = None;
}
pub fn set_conditions(&mut self, conditions: Vec<::models::AlertrequestConditions>) {
self.conditions = Some(conditions);
}
pub fn with_conditions(mut self, conditions: Vec<::models::AlertrequestConditions>) -> AlertRequest {
self.conditions = Some(conditions);
self
}
pub fn conditions(&self) -> Option<&Vec<::models::AlertrequestConditions>> {
self.conditions.as_ref()
}
pub fn reset_conditions(&mut self) {
self.conditions = None;
}
pub fn set_email(&mut self, email: String) {
self.email = Some(email);
}
pub fn with_email(mut self, email: String) -> AlertRequest {
self.email = Some(email);
self
}
pub fn email(&self) -> Option<&String> {
self.email.as_ref()
}
pub fn reset_email(&mut self) {
self.email = None;
}
pub fn set_expire_time(&mut self, expire_time: String) |
pub fn with_expire_time(mut self, expire_time: String) -> AlertRequest {
self.expire_time = Some(expire_time);
self
}
pub fn expire_time(&self) -> Option<&String> {
self.expire_time.as_ref()
}
pub fn reset_expire_time(&mut self) {
self.expire_time = None;
}
pub fn set_i_tws_orders_only(&mut self, i_tws_orders_only: i32) {
self.i_tws_orders_only = Some(i_tws_orders_only);
}
pub fn with_i_tws_orders_only(mut self, i_tws_orders_only: i32) -> AlertRequest {
self.i_tws_orders_only = Some(i_tws_orders_only);
self
}
pub fn i_tws_orders_only(&self) -> Option<&i32> {
self.i_tws_orders_only.as_ref()
}
pub fn reset_i_tws_orders_only(&mut self) {
self.i_tws_orders_only = None;
}
pub fn set_order_id(&mut self, order_id: i32) {
self.order_id = Some(order_id);
}
pub fn with_order_id(mut self, order_id: i32) -> AlertRequest {
self.order_id = Some(order_id);
self
}
pub fn order_id(&self) -> Option<&i32> {
self.order_id.as_ref()
}
pub fn reset_order_id(&mut self) {
self.order_id = None;
}
pub fn set_outside_rth(&mut self, outside_rth: i32) {
self.outside_rth = Some(outside_rth);
}
pub fn with_outside_rth(mut self, outside_rth: i32) -> AlertRequest {
self.outside_rth = Some(outside_rth);
self
}
pub fn outside_rth(&self) -> Option<&i32> {
self.outside_rth.as_ref()
}
pub fn reset_outside_rth(&mut self) {
self.outside_rth = None;
}
pub fn set_play_audio(&mut self, play_audio: String) {
self.play_audio = Some(play_audio);
}
pub fn with_play_audio(mut self, play_audio: String) -> AlertRequest {
self.play_audio = Some(play_audio);
self
}
pub fn play_audio(&self) -> Option<&String> {
self.play_audio.as_ref()
}
pub fn reset_play_audio(&mut self) {
self.play_audio = None;
}
pub fn set_send_message(&mut self, send_message: i32) {
self.send_message = Some(send_message);
}
pub fn with_send_message(mut self, send_message: i32) -> AlertRequest {
self.send_message = Some(send_message);
self
}
pub fn send_message(&self) -> Option<&i32> {
self.send_message.as_ref()
}
pub fn reset_send_message(&mut self) {
self.send_message = None;
}
pub fn set_show_popup(&mut self, show_popup: i32) {
self.show_popup = Some(show_popup);
}
pub fn with_show_popup(mut self, show_popup: i32) -> AlertRequest {
self.show_popup = Some(show_popup);
self
}
pub fn show_popup(&self) -> Option<&i32> {
self.show_popup.as_ref()
}
pub fn reset_show_popup(&mut self) {
self.show_popup = None;
}
pub fn set_tif(&mut self, tif: String) {
self.tif = Some(tif);
}
pub fn with_tif(mut self, tif: String) -> AlertRequest {
self.tif = Some(tif);
self
}
pub fn tif(&self) -> Option<&String> {
self.tif.as_ref()
}
pub fn reset_tif(&mut self) {
self.tif = None;
}
pub fn set_tool_id(&mut self, tool_id: i32) {
self.tool_id = Some(tool_id);
}
pub fn with_tool_id(mut self, tool_id: i32) -> AlertRequest {
self.tool_id = Some(tool_id);
self
}
pub fn tool_id(&self) -> Option<&i32> {
self.tool_id.as_ref()
}
pub fn reset_tool_id(&mut self) {
self.tool_id = None;
}
}
| {
self.expire_time = Some(expire_time);
} |
config.go | // Copyright Istio Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package aggregate implements a read-only aggregator for config stores.
package aggregate
import (
"errors"
"github.com/hashicorp/go-multierror"
"istio.io/istio/pilot/pkg/model"
"istio.io/istio/pkg/config"
"istio.io/istio/pkg/config/schema/collection"
)
var errorUnsupported = errors.New("unsupported operation: the config aggregator is read-only")
// makeStore creates an aggregate config store from several config stores and
// unifies their descriptors
func makeStore(stores []model.ConfigStore, writer model.ConfigStore) (model.ConfigStore, error) {
union := collection.NewSchemasBuilder()
storeTypes := make(map[config.GroupVersionKind][]model.ConfigStore)
for _, store := range stores {
for _, s := range store.Schemas().All() {
if len(storeTypes[s.Resource().GroupVersionKind()]) == 0 {
if err := union.Add(s); err != nil {
return nil, err
}
}
storeTypes[s.Resource().GroupVersionKind()] = append(storeTypes[s.Resource().GroupVersionKind()], store)
}
}
schemas := union.Build()
if err := schemas.Validate(); err != nil {
return nil, err
}
result := &store{
schemas: schemas,
stores: storeTypes,
writer: writer,
}
return result, nil
}
// MakeWriteableCache creates an aggregate config store cache from several config store caches. An additional
// `writer` config store is passed, which may or may not be part of `caches`.
func MakeWriteableCache(caches []model.ConfigStoreCache, writer model.ConfigStore) (model.ConfigStoreCache, error) {
stores := make([]model.ConfigStore, 0, len(caches))
for _, cache := range caches {
stores = append(stores, cache)
}
store, err := makeStore(stores, writer)
if err != nil {
return nil, err
}
return &storeCache{
ConfigStore: store,
caches: caches,
}, nil
}
| }
type store struct {
// schemas is the unified
schemas collection.Schemas
// stores is a mapping from config type to a store
stores map[config.GroupVersionKind][]model.ConfigStore
writer model.ConfigStore
}
func (cr *store) Schemas() collection.Schemas {
return cr.schemas
}
// Get the first config found in the stores.
func (cr *store) Get(typ config.GroupVersionKind, name, namespace string) *config.Config {
for _, store := range cr.stores[typ] {
config := store.Get(typ, name, namespace)
if config != nil {
return config
}
}
return nil
}
// List all configs in the stores.
func (cr *store) List(typ config.GroupVersionKind, namespace string) ([]config.Config, error) {
if len(cr.stores[typ]) == 0 {
return nil, nil
}
var errs *multierror.Error
var configs []config.Config
// Used to remove duplicated config
configMap := make(map[string]struct{})
for _, store := range cr.stores[typ] {
storeConfigs, err := store.List(typ, namespace)
if err != nil {
errs = multierror.Append(errs, err)
}
for _, config := range storeConfigs {
key := config.GroupVersionKind.Kind + config.Namespace + config.Name
if _, exist := configMap[key]; exist {
continue
}
configs = append(configs, config)
configMap[key] = struct{}{}
}
}
return configs, errs.ErrorOrNil()
}
func (cr *store) Delete(typ config.GroupVersionKind, name, namespace string) error {
if cr.writer == nil {
return errorUnsupported
}
return cr.writer.Delete(typ, name, namespace)
}
func (cr *store) Create(c config.Config) (string, error) {
if cr.writer == nil {
return "", errorUnsupported
}
return cr.writer.Create(c)
}
func (cr *store) Update(c config.Config) (string, error) {
if cr.writer == nil {
return "", errorUnsupported
}
return cr.writer.Update(c)
}
func (cr *store) UpdateStatus(c config.Config) (string, error) {
if cr.writer == nil {
return "", errorUnsupported
}
return cr.writer.UpdateStatus(c)
}
func (cr *store) Patch(typ config.GroupVersionKind, name, namespace string, patchFn config.PatchFunc) (string, error) {
if cr.writer == nil {
return "", errorUnsupported
}
return cr.writer.Patch(typ, name, namespace, patchFn)
}
type storeCache struct {
model.ConfigStore
caches []model.ConfigStoreCache
}
func (cr *storeCache) HasSynced() bool {
for _, cache := range cr.caches {
if !cache.HasSynced() {
return false
}
}
return true
}
func (cr *storeCache) RegisterEventHandler(kind config.GroupVersionKind, handler func(config.Config, config.Config, model.Event)) {
for _, cache := range cr.caches {
if _, exists := cache.Schemas().FindByGroupVersionKind(kind); exists {
cache.RegisterEventHandler(kind, handler)
}
}
}
func (cr *storeCache) Run(stop <-chan struct{}) {
for _, cache := range cr.caches {
go cache.Run(stop)
}
<-stop
} | // MakeCache creates an aggregate config store cache from several config store
// caches.
func MakeCache(caches []model.ConfigStoreCache) (model.ConfigStoreCache, error) {
return MakeWriteableCache(caches, nil) |
test-parens.rs | // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_name = "foo"]
// @has foo/fn.foo.html
// @has - '//*[@class="rust fn"]' "_: &(ToString + 'static)"
pub fn foo(_: &(ToString + 'static)) | {} |
|
books.module.ts |
import { Module } from '@nestjs/common';
import { BooksService } from './books.service';
import BooksController from './books.controller';
import { APP_GUARD } from '@nestjs/core';
import { JwtAuthGuard } from 'src/auth/jwt-auth.guard';
@Module({
imports: [],
controllers: [BooksController],
providers: [BooksService,
{
provide: APP_GUARD,
useClass: JwtAuthGuard
}],
})
export default class | { }
| BooksModule |
elf.rs | //! Parsing of the 64-bit `ELF` file format.
use std::ffi::CStr;
use std::fmt::{self, Display, Formatter};
use std::fs::File;
use std::io::{self, Cursor, Read, Seek, SeekFrom};
use std::path::Path;
use byteorder::{ReadBytesExt, LE};
/// Handle for an `ELF` file.
#[derive(Debug)]
pub struct ElfFile<R> where R: Read + Seek {
target: R,
pub header: Header,
pub section_headers: Vec<SectionHeader>,
}
/// Header of a file.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct Header {
pub identification: [u8; 16],
pub file_type: u16,
pub machine: u16,
pub version: u32,
pub entry: u64,
pub program_headers_offset: u64,
pub section_headers_offset: u64,
pub flags: u32,
pub header_size: u16,
pub program_header_size: u16,
pub program_header_entries: u16,
pub section_header_size: u16,
pub section_header_entries: u16,
pub section_name_string_table_index: u16,
}
/// Section in the file.
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct Section {
pub header: SectionHeader,
pub data: Vec<u8>,
}
/// Header of a single section.
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct SectionHeader {
pub name: String,
pub name_offset: u32,
pub section_type: u32,
pub flags: u64,
pub addr: u64,
pub offset: u64,
pub size: u64,
pub link: u32,
pub info: u32,
pub addr_align: u64,
pub entry_size: u64,
}
/// An entry in the symbol table.
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct SymbolTableEntry {
pub name: String,
pub info: u8,
pub other: u8,
pub section_table_index: u16,
pub value: u64,
pub size: u64,
}
impl ElfFile<File> {
/// Load an `ELF` file from the file system.
pub fn new<P: AsRef<Path>>(filename: P) -> ElfResult<ElfFile<File>> |
}
impl<'a> ElfFile<Cursor<&'a [u8]>> {
/// Create a new `ELF` file over a byte slice.
pub fn from_slice(target: &'a [u8]) -> ElfResult<ElfFile<Cursor<&'a [u8]>>> {
ElfFile::from_readable(Cursor::new(target))
}
}
impl<R> ElfFile<R> where R: Read + Seek {
/// Create a new `ELF` file operating on a reader.
pub fn from_readable(mut target: R) -> ElfResult<ElfFile<R>> {
let header = parse_header(&mut target)?;
let section_headers = parse_section_headers(header, &mut target)?;
Ok(ElfFile { target, header, section_headers })
}
/// Retrieve all sections.
pub fn sections(&mut self) -> ElfResult<Vec<Section>> {
// Build up the sections.
let mut sections = Vec::with_capacity(self.section_headers.len());
for header in &self.section_headers {
let mut data = vec![0; header.size as usize];
self.target.seek(SeekFrom::Start(header.offset))?;
self.target.read_exact(&mut data)?;
let section = Section { header: header.clone(), data };
sections.push(section);
}
Ok(sections)
}
/// Retrieve the section with a specific name if it is present.
pub fn get_section(&mut self, name: &str) -> ElfResult<Section> {
let header = self.get_section_header(name)?.clone();
let mut data = vec![0; header.size as usize];
self.target.seek(SeekFrom::Start(header.offset))?;
self.target.read_exact(&mut data)?;
Ok(Section { header, data })
}
/// Retrieve the symbols from the `.symtab` section if it is present.
pub fn get_symbols(&mut self) -> ElfResult<Vec<SymbolTableEntry>> {
let (size, offset) = {
let header = self.get_section_header(".symtab")?;
(header.size, header.offset)
};
let count = (size / 24) as usize;
let mut symbols = Vec::with_capacity(count);
let symbol_strings = self.get_section(".strtab")?.data;
self.target.seek(SeekFrom::Start(offset))?;
for _ in 0 .. count {
let name_offset = self.target.read_u32::<LE>()?;
symbols.push(SymbolTableEntry {
name: parse_string(&symbol_strings, name_offset),
info: self.target.read_u8()?,
other: self.target.read_u8()?,
section_table_index: self.target.read_u16::<LE>()?,
value: self.target.read_u64::<LE>()?,
size: self.target.read_u64::<LE>()?,
})
}
Ok(symbols)
}
fn get_section_header(&mut self, name: &str) -> ElfResult<&SectionHeader> {
self.section_headers.iter()
.find(|header| header.name == name)
.ok_or_else(|| ElfError::MissingSection(name.to_owned()))
}
}
/// Parse the header of the file.
fn parse_header<R>(target: &mut R) -> ElfResult<Header> where R: Read + Seek {
let header = Header {
identification: {
let mut buf = [0; 16];
target.read_exact(&mut buf)?;
buf
},
file_type: target.read_u16::<LE>()?,
machine: target.read_u16::<LE>()?,
version: target.read_u32::<LE>()?,
entry: target.read_u64::<LE>()?,
program_headers_offset: target.read_u64::<LE>()?,
section_headers_offset: target.read_u64::<LE>()?,
flags: target.read_u32::<LE>()?,
header_size: target.read_u16::<LE>()?,
program_header_size: target.read_u16::<LE>()?,
program_header_entries: target.read_u16::<LE>()?,
section_header_size: target.read_u16::<LE>()?,
section_header_entries: target.read_u16::<LE>()?,
section_name_string_table_index: target.read_u16::<LE>()?,
};
// Assure that this is `ELF`, 64-bit and little endian.
// If not we don't know how to handle it and would return complete garbage.
if (&header.identification[0..4] != b"\x7fELF")
|| (header.identification[4] != 2)
|| (header.identification[5] != 1) {
return Err(ElfError::Invalid);
}
Ok(header)
}
/// Parse the section headers of the file and return the string table with it.
fn parse_section_headers<R>(header: Header, target: &mut R)
-> ElfResult<Vec<SectionHeader>> where R: Read + Seek {
// Read the section headers.
target.seek(SeekFrom::Start(header.section_headers_offset))?;
let mut headers = Vec::with_capacity(header.section_header_entries as usize);
for _ in 0 .. header.section_header_entries {
let header = SectionHeader {
name: String::new(),
name_offset: target.read_u32::<LE>()?,
section_type: target.read_u32::<LE>()?,
flags: target.read_u64::<LE>()?,
addr: target.read_u64::<LE>()?,
offset: target.read_u64::<LE>()?,
size: target.read_u64::<LE>()?,
link: target.read_u32::<LE>()?,
info: target.read_u32::<LE>()?,
addr_align: target.read_u64::<LE>()?,
entry_size: target.read_u64::<LE>()?,
};
headers.push(header);
}
// Read the raw string table data.
let string_index = header.section_name_string_table_index as usize;
let string_table = &headers[string_index];
let mut strings = vec![0; string_table.size as usize];
target.seek(SeekFrom::Start(string_table.offset))?;
target.read_exact(&mut strings)?;
// Fill in the missing names for all sections.
for table in headers.iter_mut() {
table.name = parse_string(&strings, table.name_offset);
}
Ok(headers)
}
/// Parse a string from the string table.
fn parse_string(strings: &[u8], offset: u32) -> String {
let mut zero = offset as usize;
while strings[zero] != 0 {
zero += 1;
}
CStr::from_bytes_with_nul(&strings[offset as usize .. zero + 1])
.expect("invalid C string in elf string table")
.to_string_lossy()
.into_owned()
}
/// The error type for `ELF` loading.
pub enum ElfError {
Invalid,
MissingSection(String),
Io(io::Error),
}
pub(in super) type ElfResult<T> = Result<T, ElfError>;
impl Display for ElfError {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match self {
ElfError::Invalid => write!(f, "Invalid ELF file"),
ElfError::MissingSection(name) => write!(f, "Missing section: {}", name),
ElfError::Io(err) => write!(f, "I/O error: {}", err),
}
}
}
impl std::error::Error for ElfError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
ElfError::Io(err) => Some(err),
_ => None,
}
}
}
debug_display!(ElfError);
impl From<io::Error> for ElfError {
fn from(err: io::Error) -> ElfError {
ElfError::Io(err)
}
}
| {
let file = File::open(filename)?;
ElfFile::from_readable(file)
} |
functions.ts | import { Social, SpotifyLink } from './socials';
export interface BuildUrl {
(username: string, socialType: Exclude<Social, 'spotify'>): string;
(username: string, socialType: 'spotify', linkType: SpotifyLink): string;
}
export interface ExtractUser { | (url: string, singleOperation?: boolean): string;
}
export interface IsValidDomain {
(input: string): boolean;
} | |
mathjax.directive.spec.ts | // Copyright 2021 The Oppia Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { Component } from '@angular/core';
import { ComponentFixture, TestBed, waitForAsync } from '@angular/core/testing';
import { MathJaxDirective } from './mathjax.directive';
/**
* @fileoverview Unit tests for mathjax directive
*/
@Component({
selector: 'mock-comp-a',
template: ' <span [oppiaMathJax]="expr"></span>'
})
class MockCompA {
expr: string = '/frac{x}{y}';
}
const mockMathJaxHub = {
Queue: (...args) => {
return;
} | };
describe('MathJax directive', () => {
let component: MockCompA;
let fixture: ComponentFixture<MockCompA>;
const originalMathJax = window.MathJax;
beforeEach(waitForAsync(() => {
TestBed.configureTestingModule({
declarations: [MockCompA, MathJaxDirective]
}).compileComponents();
}));
beforeEach(waitForAsync(() => {
fixture = TestBed.createComponent(MockCompA);
component = fixture.componentInstance;
window.MathJax = mockMathJs as unknown as typeof MathJax;
}));
afterEach(() => {
window.MathJax = originalMathJax;
});
it('should re render math expr when expr changes', waitForAsync(() => {
const spy = spyOn(mockMathJaxHub, 'Queue');
component.expr = '/frac{z}{y}';
fixture.detectChanges();
expect(spy).toHaveBeenCalled();
}));
}); | };
const mockMathJs = {
Hub: mockMathJaxHub |
rtc.rs | //! Real-time clock.
use crate::{pac, rcc::lsi_hz};
use chrono::{Datelike, NaiveDate, NaiveDateTime, NaiveTime, Timelike};
use pac::rcc::{
bdcr::RTCSEL_A,
csr::LSIPRE_A::{DIV1, DIV128},
};
/// RTC clock selection
#[derive(Debug)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[repr(u8)]
pub enum Clk {
/// LSE oscillator clock selected.
Lse = RTCSEL_A::LSE as u8,
/// LSI oscillator clock selected.
Lsi = RTCSEL_A::LSI as u8,
/// HSE32 oscillator clock divided by 32 selected.
Hse = RTCSEL_A::HSE32 as u8,
}
/// Real-time clock driver.
#[derive(Debug)]
pub struct Rtc {
rtc: pac::RTC,
}
impl Rtc {
/// Create a new real-time clock driver.
///
/// This will **not** setup the source clock.
///
/// # Safety
///
/// This function _could_ be considered unsafe because it is not a
/// pure function.
/// The RTC is in the backup domain; system resets will not reset the RTC.
/// You are responsible for resetting the backup domain if required.
///
/// # Panics
///
/// * (debug) clock source is not ready.
///
/// # Example
///
/// LSE clock source (this depends on HW, example valid for NUCLEO board):
///
/// ```no_run
/// use stm32wl_hal::{
/// pac,
/// rcc::pulse_reset_backup_domain,
/// rtc::{Clk, Rtc},
/// };
///
/// let mut dp: pac::Peripherals = pac::Peripherals::take().unwrap();
///
/// unsafe { pulse_reset_backup_domain(&mut dp.RCC, &mut dp.PWR) };
/// dp.PWR.cr1.modify(|_, w| w.dbp().enabled());
/// dp.RCC.bdcr.modify(|_, w| w.lseon().on());
/// while dp.RCC.bdcr.read().lserdy().is_not_ready() {}
///
/// let rtc: Rtc = Rtc::new(dp.RTC, Clk::Lse, &mut dp.PWR, &mut dp.RCC);
/// ```
///
/// LSI clock source:
///
/// ```no_run
/// use stm32wl_hal::{
/// pac,
/// rcc::{enable_lsi, pulse_reset_backup_domain},
/// rtc::{Clk, Rtc},
/// };
///
/// let mut dp: pac::Peripherals = pac::Peripherals::take().unwrap();
///
/// unsafe { pulse_reset_backup_domain(&mut dp.RCC, &mut dp.PWR) };
/// enable_lsi(&mut dp.RCC);
///
/// let rtc: Rtc = Rtc::new(dp.RTC, Clk::Lsi, &mut dp.PWR, &mut dp.RCC);
/// ```
///
/// HSE clock source (this depends on HW, example valid for NUCLEO board):
///
/// ```no_run
/// use stm32wl_hal::{
/// pac,
/// rcc::pulse_reset_backup_domain,
/// rtc::{Clk, Rtc},
/// };
///
/// let mut dp: pac::Peripherals = pac::Peripherals::take().unwrap();
///
/// unsafe { pulse_reset_backup_domain(&mut dp.RCC, &mut dp.PWR) };
/// dp.RCC
/// .cr
/// .modify(|_, w| w.hseon().enabled().hsebyppwr().vddtcxo());
/// while dp.RCC.cr.read().hserdy().is_not_ready() {}
///
/// let rtc: Rtc = Rtc::new(dp.RTC, Clk::Hse, &mut dp.PWR, &mut dp.RCC);
/// ```
pub fn new(rtc: pac::RTC, clk: Clk, pwr: &mut pac::PWR, rcc: &mut pac::RCC) -> Rtc {
pwr.cr1.modify(|_, w| w.dbp().enabled());
match clk {
Clk::Lse => {
debug_assert!(rcc.bdcr.read().lserdy().is_ready());
rcc.bdcr.modify(|_, w| w.rtcsel().lse().rtcen().enabled());
}
Clk::Lsi => {
debug_assert!(rcc.csr.read().lsirdy().is_ready());
rcc.bdcr.modify(|_, w| w.rtcsel().lsi().rtcen().enabled());
}
Clk::Hse => {
debug_assert!(rcc.cr.read().hserdy().is_ready());
rcc.bdcr.modify(|_, w| w.rtcsel().hse32().rtcen().enabled());
}
}
#[cfg(not(feature = "stm32wl5x_cm0p"))]
rcc.apb1enr1.modify(|_, w| w.rtcapben().set_bit());
#[cfg(feature = "stm32wl5x_cm0p")]
rcc.c2apb1enr1.modify(|_, w| w.rtcapben().set_bit());
let mut rtc: Rtc = Rtc { rtc };
rtc.disable_write_protect();
rtc.configure_prescaler(rcc);
rtc
}
/// Source clock frequency in hertz.
#[inline]
pub fn hz(rcc: &pac::RCC) -> u32 {
match rcc.bdcr.read().rtcsel().variant() {
RTCSEL_A::NOCLOCK => 0,
RTCSEL_A::LSE => 32_768,
RTCSEL_A::LSI => lsi_hz(rcc).into(),
RTCSEL_A::HSE32 => 1_000_000,
}
}
// configure prescaler for a 1Hz clock
//
// RM0453 Rev 2 page 996:
// When both prescalers are used, it is recommended to configure the
// asynchronous prescaler to a high value to minimize consumption.
//
// async is 7 bit (128 max)
// sync is 15-bit (32_768 max)
fn configure_prescaler(&mut self, rcc: &mut pac::RCC) {
let (a_pre, s_pre): (u8, u16) = match rcc.bdcr.read().rtcsel().variant() {
RTCSEL_A::NOCLOCK => unreachable!(),
// (127 + 1) × (255 + 1) = 32_768 Hz
RTCSEL_A::LSE => (127, 255),
RTCSEL_A::LSI => match rcc.csr.read().lsipre().variant() {
// (99 + 1) × (319 + 1) = 32_000 Hz
DIV1 => (99, 319),
// (124 + 1) × (1 + 1) = 250 Hz
DIV128 => (124, 1),
},
// (99 + 1) × (9_999 + 1) = 1_000_000 Hz
RTCSEL_A::HSE32 => (99, 9_999),
};
// enter initialization mode
self.rtc.icsr.modify(|_, w| w.init().init_mode());
while self.rtc.icsr.read().initf().is_not_allowed() {}
// enable shadow register bypass
self.rtc.cr.modify(|_, w| w.bypshad().set_bit());
self.rtc
.prer
.write(|w| w.prediv_s().bits(s_pre).prediv_a().bits(a_pre));
// exit initialization mode
self.rtc.icsr.modify(|_, w| w.init().free_running_mode())
}
/// Set the date and time.
///
/// The value will take some duration to apply after this function returns:
///
/// * LPCAL=0: the counting restarts after 4 RTCCLK clock cycles
/// * LPCAL=1: the counting restarts after up to 2 RTCCLK + 1 ck_apre
///
/// # Panics
///
/// * Year is greater than or equal to 2100.
/// * Year is less than 2000.
/// * Backup domain write protection is enabled.
pub fn set_date_time(&mut self, date_time: chrono::NaiveDateTime) {
// safety: atomic read with no side effects
assert!(unsafe { (*pac::PWR::ptr()).cr1.read().dbp().bit_is_set() });
// enter initialization mode
self.rtc.icsr.modify(|_, w| w.init().init_mode());
while self.rtc.icsr.read().initf().is_not_allowed() {}
let hour: u8 = date_time.hour() as u8;
let ht: u8 = hour / 10;
let hu: u8 = hour % 10;
let minute: u8 = date_time.minute() as u8;
let mnt: u8 = minute / 10;
let mnu: u8 = minute % 10;
let second: u8 = date_time.second() as u8;
let st: u8 = second / 10;
let su: u8 = second % 10;
#[rustfmt::skip]
self.rtc.tr.write(|w| {
w
.pm().clear_bit() // 24h format
.ht().bits(ht)
.hu().bits(hu)
.mnt().bits(mnt)
.mnu().bits(mnu)
.st().bits(st)
.su().bits(su)
});
let year: i32 = date_time.year();
assert!((2000..2100).contains(&year));
let yt: u8 = ((year - 2000) / 10) as u8;
let yu: u8 = ((year - 2000) % 10) as u8;
let wdu: u8 = date_time.weekday().number_from_monday() as u8;
let month: u8 = date_time.month() as u8;
let mt: bool = month > 9;
let mu: u8 = month % 10;
let day: u8 = date_time.day() as u8;
let dt: u8 = day / 10;
let du: u8 = day % 10;
#[rustfmt::skip]
self.rtc.dr.write(|w| unsafe {
w
.yt().bits(yt)
.yu().bits(yu)
.wdu().bits(wdu)
.mt().bit(mt)
.mu().bits(mu)
.dt().bits(dt)
.du().bits(du)
});
// exit initialization mode
self.rtc.icsr.modify(|_, w| w.init().free_running_mode());
}
/// Returns `None` if the calendar is uninitialized
fn cale | lf) -> Option<()> {
use pac::rtc::icsr::INITS_A;
match self.rtc.icsr.read().inits().variant() {
INITS_A::NOTINITALIZED => None,
INITS_A::INITALIZED => Some(()),
}
}
/// Calendar Date
///
/// Returns `None` if the calendar has not been initialized.
pub fn date(&self) -> Option<NaiveDate> {
self.calendar_initialized()?;
let data = self.rtc.dr.read();
let year: i32 = 2000 + (data.yt().bits() as i32) * 10 + (data.yu().bits() as i32);
let month: u8 = data.mt().bits() as u8 * 10 + data.mu().bits();
let day: u8 = data.dt().bits() * 10 + data.du().bits();
NaiveDate::from_ymd_opt(year, month.into(), day.into())
}
fn ss_to_us(&self, ss: u32) -> u32 {
// running in BCD mode, only 15:0 are used
let ss: u32 = ss & 0xFFFF;
let pre_s: u32 = self.rtc.prer.read().prediv_s().bits().into();
// RM0453 Rev 2 page 1012
// SS can be larger than PREDIV_S only after a shift operation.
// In that case, the correct time/date is one second less than as
// indicated by RTC_TR/RTC_DR.
debug_assert!(ss <= pre_s);
// RM0453 Rev 2 page 1012
// SS[15:0] is the value in the synchronous prescaler counter.
// The fraction of a second is given by the formula below:
// Second fraction = (PREDIV_S - SS) / (PREDIV_S + 1)
(((pre_s - ss) * 100_000) / (pre_s + 1)) * 10
}
/// Current Time
///
/// Returns `None` if the calendar has not been initialized.
pub fn time(&self) -> Option<NaiveTime> {
loop {
self.calendar_initialized()?;
let ss: u32 = self.rtc.ssr.read().ss().bits();
let data = self.rtc.tr.read();
// If an RTCCLK edge occurs during read we may see inconsistent values
// so read ssr again and see if it has changed
// see RM0453 Rev 2 32.3.10 page 1002 "Reading the calendar"
let ss_after: u32 = self.rtc.ssr.read().ss().bits();
if ss == ss_after {
let mut hour: u8 = data.ht().bits() * 10 + data.hu().bits();
if data.pm().is_pm() {
hour += 12;
}
let minute: u8 = data.mnt().bits() * 10 + data.mnu().bits();
let second: u8 = data.st().bits() * 10 + data.su().bits();
let micro: u32 = self.ss_to_us(ss);
return NaiveTime::from_hms_micro_opt(
hour as u32,
minute as u32,
second as u32,
micro,
);
}
}
}
/// Calendar Date and Time
///
/// Returns `None` if the calendar has not been initialized.
pub fn date_time(&self) -> Option<NaiveDateTime> {
loop {
self.calendar_initialized()?;
let ss: u32 = self.rtc.ssr.read().ss().bits();
let dr = self.rtc.dr.read();
let tr = self.rtc.tr.read();
// If an RTCCLK edge occurs during a read we may see inconsistent values
// so read ssr again and see if it has changed
// see RM0453 Rev 2 32.3.10 page 1002 "Reading the calendar"
let ss_after: u32 = self.rtc.ssr.read().ss().bits();
if ss == ss_after {
let year: i32 = 2000 + (dr.yt().bits() as i32) * 10 + (dr.yu().bits() as i32);
let month: u8 = dr.mt().bits() as u8 * 10 + dr.mu().bits();
let day: u8 = dr.dt().bits() * 10 + dr.du().bits();
let date: NaiveDate = NaiveDate::from_ymd_opt(year, month as u32, day as u32)?;
let mut hour: u8 = tr.ht().bits() * 10 + tr.hu().bits();
if tr.pm().is_pm() {
hour += 12;
}
let minute: u8 = tr.mnt().bits() * 10 + tr.mnu().bits();
let second: u8 = tr.st().bits() * 10 + tr.su().bits();
let micro: u32 = self.ss_to_us(ss);
let time = NaiveTime::from_hms_micro_opt(
hour as u32,
minute as u32,
second as u32,
micro,
)?;
return Some(date.and_time(time));
}
}
}
/// Disable the RTC write protection.
#[inline]
pub fn disable_write_protect(&mut self) {
self.rtc.wpr.write(|w| w.key().deactivate1());
self.rtc.wpr.write(|w| w.key().deactivate2());
}
/// Enable the RTC write protection.
///
/// # Safety
///
/// * You must call [`disable_write_protect`] before using any other
/// `&mut self` RTC method.
///
/// [`disable_write_protect`]: Self::disable_write_protect
#[inline]
pub unsafe fn enable_write_protect(&mut self) {
self.rtc.wpr.write(|w| w.key().activate());
}
}
| ndar_initialized(&se |
Subsets and Splits