file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
tutorial.js | /**
* @module jsdoc/tutorial
*/
const markdown = require('parsers/markdown');
const hasOwnProp = Object.prototype.hasOwnProperty;
/**
* Removes child tutorial from the parent. Does *not* unset child.parent though.
*
* @param {Tutorial} parent - parent tutorial.
* @param {Tutorial} child - Old child.
* @private
*/
function removeChild({children}, child) {
const index = children.indexOf(child);
if (index !== -1) {
children.splice(index, 1);
}
}
/**
* Adds a child to the parent tutorial. Does *not* set child.parent though.
*
* @param {Tutorial} parent - parent tutorial.
* @param {Tutorial} child - New child.
* @private
*/
function addChild({children}, child) {
children.push(child);
}
/**
* Represents a single JSDoc tutorial.
*/
class Tutorial {
/**
* @param {string} name - Tutorial name.
* @param {string} content - Text content.
* @param {number} type - Source formating.
*/
constructor(name, content, type) {
this.title = this.name = this.longname = name;
this.content = content;
this.type = type;
// default values
this.parent = null;
this.children = [];
}
/**
* Moves children from current parent to different one.
*
* @param {?Tutorial} parent - New parent. If null, the tutorial has no parent.
*/
setParent(parent) {
// removes node from old parent
if (this.parent) {
removeChild(this.parent, this);
}
this.parent = parent;
if (parent) {
addChild(parent, this);
}
}
/* eslint-disable class-methods-use-this */
/**
* Removes children from current node.
*
* @param {Tutorial} child - Old child.
*/
removeChild(child) {
child.setParent(null);
}
/* eslint-enable class-methods-use-this */
/**
* Adds new children to current node.
*
* @param {Tutorial} child - New child.
*/
addChild(child) {
child.setParent(this);
}
/**
* Prepares source.
*
* @return {string} HTML source.
*/
parse() {
switch (this.type) {
// nothing to do
case exports.TYPES.HTML:
return this.content;
// markdown
case exports.TYPES.MARKDOWN:
return markdown.getParser()(this.content);
// uhm... should we react somehow?
// if not then this case can be merged with TYPES.HTML
default:
return this.content;
}
}
}
exports.Tutorial = Tutorial;
/**
* Represents the root tutorial.
* @extends {module:jsdoc/tutorial.Tutorial}
*/
class | extends Tutorial {
constructor() {
super('', '', null);
this._tutorials = {};
}
/**
* Retrieve a tutorial by name.
* @param {string} name - Tutorial name.
* @return {module:jsdoc/tutorial.Tutorial} Tutorial instance.
*/
getByName(name) {
return hasOwnProp.call(this._tutorials, name) && this._tutorials[name];
}
/**
* Add a child tutorial to the root.
* @param {module:jsdoc/tutorial.Tutorial} child - Child tutorial.
*/
_addTutorial(child) {
this._tutorials[child.name] = child;
}
}
exports.RootTutorial = RootTutorial;
/**
* Tutorial source types.
*
* @enum {number}
*/
exports.TYPES = {
HTML: 1,
MARKDOWN: 2
};
| RootTutorial |
database.py | import os
import logging
import asyncio
import sqlite3
import platform
from binascii import hexlify
from collections import defaultdict
from dataclasses import dataclass
from contextvars import ContextVar
from typing import Tuple, List, Union, Callable, Any, Awaitable, Iterable, Dict, Optional
from datetime import date
from prometheus_client import Gauge, Counter, Histogram
from lbry.utils import LockWithMetrics
from .bip32 import PubKey
from .transaction import Transaction, Output, OutputScript, TXRefImmutable, Input
from .constants import TXO_TYPES, CLAIM_TYPES
from .util import date_to_julian_day
from concurrent.futures.thread import ThreadPoolExecutor # pylint: disable=wrong-import-order
if platform.system() == 'Windows' or 'ANDROID_ARGUMENT' or 'KIVY_BUILD' in os.environ:
from concurrent.futures.thread import ThreadPoolExecutor as ReaderExecutorClass # pylint: disable=reimported
else:
from concurrent.futures.process import ProcessPoolExecutor as ReaderExecutorClass
log = logging.getLogger(__name__)
sqlite3.enable_callback_tracebacks(True)
HISTOGRAM_BUCKETS = (
.005, .01, .025, .05, .075, .1, .25, .5, .75, 1.0, 2.5, 5.0, 7.5, 10.0, 15.0, 20.0, 30.0, 60.0, float('inf')
)
@dataclass
class ReaderProcessState:
cursor: sqlite3.Cursor
reader_context: Optional[ContextVar[ReaderProcessState]] = ContextVar('reader_context')
def initializer(path):
db = sqlite3.connect(path)
db.row_factory = dict_row_factory
db.executescript("pragma journal_mode=WAL;")
reader = ReaderProcessState(db.cursor())
reader_context.set(reader)
def run_read_only_fetchall(sql, params):
cursor = reader_context.get().cursor
try:
return cursor.execute(sql, params).fetchall()
except (Exception, OSError) as e:
log.exception('Error running transaction:', exc_info=e)
raise
def run_read_only_fetchone(sql, params):
cursor = reader_context.get().cursor
try:
return cursor.execute(sql, params).fetchone()
except (Exception, OSError) as e:
log.exception('Error running transaction:', exc_info=e)
raise
class AIOSQLite:
reader_executor: ReaderExecutorClass
waiting_writes_metric = Gauge(
"waiting_writes_count", "Number of waiting db writes", namespace="daemon_database"
)
waiting_reads_metric = Gauge(
"waiting_reads_count", "Number of waiting db writes", namespace="daemon_database"
)
write_count_metric = Counter(
"write_count", "Number of database writes", namespace="daemon_database"
)
read_count_metric = Counter(
"read_count", "Number of database reads", namespace="daemon_database"
)
acquire_write_lock_metric = Histogram(
f'write_lock_acquired', 'Time to acquire the write lock', namespace="daemon_database", buckets=HISTOGRAM_BUCKETS
)
held_write_lock_metric = Histogram(
f'write_lock_held', 'Length of time the write lock is held for', namespace="daemon_database",
buckets=HISTOGRAM_BUCKETS
)
def __init__(self):
# has to be single threaded as there is no mapping of thread:connection
self.writer_executor = ThreadPoolExecutor(max_workers=1)
self.writer_connection: Optional[sqlite3.Connection] = None
self._closing = False
self.query_count = 0
self.write_lock = LockWithMetrics(self.acquire_write_lock_metric, self.held_write_lock_metric)
self.writers = 0
self.read_ready = asyncio.Event()
self.urgent_read_done = asyncio.Event()
@classmethod
async def connect(cls, path: Union[bytes, str], *args, **kwargs):
sqlite3.enable_callback_tracebacks(True)
db = cls()
def _connect_writer():
db.writer_connection = sqlite3.connect(path, *args, **kwargs)
readers = max(os.cpu_count() - 2, 2)
db.reader_executor = ReaderExecutorClass(
max_workers=readers, initializer=initializer, initargs=(path, )
)
await asyncio.get_event_loop().run_in_executor(db.writer_executor, _connect_writer)
db.read_ready.set()
db.urgent_read_done.set()
return db
async def close(self):
if self._closing:
return
self._closing = True
def __checkpoint_and_close(conn: sqlite3.Connection):
conn.execute("PRAGMA WAL_CHECKPOINT(FULL);")
log.info("DB checkpoint finished.")
conn.close()
await asyncio.get_event_loop().run_in_executor(
self.writer_executor, __checkpoint_and_close, self.writer_connection)
self.writer_executor.shutdown(wait=True)
self.reader_executor.shutdown(wait=True)
self.read_ready.clear()
self.writer_connection = None
def executemany(self, sql: str, params: Iterable):
params = params if params is not None else []
# this fetchall is needed to prevent SQLITE_MISUSE
return self.run(lambda conn: conn.executemany(sql, params).fetchall())
def executescript(self, script: str) -> Awaitable:
return self.run(lambda conn: conn.executescript(script))
async def _execute_fetch(self, sql: str, parameters: Iterable = None,
read_only=False, fetch_all: bool = False) -> List[dict]:
read_only_fn = run_read_only_fetchall if fetch_all else run_read_only_fetchone
parameters = parameters if parameters is not None else []
still_waiting = False
urgent_read = False
if read_only:
self.waiting_reads_metric.inc()
self.read_count_metric.inc()
try:
while self.writers and not self._closing: # more writes can come in while we are waiting for the first
if not urgent_read and still_waiting and self.urgent_read_done.is_set():
# throttle the writes if they pile up
self.urgent_read_done.clear()
urgent_read = True
# wait until the running writes have finished
await self.read_ready.wait()
still_waiting = True
if self._closing:
raise asyncio.CancelledError()
return await asyncio.get_event_loop().run_in_executor(
self.reader_executor, read_only_fn, sql, parameters
)
finally:
if urgent_read:
# unthrottle the writers if they had to be throttled
self.urgent_read_done.set()
self.waiting_reads_metric.dec()
if fetch_all:
return await self.run(lambda conn: conn.execute(sql, parameters).fetchall())
return await self.run(lambda conn: conn.execute(sql, parameters).fetchone())
async def execute_fetchall(self, sql: str, parameters: Iterable = None,
read_only=False) -> List[dict]:
return await self._execute_fetch(sql, parameters, read_only, fetch_all=True)
async def execute_fetchone(self, sql: str, parameters: Iterable = None,
read_only=False) -> List[dict]:
return await self._execute_fetch(sql, parameters, read_only, fetch_all=False)
def execute(self, sql: str, parameters: Iterable = None) -> Awaitable[sqlite3.Cursor]:
parameters = parameters if parameters is not None else []
return self.run(lambda conn: conn.execute(sql, parameters))
async def run(self, fun, *args, **kwargs):
self.write_count_metric.inc()
self.waiting_writes_metric.inc()
# it's possible many writes are coming in one after the other, these can
# block reader calls for a long time
# if the reader waits for the writers to finish and then has to wait for
# yet more, it will clear the urgent_read_done event to block more writers
# piling on
try:
await self.urgent_read_done.wait()
except Exception as e:
self.waiting_writes_metric.dec()
raise e
self.writers += 1
# block readers
self.read_ready.clear()
try:
async with self.write_lock:
if self._closing:
raise asyncio.CancelledError()
return await asyncio.get_event_loop().run_in_executor(
self.writer_executor, lambda: self.__run_transaction(fun, *args, **kwargs)
)
finally:
self.writers -= 1
self.waiting_writes_metric.dec()
if not self.writers:
# unblock the readers once the last enqueued writer finishes
self.read_ready.set()
def __run_transaction(self, fun: Callable[[sqlite3.Connection, Any, Any], Any], *args, **kwargs):
self.writer_connection.execute('begin')
try:
self.query_count += 1
result = fun(self.writer_connection, *args, **kwargs) # type: ignore
self.writer_connection.commit()
return result
except (Exception, OSError) as e:
log.exception('Error running transaction:', exc_info=e)
self.writer_connection.rollback()
log.warning("rolled back")
raise
async def run_with_foreign_keys_disabled(self, fun, *args, **kwargs):
self.write_count_metric.inc()
self.waiting_writes_metric.inc()
try:
await self.urgent_read_done.wait()
except Exception as e:
self.waiting_writes_metric.dec()
raise e
self.writers += 1
self.read_ready.clear()
try:
async with self.write_lock:
if self._closing:
raise asyncio.CancelledError()
return await asyncio.get_event_loop().run_in_executor(
self.writer_executor, self.__run_transaction_with_foreign_keys_disabled, fun, args, kwargs
)
finally:
self.writers -= 1
self.waiting_writes_metric.dec()
if not self.writers:
self.read_ready.set()
def __run_transaction_with_foreign_keys_disabled(self,
fun: Callable[[sqlite3.Connection, Any, Any], Any],
args, kwargs):
foreign_keys_enabled, = self.writer_connection.execute("pragma foreign_keys").fetchone()
if not foreign_keys_enabled:
raise sqlite3.IntegrityError("foreign keys are disabled, use `AIOSQLite.run` instead")
try:
self.writer_connection.execute('pragma foreign_keys=off').fetchone()
return self.__run_transaction(fun, *args, **kwargs)
finally:
self.writer_connection.execute('pragma foreign_keys=on').fetchone()
def constraints_to_sql(constraints, joiner=' AND ', prepend_key=''):
sql, values = [], {}
for key, constraint in constraints.items():
tag = '0'
if '#' in key:
key, tag = key[:key.index('#')], key[key.index('#')+1:]
col, op, key = key, '=', key.replace('.', '_')
if not key:
sql.append(constraint)
continue
if key.startswith('$$'):
col, key = col[2:], key[1:]
elif key.startswith('$'):
values[key] = constraint
continue
if key.endswith('__not'):
col, op = col[:-len('__not')], '!='
elif key.endswith('__is_null'):
col = col[:-len('__is_null')]
sql.append(f'{col} IS NULL')
continue
if key.endswith('__is_not_null'):
col = col[:-len('__is_not_null')]
sql.append(f'{col} IS NOT NULL')
continue
if key.endswith('__lt'):
col, op = col[:-len('__lt')], '<'
elif key.endswith('__lte'):
col, op = col[:-len('__lte')], '<='
elif key.endswith('__gt'):
col, op = col[:-len('__gt')], '>'
elif key.endswith('__gte'):
col, op = col[:-len('__gte')], '>='
elif key.endswith('__like'):
col, op = col[:-len('__like')], 'LIKE'
elif key.endswith('__not_like'):
col, op = col[:-len('__not_like')], 'NOT LIKE'
elif key.endswith('__in') or key.endswith('__not_in'):
if key.endswith('__in'):
col, op, one_val_op = col[:-len('__in')], 'IN', '='
else:
col, op, one_val_op = col[:-len('__not_in')], 'NOT IN', '!='
if constraint:
if isinstance(constraint, (list, set, tuple)):
if len(constraint) == 1:
values[f'{key}{tag}'] = next(iter(constraint))
sql.append(f'{col} {one_val_op} :{key}{tag}')
else:
keys = []
for i, val in enumerate(constraint):
keys.append(f':{key}{tag}_{i}')
values[f'{key}{tag}_{i}'] = val
sql.append(f'{col} {op} ({", ".join(keys)})')
elif isinstance(constraint, str):
sql.append(f'{col} {op} ({constraint})')
else:
raise ValueError(f"{col} requires a list, set or string as constraint value.")
continue
elif key.endswith('__any') or key.endswith('__or'):
where, subvalues = constraints_to_sql(constraint, ' OR ', key+tag+'_')
sql.append(f'({where})')
values.update(subvalues)
continue
if key.endswith('__and'):
where, subvalues = constraints_to_sql(constraint, ' AND ', key+tag+'_')
sql.append(f'({where})')
values.update(subvalues)
continue
sql.append(f'{col} {op} :{prepend_key}{key}{tag}')
values[prepend_key+key+tag] = constraint
return joiner.join(sql) if sql else '', values
def query(select, **constraints) -> Tuple[str, Dict[str, Any]]:
sql = [select]
limit = constraints.pop('limit', None)
offset = constraints.pop('offset', None)
order_by = constraints.pop('order_by', None)
group_by = constraints.pop('group_by', None)
accounts = constraints.pop('accounts', [])
if accounts:
constraints['account__in'] = [a.public_key.address for a in accounts]
where, values = constraints_to_sql(constraints)
if where:
sql.append('WHERE')
sql.append(where)
if group_by is not None:
sql.append(f'GROUP BY {group_by}')
if order_by:
sql.append('ORDER BY')
if isinstance(order_by, str):
sql.append(order_by)
elif isinstance(order_by, list):
sql.append(', '.join(order_by))
else:
raise ValueError("order_by must be string or list")
if limit is not None:
sql.append(f'LIMIT {limit}')
if offset is not None:
sql.append(f'OFFSET {offset}')
return ' '.join(sql), values
def interpolate(sql, values):
for k in sorted(values.keys(), reverse=True):
value = values[k]
if isinstance(value, bytes):
value = f"X'{hexlify(value).decode()}'"
elif isinstance(value, str):
value = f"'{value}'"
else:
value = str(value)
sql = sql.replace(f":{k}", value)
return sql
def constrain_single_or_list(constraints, column, value, convert=lambda x: x, negate=False):
if value is not None:
if isinstance(value, list):
value = [convert(v) for v in value]
if len(value) == 1:
if negate:
constraints[f"{column}__or"] = {
f"{column}__is_null": True,
f"{column}__not": value[0]
}
else:
constraints[column] = value[0]
elif len(value) > 1:
if negate:
constraints[f"{column}__or"] = {
f"{column}__is_null": True,
f"{column}__not_in": value
}
else:
constraints[f"{column}__in"] = value
elif negate:
constraints[f"{column}__or"] = {
f"{column}__is_null": True,
f"{column}__not": convert(value)
}
else:
constraints[column] = convert(value)
return constraints
class SQLiteMixin:
SCHEMA_VERSION: Optional[str] = None
CREATE_TABLES_QUERY: str
MAX_QUERY_VARIABLES = 900
CREATE_VERSION_TABLE = """
create table if not exists version (
version text
);
"""
def __init__(self, path):
self._db_path = path
self.db: AIOSQLite = None
self.ledger = None
async def open(self):
log.info("connecting to database: %s", self._db_path)
self.db = await AIOSQLite.connect(self._db_path, isolation_level=None)
if self.SCHEMA_VERSION:
tables = [t[0] for t in await self.db.execute_fetchall(
"SELECT name FROM sqlite_master WHERE type='table';"
)]
if tables:
if 'version' in tables:
version = await self.db.execute_fetchone("SELECT version FROM version LIMIT 1;")
if version == (self.SCHEMA_VERSION,):
return
await self.db.executescript('\n'.join(
f"DROP TABLE {table};" for table in tables
) + '\n' + 'PRAGMA WAL_CHECKPOINT(FULL);' + '\n' + 'VACUUM;')
await self.db.execute(self.CREATE_VERSION_TABLE)
await self.db.execute("INSERT INTO version VALUES (?)", (self.SCHEMA_VERSION,))
await self.db.executescript(self.CREATE_TABLES_QUERY)
async def close(self):
await self.db.close()
@staticmethod
def _insert_sql(table: str, data: dict, ignore_duplicate: bool = False,
replace: bool = False) -> Tuple[str, List]:
columns, values = [], []
for column, value in data.items():
columns.append(column)
values.append(value)
policy = ""
if ignore_duplicate:
policy = " OR IGNORE"
if replace:
policy = " OR REPLACE"
sql = "INSERT{} INTO {} ({}) VALUES ({})".format(
policy, table, ', '.join(columns), ', '.join(['?'] * len(values))
)
return sql, values
@staticmethod
def _update_sql(table: str, data: dict, where: str,
constraints: Union[list, tuple]) -> Tuple[str, list]:
columns, values = [], []
for column, value in data.items():
columns.append(f"{column} = ?")
values.append(value)
values.extend(constraints)
sql = "UPDATE {} SET {} WHERE {}".format(
table, ', '.join(columns), where
)
return sql, values
def dict_row_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
SQLITE_MAX_INTEGER = 9223372036854775807
def _get_spendable_utxos(transaction: sqlite3.Connection, accounts: List, decoded_transactions: Dict[str, Transaction],
result: Dict[Tuple[bytes, int, bool], List[int]], reserved: List[Transaction],
amount_to_reserve: int, reserved_amount: int, floor: int, ceiling: int,
fee_per_byte: int) -> int:
accounts_fmt = ",".join(["?"] * len(accounts))
txo_query = f"""
SELECT tx.txid, txo.txoid, tx.raw, tx.height, txo.position as nout, tx.is_verified, txo.amount FROM txo
INNER JOIN account_address USING (address)
LEFT JOIN txi USING (txoid)
INNER JOIN tx USING (txid)
WHERE txo.txo_type=0 AND txi.txoid IS NULL AND tx.txid IS NOT NULL AND NOT txo.is_reserved
AND txo.amount >= ? AND txo.amount < ?
"""
if accounts:
txo_query += f"""
AND account_address.account {'= ?' if len(accounts_fmt) == 1 else 'IN (' + accounts_fmt + ')'}
"""
txo_query += """
ORDER BY txo.amount ASC, tx.height DESC
"""
# prefer confirmed, but save unconfirmed utxos from this selection in case they are needed
unconfirmed = []
for row in transaction.execute(txo_query, (floor, ceiling, *accounts)):
(txid, txoid, raw, height, nout, verified, amount) = row.values()
# verified or non verified transactions were found- reset the gap count
# multiple txos can come from the same tx, only decode it once and cache
if txid not in decoded_transactions:
# cache the decoded transaction
decoded_transactions[txid] = Transaction(raw)
decoded_tx = decoded_transactions[txid]
# save the unconfirmed txo for possible use later, if still needed
if verified:
# add the txo to the reservation, minus the fee for including it
reserved_amount += amount
reserved_amount -= Input.spend(decoded_tx.outputs[nout]).size * fee_per_byte
# mark it as reserved
result[(raw, height, verified)].append(nout)
reserved.append(txoid)
# if we've reserved enough, return
if reserved_amount >= amount_to_reserve:
return reserved_amount
else:
unconfirmed.append((txid, txoid, raw, height, nout, verified, amount))
# we're popping the items, so to get them in the order they were seen they are reversed
unconfirmed.reverse()
# add available unconfirmed txos if any were previously found
while unconfirmed and reserved_amount < amount_to_reserve:
(txid, txoid, raw, height, nout, verified, amount) = unconfirmed.pop()
# it's already decoded
decoded_tx = decoded_transactions[txid]
# add to the reserved amount
reserved_amount += amount
reserved_amount -= Input.spend(decoded_tx.outputs[nout]).size * fee_per_byte
result[(raw, height, verified)].append(nout)
reserved.append(txoid)
return reserved_amount
def get_and_reserve_spendable_utxos(transaction: sqlite3.Connection, accounts: List, amount_to_reserve: int, floor: int,
fee_per_byte: int, set_reserved: bool, return_insufficient_funds: bool,
base_multiplier: int = 100):
txs = defaultdict(list)
decoded_transactions = {}
reserved = []
reserved_dewies = 0
multiplier = base_multiplier
gap_count = 0
while reserved_dewies < amount_to_reserve and gap_count < 5 and floor * multiplier < SQLITE_MAX_INTEGER:
previous_reserved_dewies = reserved_dewies
reserved_dewies = _get_spendable_utxos(
transaction, accounts, decoded_transactions, txs, reserved, amount_to_reserve, reserved_dewies,
floor, floor * multiplier, fee_per_byte
)
floor *= multiplier
if previous_reserved_dewies == reserved_dewies:
gap_count += 1
multiplier **= 2
else:
gap_count = 0
multiplier = base_multiplier
# reserve the accumulated txos if enough were found
if reserved_dewies >= amount_to_reserve:
if set_reserved:
transaction.executemany("UPDATE txo SET is_reserved = ? WHERE txoid = ?",
[(True, txoid) for txoid in reserved]).fetchall()
return txs
# return_insufficient_funds and set_reserved are used for testing
return txs if return_insufficient_funds else {}
class Database(SQLiteMixin):
SCHEMA_VERSION = "1.5"
PRAGMAS = """
pragma journal_mode=WAL;
"""
CREATE_ACCOUNT_TABLE = """
create table if not exists account_address (
account text not null,
address text not null,
chain integer not null,
pubkey blob not null,
chain_code blob not null,
n integer not null,
depth integer not null,
primary key (account, address)
);
create index if not exists address_account_idx on account_address (address, account);
"""
CREATE_PUBKEY_ADDRESS_TABLE = """
create table if not exists pubkey_address (
address text primary key,
history text,
used_times integer not null default 0
);
"""
CREATE_TX_TABLE = """
create table if not exists tx (
txid text primary key,
raw blob not null,
height integer not null,
position integer not null,
is_verified boolean not null default 0,
purchased_claim_id text,
day integer
);
create index if not exists tx_purchased_claim_id_idx on tx (purchased_claim_id);
"""
CREATE_TXO_TABLE = """
create table if not exists txo (
txid text references tx,
txoid text primary key,
address text references pubkey_address,
position integer not null,
amount integer not null,
script blob not null,
is_reserved boolean not null default 0,
txo_type integer not null default 0,
claim_id text,
claim_name text,
channel_id text,
reposted_claim_id text
);
create index if not exists txo_txid_idx on txo (txid);
create index if not exists txo_address_idx on txo (address);
create index if not exists txo_claim_id_idx on txo (claim_id, txo_type);
create index if not exists txo_claim_name_idx on txo (claim_name);
create index if not exists txo_txo_type_idx on txo (txo_type);
create index if not exists txo_channel_id_idx on txo (channel_id);
create index if not exists txo_reposted_claim_idx on txo (reposted_claim_id);
"""
CREATE_TXI_TABLE = """
create table if not exists txi (
txid text references tx,
txoid text references txo primary key,
address text references pubkey_address,
position integer not null
);
create index if not exists txi_address_idx on txi (address);
create index if not exists first_input_idx on txi (txid, address) where position=0;
"""
CREATE_TABLES_QUERY = (
PRAGMAS +
CREATE_ACCOUNT_TABLE +
CREATE_PUBKEY_ADDRESS_TABLE +
CREATE_TX_TABLE +
CREATE_TXO_TABLE +
CREATE_TXI_TABLE
)
async def open(self):
await super().open()
self.db.writer_connection.row_factory = dict_row_factory
def txo_to_row(self, tx, txo):
row = {
'txid': tx.id,
'txoid': txo.id,
'address': txo.get_address(self.ledger),
'position': txo.position,
'amount': txo.amount,
'script': sqlite3.Binary(txo.script.source)
}
if txo.is_claim:
if txo.can_decode_claim:
claim = txo.claim
row['txo_type'] = TXO_TYPES.get(claim.claim_type, TXO_TYPES['stream'])
if claim.is_repost:
row['reposted_claim_id'] = claim.repost.reference.claim_id
if claim.is_signed:
row['channel_id'] = claim.signing_channel_id
else:
row['txo_type'] = TXO_TYPES['stream']
elif txo.is_support:
row['txo_type'] = TXO_TYPES['support']
elif txo.purchase is not None:
row['txo_type'] = TXO_TYPES['purchase']
row['claim_id'] = txo.purchased_claim_id
if txo.script.is_claim_involved:
row['claim_id'] = txo.claim_id
row['claim_name'] = txo.claim_name
return row
def tx_to_row(self, tx):
row = {
'txid': tx.id,
'raw': sqlite3.Binary(tx.raw),
'height': tx.height,
'position': tx.position,
'is_verified': tx.is_verified,
'day': tx.get_julian_day(self.ledger),
}
txos = tx.outputs
if len(txos) >= 2 and txos[1].can_decode_purchase_data:
txos[0].purchase = txos[1]
row['purchased_claim_id'] = txos[1].purchase_data.claim_id
return row
async def insert_transaction(self, tx):
await self.db.execute_fetchall(*self._insert_sql('tx', self.tx_to_row(tx)))
async def update_transaction(self, tx):
await self.db.execute_fetchall(*self._update_sql("tx", {
'height': tx.height, 'position': tx.position, 'is_verified': tx.is_verified
}, 'txid = ?', (tx.id,)))
def _transaction_io(self, conn: sqlite3.Connection, tx: Transaction, address, txhash):
conn.execute(*self._insert_sql('tx', self.tx_to_row(tx), replace=True)).fetchall()
is_my_input = False
for txi in tx.inputs:
if txi.txo_ref.txo is not None:
txo = txi.txo_ref.txo
if txo.has_address and txo.get_address(self.ledger) == address:
is_my_input = True
conn.execute(*self._insert_sql("txi", {
'txid': tx.id,
'txoid': txo.id,
'address': address,
'position': txi.position
}, ignore_duplicate=True)).fetchall()
for txo in tx.outputs:
if txo.script.is_pay_pubkey_hash and (txo.pubkey_hash == txhash or is_my_input):
conn.execute(*self._insert_sql(
"txo", self.txo_to_row(tx, txo), ignore_duplicate=True
)).fetchall()
elif txo.script.is_pay_script_hash:
# TODO: implement script hash payments
log.warning('Database.save_transaction_io: pay script hash is not implemented!')
def save_transaction_io(self, tx: Transaction, address, txhash, history):
return self.save_transaction_io_batch([tx], address, txhash, history)
def save_transaction_io_batch(self, txs: Iterable[Transaction], address, txhash, history):
history_count = history.count(':') // 2
def __many(conn):
for tx in txs:
self._transaction_io(conn, tx, address, txhash)
conn.execute(
"UPDATE pubkey_address SET history = ?, used_times = ? WHERE address = ?",
(history, history_count, address)
).fetchall()
return self.db.run(__many)
async def reserve_outputs(self, txos, is_reserved=True):
txoids = [(is_reserved, txo.id) for txo in txos]
await self.db.executemany("UPDATE txo SET is_reserved = ? WHERE txoid = ?", txoids)
async def release_outputs(self, txos):
await self.reserve_outputs(txos, is_reserved=False)
async def rewind_blockchain(self, above_height): # pylint: disable=no-self-use
# TODO:
# 1. delete transactions above_height
# 2. update address histories removing deleted TXs
return True
async def get_spendable_utxos(self, ledger, reserve_amount, accounts: Optional[Iterable], min_amount: int = 1,
fee_per_byte: int = 50, set_reserved: bool = True,
return_insufficient_funds: bool = False) -> List:
to_spend = await self.db.run(
get_and_reserve_spendable_utxos, tuple(account.id for account in accounts), reserve_amount, min_amount,
fee_per_byte, set_reserved, return_insufficient_funds
)
txos = []
for (raw, height, verified), positions in to_spend.items():
tx = Transaction(raw, height=height, is_verified=verified)
for nout in positions:
txos.append(tx.outputs[nout].get_estimator(ledger))
return txos
async def select_transactions(self, cols, accounts=None, read_only=False, **constraints):
if not {'txid', 'txid__in'}.intersection(constraints):
assert accounts, "'accounts' argument required when no 'txid' constraint is present"
where, values = constraints_to_sql({
'$$account_address.account__in': [a.public_key.address for a in accounts]
})
constraints['txid__in'] = f"""
SELECT txo.txid FROM txo JOIN account_address USING (address) WHERE {where}
UNION
SELECT txi.txid FROM txi JOIN account_address USING (address) WHERE {where}
"""
constraints.update(values)
return await self.db.execute_fetchall(
*query(f"SELECT {cols} FROM tx", **constraints), read_only=read_only
)
TXO_NOT_MINE = Output(None, None, is_my_output=False)
async def get_transactions(self, wallet=None, **constraints):
include_is_spent = constraints.pop('include_is_spent', False)
include_is_my_input = constraints.pop('include_is_my_input', False)
include_is_my_output = constraints.pop('include_is_my_output', False)
tx_rows = await self.select_transactions(
'txid, raw, height, position, is_verified',
order_by=constraints.pop('order_by', ["height=0 DESC", "height DESC", "position DESC"]),
**constraints
)
if not tx_rows:
return []
txids, txs, txi_txoids = [], [], []
for row in tx_rows:
txids.append(row['txid'])
txs.append(Transaction(
raw=row['raw'], height=row['height'], position=row['position'],
is_verified=bool(row['is_verified'])
))
for txi in txs[-1].inputs:
txi_txoids.append(txi.txo_ref.id)
step = self.MAX_QUERY_VARIABLES
annotated_txos = {}
for offset in range(0, len(txids), step):
annotated_txos.update({
txo.id: txo for txo in
(await self.get_txos(
wallet=wallet,
txid__in=txids[offset:offset+step], order_by='txo.txid',
include_is_spent=include_is_spent,
include_is_my_input=include_is_my_input,
include_is_my_output=include_is_my_output,
))
})
referenced_txos = {}
for offset in range(0, len(txi_txoids), step):
referenced_txos.update({
txo.id: txo for txo in
(await self.get_txos(
wallet=wallet,
txoid__in=txi_txoids[offset:offset+step], order_by='txo.txoid',
include_is_my_output=include_is_my_output,
))
})
for tx in txs:
for txi in tx.inputs:
txo = referenced_txos.get(txi.txo_ref.id)
if txo:
txi.txo_ref = txo.ref
for txo in tx.outputs:
_txo = annotated_txos.get(txo.id)
if _txo:
txo.update_annotations(_txo)
else:
txo.update_annotations(self.TXO_NOT_MINE)
for tx in txs:
txos = tx.outputs
if len(txos) >= 2 and txos[1].can_decode_purchase_data:
txos[0].purchase = txos[1]
return txs
async def get_transaction_count(self, **constraints):
constraints.pop('wallet', None)
constraints.pop('offset', None)
constraints.pop('limit', None)
constraints.pop('order_by', None)
count = await self.select_transactions('COUNT(*) as total', **constraints)
return count[0]['total'] or 0
async def get_transaction(self, **constraints):
txs = await self.get_transactions(limit=1, **constraints)
if txs:
return txs[0]
async def select_txos(
self, cols, accounts=None, is_my_input=None, is_my_output=True,
is_my_input_or_output=None, exclude_internal_transfers=False,
include_is_spent=False, include_is_my_input=False,
is_spent=None, read_only=False, **constraints):
for rename_col in ('txid', 'txoid'):
for rename_constraint in (rename_col, rename_col+'__in', rename_col+'__not_in'):
if rename_constraint in constraints:
constraints['txo.'+rename_constraint] = constraints.pop(rename_constraint)
if accounts:
account_in_sql, values = constraints_to_sql({
'$$account__in': [a.public_key.address for a in accounts]
})
my_addresses = f"SELECT address FROM account_address WHERE {account_in_sql}"
constraints.update(values)
if is_my_input_or_output:
include_is_my_input = True
constraints['received_or_sent__or'] = {
'txo.address__in': my_addresses,
'sent__and': {
'txi.address__is_not_null': True,
'txi.address__in': my_addresses
}
}
else:
if is_my_output:
constraints['txo.address__in'] = my_addresses
elif is_my_output is False:
constraints['txo.address__not_in'] = my_addresses
if is_my_input:
include_is_my_input = True
constraints['txi.address__is_not_null'] = True
constraints['txi.address__in'] = my_addresses
elif is_my_input is False:
include_is_my_input = True
constraints['is_my_input_false__or'] = {
'txi.address__is_null': True,
'txi.address__not_in': my_addresses
}
if exclude_internal_transfers:
include_is_my_input = True
constraints['exclude_internal_payments__or'] = {
'txo.txo_type__not': TXO_TYPES['other'],
'txo.address__not_in': my_addresses,
'txi.address__is_null': True,
'txi.address__not_in': my_addresses,
}
sql = [f"SELECT {cols} FROM txo JOIN tx ON (tx.txid=txo.txid)"]
if is_spent:
constraints['spent.txoid__is_not_null'] = True
elif is_spent is False:
constraints['is_reserved'] = False
constraints['spent.txoid__is_null'] = True
if include_is_spent or is_spent is not None:
sql.append("LEFT JOIN txi AS spent ON (spent.txoid=txo.txoid)")
if include_is_my_input:
sql.append("LEFT JOIN txi ON (txi.position=0 AND txi.txid=txo.txid)")
return await self.db.execute_fetchall(*query(' '.join(sql), **constraints), read_only=read_only)
async def get_txos(self, wallet=None, no_tx=False, read_only=False, **constraints):
include_is_spent = constraints.get('include_is_spent', False)
include_is_my_input = constraints.get('include_is_my_input', False)
include_is_my_output = constraints.pop('include_is_my_output', False)
include_received_tips = constraints.pop('include_received_tips', False)
select_columns = [
"tx.txid, raw, tx.height, tx.position as tx_position, tx.is_verified, "
"txo_type, txo.position as txo_position, amount, script"
]
my_accounts = {a.public_key.address for a in wallet.accounts} if wallet else set()
my_accounts_sql = ""
if include_is_my_output or include_is_my_input:
my_accounts_sql, values = constraints_to_sql({'$$account__in#_wallet': my_accounts})
constraints.update(values)
if include_is_my_output and my_accounts:
if constraints.get('is_my_output', None) in (True, False):
select_columns.append(f"{1 if constraints['is_my_output'] else 0} AS is_my_output")
else:
select_columns.append(f"""(
txo.address IN (SELECT address FROM account_address WHERE {my_accounts_sql})
) AS is_my_output""")
if include_is_my_input and my_accounts:
if constraints.get('is_my_input', None) in (True, False):
select_columns.append(f"{1 if constraints['is_my_input'] else 0} AS is_my_input")
else:
select_columns.append(f"""(
txi.address IS NOT NULL AND
txi.address IN (SELECT address FROM account_address WHERE {my_accounts_sql})
) AS is_my_input""")
if include_is_spent:
select_columns.append("spent.txoid IS NOT NULL AS is_spent")
if include_received_tips:
select_columns.append(f"""(
SELECT COALESCE(SUM(support.amount), 0) FROM txo AS support WHERE
support.claim_id = txo.claim_id AND
support.txo_type = {TXO_TYPES['support']} AND
support.address IN (SELECT address FROM account_address WHERE {my_accounts_sql}) AND
support.txoid NOT IN (SELECT txoid FROM txi)
) AS received_tips""")
if 'order_by' not in constraints or constraints['order_by'] == 'height':
constraints['order_by'] = [
"tx.height=0 DESC", "tx.height DESC", "tx.position DESC", "txo.position"
]
elif constraints.get('order_by', None) == 'none':
del constraints['order_by']
rows = await self.select_txos(', '.join(select_columns), read_only=read_only, **constraints)
txos = []
txs = {}
for row in rows:
if no_tx:
txo = Output(
amount=row['amount'],
script=OutputScript(row['script']),
tx_ref=TXRefImmutable.from_id(row['txid'], row['height']),
position=row['txo_position']
)
else:
if row['txid'] not in txs:
txs[row['txid']] = Transaction(
row['raw'], height=row['height'], position=row['tx_position'],
is_verified=bool(row['is_verified'])
)
txo = txs[row['txid']].outputs[row['txo_position']]
if include_is_spent:
txo.is_spent = bool(row['is_spent'])
if include_is_my_input:
txo.is_my_input = bool(row['is_my_input'])
if include_is_my_output:
txo.is_my_output = bool(row['is_my_output'])
if include_is_my_input and include_is_my_output:
if txo.is_my_input and txo.is_my_output and row['txo_type'] == TXO_TYPES['other']:
txo.is_internal_transfer = True
else:
txo.is_internal_transfer = False
if include_received_tips:
txo.received_tips = row['received_tips']
txos.append(txo)
channel_ids = set()
for txo in txos:
if txo.is_claim and txo.can_decode_claim:
if txo.claim.is_signed:
channel_ids.add(txo.claim.signing_channel_id)
if txo.claim.is_channel and wallet:
for account in wallet.accounts:
private_key = await account.get_channel_private_key(
txo.claim.channel.public_key_bytes
)
if private_key:
txo.private_key = private_key
break
if channel_ids:
channels = {
txo.claim_id: txo for txo in
(await self.get_channels(
wallet=wallet,
claim_id__in=channel_ids,
read_only=read_only
))
}
for txo in txos:
if txo.is_claim and txo.can_decode_claim:
txo.channel = channels.get(txo.claim.signing_channel_id, None)
return txos
@staticmethod
def _clean_txo_constraints_for_aggregation(constraints):
constraints.pop('include_is_spent', None)
constraints.pop('include_is_my_input', None)
constraints.pop('include_is_my_output', None)
constraints.pop('include_received_tips', None)
constraints.pop('wallet', None)
constraints.pop('resolve', None)
constraints.pop('offset', None)
constraints.pop('limit', None)
constraints.pop('order_by', None)
async def get_txo_count(self, **constraints):
self._clean_txo_constraints_for_aggregation(constraints)
count = await self.select_txos('COUNT(*) AS total', **constraints)
return count[0]['total'] or 0
async def get_txo_sum(self, **constraints):
self._clean_txo_constraints_for_aggregation(constraints)
result = await self.select_txos('SUM(amount) AS total', **constraints)
return result[0]['total'] or 0
async def get_txo_plot(self, start_day=None, days_back=0, end_day=None, days_after=None, **constraints):
self._clean_txo_constraints_for_aggregation(constraints)
if start_day is None:
constraints['day__gte'] = self.ledger.headers.estimated_julian_day(
self.ledger.headers.height
) - days_back
else:
constraints['day__gte'] = date_to_julian_day(
date.fromisoformat(start_day)
)
if end_day is not None:
constraints['day__lte'] = date_to_julian_day(
date.fromisoformat(end_day)
)
elif days_after is not None:
constraints['day__lte'] = constraints['day__gte'] + days_after
return await self.select_txos(
"DATE(day) AS day, SUM(amount) AS total",
group_by='day', order_by='day', **constraints
)
def get_utxos(self, read_only=False, **constraints):
return self.get_txos(is_spent=False, read_only=read_only, **constraints)
def get_utxo_count(self, **constraints):
return self.get_txo_count(is_spent=False, **constraints)
async def get_balance(self, wallet=None, accounts=None, read_only=False, **constraints):
assert wallet or accounts, \
"'wallet' or 'accounts' constraints required to calculate balance"
constraints['accounts'] = accounts or wallet.accounts
balance = await self.select_txos(
'SUM(amount) as total', is_spent=False, read_only=read_only, **constraints
)
return balance[0]['total'] or 0
async def select_addresses(self, cols, read_only=False, **constraints):
return await self.db.execute_fetchall(*query(
f"SELECT {cols} FROM pubkey_address JOIN account_address USING (address)",
**constraints
), read_only=read_only)
async def get_addresses(self, cols=None, read_only=False, **constraints):
cols = cols or (
'address', 'account', 'chain', 'history', 'used_times',
'pubkey', 'chain_code', 'n', 'depth'
)
addresses = await self.select_addresses(', '.join(cols), read_only=read_only, **constraints)
if 'pubkey' in cols:
for address in addresses:
address['pubkey'] = PubKey(
self.ledger, address.pop('pubkey'), address.pop('chain_code'),
address.pop('n'), address.pop('depth')
)
return addresses
async def get_address_count(self, cols=None, read_only=False, **constraints):
count = await self.select_addresses('COUNT(*) as total', read_only=read_only, **constraints)
return count[0]['total'] or 0
async def get_address(self, read_only=False, **constraints):
addresses = await self.get_addresses(read_only=read_only, limit=1, **constraints)
if addresses:
return addresses[0]
async def add_keys(self, account, chain, pubkeys):
await self.db.executemany(
"insert or ignore into account_address "
"(account, address, chain, pubkey, chain_code, n, depth) values "
"(?, ?, ?, ?, ?, ?, ?)", ((
account.id, k.address, chain,
sqlite3.Binary(k.pubkey_bytes),
sqlite3.Binary(k.chain_code),
k.n, k.depth
) for k in pubkeys)
)
await self.db.executemany(
"insert or ignore into pubkey_address (address) values (?)",
((pubkey.address,) for pubkey in pubkeys)
)
async def _set_address_history(self, address, history):
await self.db.execute_fetchall(
"UPDATE pubkey_address SET history = ?, used_times = ? WHERE address = ?",
(history, history.count(':')//2, address)
)
async def set_address_history(self, address, history):
await self._set_address_history(address, history)
@staticmethod
def constrain_purchases(constraints):
accounts = constraints.pop('accounts', None)
assert accounts, "'accounts' argument required to find purchases"
if not {'purchased_claim_id', 'purchased_claim_id__in'}.intersection(constraints):
constraints['purchased_claim_id__is_not_null'] = True
constraints.update({
f'$account{i}': a.public_key.address for i, a in enumerate(accounts)
})
account_values = ', '.join([f':$account{i}' for i in range(len(accounts))])
constraints['txid__in'] = f"""
SELECT txid FROM txi JOIN account_address USING (address)
WHERE account_address.account IN ({account_values})
"""
async def get_purchases(self, **constraints):
self.constrain_purchases(constraints)
return [tx.outputs[0] for tx in await self.get_transactions(**constraints)]
def get_purchase_count(self, **constraints):
self.constrain_purchases(constraints)
return self.get_transaction_count(**constraints)
@staticmethod
def constrain_claims(constraints):
|
async def get_claims(self, read_only=False, **constraints) -> List[Output]:
self.constrain_claims(constraints)
return await self.get_utxos(read_only=read_only, **constraints)
def get_claim_count(self, **constraints):
self.constrain_claims(constraints)
return self.get_utxo_count(**constraints)
@staticmethod
def constrain_streams(constraints):
constraints['txo_type'] = TXO_TYPES['stream']
def get_streams(self, read_only=False, **constraints):
self.constrain_streams(constraints)
return self.get_claims(read_only=read_only, **constraints)
def get_stream_count(self, **constraints):
self.constrain_streams(constraints)
return self.get_claim_count(**constraints)
@staticmethod
def constrain_channels(constraints):
constraints['txo_type'] = TXO_TYPES['channel']
def get_channels(self, **constraints):
self.constrain_channels(constraints)
return self.get_claims(**constraints)
def get_channel_count(self, **constraints):
self.constrain_channels(constraints)
return self.get_claim_count(**constraints)
@staticmethod
def constrain_supports(constraints):
constraints['txo_type'] = TXO_TYPES['support']
def get_supports(self, **constraints):
self.constrain_supports(constraints)
return self.get_utxos(**constraints)
def get_support_count(self, **constraints):
self.constrain_supports(constraints)
return self.get_utxo_count(**constraints)
@staticmethod
def constrain_collections(constraints):
constraints['txo_type'] = TXO_TYPES['collection']
def get_collections(self, **constraints):
self.constrain_collections(constraints)
return self.get_utxos(**constraints)
def get_collection_count(self, **constraints):
self.constrain_collections(constraints)
return self.get_utxo_count(**constraints)
async def release_all_outputs(self, account=None):
if account is None:
await self.db.execute_fetchall("UPDATE txo SET is_reserved = 0 WHERE is_reserved = 1")
else:
await self.db.execute_fetchall(
"UPDATE txo SET is_reserved = 0 WHERE"
" is_reserved = 1 AND txo.address IN ("
" SELECT address from account_address WHERE account = ?"
" )", (account.public_key.address, )
)
def get_supports_summary(self, read_only=False, **constraints):
return self.get_txos(
txo_type=TXO_TYPES['support'],
is_spent=False, is_my_output=True,
include_is_my_input=True,
no_tx=True, read_only=read_only,
**constraints
)
| if {'txo_type', 'txo_type__in'}.intersection(constraints):
return
claim_types = constraints.pop('claim_type', None)
if claim_types:
constrain_single_or_list(
constraints, 'txo_type', claim_types, lambda x: TXO_TYPES[x]
)
else:
constraints['txo_type__in'] = CLAIM_TYPES |
upload_test.go | package upload
import (
"github.com/jamespfennell/hoard/config"
"github.com/jamespfennell/hoard/internal/actions"
"github.com/jamespfennell/hoard/internal/archive"
"github.com/jamespfennell/hoard/internal/storage"
"github.com/jamespfennell/hoard/internal/storage/dstore"
"github.com/jamespfennell/hoard/internal/storage/hour"
"github.com/jamespfennell/hoard/internal/util/testutil"
"testing"
)
var h = hour.Date(2000, 1, 2, 3)
var feed = &config.Feed{}
| localAStore := session.LocalAStore()
remoteAStore := session.RemoteAStore()
testutil.CreateArchiveFromData(t, feed, localAStore, testutil.Data[0], testutil.Data[1])
testutil.CreateArchiveFromData(t, feed, remoteAStore, testutil.Data[1], testutil.Data[3])
// createArchive(t, localAStore, d1, b1, d2, b2)
// createArchive(t, remoteAStore, d2, b2, d3, b3)
err := RunOnce(session, false)
testutil.ErrorOrFail(t, err)
localAFiles, err := storage.ListAFilesInHour(localAStore, h)
if err != nil {
t.Errorf("Unexpected error in ListInHour: %s\n", err)
}
if len(localAFiles) != 0 {
t.Errorf("Unexpected number of AFiles: 0 != %d\n", len(localAFiles))
}
remoteAFiles, err := storage.ListAFilesInHour(remoteAStore, h)
if err != nil {
t.Errorf("Unexpected error in ListInHour: %s\n", err)
}
if len(remoteAFiles) != 1 {
t.Errorf("Unexpected number of AFiles: 1 != %d\n", len(remoteAFiles))
}
aFile := remoteAFiles[0]
dStore := dstore.NewInMemoryDStore()
err = archive.Unpack(aFile, remoteAStore, dStore)
if err != nil {
t.Errorf("Unexpected error deserializing archive: %s\n", err)
}
testutil.ExpectDStoreHasExactlyDFiles(t, dStore, testutil.Data[0], testutil.Data[1], testutil.Data[3])
} | func TestOnce(t *testing.T) {
session := actions.NewInMemorySession(feed) |
util.ts | import { RPC } from "ckb-js-toolkit";
import {
CellDep,
HexString,
HexNumber,
Hash,
Indexer,
Script,
} from "@ckb-lumos/base";
import {
generateDepositLock,
DepositLockArgs,
getDepositLockArgs,
serializeArgs,
} from "../js/transactions/deposit";
import { deploymentConfig } from "../js/utils/deployment_config";
import { normalizers } from "ckb-js-toolkit";
import base from "@ckb-lumos/base";
import { key } from "@ckb-lumos/hd";
import crypto from "crypto";
import keccak256 from "keccak256";
import { getConfig } from "@ckb-lumos/config-manager";
import { generateAddress } from "@ckb-lumos/helpers";
import TOML from '@iarna/toml';
import fs from 'fs';
import path from 'path';
import { getRollupTypeHash } from "../js/transactions/deposit";
export function asyncSleep(ms = 0) {
return new Promise((r) => setTimeout(r, ms));
}
export async function waitForBlockSync(
indexer: Indexer,
rpc: RPC,
blockHash?: Hash,
blockNumber?: bigint
) {
if (!blockNumber) {
const header = await rpc.get_header(blockHash);
blockNumber = BigInt(header.number);
}
while (true) {
await indexer.waitForSync();
const tip = await indexer.tip();
if (tip) {
const indexedNumber = BigInt(tip.block_number);
if (indexedNumber >= blockNumber) {
// TODO: do we need to handle forks?
break;
}
}
await asyncSleep(2000);
}
}
export function caculateChainId(creator_id: number, compatible_chain_id: number){
console.log(creator_id);
const chain_id_num = ( compatible_chain_id * Math.pow(2, 32) ) + creator_id;
return '0x' + BigInt(chain_id_num).toString(16);
}
export function caculateLayer2LockScriptHash(layer2LockArgs: string) {
const rollup_type_hash = getRollupTypeHash();
const script = {
code_hash: deploymentConfig.eth_account_lock.code_hash,
hash_type: deploymentConfig.eth_account_lock.hash_type,
args: rollup_type_hash + layer2LockArgs.slice(2),
};
return base.utils
.ckbHash(base.core.SerializeScript(normalizers.NormalizeScript(script)))
.serializeJson();
}
export function serializeScript(script: Script) {
return base.utils
.ckbHash(base.core.SerializeScript(normalizers.NormalizeScript(script)))
.serializeJson();
}
export function privateKeyToCkbAddress(privateKey: HexString): string {
const publicKey = key.privateToPublic(privateKey);
const publicKeyHash = key.publicKeyToBlake160(publicKey);
const scriptConfig = getConfig().SCRIPTS.SECP256K1_BLAKE160!;
const script = {
code_hash: scriptConfig.CODE_HASH,
hash_type: scriptConfig.HASH_TYPE,
args: publicKeyHash,
};
const address = generateAddress(script);
return address;
}
export function privateKeyToEthAddress(privateKey: HexString) {
const ecdh = crypto.createECDH(`secp256k1`);
ecdh.generateKeys();
ecdh.setPrivateKey(Buffer.from(privateKey.slice(2), "hex"));
const publicKey: string = "0x" + ecdh.getPublicKey("hex", "uncompressed");
const ethAddress =
"0x" +
keccak256(Buffer.from(publicKey.slice(4), "hex"))
.slice(12)
.toString("hex");
return ethAddress;
}
export async function generateGodwokenConfig(_input_file: string, _output_file: string) {
const toml_path: string = path.resolve(__dirname, _input_file);
const toml_file_str = await fs.readFileSync(toml_path).toString();
const toml_file_obj = TOML.parse(toml_file_str);
const json_path = path.resolve(__dirname, _output_file);
await fs.writeFileSync(json_path, JSON.stringify(toml_file_obj, null, 2));
console.log(`create godwoken_config.json file in ${json_path}. done.`);
}
export function UInt32ToLeBytes(num: number): HexString {
const buf = Buffer.allocUnsafe(4);
buf.writeUInt32LE(+num, 0);
return '0x' + buf.toString('hex');
}
export function | (num:number | bigint) {
const bnum = BigInt(num);
const buf = Buffer.alloc(8);
buf.writeBigUInt64LE(bnum);
return `0x${buf.toString("hex")}`;
}
export function toBigUInt64BE(num:number | bigint) {
const bnum = BigInt(num);
const buf = Buffer.alloc(8);
buf.writeBigUInt64BE(bnum);
return `0x${buf.toString("hex")}`;
}
// todo: refactor this function to suit ts strict mode
export function deepCompare(o: object, p: object)
{
var i: number,
keysO = Object.keys(o).sort(),
keysP = Object.keys(p).sort();
if (keysO.length !== keysP.length)
return false;//not the same nr of keys
if (keysO.join('') !== keysP.join(''))
return false;//different keys
for (i=0;i<keysO.length;++i)
{
if (o[keysO[i]] instanceof Array)
{
if (!(p[keysO[i]] instanceof Array))
return false;
//if (compareObjects(o[keysO[i]], p[keysO[i]] === false) return false
//would work, too, and perhaps is a better fit, still, this is easy, too
if (p[keysO[i]].sort().join('') !== o[keysO[i]].sort().join(''))
return false;
}
else if (o[keysO[i]] instanceof Date)
{
if (!(p[keysO[i]] instanceof Date))
return false;
if ((''+o[keysO[i]]) !== (''+p[keysO[i]]))
return false;
}
else if (o[keysO[i]] instanceof Function)
{
if (!(p[keysO[i]] instanceof Function))
return false;
//ignore functions, or check them regardless?
}
else if (o[keysO[i]] instanceof Object)
{
if (!(p[keysO[i]] instanceof Object))
return false;
if (o[keysO[i]] === o)
{//self reference?
if (p[keysO[i]] !== p)
return false;
}
else if (deepCompare(o[keysO[i]], p[keysO[i]]) === false)
return false;//WARNING: does not deal with circular refs other than ^^
}
if (o[keysO[i]] !== p[keysO[i]])//change !== to != for loose comparison
return false;//not the same value
}
return true;
}
// todo: refactor this class to fit ts strice mode
export class DeepDiffMapper {
private VALUE_CREATED = 'created';
private VALUE_UPDATED = 'updated';
private VALUE_DELETED = 'deleted';
private VALUE_UNCHANGED = 'unchanged';
constructor(){
}
map (obj1: object, obj2: object) {
if (this.isFunction(obj1) || this.isFunction(obj2)) {
throw 'Invalid argument. Function given, object expected.';
}
if (this.isValue(obj1) || this.isValue(obj2)) {
return {
type: this.compareValues(obj1, obj2),
data: obj1 === undefined ? obj2 : obj1
};
}
var diff = {};
for (var key in obj1) {
if (this.isFunction(obj1[key])) {
continue;
}
var value2 = undefined;
if (obj2[key] !== undefined) {
value2 = obj2[key];
}
diff[key] = this.map(obj1[key], value2);
}
for (var key in obj2) {
if (this.isFunction(obj2[key]) || diff[key] !== undefined) {
continue;
}
diff[key] = this.map(undefined, obj2[key]);
}
return diff;
}
// extract the updated parts in diff result from map method.
async filterDiff (diff_obj: any, target=[], paths=[]) {
for await (const [key, value] of Object.entries(diff_obj)) {
if(key === "type" && value === "unchanged"){
// reached the end of one unchanged part, let's clear the last parent paths.
delete paths[paths.length-1];
}
if(key === "type" && value !== "unchanged"){
var path_str = paths.join('.');
// remove the dead end branch path
path_str = path_str.replace(/([A-Za-z0-9_]+\.(\.+))/g, '');
// push the result into target
const item: any = {};
item[path_str] = diff_obj;
target.push( item );
break;
}
if( this.isObject(value) ){
paths.push(key);
await this.filterDiff(value, target, paths);
}
}
return target;
}
compareValues (value1: any, value2: any) {
if (value1 === value2) {
return this.VALUE_UNCHANGED;
}
if (this.isDate(value1) && this.isDate(value2) && value1.getTime() === value2.getTime()) {
return this.VALUE_UNCHANGED;
}
if (value1 === undefined) {
return this.VALUE_CREATED;
}
if (value2 === undefined) {
return this.VALUE_DELETED;
}
return this.VALUE_UPDATED;
}
isFunction (x: any) {
return Object.prototype.toString.call(x) === '[object Function]';
}
isArray (x: any) {
return Object.prototype.toString.call(x) === '[object Array]';
}
isDate (x: any) {
return Object.prototype.toString.call(x) === '[object Date]';
}
isObject (x: any) {
return Object.prototype.toString.call(x) === '[object Object]';
}
isValue(x: any) {
return !this.isObject(x) && !this.isArray(x);
}
}
| toBigUInt64LE |
brain.py | r"""
Ce programme sert à ouvrir, gérer et fermer les positions.
Enregistre les positions dans la base de données et dans un fichier excel,
les affiches dans le terminal puis envoie une notification au groupe telegram
"""
import time,datetime,os,sys,requests
from brokerconnection import realcommands
from prediction import Prediction
from settings import Settings
from database import Database
from position import Position
from csv_generator import csv_generator
#Definition of variables
Position=Position()
Settings=Settings()
Database=Database()
def current_second() :
return datetime.datetime.now().second
def cls():
"""
This function clear the terminal in order to get the clear view of the prints.c
"""
os.system('cls' if os.name=='nt' else 'clear')
def open_position(symbol):
"""
This function send an open order to the broker, with the opening price,
and then save the data inside the class Position.
"""
if Position.backtesting == False:
order = realcommands().limit_open(symbol=symbol,backtesting=Position.backtesting)
if order['error']==True:
return False
Position.open_price=float(order['order']['price'])
current_price = Settings.broker.price(symbol)['ask']
else:
time.sleep(2)
current_price = Settings.broker.price(symbol)['ask']
Position.open_price = current_price
Position.symbol=symbol
Position.current_price =current_price
#Setting highest price and lowest price to the opening price
Position.highest_price=Position.open_price
Position.lowest_price=Position.open_price
Position.status='open'
Position.number+=1
Position.time=time.time()
return True
def close_position():
"""
This function send a close order to the broker, at market,
and then save the data inside an excel spreadsheet.
"""
Position.status='close'
Position.stoploss = False
Position.effective_yield = effective_yield_calculation(Position.close_price,Position.open_price,Settings.fee)
Position.total_yield = round(Position.total_yield*Position.effective_yield,5)
if Position.total_yield > Position.highest_yield:
Position.highest_yield = Position.total_yield
save_position()
return
def save_position():
"""
This function sends notification and add position information to database.
"""
try:
date = time.time()
text=''
text+="\nRendement : "+str(round((Position.total_yield-1)*100,2))+' %'
program_notification(message=text)
#Saving position into database
Database.database_request(
sql=("""REPLACE INTO positions (paire,opening_date,closing_date,duration,opening_price,closing_price,exit_way,highest_price,lowest_price,position_yield,total_yield) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"""),
params=(
Position.symbol,
datetime.datetime.fromtimestamp(Position.time),
datetime.datetime.fromtimestamp(date),
str(datetime.timedelta(seconds=round(date,0)-round(Position.time,0))),
Position.open_price,
Position.close_price,
Position.close_mode,
Position.highest_price,
Position.lowest_price,
Position.effective_yield,
Position.total_yield,
),
commit=True
)
return
except Exception as error:
program_notification(message=error)
def program_notification(message):
try:
token = "1813447727:AAHDPI54DetjXDDNFCMqtN-7phGvwNy9rqY"
chat_id = "-431364858"
url = f"https://api.telegram.org/bot{token}/sendMessage?chat_id={chat_id}&text={message}"
requests.post(url)
except Exception as error:
print(error)
def effective_yield_calculation(current_price,opening_price,fee):
R = float(current_price)/float(opening_price)
F = float(fee)
return (R-(F+(1-F)*R*F))
def check_position():
"""
Function to update the current_price, the highest_price and the lowest price
Then checks if it has to close the position
"""
Position.current_price = Settings.broker.price(Position.symbol)['bid']
#Updating highest_price
if Position.current_price >Position.highest_price:
Pos | #Updating lowest_price
if Position.current_price <Position.lowest_price:
Position.lowest_price=Position.current_price
#Calculating current effective_yield
current_effective_yield = effective_yield_calculation(
current_price=Position.current_price,
opening_price=Position.open_price,
fee=Settings.fee
)
#Stop loss
#Close position :
if current_effective_yield < Settings.risk:
if Position.backtesting:
Position.close_price = Position.open_price*(Settings.risk)
else:
order = realcommands().limit_close(Position.symbol,backtesting=Position.backtesting)
Position.close_price = float(order['price'])
Position.close_mode = 'stoploss'
Position.number_lost+=1
close_position()
return
#Take profit on expected yield
#Closing on takeprofit : Check if the yield is stronger than the minimal yield considering fees and slippage
if current_effective_yield > Settings.expected_yield:
if Position.backtesting:
Position.close_price = Position.current_price
else:
order = realcommands().limit_close(symbol=Position.symbol,backtesting=Position.backtesting)
Position.close_price = float(order['price'])
Position.close_mode = 'takeprofit'
close_position()
return
def find_entry_point():
#Analyse market every second
#We use the watchlist defined in settings.py
for symbol in Settings.watchlist:
time.sleep(1)
try:
#We analyze the market with the signals defined inside prediction.py
predict = Prediction().buy_signal(symbol+'/'+Settings.base_asset)
#We clear the console
cls()
for values in predict:
print(values,':',predict[values],'\n')
#Give information about the program
statistics = {
'':'------------------------------ :',
'running_time':str(datetime.timedelta(seconds=round(time.time(),0)-round(Position.start_time,0))),
'current_status':Position.status,
'total_yield':str(round((Position.total_yield-1)*100,2))+' %',
'position_number':Position.number,
'position_lost':Position.number_lost,
}
for data, value in statistics.items():
print(data, ':', value, '\n')
#If we get a buy signal then :
if predict['signal'] == 'buy' and open_position(
symbol+'/'+Settings.base_asset
):
Settings.expected_yield = predict['recovery']
return predict
except Exception as error:
print('error while predicting : %s' % error)
def manage_position(predict):
#We clear the console
cls()
for values in predict:
print(values,':',predict[values],'\n')
current_effective_yield=effective_yield_calculation(Position.current_price,Position.open_price,Settings.fee)
#Give information about the program
statistics = {
'':'------------------------------ :',
'running_time':str(datetime.timedelta(seconds=round(time.time(),0)-round(Position.start_time,0))),
'current_status':Position.status,
'current_price':Position.current_price,
'open_price':Position.open_price,
'highest_price':Position.highest_price,
'lowest_price':Position.lowest_price,
'position_number':Position.number,
'position_yield':str(round((current_effective_yield-1)*100,2))+' %',
'total_yield':str(round((Position.total_yield*current_effective_yield-1)*100,2))+' %',
'number_lost':Position.number_lost,
'stoploss':Position.stoploss,
'current_position_time':str(datetime.timedelta(seconds=round(time.time(),0)-round(Position.time,0))),
}
for data, value__ in statistics.items():
print(data, ':', value__, '\n')
try:
#We check if we have to do something with the current position, update current price highest price and lowest price
check_position()
except Exception as e:
print(e)
#We slow down the requests
time.sleep(0.2)
def main():
'''Brain'''
#Check the correct version of python
if sys.version_info[0] < 3:
raise Exception("Python 3 or a more recent version is required.")
#test des commandes et de la connection au broker
if realcommands().test_connection():
print("Connected to market")
elif input("Unable to connect to market, run in backtesting mode? Y/N : ").upper()=='N':
return
else:
Position.backtesting = True
#Generates file with known supports
csv_generator()
#On change la description du telegram pour les settings
parameters = vars(Settings)
about = ''.join(
'\n' + str(param) + ' : ' + str(value_)
for param, value_ in parameters.items()
)
program_notification(about)
#On initialise le temps de fonctionnement du programme
Position.start_time = time.time()
print('---Starting Trading---')
if Position.backtesting == True:
program_notification("Démarrage du programme : "+Settings.program_name + " en mode démo \n"+Settings.commentaire)
else:
program_notification("Démarrage du programme : "+Settings.program_name + " en mode réél \n"+Settings.commentaire)
while True:
try:
if Position.highest_yield-Position.total_yield > Settings.program_risk:
print("Program stopped : check strategy")
break
if Position.status == 'close':
predict = find_entry_point()
elif Position.status == 'open':
manage_position(predict)
except KeyboardInterrupt:
cls()
if Position.status == 'open':
if Position.backtesting:
Position.close_price = Position.current_price
else:
order = realcommands().limit_close(symbol=Position.symbol,backtesting=Position.backtesting)
Position.close_price = float(order['price'])
Position.close_mode = 'stopping program'
close_position()
print("---Ending Trading--")
break
program_notification("Arrêt du programme : "+Settings.program_name)
if __name__=='__main__':
main()
| ition.highest_price=Position.current_price
|
coinbasepro.py | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
# -----------------------------------------------------------------------------
try:
basestring # Python 3
except NameError:
basestring = str # Python 2
import hashlib
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidAddress
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import NotSupported
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import OnMaintenance
from ccxt.base.decimal_to_precision import TICK_SIZE
class coinbasepro(Exchange):
def describe(self):
return self.deep_extend(super(coinbasepro, self).describe(), {
'id': 'coinbasepro',
'name': 'Coinbase Pro',
'countries': ['US'],
'rateLimit': 1000,
'userAgent': self.userAgents['chrome'],
'pro': True,
'has': {
'cancelAllOrders': True,
'cancelOrder': True,
'CORS': True,
'createDepositAddress': True,
'createOrder': True,
'deposit': True,
'fetchAccounts': True,
'fetchBalance': True,
'fetchCurrencies': True,
'fetchClosedOrders': True,
'fetchDepositAddress': True,
'fetchMarkets': True,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': True,
'fetchOrderTrades': True,
'fetchTime': True,
'fetchTicker': True,
'fetchTrades': True,
'fetchTransactions': True,
'withdraw': True,
},
'timeframes': {
'1m': 60,
'5m': 300,
'15m': 900,
'1h': 3600,
'6h': 21600,
'1d': 86400,
},
'urls': {
'test': {
'public': 'https://api-public.sandbox.pro.coinbase.com',
'private': 'https://api-public.sandbox.pro.coinbase.com',
},
'logo': 'https://user-images.githubusercontent.com/1294454/41764625-63b7ffde-760a-11e8-996d-a6328fa9347a.jpg',
'api': {
'public': 'https://api.pro.coinbase.com',
'private': 'https://api.pro.coinbase.com',
},
'www': 'https://pro.coinbase.com/',
'doc': 'https://docs.pro.coinbase.com',
'fees': [
'https://docs.pro.coinbase.com/#fees',
'https://support.pro.coinbase.com/customer/en/portal/articles/2945310-fees',
],
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
'password': True,
},
'api': {
'public': {
'get': [
'currencies',
'products',
'products/{id}',
'products/{id}/book',
'products/{id}/candles',
'products/{id}/stats',
'products/{id}/ticker',
'products/{id}/trades',
'time',
],
},
'private': {
'get': [
'accounts',
'accounts/{id}',
'accounts/{id}/holds',
'accounts/{id}/ledger',
'accounts/{id}/transfers',
'coinbase-accounts',
'coinbase-accounts/{id}/addresses',
'fills',
'funding',
'fees',
'margin/profile_information',
'margin/buying_power',
'margin/withdrawal_power',
'margin/withdrawal_power_all',
'margin/exit_plan',
'margin/liquidation_history',
'margin/position_refresh_amounts',
'margin/status',
'oracle',
'orders',
'orders/{id}',
'orders/client:{client_oid}',
'otc/orders',
'payment-methods',
'position',
'profiles',
'profiles/{id}',
'reports/{report_id}',
'transfers',
'transfers/{transfer_id}',
'users/self/trailing-volume',
'users/self/exchange-limits',
'withdrawals/fee-estimate',
],
'post': [
'conversions',
'deposits/coinbase-account',
'deposits/payment-method',
'coinbase-accounts/{id}/addresses',
'funding/repay',
'orders',
'position/close',
'profiles/margin-transfer',
'profiles/transfer',
'reports',
'withdrawals/coinbase',
'withdrawals/coinbase-account',
'withdrawals/crypto',
'withdrawals/payment-method',
],
'delete': [
'orders',
'orders/client:{client_oid}',
'orders/{id}',
],
},
},
'precisionMode': TICK_SIZE,
'fees': {
'trading': {
'tierBased': True, # complicated tier system per coin
'percentage': True,
'maker': 0.5 / 100, # highest fee of all tiers
'taker': 0.5 / 100, # highest fee of all tiers
},
'funding': {
'tierBased': False,
'percentage': False,
'withdraw': {
'BCH': 0,
'BTC': 0,
'LTC': 0,
'ETH': 0,
'EUR': 0.15,
'USD': 25,
},
'deposit': {
'BCH': 0,
'BTC': 0,
'LTC': 0,
'ETH': 0,
'EUR': 0.15,
'USD': 10,
},
},
},
'exceptions': {
'exact': {
'Insufficient funds': InsufficientFunds,
'NotFound': OrderNotFound,
'Invalid API Key': AuthenticationError,
'invalid signature': AuthenticationError,
'Invalid Passphrase': AuthenticationError,
'Invalid order id': InvalidOrder,
'Private rate limit exceeded': RateLimitExceeded,
'Trading pair not available': PermissionDenied,
'Product not found': InvalidOrder,
},
'broad': {
'Order already done': OrderNotFound,
'order not found': OrderNotFound,
'price too small': InvalidOrder,
'price too precise': InvalidOrder,
'under maintenance': OnMaintenance,
'size is too small': InvalidOrder,
'Cancel only mode': OnMaintenance, # https://github.com/ccxt/ccxt/issues/7690
},
},
})
async def fetch_currencies(self, params={}):
response = await self.publicGetCurrencies(params)
#
# [
# {
# id: 'XTZ',
# name: 'Tezos',
# min_size: '0.000001',
# status: 'online',
# message: '',
# max_precision: '0.000001',
# convertible_to: [],
# details: {
# type: 'crypto',
# symbol: 'Τ',
# network_confirmations: 60,
# sort_order: 53,
# crypto_address_link: 'https://tzstats.com/{{address}}',
# crypto_transaction_link: 'https://tzstats.com/{{txId}}',
# push_payment_methods: ['crypto'],
# group_types: [],
# display_name: '',
# processing_time_seconds: 0,
# min_withdrawal_amount: 1
# }
# }
# ]
#
result = {}
for i in range(0, len(response)):
currency = response[i]
id = self.safe_string(currency, 'id')
name = self.safe_string(currency, 'name')
code = self.safe_currency_code(id)
details = self.safe_value(currency, 'details', {})
precision = self.safe_float(currency, 'max_precision')
status = self.safe_string(currency, 'status')
active = (status == 'online')
result[code] = {
'id': id,
'code': code,
'info': currency,
'type': self.safe_string(details, 'type'),
'name': name,
'active': active,
'fee': None,
'precision': precision,
'limits': {
'amount': {
'min': self.safe_float(details, 'min_size'),
'max': None,
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': None,
'max': None,
},
'withdraw': {
'min': self.safe_float(details, 'min_withdrawal_amount'),
'max': None,
},
},
}
return result
async def fetch_markets(self, params={}):
response = await self.publicGetProducts(params)
#
# [
# {
# "id":"ZEC-BTC",
# "base_currency":"ZEC",
# "quote_currency":"BTC",
# "base_min_size":"0.01000000",
# "base_max_size":"1500.00000000",
# "quote_increment":"0.00000100",
# "base_increment":"0.00010000",
# "display_name":"ZEC/BTC",
# "min_market_funds":"0.001",
# "max_market_funds":"30",
# "margin_enabled":false,
# "post_only":false,
# "limit_only":false,
# "cancel_only":false,
# "trading_disabled":false,
# "status":"online",
# "status_message":""
# }
# ]
#
result = []
for i in range(0, len(response)):
market = response[i]
id = self.safe_string(market, 'id')
baseId = self.safe_string(market, 'base_currency')
quoteId = self.safe_string(market, 'quote_currency')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
priceLimits = {
'min': self.safe_float(market, 'quote_increment'),
'max': None,
}
precision = {
'amount': self.safe_float(market, 'base_increment'),
'price': self.safe_float(market, 'quote_increment'),
}
status = self.safe_string(market, 'status')
active = (status == 'online')
result.append(self.extend(self.fees['trading'], {
'id': id,
'symbol': symbol,
'baseId': baseId,
'quoteId': quoteId,
'base': base,
'quote': quote,
'precision': precision,
'limits': {
'amount': {
'min': self.safe_float(market, 'base_min_size'),
'max': self.safe_float(market, 'base_max_size'),
},
'price': priceLimits,
'cost': {
'min': self.safe_float(market, 'min_market_funds'),
'max': self.safe_float(market, 'max_market_funds'),
},
},
'active': active,
'info': market,
}))
return result
async def fetch_accounts(self, params={}):
response = await self.privateGetAccounts(params)
#
# [
# {
# id: '4aac9c60-cbda-4396-9da4-4aa71e95fba0',
# currency: 'BTC',
# balance: '0.0000000000000000',
# available: '0',
# hold: '0.0000000000000000',
# profile_id: 'b709263e-f42a-4c7d-949a-a95c83d065da'
# },
# {
# id: 'f75fa69a-1ad1-4a80-bd61-ee7faa6135a3',
# currency: 'USDC',
# balance: '0.0000000000000000',
# available: '0',
# hold: '0.0000000000000000',
# profile_id: 'b709263e-f42a-4c7d-949a-a95c83d065da'
# },
# ]
#
result = []
for i in range(0, len(response)):
account = response[i]
accountId = self.safe_string(account, 'id')
currencyId = self.safe_string(account, 'currency')
code = self.safe_currency_code(currencyId)
result.append({
'id': accountId,
'type': None,
'currency': code,
'info': account,
})
return result
async def fetch_balance(self, params={}):
await self.load_markets()
response = await self.privateGetAccounts(params)
result = {'info': response}
for i in range(0, len(response)):
balance = response[i]
currencyId = self.safe_string(balance, 'currency')
code = self.safe_currency_code(currencyId)
account = {
'free': self.safe_float(balance, 'available'),
'used': self.safe_float(balance, 'hold'),
'total': self.safe_float(balance, 'balance'),
}
result[code] = account
return self.parse_balance(result)
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
# level 1 - only the best bid and ask
# level 2 - top 50 bids and asks(aggregated)
# level 3 - full order book(non aggregated)
request = {
'id': self.market_id(symbol),
'level': 2, # 1 best bidask, 2 aggregated, 3 full
}
response = await self.publicGetProductsIdBook(self.extend(request, params))
#
# {
# "sequence":1924393896,
# "bids":[
# ["0.01825","24.34811287",2],
# ["0.01824","72.5463",3],
# ["0.01823","424.54298049",6],
# ],
# "asks":[
# ["0.01826","171.10414904",4],
# ["0.01827","22.60427028",1],
# ["0.01828","397.46018784",7],
# ]
# }
#
orderbook = self.parse_order_book(response)
orderbook['nonce'] = self.safe_integer(response, 'sequence')
return orderbook
def parse_ticker(self, ticker, market=None):
#
# publicGetProductsIdTicker
#
# {
# "trade_id":843439,
# "price":"0.997999",
# "size":"80.29769",
# "time":"2020-01-28T02:13:33.012523Z",
# "bid":"0.997094",
# "ask":"0.998",
# "volume":"1903188.03750000"
# }
#
# publicGetProductsIdStats
#
# {
# "open": "34.19000000",
# "high": "95.70000000",
# "low": "7.06000000",
# "volume": "2.41000000"
# }
#
timestamp = self.parse8601(self.safe_value(ticker, 'time'))
bid = self.safe_float(ticker, 'bid')
ask = self.safe_float(ticker, 'ask')
last = self.safe_float(ticker, 'price')
symbol = None if (market is None) else market['symbol']
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high'),
'low': self.safe_float(ticker, 'low'),
'bid': bid,
'bidVolume': None,
'ask': ask,
'askVolume': None,
'vwap': None,
'open': self.safe_float(ticker, 'open'),
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': self.safe_float(ticker, 'volume'),
'quoteVolume': None,
'info': ticker,
}
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'id': market['id'],
}
# publicGetProductsIdTicker or publicGetProductsIdStats
method = self.safe_string(self.options, 'fetchTickerMethod', 'publicGetProductsIdTicker')
response = await getattr(self, method)(self.extend(request, params))
#
# publicGetProductsIdTicker
#
# {
# "trade_id":843439,
# "price":"0.997999",
# "size":"80.29769",
# "time":"2020-01-28T02:13:33.012523Z",
# "bid":"0.997094",
# "ask":"0.998",
# "volume":"1903188.03750000"
# }
#
# publicGetProductsIdStats
#
# {
# "open": "34.19000000",
# "high": "95.70000000",
# "low": "7.06000000",
# "volume": "2.41000000"
# }
#
return self.parse_ticker(response, market)
def parse_trade(self, trade, market=None):
#
# {
# type: 'match',
# trade_id: 82047307,
# maker_order_id: '0f358725-2134-435e-be11-753912a326e0',
# taker_order_id: '252b7002-87a3-425c-ac73-f5b9e23f3caf',
# side: 'sell',
# size: '0.00513192',
# price: '9314.78',
# product_id: 'BTC-USD',
# sequence: 12038915443,
# time: '2020-01-31T20:03:41.158814Z'
# }
#
timestamp = self.parse8601(self.safe_string_2(trade, 'time', 'created_at'))
marketId = self.safe_string(trade, 'product_id')
symbol = self.safe_symbol(marketId, market, '-')
feeRate = None
feeCurrency = None
takerOrMaker = None
if market is not None:
feeCurrency = market['quote']
if 'liquidity' in trade:
takerOrMaker = 'taker' if (trade['liquidity'] == 'T') else 'maker'
feeRate = market[takerOrMaker]
feeCost = self.safe_float_2(trade, 'fill_fees', 'fee')
fee = {
'cost': feeCost,
'currency': feeCurrency,
'rate': feeRate,
}
type = None
id = self.safe_string(trade, 'trade_id')
side = 'sell' if (trade['side'] == 'buy') else 'buy'
orderId = self.safe_string(trade, 'order_id')
# Coinbase Pro returns inverted side to fetchMyTrades vs fetchTrades
if orderId is not None:
side = 'buy' if (trade['side'] == 'buy') else 'sell'
price = self.safe_float(trade, 'price')
amount = self.safe_float(trade, 'size')
return {
'id': id,
'order': orderId,
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'type': type,
'takerOrMaker': takerOrMaker,
'side': side,
'price': price,
'amount': amount,
'fee': fee,
'cost': price * amount,
}
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
# as of 2018-08-23
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchMyTrades requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'product_id': market['id'],
}
if limit is not None:
request['limit'] = limit
response = await self.privateGetFills(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'id': market['id'], # fixes issue #2
}
response = await self.publicGetProductsIdTrades(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
def parse_ohlcv(self, ohlcv, market=None):
#
# [
# 1591514160,
# 0.02507,
# 0.02507,
# 0.02507,
# 0.02507,
# 0.02816506
# ]
#
return [
self.safe_timestamp(ohlcv, 0),
self.safe_float(ohlcv, 3),
self.safe_float(ohlcv, 2),
self.safe_float(ohlcv, 1),
self.safe_float(ohlcv, 4),
self.safe_float(ohlcv, 5),
]
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
granularity = self.timeframes[timeframe]
request = {
'id': market['id'],
'granularity': granularity,
}
if since is not None:
request['start'] = self.iso8601(since)
if limit is None:
# https://docs.pro.coinbase.com/#get-historic-rates
limit = 300 # max = 300
request['end'] = self.iso8601(self.sum((limit - 1) * granularity * 1000, since))
response = await self.publicGetProductsIdCandles(self.extend(request, params))
#
# [
# [1591514160,0.02507,0.02507,0.02507,0.02507,0.02816506],
# [1591514100,0.02507,0.02507,0.02507,0.02507,1.63830323],
# [1591514040,0.02505,0.02507,0.02505,0.02507,0.19918178]
# ]
#
return self.parse_ohlcvs(response, market, timeframe, since, limit)
async def fetch_time(self, params={}):
response = await self.publicGetTime(params)
#
# {
# "iso":"2020-05-12T08:00:51.504Z",
# "epoch":1589270451.504
# }
#
return self.safe_timestamp(response, 'epoch')
def parse_order_status(self, status):
statuses = {
'pending': 'open',
'active': 'open',
'open': 'open',
'done': 'closed',
'canceled': 'canceled',
'canceling': 'open',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
timestamp = self.parse8601(self.safe_string(order, 'created_at'))
marketId = self.safe_string(order, 'product_id')
market = self.safe_market(marketId, market, '-')
status = self.parse_order_status(self.safe_string(order, 'status'))
price = self.safe_float(order, 'price')
filled = self.safe_float(order, 'filled_size')
amount = self.safe_float(order, 'size', filled)
remaining = None
if amount is not None:
if filled is not None:
remaining = amount - filled
cost = self.safe_float(order, 'executed_value')
feeCost = self.safe_float(order, 'fill_fees')
fee = None
if feeCost is not None:
feeCurrencyCode = None
if market is not None:
feeCurrencyCode = market['quote']
fee = {
'cost': feeCost,
'currency': feeCurrencyCode,
'rate': None,
}
id = self.safe_string(order, 'id')
type = self.safe_string(order, 'type')
side = self.safe_string(order, 'side')
return {
'id': id,
'clientOrderId': None,
'info': order,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'status': status,
'symbol': market['symbol'],
'type': type,
'side': side,
'price': price,
'cost': cost,
'amount': amount,
'filled': filled,
'remaining': remaining,
'fee': fee,
'average': None,
'trades': None,
}
async def fetch_order(self, id, symbol=None, params={}):
await self.load_markets()
request = {
'id': id,
}
response = await self.privateGetOrdersId(self.extend(request, params))
return self.parse_order(response)
async def fetch_order_trades(self, id, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
market = None
if symbol is not None:
market = self.market(symbol)
request = {
'order_id': id,
}
response = await self.privateGetFills(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
async def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
'status': 'all',
}
market = None
if symbol is not None:
market = self.market(symbol)
request['product_id'] = market['id']
response = await self.privateGetOrders(self.extend(request, params))
return self.parse_orders(response, market, since, limit)
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
request = {}
market = None
if symbol is not None:
market = self.market(symbol)
request['product_id'] = market['id']
response = await self.privateGetOrders(self.extend(request, params))
return self.parse_orders(response, market, since, limit)
async def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
await self.load_markets()
request = {
'status': 'done',
}
market = None
if symbol is not None:
market = self.market(symbol)
request['product_id'] = market['id']
response = await self.privateGetOrders(self.extend(request, params))
return self.parse_orders(response, market, since, limit)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
# oid = str(self.nonce())
request = {
'product_id': self.market_id(symbol),
'side': side,
'size': self.amount_to_precision(symbol, amount),
'type': type,
}
if type == 'limit':
request['price'] = self.price_to_precision(symbol, price)
response = await self.privatePostOrders(self.extend(request, params))
return self.parse_order(response)
async def cancel_order(self, id, symbol=None, params={}):
await self.load_markets()
return await self.privateDeleteOrdersId({'id': id})
async def cancel_all_orders(self, symbol=None, params={}):
return await self.privateDeleteOrders(params)
def calculate_fee(self, symbol, type, side, amount, price, takerOrMaker='taker', params={}):
market = self.markets[symbol]
rate = market[takerOrMaker]
cost = amount * price
currency = market['quote']
return {
'type': takerOrMaker,
'currency': currency,
'rate': rate,
'cost': float(self.currency_to_precision(currency, rate * cost)),
}
async def fetch_payment_methods(self, params={}):
return await self.privateGetPaymentMethods(params)
async def deposit(self, code, amount, address, params={}):
await self.load_markets()
currency = self.currency(code)
request = {
'currency': currency['id'],
'amount': amount,
}
method = 'privatePostDeposits'
if 'payment_method_id' in params:
# deposit from a payment_method, like a bank account
method += 'PaymentMethod'
elif 'coinbase_account_id' in params:
# deposit into Coinbase Pro account from a Coinbase account
method += 'CoinbaseAccount'
else:
# deposit methodotherwise we did not receive a supported deposit location
# relevant docs link for the Googlers
# https://docs.pro.coinbase.com/#deposits
raise NotSupported(self.id + ' deposit() requires one of `coinbase_account_id` or `payment_method_id` extra params')
response = await getattr(self, method)(self.extend(request, params))
if not response:
raise ExchangeError(self.id + ' deposit() error: ' + self.json(response))
return {
'info': response,
'id': response['id'],
}
async def withdraw(self, code, amount, address, tag=None, params={}):
self.check_address(address)
await self.load_markets()
currency = self.currency(code)
request = {
'currency': currency['id'],
'amount': amount,
}
method = 'privatePostWithdrawals'
if 'payment_method_id' in params:
method += 'PaymentMethod'
elif 'coinbase_account_id' in params:
method += 'CoinbaseAccount'
else:
method += 'Crypto'
request['crypto_address'] = address
response = await getattr(self, method)(self.extend(request, params))
if not response:
raise ExchangeError(self.id + ' withdraw() error: ' + self.json(response))
return {
'info': response,
'id': response['id'],
}
async def fetch_transactions(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
await self.load_accounts()
currency = None
id = self.safe_string(params, 'id') # account id
if id is None:
if code is None:
raise ArgumentsRequired(self.id + ' fetchTransactions() requires a currency code argument if no account id specified in params')
currency = self.currency(code)
accountsByCurrencyCode = self.index_by(self.accounts, 'currency')
account = self.safe_value(accountsByCurrencyCode, code)
if account is None:
raise ExchangeError(self.id + ' fetchTransactions() could not find account id for ' + code)
id = account['id']
request = {
'id': id,
}
if limit is not None:
request['limit'] = limit
response = await self.privateGetAccountsIdTransfers(self.extend(request, params))
for i in range(0, len(response)):
response[i]['currency'] = code
return self.parse_transactions(response, currency, since, limit)
def parse_transaction_status(self, transaction):
canceled = self.safe_value(transaction, 'canceled_at')
if canceled:
return 'canceled'
processed = self.safe_value(transaction, 'processed_at')
completed = self.safe_value(transaction, 'completed_at')
if completed:
return 'ok'
elif processed and not completed:
return 'failed'
else:
return 'pending'
def parse_transaction(self, transaction, currency=None):
details = self.safe_value(transaction, 'details', {})
id = self.safe_string(transaction, 'id')
txid = self.safe_string(details, 'crypto_transaction_hash')
timestamp = self.parse8601(self.safe_string(transaction, 'created_at'))
updated = self.parse8601(self.safe_string(transaction, 'processed_at'))
currencyId = self.safe_string(transaction, 'currency')
code = self.safe_currency_code(currencyId, currency)
fee = None
status = self.parse_transaction_status(transaction)
amount = self.safe_float(transaction, 'amount')
type = self.safe_string(transaction, 'type')
address = self.safe_string(details, 'crypto_address')
tag = self.safe_string(details, 'destination_tag')
address = self.safe_string(transaction, 'crypto_address', address)
if type == 'withdraw':
type = 'withdrawal'
address = self.safe_string(details, 'sent_to_address', address)
return {
'info': transaction,
'id': id,
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'address': address,
'tag': tag,
'type': type,
'amount': amount,
'currency': code,
'status': status,
'updated': updated,
'fee': fee,
}
def s | self, path, api='public', method='GET', params={}, headers=None, body=None):
request = '/' + self.implode_params(path, params)
query = self.omit(params, self.extract_params(path))
if method == 'GET':
if query:
request += '?' + self.urlencode(query)
url = self.urls['api'][api] + request
if api == 'private':
self.check_required_credentials()
nonce = str(self.nonce())
payload = ''
if method != 'GET':
if query:
body = self.json(query)
payload = body
what = nonce + method + request + payload
secret = self.base64_to_binary(self.secret)
signature = self.hmac(self.encode(what), secret, hashlib.sha256, 'base64')
headers = {
'CB-ACCESS-KEY': self.apiKey,
'CB-ACCESS-SIGN': signature,
'CB-ACCESS-TIMESTAMP': nonce,
'CB-ACCESS-PASSPHRASE': self.password,
'Content-Type': 'application/json',
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
async def fetch_deposit_address(self, code, params={}):
await self.load_markets()
currency = self.currency(code)
accounts = self.safe_value(self.options, 'coinbaseAccounts')
if accounts is None:
accounts = await self.privateGetCoinbaseAccounts()
self.options['coinbaseAccounts'] = accounts # cache it
self.options['coinbaseAccountsByCurrencyId'] = self.index_by(accounts, 'currency')
currencyId = currency['id']
account = self.safe_value(self.options['coinbaseAccountsByCurrencyId'], currencyId)
if account is None:
# eslint-disable-next-line quotes
raise InvalidAddress(self.id + " fetchDepositAddress() could not find currency code " + code + " with id = " + currencyId + " in self.options['coinbaseAccountsByCurrencyId']")
request = {
'id': account['id'],
}
response = await self.privateGetCoinbaseAccountsIdAddresses(self.extend(request, params))
address = self.safe_string(response, 'address')
tag = self.safe_string(response, 'destination_tag')
return {
'currency': code,
'address': self.check_address(address),
'tag': tag,
'info': response,
}
async def create_deposit_address(self, code, params={}):
await self.load_markets()
currency = self.currency(code)
accounts = self.safe_value(self.options, 'coinbaseAccounts')
if accounts is None:
accounts = await self.privateGetCoinbaseAccounts()
self.options['coinbaseAccounts'] = accounts # cache it
self.options['coinbaseAccountsByCurrencyId'] = self.index_by(accounts, 'currency')
currencyId = currency['id']
account = self.safe_value(self.options['coinbaseAccountsByCurrencyId'], currencyId)
if account is None:
# eslint-disable-next-line quotes
raise InvalidAddress(self.id + " fetchDepositAddress() could not find currency code " + code + " with id = " + currencyId + " in self.options['coinbaseAccountsByCurrencyId']")
request = {
'id': account['id'],
}
response = await self.privatePostCoinbaseAccountsIdAddresses(self.extend(request, params))
address = self.safe_string(response, 'address')
tag = self.safe_string(response, 'destination_tag')
return {
'currency': code,
'address': self.check_address(address),
'tag': tag,
'info': response,
}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if (code == 400) or (code == 404):
if body[0] == '{':
message = self.safe_string(response, 'message')
feedback = self.id + ' ' + message
self.throw_exactly_matched_exception(self.exceptions['exact'], message, feedback)
self.throw_broadly_matched_exception(self.exceptions['broad'], message, feedback)
raise ExchangeError(feedback) # unknown message
raise ExchangeError(self.id + ' ' + body)
async def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
response = await self.fetch2(path, api, method, params, headers, body)
if not isinstance(response, basestring):
if 'message' in response:
raise ExchangeError(self.id + ' ' + self.json(response))
return response
| ign( |
routes.tsx | import React from 'react';
import { BrowserRouter, Route } from 'react-router-dom';
import Landing from './pages/Landing';
import TeacherList from './pages/TeacherList';
import TeacherForm from './pages/TeacherForm';
| function Routes() {
return (
<BrowserRouter>
<Route path="/" exact component={Landing} />
<Route path="/study" component={TeacherList} />
<Route path="/give-classes" component={TeacherForm} />
</BrowserRouter>
)
}
export default Routes; | |
app.test.js | const { Oso } = require('oso');
const { Organization, Repository, Role, User } = require('./app');
const oso = new Oso();
oso.registerClass(Organization);
oso.registerClass(Repository);
oso.registerClass(User);
beforeEach(() => oso.loadFiles(["main.polar"]));
afterEach(() => oso.clearRules());
const alphaAssociation = new Organization("Alpha Association");
const betaBusiness = new Organization("Beta Business");
const affineTypes = new Repository("Affine Types", alphaAssociation);
const allocator = new Repository("Allocator", alphaAssociation);
const bubbleSort = new Repository("Bubble Sort", betaBusiness);
const benchmarks = new Repository("Benchmarks", betaBusiness);
const ariana = new User("Ariana");
const bhavik = new User("Bhavik");
ariana.assignRoleForResource("owner", alphaAssociation);
bhavik.assignRoleForResource("contributor", bubbleSort);
bhavik.assignRoleForResource("maintainer", benchmarks);
test('policy', async () => {
await expect(oso.authorize(ariana, "read", affineTypes)).resolves.toBeUndefined();
await expect(oso.authorize(ariana, "push", affineTypes)).resolves.toBeUndefined();
await expect(oso.authorize(ariana, "read", allocator)).resolves.toBeUndefined();
await expect(oso.authorize(ariana, "push", allocator)).resolves.toBeUndefined();
await expect(oso.authorize(ariana, "read", bubbleSort)).rejects.toThrow('404');
await expect(oso.authorize(ariana, "push", bubbleSort)).rejects.toThrow('404');
await expect(oso.authorize(ariana, "read", benchmarks)).rejects.toThrow('404');
await expect(oso.authorize(ariana, "push", benchmarks)).rejects.toThrow('404');
await expect(oso.authorize(bhavik, "read", affineTypes)).rejects.toThrow('404');
await expect(oso.authorize(bhavik, "push", affineTypes)).rejects.toThrow('404');
await expect(oso.authorize(bhavik, "read", allocator)).rejects.toThrow('404');
await expect(oso.authorize(bhavik, "push", allocator)).rejects.toThrow('404');
await expect(oso.authorize(bhavik, "read", bubbleSort)).resolves.toBeUndefined();
await expect(oso.authorize(bhavik, "push", bubbleSort)).rejects.toThrow('403');
await expect(oso.authorize(bhavik, "read", benchmarks)).resolves.toBeUndefined(); | await expect(oso.authorize(bhavik, "push", benchmarks)).resolves.toBeUndefined();
}); |
|
auth.rs | use crate::dispatch::WasmCloudEntity;
use crate::Result;
use crate::{Invocation, SYSTEM_ACTOR};
use std::collections::HashMap;
use wascap::jwt::{Actor, Claims};
/// An authorizer is responsible for determining whether an actor can be loaded as well as
/// whether an actor can invoke another entity. For invocation checks, the authorizer is only ever invoked _after_
/// an initial capability attestation check has been performed and _passed_. This has the net effect of making it
/// impossible to override the base behavior of checking that an actor's embedded JWT contains the right
/// capability attestations.
pub trait Authorizer: CloneAuthorizer + Sync + Send {
/// This check is performed during the [`start_actor`](`crate::Host::start_actor()`) call, allowing the custom authorizer to do things
/// like verify a provenance chain, make external calls, etc.
fn can_load(&self, claims: &Claims<Actor>) -> bool;
/// This check will be performed for _every_ invocation that has passed the base capability check,
/// including the operation that occurs during [`set_link`](`crate::Host::set_link()`).
fn can_invoke(&self, claims: &Claims<Actor>, target: &WasmCloudEntity, operation: &str)
-> bool;
}
#[doc(hidden)]
pub trait CloneAuthorizer {
fn clone_authorizer(&self) -> Box<dyn Authorizer>;
}
impl<T> CloneAuthorizer for T
where
T: Authorizer + Clone + 'static,
{
fn clone_authorizer(&self) -> Box<dyn Authorizer> {
Box::new(self.clone())
}
}
impl Clone for Box<dyn Authorizer> {
fn clone(&self) -> Self {
self.clone_authorizer()
}
}
#[derive(Clone)]
pub(crate) struct DefaultAuthorizer {}
impl DefaultAuthorizer {
pub fn new() -> DefaultAuthorizer {
DefaultAuthorizer {}
}
}
impl Authorizer for DefaultAuthorizer {
fn can_load(&self, _claims: &Claims<Actor>) -> bool {
true
}
// This doesn't actually mean everyone can invoke everything. Remember that the host itself
// will _always_ enforce the claims check on an actor having the required capability
// attestation
fn can_invoke(
&self,
_claims: &Claims<Actor>,
target: &WasmCloudEntity,
_operation: &str,
) -> bool {
match target {
WasmCloudEntity::Actor(_a) => true,
WasmCloudEntity::Capability { .. } => true,
}
}
}
pub(crate) fn authorize_invocation(
inv: &Invocation,
authorizer: Box<dyn Authorizer>,
claims_cache: &HashMap<String, Claims<wascap::jwt::Actor>>,
) -> Result<()> {
let _ = inv.validate_antiforgery()?; // Fail authorization if the invocation isn't properly signed
if let WasmCloudEntity::Actor(ref actor_key) = &inv.origin {
if let Some(c) = claims_cache.get(actor_key) {
if let Some(ref caps) = c.metadata.as_ref().unwrap().caps {
let allowed = if let WasmCloudEntity::Capability { contract_id, .. } = &inv.target {
caps.contains(contract_id)
} else {
true
};
if allowed {
if authorizer.can_invoke(&c, &inv.target, &inv.operation) {
Ok(())
} else {
Err("Authorization denied - authorizer rejected invocation".into())
}
} else {
Err("Authorization denied - Actor does not have required claims".into())
}
} else {
Err("This actor has no embedded claims. Authorization denied".into())
}
} else if actor_key == SYSTEM_ACTOR {
// system actor can call other actors
Ok(())
} else {
Err(format!(
"No claims found for actor '{}'. Has it been started?",
actor_key
)
.into())
}
} else {
Ok(()) // Allow cap->actor calls without checking
}
}
#[cfg(test)]
mod test {
use crate::auth::{authorize_invocation, Authorizer, DefaultAuthorizer};
use crate::{Invocation, WasmCloudEntity};
use std::collections::HashMap;
use wascap::jwt::{Actor, Claims, ClaimsBuilder};
use wascap::prelude::KeyPair;
#[test]
fn actor_to_actor_allowed() {
let inv = gen_invocation(
WasmCloudEntity::Actor("A".to_string()),
WasmCloudEntity::Actor("B".to_string()),
"test",
);
let mut cache = HashMap::new();
cache.insert(
"A".to_string(),
ClaimsBuilder::new()
.with_metadata(wascap::jwt::Actor::new(
"A".to_string(),
Some(vec!["wasmcloud:messaging".to_string()]),
None,
false,
None,
None,
None,
))
.build(),
);
let auth = Box::new(DefaultAuthorizer::new());
assert!(authorize_invocation(&inv, auth, &cache).is_ok());
}
#[test]
fn block_actor_with_no_claims() {
let inv = gen_invocation(
WasmCloudEntity::Actor("A".to_string()),
WasmCloudEntity::Actor("B".to_string()),
"test",
);
let cache = HashMap::new();
let auth = Box::new(DefaultAuthorizer::new());
let res = authorize_invocation(&inv, auth, &cache);
assert!(res.is_err());
assert_eq!(
res.err().unwrap().to_string(),
"No claims found for actor 'A'. Has it been started?"
);
}
#[test]
fn block_actor_with_insufficient_claims() {
let target = WasmCloudEntity::Capability {
contract_id: "wasmcloud:keyvalue".to_string(),
id: "Vxxx".to_string(),
link_name: "default".to_string(),
};
let inv = gen_invocation(WasmCloudEntity::Actor("A".to_string()), target, "test");
let mut cache = HashMap::new();
cache.insert(
"A".to_string(),
ClaimsBuilder::new()
.with_metadata(wascap::jwt::Actor::new(
"A".to_string(),
Some(vec!["wasmcloud:messaging".to_string()]),
None,
false,
None,
None,
None,
))
.build(),
);
let auth = Box::new(DefaultAuthorizer::new());
let res = authorize_invocation(&inv, auth, &cache);
assert_eq!(
res.err().unwrap().to_string(),
"Authorization denied - Actor does not have required claims"
);
}
#[test]
fn invoke_authorizer_when_initial_check_passes() {
let target = WasmCloudEntity::Capability {
contract_id: "wasmcloud:keyvalue".to_string(),
id: "Vxxx".to_string(),
link_name: "default".to_string(),
};
let inv = gen_invocation(WasmCloudEntity::Actor("A".to_string()), target, "test");
let mut cache = HashMap::new();
cache.insert(
"A".to_string(),
ClaimsBuilder::new()
.with_metadata(wascap::jwt::Actor::new(
"A".to_string(),
Some(vec!["wasmcloud:keyvalue".to_string()]),
None,
false,
None,
None,
None,
))
.build(),
);
let auth = Box::new(CrankyAuthorizer::new());
let res = authorize_invocation(&inv, auth, &cache);
assert_eq!(
res.err().unwrap().to_string(),
"Authorization denied - authorizer rejected invocation"
);
}
fn gen_invocation(source: WasmCloudEntity, target: WasmCloudEntity, op: &str) -> Invocation {
let hk = KeyPair::new_server();
Invocation::new(&hk, source, target, op, vec![])
}
#[derive(Clone)]
struct CrankyAuthorizer;
impl CrankyAuthorizer {
pub fn new() -> CrankyAuthorizer |
}
impl Authorizer for CrankyAuthorizer {
fn can_load(&self, _claims: &Claims<Actor>) -> bool {
false
}
fn can_invoke(
&self,
_claims: &Claims<Actor>,
_target: &WasmCloudEntity,
_operation: &str,
) -> bool {
false
}
}
}
| {
CrankyAuthorizer
} |
external_server.go | package httpsteps
import (
"context"
"fmt"
"strings"
"github.com/bool64/httpmock"
"github.com/bool64/shared"
"github.com/cucumber/godog"
"github.com/godogx/resource"
)
type exp struct {
httpmock.Expectation
async bool
}
// NewExternalServer creates an ExternalServer.
func NewExternalServer() *ExternalServer {
es := &ExternalServer{}
es.mocks = make(map[string]*mock, 1)
es.lock = resource.NewLock(func(service string) error {
m := es.mocks[service]
if m == nil |
if m.exp != nil {
return fmt.Errorf("%w in %s for %s %s",
errUndefinedResponse, service, m.exp.Method, m.exp.RequestURI)
}
if err := m.srv.ExpectationsWereMet(); err != nil {
return fmt.Errorf("expectations were not met for %s: %w", service, err)
}
return nil
})
es.Vars = &shared.Vars{}
return es
}
// ExternalServer is a collection of step-driven HTTP servers to serve requests of application with mocked data.
//
// Please use NewExternalServer() to create an instance.
type ExternalServer struct {
mocks map[string]*mock
lock *resource.Lock
Vars *shared.Vars
}
type mock struct {
exp *exp
srv *httpmock.Server
}
// RegisterSteps adds steps to godog scenario context to serve outgoing requests with mocked data.
//
// In simple case you can define expected URL and response.
//
// Given "some-service" receives "GET" request "/get-something?foo=bar"
//
// And "some-service" responds with status "OK" and body
// """
// {"key":"value"}
// """
//
// Or request with body.
//
// And "another-service" receives "POST" request "/post-something" with body
// """
// // Could be a JSON5 too.
// {"foo":"bar"}
// """
//
// Request with body from a file.
//
// And "another-service" receives "POST" request "/post-something" with body from file
// """
// _testdata/sample.json
// """
//
// Request can expect to have a header.
//
// And "some-service" request includes header "X-Foo: bar"
//
// By default, each configured request is expected to be received 1 time. This can be changed to a different number.
//
// And "some-service" request is received 1234 times
//
// Or to be unlimited.
//
// And "some-service" request is received several times
//
// By default, requests are expected in same sequential order as they are defined.
// If there is no stable order you can have an async expectation.
// Async requests are expected in any order.
//
// And "some-service" request is async
//
// Response may have a header.
//
// And "some-service" response includes header "X-Bar: foo"
//
// Response must have a status.
//
// And "some-service" responds with status "OK"
//
// Response may also have a body.
//
// And "some-service" responds with status "OK" and body
// """
// {"key":"value"}
// """
//
// Response body can also be defined in file.
//
// And "another-service" responds with status "200" and body from file
// """
// _testdata/sample.json5
// """
func (e *ExternalServer) RegisterSteps(s *godog.ScenarioContext) {
e.lock.Register(s)
e.steps(s)
}
func (e *ExternalServer) steps(s *godog.ScenarioContext) {
// Init request expectation.
s.Step(`^"([^"]*)" receives "([^"]*)" request "([^"]*)"$`,
e.serviceReceivesRequest)
s.Step(`^"([^"]*)" receives "([^"]*)" request "([^"]*)" with body$`,
e.serviceReceivesRequestWithBody)
s.Step(`^"([^"]*)" receives "([^"]*)" request "([^"]*)" with body from file$`,
e.serviceReceivesRequestWithBodyFromFile)
// Configure request expectation.
s.Step(`^"([^"]*)" request includes header "([^"]*): ([^"]*)"$`,
e.serviceRequestIncludesHeader)
s.Step(`^"([^"]*)" request is async$`,
e.serviceRequestIsAsync)
s.Step(`^"([^"]*)" request is received several times$`,
e.serviceReceivesRequestMultipleTimes)
s.Step(`^"([^"]*)" request is received (\d+) times$`,
e.serviceReceivesRequestNTimes)
// Configure response.
s.Step(`^"([^"]*)" response includes header "([^"]*): ([^"]*)"$`,
e.serviceResponseIncludesHeader)
// Finalize request expectation.
s.Step(`^"([^"]*)" responds with status "([^"]*)"$`,
func(ctx context.Context, service, statusOrCode string) (context.Context, error) {
return e.serviceRespondsWithStatusAndPreparedBody(ctx, service, statusOrCode, nil)
})
s.Step(`^"([^"]*)" responds with status "([^"]*)" and body$`,
e.serviceRespondsWithStatusAndBody)
s.Step(`^"([^"]*)" responds with status "([^"]*)" and body from file$`,
e.serviceRespondsWithStatusAndBodyFromFile)
}
// GetMock exposes mock of external service for configuration.
func (e *ExternalServer) GetMock(service string) *httpmock.Server {
return e.mocks[service].srv
}
func (e *ExternalServer) pending(ctx context.Context, service string) (context.Context, *mock, error) {
ctx, m, err := e.mock(ctx, service)
if err != nil {
return ctx, nil, err
}
if m.exp == nil {
return ctx, nil, fmt.Errorf("%w: %q", errUndefinedRequest, service)
}
return ctx, m, nil
}
// mock returns mock for a service or fails if service is not defined.
func (e *ExternalServer) mock(ctx context.Context, service string) (context.Context, *mock, error) {
service = strings.Trim(service, `" `)
if service == "" {
service = Default
}
c, found := e.mocks[service]
if !found {
return ctx, nil, fmt.Errorf("%w: %s", errUnknownService, service)
}
acquired, err := e.lock.Acquire(ctx, service)
if err != nil {
return ctx, nil, err
}
// Reset client after acquiring lock.
if acquired {
c.exp = nil
c.srv.ResetExpectations()
if e.Vars != nil {
ctx, c.srv.JSONComparer.Vars = e.Vars.Fork(ctx)
}
}
return ctx, c, nil
}
// Add starts a mocked server for a named service and returns url.
func (e *ExternalServer) Add(service string, options ...func(mock *httpmock.Server)) string {
m, url := httpmock.NewServer()
for _, option := range options {
option(m)
}
e.mocks[service] = &mock{srv: m}
return url
}
func (e *ExternalServer) serviceReceivesRequestWithPreparedBody(ctx context.Context, service, method, requestURI string, body []byte) (context.Context, error) {
ctx, err := e.serviceReceivesRequest(ctx, service, method, requestURI)
if err != nil {
return ctx, err
}
ctx, m, err := e.pending(ctx, service)
if err != nil {
return ctx, err
}
m.exp.RequestBody = body
return ctx, nil
}
func (e *ExternalServer) serviceRequestIncludesHeader(ctx context.Context, service, header, value string) (context.Context, error) {
ctx, m, err := e.pending(ctx, service)
if err != nil {
return ctx, err
}
if m.exp.RequestHeader == nil {
m.exp.RequestHeader = make(map[string]string, 1)
}
m.exp.RequestHeader[header] = value
return ctx, nil
}
func (e *ExternalServer) serviceReceivesRequestWithBody(ctx context.Context, service, method, requestURI string, bodyDoc string) (context.Context, error) {
ctx, m, err := e.mock(ctx, service)
if err != nil {
return ctx, err
}
body, err := loadBody([]byte(bodyDoc), m.srv.JSONComparer.Vars)
if err != nil {
return ctx, err
}
return e.serviceReceivesRequestWithPreparedBody(ctx, service, method, requestURI, body)
}
func (e *ExternalServer) serviceReceivesRequestWithBodyFromFile(ctx context.Context, service, method, requestURI string, filePath string) (context.Context, error) {
ctx, m, err := e.mock(ctx, service)
if err != nil {
return ctx, err
}
body, err := loadBodyFromFile(filePath, m.srv.JSONComparer.Vars)
if err != nil {
return ctx, err
}
return e.serviceReceivesRequestWithPreparedBody(ctx, service, method, requestURI, body)
}
func (e *ExternalServer) serviceReceivesRequest(ctx context.Context, service, method, requestURI string) (context.Context, error) {
ctx, m, err := e.mock(ctx, service)
if err != nil {
return ctx, err
}
if m.exp != nil {
return ctx, fmt.Errorf("%w for %q: %+v", errUnexpectedExpectations, service, *m.exp)
}
m.exp = &exp{}
m.exp.Method = method
m.exp.RequestURI = requestURI
return ctx, nil
}
func (e *ExternalServer) serviceReceivesRequestNTimes(ctx context.Context, service string, n int) (context.Context, error) {
ctx, m, err := e.pending(ctx, service)
if err != nil {
return ctx, err
}
m.exp.Repeated = n
return ctx, nil
}
func (e *ExternalServer) serviceRequestIsAsync(ctx context.Context, service string) (context.Context, error) {
ctx, m, err := e.pending(ctx, service)
if err != nil {
return ctx, err
}
m.exp.async = true
return ctx, nil
}
func (e *ExternalServer) serviceReceivesRequestMultipleTimes(ctx context.Context, service string) (context.Context, error) {
ctx, m, err := e.pending(ctx, service)
if err != nil {
return ctx, err
}
m.exp.Unlimited = true
return ctx, nil
}
func (e *ExternalServer) serviceRespondsWithStatusAndPreparedBody(ctx context.Context, service, statusOrCode string, body []byte) (context.Context, error) {
code, err := statusCode(statusOrCode)
if err != nil {
return ctx, err
}
ctx, m, err := e.pending(ctx, service)
if err != nil {
return ctx, err
}
pending := *m.exp
m.exp = nil
pending.Status = code
pending.ResponseBody = body
if pending.ResponseHeader == nil {
pending.ResponseHeader = map[string]string{}
}
if pending.async {
m.srv.ExpectAsync(pending.Expectation)
} else {
m.srv.Expect(pending.Expectation)
}
return ctx, nil
}
func (e *ExternalServer) serviceResponseIncludesHeader(ctx context.Context, service, header, value string) (context.Context, error) {
ctx, m, err := e.pending(ctx, service)
if err != nil {
return ctx, err
}
if m.exp.ResponseHeader == nil {
m.exp.ResponseHeader = make(map[string]string, 1)
}
m.exp.ResponseHeader[header] = value
return ctx, nil
}
func (e *ExternalServer) serviceRespondsWithStatusAndBody(ctx context.Context, service, statusOrCode string, bodyDoc string) (context.Context, error) {
ctx, m, err := e.mock(ctx, service)
if err != nil {
return ctx, err
}
body, err := loadBody([]byte(bodyDoc), m.srv.JSONComparer.Vars)
if err != nil {
return ctx, err
}
return e.serviceRespondsWithStatusAndPreparedBody(ctx, service, statusOrCode, body)
}
func (e *ExternalServer) serviceRespondsWithStatusAndBodyFromFile(ctx context.Context, service, statusOrCode string, filePath string) (context.Context, error) {
ctx, m, err := e.mock(ctx, service)
if err != nil {
return ctx, err
}
body, err := loadBodyFromFile(filePath, m.srv.JSONComparer.Vars)
if err != nil {
return ctx, err
}
return e.serviceRespondsWithStatusAndPreparedBody(ctx, service, statusOrCode, body)
}
| {
return fmt.Errorf("%w: %s", errNoMockForService, service)
} |
Bank.tsx | import React, { useEffect } from 'react';
import styled from 'styled-components';
import { useParams } from 'react-router-dom';
import { useWallet } from 'use-wallet';
import { makeStyles } from '@material-ui/core/styles';
import { Box, Button, Card, CardContent, Typography, Grid } from '@material-ui/core';
import PageHeader from '../../components/PageHeader';
import Spacer from '../../components/Spacer';
import UnlockWallet from '../../components/UnlockWallet';
import Harvest from './components/Harvest';
import Stake from './components/Stake';
import useBank from '../../hooks/useBank';
import useStatsForPool from '../../hooks/useStatsForPool';
import useRedeem from '../../hooks/useRedeem';
import { Bank as BankEntity } from '../../tomb-finance';
import useTombFinance from '../../hooks/useTombFinance';
const useStyles = makeStyles((theme) => ({
gridItem: {
height: '100%',
[theme.breakpoints.up('md')]: {
height: '90px',
},
},
}));
const Bank: React.FC = () => {
useEffect(() => window.scrollTo(0, 0));
const classes = useStyles();
const { bankId } = useParams();
const bank = useBank(bankId);
const { account } = useWallet();
const { onRedeem } = useRedeem(bank);
const statsOnPool = useStatsForPool(bank);
return account && bank ? (
<>
<PageHeader
icon="🏦"
subtitle={`Deposit ${bank?.depositTokenName} and earn ${bank?.earnTokenName}`}
title={bank?.name}
/>
<Box>
<Grid container justifyContent="center" spacing={3} style={{ marginBottom: '50px' }}>
<Grid item xs={12} md={2} lg={2} className={classes.gridItem}>
<Card className={classes.gridItem}>
<CardContent style={{ textAlign: 'center', fontSize: '18px !important' }}>
<Typography style={{ fontSize:'18px' }}>APR</Typography>
<Typography style={{ fontSize:'18px' }}>{bank.closedForStaking ? '0.00' : statsOnPool?.yearlyAPR}%</Typography>
</CardContent>
</Card>
</Grid>
<Grid item xs={12} md={2} lg={2} className={classes.gridItem}>
<Card className={classes.gridItem}>
<CardContent style={{ textAlign: 'center' }}>
<Typography style={{ fontSize:'18px' }}>Daily APR</Typography>
<Typography style={{ fontSize:'18px' }}>{bank.closedForStaking ? '0.00' : statsOnPool?.dailyAPR}%</Typography>
</CardContent>
</Card>
</Grid>
<Grid item xs={12} md={2} lg={2} className={classes.gridItem}>
<Card className={classes.gridItem}>
<CardContent style={{ textAlign: 'center' }}>
<Typography style={{ fontSize:'18px' }}>TVL</Typography>
<Typography style={{ fontSize:'18px' }}>${statsOnPool?.TVL}</Typography>
</CardContent>
</Card>
</Grid>
</Grid>
</Box>
<Box mt={5}>
<StyledBank>
<StyledCardsWrapper>
<StyledCardWrapper>
<Harvest bank={bank} />
</StyledCardWrapper>
<Spacer />
<StyledCardWrapper>{<Stake bank={bank} />}</StyledCardWrapper>
</StyledCardsWrapper>
<Spacer size="lg" />
{bank.depositTokenName.includes('LP') && <LPTokenHelpText bank={bank} />}
<Spacer size="lg" />
<div>
<Button onClick={onRedeem} color="primary" variant="contained">
Claim & Withdraw
</Button>
</div>
<Spacer size="lg" />
</StyledBank>
</Box>
</>
) : !bank ? (
<BankNotFound />
) : (
<UnlockWallet />
);
};
const LPTokenHelpText: React.FC<{ bank: BankEntity }> = ({ bank }) => {
const tombFinance = useTombFinance();
const tombAddr = tombFinance.LROAD.address;
const tshareAddr = tombFinance.LCREAM.address;
let pairName: string;
let uniswapUrl: string;
if (bank.depositTokenName.includes('LROAD-FTM')) {
pairName = 'LROAD-FTM pair';
uniswapUrl = 'https://spookyswap.finance/add/FTM/' + tombAddr;
} else if (bank.depositTokenName.includes('LCREAM-FTM')){
pairName = 'LCREAM-FTM pair';
uniswapUrl = 'https://spookyswap.finance/add/FTM/' + tshareAddr;
} else {
pairName = 'LROAD-LCREAM pair';
uniswapUrl = 'https://spookyswap.finance/add/' + tombAddr + '/' + tshareAddr;
}
return (
<Card>
<CardContent>
<StyledLink href={uniswapUrl} target="_blank">
{`👻 Provide liquidity for ${pairName} now on SpookySwap 👻`}
</StyledLink>
</CardContent>
</Card>
);
};
const BankNotFound = () => {
return (
<Center>
<PageHeader icon="🏚" title="Not Found" subtitle="You've hit a bank just robbed by unicorns." />
</Center>
);
};
const StyledBank = styled.div`
align-items: center; | @media (max-width: 768px) {
width: 100%;
}
`;
const StyledLink = styled.a`
font-weight: 700;
text-decoration: none;
color: ${(props) => props.theme.color.primary.main};
`;
const StyledCardsWrapper = styled.div`
display: flex;
width: 600px;
@media (max-width: 768px) {
width: 100%;
flex-flow: column nowrap;
align-items: center;
}
`;
const StyledCardWrapper = styled.div`
display: flex;
flex: 1;
flex-direction: column;
@media (max-width: 768px) {
width: 80%;
}
`;
const Center = styled.div`
display: flex;
flex: 1;
align-items: center;
justify-content: center;
`;
export default Bank; | display: flex;
flex-direction: column; |
setup.py | from setuptools import setup, find_packages
from truewho import __version__
def | (filename, lines=False):
try:
with open(filename, "r") as f:
if lines:
return [i.strip() for i in f.readlines() if (i.strip())]
return f.read()
except:
print("Can not read file:", filename)
return None
long_description = read_file("README.md")
setup(
name="truewho",
version=__version__,
author="Ibrahim Rafi",
author_email="[email protected]",
license="MIT",
url="https://github.com/rafiibrahim8/truewho",
download_url="https://github.com/rafiibrahim8/truewho/archive/v{}.tar.gz".format(
__version__
),
install_requires=["phone-iso3166", "requests", "click"],
description="Check a phone number for name with Truecaller in command line.",
long_description=long_description,
long_description_content_type="text/markdown",
keywords=["truewho", "Truecaller", "Spam", "Call"],
packages=find_packages(),
entry_points=dict(console_scripts=["truewho=truewho.truewho:main"]),
platforms=["any"],
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: End Users/Desktop",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
)
| read_file |
create_world_request.py | """
VRChat API Documentation
The version of the OpenAPI document: 1.6.8
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from vrchatapi.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from vrchatapi.exceptions import ApiAttributeError
def lazy_import():
from vrchatapi.model.release_status import ReleaseStatus
from vrchatapi.model.tag import Tag
from vrchatapi.model.world_id import WorldID
globals()['ReleaseStatus'] = ReleaseStatus
globals()['Tag'] = Tag
globals()['WorldID'] = WorldID
class CreateWorldRequest(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
('asset_url',): {
'min_length': 1,
},
('image_url',): {
'min_length': 1,
},
('name',): {
'min_length': 1,
},
('asset_version',): {
'inclusive_minimum': 0,
},
('author_name',): {
'min_length': 1,
},
('capacity',): {
'inclusive_maximum': 40,
'inclusive_minimum': 0,
},
('unity_package_url',): {
'min_length': 1,
},
('unity_version',): {
'min_length': 1,
},
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'asset_url': (str,), # noqa: E501
'image_url': (str,), # noqa: E501
'name': (str,), # noqa: E501
'asset_version': (int,), # noqa: E501
'author_id': (str,), # noqa: E501
'author_name': (str,), # noqa: E501
'capacity': (int,), # noqa: E501
'description': (str,), # noqa: E501
'id': (WorldID,), # noqa: E501
'platform': (str,), # noqa: E501
'release_status': (ReleaseStatus,), # noqa: E501
'tags': ([Tag],), # noqa: E501
'unity_package_url': (str,), # noqa: E501
'unity_version': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'asset_url': 'assetUrl', # noqa: E501
'image_url': 'imageUrl', # noqa: E501
'name': 'name', # noqa: E501
'asset_version': 'assetVersion', # noqa: E501
'author_id': 'authorId', # noqa: E501
'author_name': 'authorName', # noqa: E501
'capacity': 'capacity', # noqa: E501
'description': 'description', # noqa: E501
'id': 'id', # noqa: E501
'platform': 'platform', # noqa: E501
'release_status': 'releaseStatus', # noqa: E501
'tags': 'tags', # noqa: E501
'unity_package_url': 'unityPackageUrl', # noqa: E501
'unity_version': 'unityVersion', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def | (cls, asset_url, image_url, name, *args, **kwargs): # noqa: E501
"""CreateWorldRequest - a model defined in OpenAPI
Args:
asset_url (str):
image_url (str):
name (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
asset_version (int): [optional] # noqa: E501
author_id (str): A users unique ID, usually in the form of `usr_c1644b5b-3ca4-45b4-97c6-a2a0de70d469`. Legacy players can have old IDs in the form of `8JoV9XEdpo`. The ID can never be changed.. [optional] # noqa: E501
author_name (str): [optional] # noqa: E501
capacity (int): [optional] # noqa: E501
description (str): [optional] # noqa: E501
id (WorldID): [optional] # noqa: E501
platform (str): This can be `standalonewindows` or `android`, but can also pretty much be any random Unity verison such as `2019.2.4-801-Release` or `2019.2.2-772-Release` or even `unknownplatform`.. [optional] # noqa: E501
release_status (ReleaseStatus): [optional] # noqa: E501
tags ([Tag]): [optional] # noqa: E501
unity_package_url (str): [optional] # noqa: E501
unity_version (str): [optional] if omitted the server will use the default value of "5.3.4p1" # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.asset_url = asset_url
self.image_url = image_url
self.name = name
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, asset_url, image_url, name, *args, **kwargs): # noqa: E501
"""CreateWorldRequest - a model defined in OpenAPI
Args:
asset_url (str):
image_url (str):
name (str):
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
asset_version (int): [optional] # noqa: E501
author_id (str): A users unique ID, usually in the form of `usr_c1644b5b-3ca4-45b4-97c6-a2a0de70d469`. Legacy players can have old IDs in the form of `8JoV9XEdpo`. The ID can never be changed.. [optional] # noqa: E501
author_name (str): [optional] # noqa: E501
capacity (int): [optional] # noqa: E501
description (str): [optional] # noqa: E501
id (WorldID): [optional] # noqa: E501
platform (str): This can be `standalonewindows` or `android`, but can also pretty much be any random Unity verison such as `2019.2.4-801-Release` or `2019.2.2-772-Release` or even `unknownplatform`.. [optional] # noqa: E501
release_status (ReleaseStatus): [optional] # noqa: E501
tags ([Tag]): [optional] # noqa: E501
unity_package_url (str): [optional] # noqa: E501
unity_version (str): [optional] if omitted the server will use the default value of "5.3.4p1" # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.asset_url = asset_url
self.image_url = image_url
self.name = name
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| _from_openapi_data |
error.rs | use core::num::ParseIntError;
use core::mem::transmute;
pub fn invalid_digit() -> ParseIntError {
unsafe { transmute(1u8) }
}
pub fn underflow() -> ParseIntError {
unsafe { transmute(3u8) }
}
pub fn overflow() -> ParseIntError {
unsafe { transmute(2u8) }
}
pub fn empty() -> ParseIntError {
unsafe { transmute(0u8) }
}
pub fn is_overflow(e: &ParseIntError) -> bool {
*e == overflow()
}
|
#[test]
fn test_local_parse_int_error_to_std() {
assert_fmt_eq!("invalid digit found in string", 29, "{}", error::invalid_digit());
assert_fmt_eq!("cannot parse integer from empty string", 38, "{}", error::empty());
assert_fmt_eq!("number too large to fit in target type", 38, "{}", error::overflow());
assert_fmt_eq!("number too small to fit in target type", 38, "{}", error::underflow());
}
} | #[cfg(test)]
mod tests {
use error; |
router.go | package network
import (
"bytes"
"encoding/xml"
"io/ioutil"
"net"
"net/http"
"os"
)
type cbIPAddr func() (*net.IPAddr, error)
var (
routerExternalName string
routerExternalIP *net.IPAddr
routerInternalName string
routerInternalIP *net.IPAddr
cbRouterInternalIPs []cbIPAddr
)
func SetRouterExternalName(name string) {
routerExternalName = name
}
func SetRouterInternalIP(ip *net.IPAddr) {
routerInternalIP = ip
}
func SetRouterInternalName(name string) {
routerInternalName = name
routerInternalIP, _ = net.ResolveIPAddr("", name)
}
func GetRouterInternalIP() (routerIP *net.IPAddr, err error) {
if routerInternalIP != nil {
return routerInternalIP, nil
}
if nil == cbRouterInternalIPs {
return nil, os.ErrNotExist
}
for _, cbRouterInternalIP := range cbRouterInternalIPs {
routerIP, err = cbRouterInternalIP()
if err == nil {
routerInternalIP = routerIP
return
}
}
return nil, os.ErrNotExist
}
func registerCbRouterInternalIP(cb cbIPAddr) {
cbRouterInternalIPs = append(cbRouterInternalIPs, cb)
}
////////////////////////////////////////////////////////////////////////////////
func GetRouterExternalIP() (routerIP *net.IPAddr, err error) {
if routerExternalIP != nil {
return routerExternalIP, nil
}
routerExternalIP, err = getRouterExternalIP()
return routerExternalIP, err
}
func getRouterExternalIP() (routerIP *net.IPAddr, err error) {
routerIP, err = GetRouterExternalIPUPNP()
if err == nil {
return routerIP, err
}
return GetRouterExternalIPIpify()
}
////////////////////////////////////////////////////////////////////////////////
func GetRouterExternalIPIpify() (publicIP *net.IPAddr, err error) {
res, err := http.Get("https://api.ipify.org")
if err != nil {
return nil, err
}
ip, err := ioutil.ReadAll(res.Body)
if err != nil {
return nil, err
}
return net.ResolveIPAddr("", string(ip))
}
type getExternalIPAddressEnvelope struct {
Body getExternalIPAddressBody `xml:"Body"`
}
type getExternalIPAddressBody struct {
Response getExternalIPAddressResponse `xml:"GetExternalIPAddressResponse"`
}
type getExternalIPAddressResponse struct {
NewExternalIPAddress string `xml:"NewExternalIPAddress"`
}
func GetRouterExternalIPUPNP() (publicIP *net.IPAddr, err error) {
internalIP, _ := GetRouterInternalIP()
req, err := http.NewRequest("POST", "http://"+internalIP.String()+":49000/igdupnp/control/WANIPConn1", bytes.NewBufferString(`<?xml version='1.0' encoding='utf-8'?>
<s:Envelope s:encodingStyle='http://schemas.xmlsoap.org/soap/encoding/'
xmlns:s='http://schemas.xmlsoap.org/soap/envelope/'>
<s:Body>
<u:GetExternalIPAddress xmlns:u='urn:schemas-upnp-org:service:WANIPConnection:1' />
</s:Body>
</s:Envelope>`))
req.Header.Add("Content-Type",
"text/xml; charset=\"utf-8\"")
req.Header.Add("SoapAction",
"urn:schemas-upnp-org:service:WANIPConnection:1#GetExternalIPAddress")
res, err := http.DefaultClient.Do(req)
if err != nil {
return nil, err
}
data, err := ioutil.ReadAll(res.Body)
env := getExternalIPAddressEnvelope{}
err = xml.Unmarshal(data, &env)
if err != nil |
return net.ResolveIPAddr("", env.Body.Response.NewExternalIPAddress)
}
| {
return nil, err
} |
signal.rs | #![no_std]
#![no_main]
#![feature(type_alias_impl_trait)]
use defmt::{info, unwrap};
use embassy::channel::signal::Signal;
use embassy::executor::Spawner;
use embassy::time::{Duration, Timer};
use embassy_stm32::Peripherals;
use {defmt_rtt as _, panic_probe as _};
static SIGNAL: Signal<u32> = Signal::new();
#[embassy::task]
async fn my_sending_task() {
let mut counter: u32 = 0;
loop {
Timer::after(Duration::from_secs(1)).await;
SIGNAL.signal(counter);
counter = counter.wrapping_add(1);
}
}
#[embassy::main]
async fn | (spawner: Spawner, _p: Peripherals) {
unwrap!(spawner.spawn(my_sending_task()));
loop {
let received_counter = SIGNAL.wait().await;
info!("signalled, counter: {}", received_counter);
}
}
| main |
globbound.rs | #[doc = "Reader of register GLOBBOUND"]
pub type R = crate::R<u32, super::GLOBBOUND>;
#[doc = "Writer for register GLOBBOUND"]
pub type W = crate::W<u32, super::GLOBBOUND>;
#[doc = "Register GLOBBOUND `reset()`'s with value 0"]
impl crate::ResetValue for super::GLOBBOUND {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `BOUNDARY0`"]
pub type BOUNDARY0_R = crate::R<u16, u16>;
#[doc = "Write proxy for field `BOUNDARY0`"]
pub struct BOUNDARY0_W<'a> {
w: &'a mut W,
}
impl<'a> BOUNDARY0_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0fff) | ((value as u32) & 0x0fff);
self.w
}
}
#[doc = "Reader of field `BOUNDARY1`"]
pub type BOUNDARY1_R = crate::R<u16, u16>;
#[doc = "Write proxy for field `BOUNDARY1`"]
pub struct BOUNDARY1_W<'a> {
w: &'a mut W,
}
impl<'a> BOUNDARY1_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0fff << 16)) | (((value as u32) & 0x0fff) << 16);
self.w
}
}
impl R {
#[doc = "Bits 0:11 - Boundary Value 0 for Limit Checking"]
#[inline(always)]
pub fn boundary0(&self) -> BOUNDARY0_R {
BOUNDARY0_R::new((self.bits & 0x0fff) as u16)
}
#[doc = "Bits 16:27 - Boundary Value 1 for Limit Checking"]
#[inline(always)]
pub fn boundary1(&self) -> BOUNDARY1_R {
BOUNDARY1_R::new(((self.bits >> 16) & 0x0fff) as u16)
}
}
impl W {
#[doc = "Bits 0:11 - Boundary Value 0 for Limit Checking"]
#[inline(always)]
pub fn boundary0(&mut self) -> BOUNDARY0_W {
BOUNDARY0_W { w: self }
}
#[doc = "Bits 16:27 - Boundary Value 1 for Limit Checking"]
#[inline(always)]
pub fn boundary1(&mut self) -> BOUNDARY1_W { | } | BOUNDARY1_W { w: self }
} |
get_by_ids_request_builder.go | package getbyids
import (
i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go"
)
// GetByIdsRequestBuilder provides operations to call the getByIds method.
type GetByIdsRequestBuilder struct {
// Path parameters for the request
pathParameters map[string]string
// The request adapter to use to execute the requests.
requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter
// Url template to use to build the URL for the current request builder
urlTemplate string
}
// GetByIdsRequestBuilderPostRequestConfiguration configuration for the request such as headers, query parameters, and middleware options.
type GetByIdsRequestBuilderPostRequestConfiguration struct {
// Request headers
Headers map[string]string
// Request options
Options []i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestOption
}
// NewGetByIdsRequestBuilderInternal instantiates a new GetByIdsRequestBuilder and sets the default values.
func | (pathParameters map[string]string, requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter)(*GetByIdsRequestBuilder) {
m := &GetByIdsRequestBuilder{
}
m.urlTemplate = "{+baseurl}/organization/microsoft.graph.getByIds";
urlTplParams := make(map[string]string)
for idx, item := range pathParameters {
urlTplParams[idx] = item
}
m.pathParameters = urlTplParams;
m.requestAdapter = requestAdapter;
return m
}
// NewGetByIdsRequestBuilder instantiates a new GetByIdsRequestBuilder and sets the default values.
func NewGetByIdsRequestBuilder(rawUrl string, requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter)(*GetByIdsRequestBuilder) {
urlParams := make(map[string]string)
urlParams["request-raw-url"] = rawUrl
return NewGetByIdsRequestBuilderInternal(urlParams, requestAdapter)
}
// CreatePostRequestInformation invoke action getByIds
func (m *GetByIdsRequestBuilder) CreatePostRequestInformation(body GetByIdsRequestBodyable)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {
return m.CreatePostRequestInformationWithRequestConfiguration(body, nil);
}
// CreatePostRequestInformationWithRequestConfiguration invoke action getByIds
func (m *GetByIdsRequestBuilder) CreatePostRequestInformationWithRequestConfiguration(body GetByIdsRequestBodyable, requestConfiguration *GetByIdsRequestBuilderPostRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {
requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.POST
requestInfo.SetContentFromParsable(m.requestAdapter, "application/json", body)
if requestConfiguration != nil {
requestInfo.AddRequestHeaders(requestConfiguration.Headers)
requestInfo.AddRequestOptions(requestConfiguration.Options)
}
return requestInfo, nil
}
// Post invoke action getByIds
func (m *GetByIdsRequestBuilder) Post(body GetByIdsRequestBodyable)(GetByIdsResponseable, error) {
return m.PostWithRequestConfigurationAndResponseHandler(body, nil, nil);
}
// PostWithRequestConfigurationAndResponseHandler invoke action getByIds
func (m *GetByIdsRequestBuilder) PostWithRequestConfigurationAndResponseHandler(body GetByIdsRequestBodyable, requestConfiguration *GetByIdsRequestBuilderPostRequestConfiguration, responseHandler i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ResponseHandler)(GetByIdsResponseable, error) {
requestInfo, err := m.CreatePostRequestInformationWithRequestConfiguration(body, requestConfiguration);
if err != nil {
return nil, err
}
res, err := m.requestAdapter.SendAsync(requestInfo, CreateGetByIdsResponseFromDiscriminatorValue, responseHandler, nil)
if err != nil {
return nil, err
}
return res.(GetByIdsResponseable), nil
}
| NewGetByIdsRequestBuilderInternal |
database_constants.py | #!/usr/bin/env python3
# -*-encoding: utf-8-*-
# created: 25.11.2019 | # email: [email protected]
TEXT = 0
IMAGE = 1
AUDIO = 2
VIDEO = 3
DOCUMENT = 4
MESSAGE_TYPES = {
TEXT,
IMAGE,
AUDIO,
VIDEO,
DOCUMENT,
}
CHANNELS = "Channels"
CHATS = "Chats"
USERS_CHATS = 'UsersChats'
USERS_CHANNELS = 'UsersChannels'
PRIVATE = 1
PUBLIC = 0 | # by David Zashkolny
# 3 course, comp math
# Taras Shevchenko National University of Kyiv |
attribute.py | __author__ = 'Alex Gusev <[email protected]>'
import prxgt.const as const
from prxgt.domain.meta.attribute import Attribute as AttributeBase
class Attribute(AttributeBase):
"""
Attribute model contains data.
"""
def __init__(self, name=None, type_=None, value=None):
super(Attribute, self).__init__(name, type_)
self._value = value
return
@property
def value(self):
return self._value
@value.setter
def | (self, val):
self._value = val
@property
def meta(self):
"""
META Attribute (name and type only)
:return:
"""
return AttributeBase(self._name, self._type)
def __repr__(self):
result = super(Attribute, self).__repr__()
if (self.value is not None) and (self.type == const.ATTR_TYPE_TXT):
# [name@type='value']
result += "=" + repr(self.value[:4] + "...")
else:
# [name@text='valu...']
result += "=" + repr(self.value)
return result | value |
extHost.protocol.ts | /*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { SerializedError } from 'vs/base/common/errors';
import { IDisposable } from 'vs/base/common/lifecycle';
import Severity from 'vs/base/common/severity';
import URI, { UriComponents } from 'vs/base/common/uri';
import { TPromise } from 'vs/base/common/winjs.base';
import { TextEditorCursorStyle } from 'vs/editor/common/config/editorOptions';
import { IPosition } from 'vs/editor/common/core/position';
import { IRange } from 'vs/editor/common/core/range';
import { ISelection, Selection } from 'vs/editor/common/core/selection';
import * as editorCommon from 'vs/editor/common/editorCommon';
import { ISingleEditOperation } from 'vs/editor/common/model';
import { IModelChangedEvent } from 'vs/editor/common/model/mirrorTextModel';
import * as modes from 'vs/editor/common/modes';
import { CharacterPair, CommentRule, EnterAction } from 'vs/editor/common/modes/languageConfiguration';
import { ICommandHandlerDescription } from 'vs/platform/commands/common/commands';
import { ConfigurationTarget, IConfigurationData, IConfigurationModel } from 'vs/platform/configuration/common/configuration';
import { ConfigurationScope } from 'vs/platform/configuration/common/configurationRegistry';
import { FileChangeType, FileDeleteOptions, FileOverwriteOptions, FileSystemProviderCapabilities, FileType, FileWriteOptions, IStat, IWatchOptions } from 'vs/platform/files/common/files';
import { LogLevel } from 'vs/platform/log/common/log';
import { IMarkerData } from 'vs/platform/markers/common/markers';
import { IPickOptions, IQuickInputButton, IQuickPickItem } from 'vs/platform/quickinput/common/quickInput';
import { IPatternInfo, IQueryOptions, IRawFileMatch2, IRawSearchQuery, ISearchCompleteStats } from 'vs/platform/search/common/search';
import { StatusbarAlignment as MainThreadStatusBarAlignment } from 'vs/platform/statusbar/common/statusbar';
import { ITelemetryInfo } from 'vs/platform/telemetry/common/telemetry';
import { ThemeColor } from 'vs/platform/theme/common/themeService';
import { EndOfLine, IFileOperationOptions, TextEditorLineNumbersStyle } from 'vs/workbench/api/node/extHostTypes';
import { EditorViewColumn } from 'vs/workbench/api/shared/editor';
import { TaskDTO, TaskExecutionDTO, TaskFilterDTO, TaskHandleDTO, TaskProcessEndedDTO, TaskProcessStartedDTO, TaskSystemInfoDTO } from 'vs/workbench/api/shared/tasks';
import { ITreeItem } from 'vs/workbench/common/views';
import { IAdapterExecutable, IConfig, ITerminalSettings } from 'vs/workbench/parts/debug/common/debug';
import { TaskSet } from 'vs/workbench/parts/tasks/common/tasks';
import { ITerminalDimensions } from 'vs/workbench/parts/terminal/common/terminal';
import { IExtensionDescription } from 'vs/workbench/services/extensions/common/extensions';
import { createExtHostContextProxyIdentifier as createExtId, createMainContextProxyIdentifier as createMainId, IRPCProtocol, ProxyIdentifier } from 'vs/workbench/services/extensions/node/proxyIdentifier';
import { IProgressOptions, IProgressStep } from 'vs/workbench/services/progress/common/progress';
import { SaveReason } from 'vs/workbench/services/textfile/common/textfiles';
import * as vscode from 'vscode';
export interface IEnvironment {
isExtensionDevelopmentDebug: boolean;
appRoot: string;
appSettingsHome: string;
extensionDevelopmentPath: string;
extensionTestsPath: string;
}
export interface IWorkspaceData {
id: string;
name: string;
folders: { uri: UriComponents, name: string, index: number }[];
configuration?: UriComponents;
}
export interface IInitData {
parentPid: number;
environment: IEnvironment;
workspace: IWorkspaceData;
extensions: IExtensionDescription[];
configuration: IConfigurationInitData;
telemetryInfo: ITelemetryInfo;
windowId: number;
logLevel: LogLevel;
logsPath: string;
}
export interface IConfigurationInitData extends IConfigurationData {
configurationScopes: { [key: string]: ConfigurationScope };
}
export interface IWorkspaceConfigurationChangeEventData {
changedConfiguration: IConfigurationModel;
changedConfigurationByResource: { [folder: string]: IConfigurationModel };
}
export interface IExtHostContext extends IRPCProtocol {
}
export interface IMainContext extends IRPCProtocol {
}
// --- main thread
export interface MainThreadCommandsShape extends IDisposable {
$registerCommand(id: string): void;
$unregisterCommand(id: string): void;
$executeCommand<T>(id: string, args: any[]): Thenable<T>;
$getCommands(): Thenable<string[]>;
}
export interface MainThreadCommentsShape extends IDisposable {
$registerDocumentCommentProvider(handle: number): void;
$unregisterDocumentCommentProvider(handle: number): void;
$registerWorkspaceCommentProvider(handle: number): void;
$unregisterWorkspaceCommentProvider(handle: number): void;
$onDidCommentThreadsChange(handle: number, event: modes.CommentThreadChangedEvent): void;
}
export interface MainThreadConfigurationShape extends IDisposable {
$updateConfigurationOption(target: ConfigurationTarget, key: string, value: any, resource: UriComponents): TPromise<void>;
$removeConfigurationOption(target: ConfigurationTarget, key: string, resource: UriComponents): TPromise<void>;
}
export interface MainThreadDiagnosticsShape extends IDisposable {
$changeMany(owner: string, entries: [UriComponents, IMarkerData[]][]): void;
$clear(owner: string): void;
}
export interface MainThreadDialogOpenOptions {
defaultUri?: UriComponents;
openLabel?: string;
canSelectFiles?: boolean;
canSelectFolders?: boolean;
canSelectMany?: boolean;
filters?: { [name: string]: string[] };
}
export interface MainThreadDialogSaveOptions {
defaultUri?: UriComponents;
saveLabel?: string;
filters?: { [name: string]: string[] };
}
export interface MainThreadDiaglogsShape extends IDisposable {
$showOpenDialog(options: MainThreadDialogOpenOptions): Thenable<string[]>;
$showSaveDialog(options: MainThreadDialogSaveOptions): Thenable<string>;
}
export interface MainThreadDecorationsShape extends IDisposable {
$registerDecorationProvider(handle: number, label: string): void;
$unregisterDecorationProvider(handle: number): void;
$onDidChange(handle: number, resources: UriComponents[]): void;
}
export interface MainThreadDocumentContentProvidersShape extends IDisposable {
$registerTextContentProvider(handle: number, scheme: string): void;
$unregisterTextContentProvider(handle: number): void;
$onVirtualDocumentChange(uri: UriComponents, value: string): void;
}
export interface MainThreadDocumentsShape extends IDisposable {
$tryCreateDocument(options?: { language?: string; content?: string; }): TPromise<UriComponents>;
$tryOpenDocument(uri: UriComponents): TPromise<void>;
$trySaveDocument(uri: UriComponents): TPromise<boolean>;
}
export interface ITextEditorConfigurationUpdate {
tabSize?: number | 'auto';
insertSpaces?: boolean | 'auto';
cursorStyle?: TextEditorCursorStyle;
lineNumbers?: TextEditorLineNumbersStyle;
}
export interface IResolvedTextEditorConfiguration {
tabSize: number;
insertSpaces: boolean;
cursorStyle: TextEditorCursorStyle;
lineNumbers: TextEditorLineNumbersStyle;
}
export enum TextEditorRevealType {
Default = 0,
InCenter = 1,
InCenterIfOutsideViewport = 2,
AtTop = 3
}
export interface IUndoStopOptions {
undoStopBefore: boolean;
undoStopAfter: boolean;
}
export interface IApplyEditsOptions extends IUndoStopOptions {
setEndOfLine: EndOfLine;
}
export interface ITextDocumentShowOptions {
position?: EditorViewColumn;
preserveFocus?: boolean;
pinned?: boolean;
selection?: IRange;
}
export interface MainThreadTextEditorsShape extends IDisposable {
$tryShowTextDocument(resource: UriComponents, options: ITextDocumentShowOptions): TPromise<string>;
$registerTextEditorDecorationType(key: string, options: editorCommon.IDecorationRenderOptions): void;
$removeTextEditorDecorationType(key: string): void;
$tryShowEditor(id: string, position: EditorViewColumn): TPromise<void>;
$tryHideEditor(id: string): TPromise<void>;
$trySetOptions(id: string, options: ITextEditorConfigurationUpdate): TPromise<void>;
$trySetDecorations(id: string, key: string, ranges: editorCommon.IDecorationOptions[]): TPromise<void>;
$trySetDecorationsFast(id: string, key: string, ranges: number[]): TPromise<void>;
$tryRevealRange(id: string, range: IRange, revealType: TextEditorRevealType): TPromise<void>;
$trySetSelections(id: string, selections: ISelection[]): TPromise<void>;
$tryApplyEdits(id: string, modelVersionId: number, edits: ISingleEditOperation[], opts: IApplyEditsOptions): TPromise<boolean>;
$tryApplyWorkspaceEdit(workspaceEditDto: WorkspaceEditDto): TPromise<boolean>;
$tryInsertSnippet(id: string, template: string, selections: IRange[], opts: IUndoStopOptions): TPromise<boolean>;
$getDiffInformation(id: string): TPromise<editorCommon.ILineChange[]>;
}
export interface MainThreadTreeViewsShape extends IDisposable {
$registerTreeViewDataProvider(treeViewId: string): void;
$refresh(treeViewId: string, itemsToRefresh?: { [treeItemHandle: string]: ITreeItem }): TPromise<void>;
$reveal(treeViewId: string, treeItem: ITreeItem, parentChain: ITreeItem[], options: { select: boolean, focus: boolean }): TPromise<void>;
}
export interface MainThreadErrorsShape extends IDisposable {
$onUnexpectedError(err: any | SerializedError): void;
}
export interface ISerializedRegExp {
pattern: string;
flags?: string;
}
export interface ISerializedIndentationRule {
decreaseIndentPattern: ISerializedRegExp;
increaseIndentPattern: ISerializedRegExp;
indentNextLinePattern?: ISerializedRegExp;
unIndentedLinePattern?: ISerializedRegExp;
}
export interface ISerializedOnEnterRule {
beforeText: ISerializedRegExp;
afterText?: ISerializedRegExp;
action: EnterAction;
}
export interface ISerializedLanguageConfiguration {
comments?: CommentRule;
brackets?: CharacterPair[];
wordPattern?: ISerializedRegExp;
indentationRules?: ISerializedIndentationRule;
onEnterRules?: ISerializedOnEnterRule[];
__electricCharacterSupport?: {
brackets?: any;
docComment?: {
scope: string;
open: string;
lineStart: string;
close?: string;
};
};
__characterPairSupport?: {
autoClosingPairs: {
open: string;
close: string;
notIn?: string[];
}[];
};
}
export interface ISerializedDocumentFilter {
$serialized: true;
language?: string;
scheme?: string;
pattern?: vscode.GlobPattern;
exclusive?: boolean;
}
export interface MainThreadLanguageFeaturesShape extends IDisposable {
$unregister(handle: number): void;
$registerOutlineSupport(handle: number, selector: ISerializedDocumentFilter[], extensionId: string): void;
$registerCodeLensSupport(handle: number, selector: ISerializedDocumentFilter[], eventHandle: number): void;
$emitCodeLensEvent(eventHandle: number, event?: any): void;
$registerDeclaractionSupport(handle: number, selector: ISerializedDocumentFilter[]): void;
$registerImplementationSupport(handle: number, selector: ISerializedDocumentFilter[]): void;
$registerTypeDefinitionSupport(handle: number, selector: ISerializedDocumentFilter[]): void;
$registerHoverProvider(handle: number, selector: ISerializedDocumentFilter[]): void;
$registerDocumentHighlightProvider(handle: number, selector: ISerializedDocumentFilter[]): void;
$registerReferenceSupport(handle: number, selector: ISerializedDocumentFilter[]): void;
$registerQuickFixSupport(handle: number, selector: ISerializedDocumentFilter[], supportedKinds?: string[]): void;
$registerDocumentFormattingSupport(handle: number, selector: ISerializedDocumentFilter[]): void;
$registerRangeFormattingSupport(handle: number, selector: ISerializedDocumentFilter[]): void;
$registerOnTypeFormattingSupport(handle: number, selector: ISerializedDocumentFilter[], autoFormatTriggerCharacters: string[]): void;
$registerNavigateTypeSupport(handle: number): void;
$registerRenameSupport(handle: number, selector: ISerializedDocumentFilter[], supportsResolveInitialValues: boolean): void;
$registerSuggestSupport(handle: number, selector: ISerializedDocumentFilter[], triggerCharacters: string[], supportsResolveDetails: boolean): void;
$registerSignatureHelpProvider(handle: number, selector: ISerializedDocumentFilter[], triggerCharacter: string[]): void;
$registerDocumentLinkProvider(handle: number, selector: ISerializedDocumentFilter[]): void;
$registerDocumentColorProvider(handle: number, selector: ISerializedDocumentFilter[]): void;
$registerFoldingRangeProvider(handle: number, selector: ISerializedDocumentFilter[]): void;
$setLanguageConfiguration(handle: number, languageId: string, configuration: ISerializedLanguageConfiguration): void;
}
export interface MainThreadLanguagesShape extends IDisposable {
$getLanguages(): TPromise<string[]>;
}
export interface MainThreadMessageOptions {
extension?: IExtensionDescription;
modal?: boolean;
}
export interface MainThreadMessageServiceShape extends IDisposable {
$showMessage(severity: Severity, message: string, options: MainThreadMessageOptions, commands: { title: string; isCloseAffordance: boolean; handle: number; }[]): Thenable<number>;
}
export interface MainThreadOutputServiceShape extends IDisposable {
$append(channelId: string, label: string, value: string): TPromise<void>;
$clear(channelId: string, label: string): TPromise<void>;
$dispose(channelId: string, label: string): TPromise<void>;
$reveal(channelId: string, label: string, preserveFocus: boolean): TPromise<void>;
$close(channelId: string): TPromise<void>;
}
export interface MainThreadProgressShape extends IDisposable {
$startProgress(handle: number, options: IProgressOptions): void;
$progressReport(handle: number, message: IProgressStep): void;
$progressEnd(handle: number): void;
}
export interface MainThreadTerminalServiceShape extends IDisposable {
$createTerminal(name?: string, shellPath?: string, shellArgs?: string[], cwd?: string, env?: { [key: string]: string }, waitOnExit?: boolean): TPromise<number>;
$createTerminalRenderer(name: string): TPromise<number>;
$dispose(terminalId: number): void;
$hide(terminalId: number): void;
$sendText(terminalId: number, text: string, addNewLine: boolean): void;
$show(terminalId: number, preserveFocus: boolean): void;
$registerOnDataListener(terminalId: number): void;
// Process
$sendProcessTitle(terminalId: number, title: string): void;
$sendProcessData(terminalId: number, data: string): void;
$sendProcessPid(terminalId: number, pid: number): void;
$sendProcessExit(terminalId: number, exitCode: number): void;
// Renderer
$terminalRendererSetName(terminalId: number, name: string): void;
$terminalRendererSetDimensions(terminalId: number, dimensions: ITerminalDimensions): void;
$terminalRendererWrite(terminalId: number, text: string): void;
$terminalRendererRegisterOnInputListener(terminalId: number): void;
}
export interface TransferQuickPickItems extends IQuickPickItem {
handle: number;
}
export interface TransferQuickInputButton extends IQuickInputButton {
handle: number;
}
export type TransferQuickInput = TransferQuickPick | TransferInputBox;
export interface BaseTransferQuickInput {
id: number;
type?: 'quickPick' | 'inputBox';
enabled?: boolean;
busy?: boolean;
visible?: boolean;
}
export interface TransferQuickPick extends BaseTransferQuickInput {
type?: 'quickPick';
value?: string;
placeholder?: string;
buttons?: TransferQuickInputButton[];
items?: TransferQuickPickItems[];
activeItems?: number[];
selectedItems?: number[];
canSelectMany?: boolean;
ignoreFocusOut?: boolean;
matchOnDescription?: boolean;
matchOnDetail?: boolean;
}
export interface TransferInputBox extends BaseTransferQuickInput {
type?: 'inputBox';
value?: string;
placeholder?: string;
password?: boolean;
buttons?: TransferQuickInputButton[];
prompt?: string;
validationMessage?: string;
}
export interface MainThreadQuickOpenShape extends IDisposable {
$show(options: IPickOptions<TransferQuickPickItems>): TPromise<number | number[]>;
$setItems(items: TransferQuickPickItems[]): TPromise<any>;
$setError(error: Error): TPromise<any>;
$input(options: vscode.InputBoxOptions, validateInput: boolean): TPromise<string>;
$createOrUpdate(params: TransferQuickInput): TPromise<void>;
$dispose(id: number): TPromise<void>;
}
export interface MainThreadStatusBarShape extends IDisposable {
$setEntry(id: number, extensionId: string, text: string, tooltip: string, command: string, color: string | ThemeColor, alignment: MainThreadStatusBarAlignment, priority: number): void;
$dispose(id: number): void;
}
export interface MainThreadStorageShape extends IDisposable {
$getValue<T>(shared: boolean, key: string): TPromise<T>;
$setValue(shared: boolean, key: string, value: any): TPromise<void>;
}
export interface MainThreadTelemetryShape extends IDisposable {
$publicLog(eventName: string, data?: any): void;
}
export type WebviewPanelHandle = string;
export interface WebviewPanelShowOptions { | readonly viewColumn?: EditorViewColumn;
readonly preserveFocus?: boolean;
}
export interface MainThreadWebviewsShape extends IDisposable {
$createWebviewPanel(handle: WebviewPanelHandle, viewType: string, title: string, showOptions: WebviewPanelShowOptions, options: vscode.WebviewPanelOptions & vscode.WebviewOptions, extensionLocation: UriComponents): void;
$disposeWebview(handle: WebviewPanelHandle): void;
$reveal(handle: WebviewPanelHandle, showOptions: WebviewPanelShowOptions): void;
$setTitle(handle: WebviewPanelHandle, value: string): void;
$setIconPath(handle: WebviewPanelHandle, value: { light: UriComponents, dark: UriComponents } | undefined): void;
$setHtml(handle: WebviewPanelHandle, value: string): void;
$setOptions(handle: WebviewPanelHandle, options: vscode.WebviewOptions): void;
$postMessage(handle: WebviewPanelHandle, value: any): Thenable<boolean>;
$registerSerializer(viewType: string): void;
$unregisterSerializer(viewType: string): void;
}
export interface WebviewPanelViewState {
readonly active: boolean;
readonly visible: boolean;
readonly position: EditorViewColumn;
}
export interface ExtHostWebviewsShape {
$onMessage(handle: WebviewPanelHandle, message: any): void;
$onDidChangeWebviewPanelViewState(handle: WebviewPanelHandle, newState: WebviewPanelViewState): void;
$onDidDisposeWebviewPanel(handle: WebviewPanelHandle): Thenable<void>;
$deserializeWebviewPanel(newWebviewHandle: WebviewPanelHandle, viewType: string, title: string, state: any, position: EditorViewColumn, options: vscode.WebviewOptions): Thenable<void>;
}
export interface MainThreadUrlsShape extends IDisposable {
$registerUriHandler(handle: number, extensionId: string): TPromise<void>;
$unregisterUriHandler(handle: number): TPromise<void>;
}
export interface ExtHostUrlsShape {
$handleExternalUri(handle: number, uri: UriComponents): TPromise<void>;
}
export interface MainThreadWorkspaceShape extends IDisposable {
$startFileSearch(includePattern: string, includeFolder: string, excludePatternOrDisregardExcludes: string | false, maxResults: number, requestId: number): Thenable<UriComponents[]>;
$startTextSearch(query: IPatternInfo, options: IQueryOptions, requestId: number): TPromise<void>;
$cancelSearch(requestId: number): Thenable<boolean>;
$saveAll(includeUntitled?: boolean): Thenable<boolean>;
$updateWorkspaceFolders(extensionName: string, index: number, deleteCount: number, workspaceFoldersToAdd: { uri: UriComponents, name?: string }[]): Thenable<void>;
}
export interface IFileChangeDto {
resource: UriComponents;
type: FileChangeType;
}
export interface MainThreadFileSystemShape extends IDisposable {
$registerFileSystemProvider(handle: number, scheme: string, capabilities: FileSystemProviderCapabilities): void;
$unregisterProvider(handle: number): void;
$onFileSystemChange(handle: number, resource: IFileChangeDto[]): void;
}
export interface MainThreadSearchShape extends IDisposable {
$registerFileSearchProvider(handle: number, scheme: string): void;
$registerTextSearchProvider(handle: number, scheme: string): void;
$registerFileIndexProvider(handle: number, scheme: string): void;
$unregisterProvider(handle: number): void;
$handleFileMatch(handle: number, session: number, data: UriComponents[]): void;
$handleTextMatch(handle: number, session: number, data: IRawFileMatch2[]): void;
$handleTelemetry(eventName: string, data: any): void;
}
export interface MainThreadTaskShape extends IDisposable {
$registerTaskProvider(handle: number): TPromise<void>;
$unregisterTaskProvider(handle: number): TPromise<void>;
$fetchTasks(filter?: TaskFilterDTO): TPromise<TaskDTO[]>;
$executeTask(task: TaskHandleDTO | TaskDTO): TPromise<TaskExecutionDTO>;
$terminateTask(id: string): TPromise<void>;
$registerTaskSystem(scheme: string, info: TaskSystemInfoDTO): void;
}
export interface MainThreadExtensionServiceShape extends IDisposable {
$localShowMessage(severity: Severity, msg: string): void;
$onExtensionActivated(extensionId: string, startup: boolean, codeLoadingTime: number, activateCallTime: number, activateResolvedTime: number, activationEvent: string): void;
$onExtensionActivationFailed(extensionId: string): void;
$onExtensionRuntimeError(extensionId: string, error: SerializedError): void;
$addMessage(extensionId: string, severity: Severity, message: string): void;
}
export interface SCMProviderFeatures {
hasQuickDiffProvider?: boolean;
count?: number;
commitTemplate?: string;
acceptInputCommand?: modes.Command;
statusBarCommands?: modes.Command[];
}
export interface SCMGroupFeatures {
hideWhenEmpty?: boolean;
}
export type SCMRawResource = [
number /*handle*/,
UriComponents /*resourceUri*/,
string[] /*icons: light, dark*/,
string /*tooltip*/,
boolean /*strike through*/,
boolean /*faded*/,
string | undefined /*source*/,
string | undefined /*letter*/,
ThemeColor | null /*color*/
];
export type SCMRawResourceSplice = [
number /* start */,
number /* delete count */,
SCMRawResource[]
];
export type SCMRawResourceSplices = [
number, /*handle*/
SCMRawResourceSplice[]
];
export interface MainThreadSCMShape extends IDisposable {
$registerSourceControl(handle: number, id: string, label: string, rootUri: UriComponents | undefined): void;
$updateSourceControl(handle: number, features: SCMProviderFeatures): void;
$unregisterSourceControl(handle: number): void;
$registerGroup(sourceControlHandle: number, handle: number, id: string, label: string): void;
$updateGroup(sourceControlHandle: number, handle: number, features: SCMGroupFeatures): void;
$updateGroupLabel(sourceControlHandle: number, handle: number, label: string): void;
$unregisterGroup(sourceControlHandle: number, handle: number): void;
$spliceResourceStates(sourceControlHandle: number, splices: SCMRawResourceSplices[]): void;
$setInputBoxValue(sourceControlHandle: number, value: string): void;
$setInputBoxPlaceholder(sourceControlHandle: number, placeholder: string): void;
$setValidationProviderIsEnabled(sourceControlHandle: number, enabled: boolean): void;
}
// {{SQL CARBON EDIT}}
/*
export type DebugSessionUUID = string;
export interface MainThreadDebugServiceShape extends IDisposable {
$registerDebugTypes(debugTypes: string[]): void;
$acceptDAMessage(handle: number, message: DebugProtocol.ProtocolMessage): void;
$acceptDAError(handle: number, name: string, message: string, stack: string): void;
$acceptDAExit(handle: number, code: number, signal: string): void;
$registerDebugConfigurationProvider(type: string, hasProvideMethod: boolean, hasResolveMethod: boolean, hasDebugAdapterExecutable: boolean, handle: number): TPromise<any>;
$unregisterDebugConfigurationProvider(handle: number): TPromise<any>;
$startDebugging(folder: UriComponents | undefined, nameOrConfig: string | vscode.DebugConfiguration): TPromise<boolean>;
$customDebugAdapterRequest(id: DebugSessionUUID, command: string, args: any): TPromise<any>;
$appendDebugConsole(value: string): TPromise<any>;
$startBreakpointEvents(): TPromise<any>;
$registerBreakpoints(breakpoints: (ISourceMultiBreakpointDto | IFunctionBreakpointDto)[]): TPromise<void>;
$unregisterBreakpoints(breakpointIds: string[], functionBreakpointIds: string[]): TPromise<void>;
}
// {{SQL CARBON EDIT}}
*/
export interface MainThreadWindowShape extends IDisposable {
$getWindowVisibility(): TPromise<boolean>;
}
// -- extension host
export interface ExtHostCommandsShape {
$executeContributedCommand<T>(id: string, ...args: any[]): Thenable<T>;
$getContributedCommandHandlerDescriptions(): Thenable<{ [id: string]: string | ICommandHandlerDescription }>;
}
export interface ExtHostConfigurationShape {
$acceptConfigurationChanged(data: IConfigurationData, eventData: IWorkspaceConfigurationChangeEventData): void;
}
export interface ExtHostDiagnosticsShape {
}
export interface ExtHostDocumentContentProvidersShape {
$provideTextDocumentContent(handle: number, uri: UriComponents): TPromise<string>;
}
export interface IModelAddedData {
uri: UriComponents;
versionId: number;
lines: string[];
EOL: string;
modeId: string;
isDirty: boolean;
}
export interface ExtHostDocumentsShape {
$acceptModelModeChanged(strURL: UriComponents, oldModeId: string, newModeId: string): void;
$acceptModelSaved(strURL: UriComponents): void;
$acceptDirtyStateChanged(strURL: UriComponents, isDirty: boolean): void;
$acceptModelChanged(strURL: UriComponents, e: IModelChangedEvent, isDirty: boolean): void;
}
export interface ExtHostDocumentSaveParticipantShape {
$participateInSave(resource: UriComponents, reason: SaveReason): Thenable<boolean[]>;
}
export interface ITextEditorAddData {
id: string;
documentUri: UriComponents;
options: IResolvedTextEditorConfiguration;
selections: ISelection[];
visibleRanges: IRange[];
editorPosition: EditorViewColumn;
}
export interface ITextEditorPositionData {
[id: string]: EditorViewColumn;
}
export interface IEditorPropertiesChangeData {
options: IResolvedTextEditorConfiguration | null;
selections: ISelectionChangeEvent | null;
visibleRanges: IRange[] | null;
}
export interface ISelectionChangeEvent {
selections: Selection[];
source?: string;
}
export interface ExtHostEditorsShape {
$acceptEditorPropertiesChanged(id: string, props: IEditorPropertiesChangeData): void;
$acceptEditorPositionData(data: ITextEditorPositionData): void;
}
export interface IDocumentsAndEditorsDelta {
removedDocuments?: UriComponents[];
addedDocuments?: IModelAddedData[];
removedEditors?: string[];
addedEditors?: ITextEditorAddData[];
newActiveEditor?: string;
}
export interface ExtHostDocumentsAndEditorsShape {
$acceptDocumentsAndEditorsDelta(delta: IDocumentsAndEditorsDelta): void;
}
export interface ExtHostTreeViewsShape {
$getChildren(treeViewId: string, treeItemHandle?: string): TPromise<ITreeItem[]>;
$setExpanded(treeViewId: string, treeItemHandle: string, expanded: boolean): void;
$setSelection(treeViewId: string, treeItemHandles: string[]): void;
$setVisible(treeViewId: string, visible: boolean): void;
}
export interface ExtHostWorkspaceShape {
$acceptWorkspaceData(workspace: IWorkspaceData): void;
$handleTextSearchResult(result: IRawFileMatch2, requestId: number): void;
}
export interface ExtHostFileSystemShape {
$stat(handle: number, resource: UriComponents): TPromise<IStat>;
$readdir(handle: number, resource: UriComponents): TPromise<[string, FileType][]>;
$readFile(handle: number, resource: UriComponents): TPromise<string>;
$writeFile(handle: number, resource: UriComponents, base64Encoded: string, opts: FileWriteOptions): TPromise<void>;
$rename(handle: number, resource: UriComponents, target: UriComponents, opts: FileOverwriteOptions): TPromise<void>;
$copy(handle: number, resource: UriComponents, target: UriComponents, opts: FileOverwriteOptions): TPromise<void>;
$mkdir(handle: number, resource: UriComponents): TPromise<void>;
$delete(handle: number, resource: UriComponents, opts: FileDeleteOptions): TPromise<void>;
$watch(handle: number, session: number, resource: UriComponents, opts: IWatchOptions): void;
$unwatch(handle: number, session: number): void;
}
export interface ExtHostSearchShape {
$provideFileSearchResults(handle: number, session: number, query: IRawSearchQuery): TPromise<ISearchCompleteStats>;
$clearCache(cacheKey: string): TPromise<void>;
$provideTextSearchResults(handle: number, session: number, pattern: IPatternInfo, query: IRawSearchQuery): TPromise<ISearchCompleteStats>;
}
export interface ExtHostExtensionServiceShape {
$activateByEvent(activationEvent: string): TPromise<void>;
}
export interface FileSystemEvents {
created: UriComponents[];
changed: UriComponents[];
deleted: UriComponents[];
}
export interface ExtHostFileSystemEventServiceShape {
$onFileEvent(events: FileSystemEvents): void;
$onFileRename(oldUri: UriComponents, newUri: UriComponents): void;
$onWillRename(oldUri: UriComponents, newUri: UriComponents): TPromise<any>;
}
export interface ObjectIdentifier {
$ident: number;
}
export namespace ObjectIdentifier {
export const name = '$ident';
export function mixin<T>(obj: T, id: number): T & ObjectIdentifier {
Object.defineProperty(obj, name, { value: id, enumerable: true });
return <T & ObjectIdentifier>obj;
}
export function of(obj: any): number {
return obj[name];
}
}
export interface ExtHostHeapServiceShape {
$onGarbageCollection(ids: number[]): void;
}
export interface IRawColorInfo {
color: [number, number, number, number];
range: IRange;
}
export class IdObject {
_id?: number;
private static _n = 0;
static mixin<T extends object>(object: T): T & IdObject {
(<any>object)._id = IdObject._n++;
return <any>object;
}
}
export interface SuggestionDto extends modes.ISuggestion {
_id: number;
_parentId: number;
}
export interface SuggestResultDto extends IdObject {
suggestions: SuggestionDto[];
incomplete?: boolean;
}
export interface LocationDto {
uri: UriComponents;
range: IRange;
}
export interface DefinitionLinkDto {
origin?: IRange;
uri: UriComponents;
range: IRange;
selectionRange?: IRange;
}
export interface WorkspaceSymbolDto extends IdObject {
name: string;
containerName?: string;
kind: modes.SymbolKind;
location: LocationDto;
}
export interface WorkspaceSymbolsDto extends IdObject {
symbols: WorkspaceSymbolDto[];
}
export interface ResourceFileEditDto {
oldUri: UriComponents;
newUri: UriComponents;
options: IFileOperationOptions;
}
export interface ResourceTextEditDto {
resource: UriComponents;
modelVersionId?: number;
edits: modes.TextEdit[];
}
export interface WorkspaceEditDto {
edits: (ResourceFileEditDto | ResourceTextEditDto)[];
// todo@joh reject should go into rename
rejectReason?: string;
}
export function reviveWorkspaceEditDto(data: WorkspaceEditDto): modes.WorkspaceEdit {
if (data && data.edits) {
for (const edit of data.edits) {
if (typeof (<ResourceTextEditDto>edit).resource === 'object') {
(<ResourceTextEditDto>edit).resource = URI.revive((<ResourceTextEditDto>edit).resource);
} else {
(<ResourceFileEditDto>edit).newUri = URI.revive((<ResourceFileEditDto>edit).newUri);
(<ResourceFileEditDto>edit).oldUri = URI.revive((<ResourceFileEditDto>edit).oldUri);
}
}
}
return <modes.WorkspaceEdit>data;
}
export interface CodeActionDto {
title: string;
edit?: WorkspaceEditDto;
diagnostics?: IMarkerData[];
command?: modes.Command;
kind?: string;
}
export interface ExtHostLanguageFeaturesShape {
$provideDocumentSymbols(handle: number, resource: UriComponents): TPromise<modes.DocumentSymbol[]>;
$provideCodeLenses(handle: number, resource: UriComponents): TPromise<modes.ICodeLensSymbol[]>;
$resolveCodeLens(handle: number, resource: UriComponents, symbol: modes.ICodeLensSymbol): TPromise<modes.ICodeLensSymbol>;
$provideDefinition(handle: number, resource: UriComponents, position: IPosition): TPromise<DefinitionLinkDto[]>;
$provideImplementation(handle: number, resource: UriComponents, position: IPosition): TPromise<DefinitionLinkDto[]>;
$provideTypeDefinition(handle: number, resource: UriComponents, position: IPosition): TPromise<DefinitionLinkDto[]>;
$provideHover(handle: number, resource: UriComponents, position: IPosition): TPromise<modes.Hover>;
$provideDocumentHighlights(handle: number, resource: UriComponents, position: IPosition): TPromise<modes.DocumentHighlight[]>;
$provideReferences(handle: number, resource: UriComponents, position: IPosition, context: modes.ReferenceContext): TPromise<LocationDto[]>;
$provideCodeActions(handle: number, resource: UriComponents, rangeOrSelection: IRange | ISelection, context: modes.CodeActionContext): TPromise<CodeActionDto[]>;
$provideDocumentFormattingEdits(handle: number, resource: UriComponents, options: modes.FormattingOptions): TPromise<ISingleEditOperation[]>;
$provideDocumentRangeFormattingEdits(handle: number, resource: UriComponents, range: IRange, options: modes.FormattingOptions): TPromise<ISingleEditOperation[]>;
$provideOnTypeFormattingEdits(handle: number, resource: UriComponents, position: IPosition, ch: string, options: modes.FormattingOptions): TPromise<ISingleEditOperation[]>;
$provideWorkspaceSymbols(handle: number, search: string): TPromise<WorkspaceSymbolsDto>;
$resolveWorkspaceSymbol(handle: number, symbol: WorkspaceSymbolDto): TPromise<WorkspaceSymbolDto>;
$releaseWorkspaceSymbols(handle: number, id: number): void;
$provideRenameEdits(handle: number, resource: UriComponents, position: IPosition, newName: string): TPromise<WorkspaceEditDto>;
$resolveRenameLocation(handle: number, resource: UriComponents, position: IPosition): TPromise<modes.RenameLocation>;
$provideCompletionItems(handle: number, resource: UriComponents, position: IPosition, context: modes.SuggestContext): TPromise<SuggestResultDto>;
$resolveCompletionItem(handle: number, resource: UriComponents, position: IPosition, suggestion: modes.ISuggestion): TPromise<modes.ISuggestion>;
$releaseCompletionItems(handle: number, id: number): void;
$provideSignatureHelp(handle: number, resource: UriComponents, position: IPosition): TPromise<modes.SignatureHelp>;
$provideDocumentLinks(handle: number, resource: UriComponents): TPromise<modes.ILink[]>;
$resolveDocumentLink(handle: number, link: modes.ILink): TPromise<modes.ILink>;
$provideDocumentColors(handle: number, resource: UriComponents): TPromise<IRawColorInfo[]>;
$provideColorPresentations(handle: number, resource: UriComponents, colorInfo: IRawColorInfo): TPromise<modes.IColorPresentation[]>;
$provideFoldingRanges(handle: number, resource: UriComponents, context: modes.FoldingContext): TPromise<modes.FoldingRange[]>;
}
export interface ExtHostQuickOpenShape {
$onItemSelected(handle: number): void;
$validateInput(input: string): TPromise<string>;
$onDidChangeActive(sessionId: number, handles: number[]): void;
$onDidChangeSelection(sessionId: number, handles: number[]): void;
$onDidAccept(sessionId: number): void;
$onDidChangeValue(sessionId: number, value: string): void;
$onDidTriggerButton(sessionId: number, handle: number): void;
$onDidHide(sessionId: number): void;
}
export interface ShellLaunchConfigDto {
name?: string;
executable?: string;
args?: string[] | string;
cwd?: string;
env?: { [key: string]: string };
}
export interface ExtHostTerminalServiceShape {
$acceptTerminalClosed(id: number): void;
$acceptTerminalOpened(id: number, name: string): void;
$acceptActiveTerminalChanged(id: number | null): void;
$acceptTerminalProcessId(id: number, processId: number): void;
$acceptTerminalProcessData(id: number, data: string): void;
$acceptTerminalRendererInput(id: number, data: string): void;
$acceptTerminalRendererDimensions(id: number, cols: number, rows: number): void;
$createProcess(id: number, shellLaunchConfig: ShellLaunchConfigDto, cols: number, rows: number): void;
$acceptProcessInput(id: number, data: string): void;
$acceptProcessResize(id: number, cols: number, rows: number): void;
$acceptProcessShutdown(id: number): void;
}
export interface ExtHostSCMShape {
$provideOriginalResource(sourceControlHandle: number, uri: UriComponents): TPromise<UriComponents>;
$onInputBoxValueChange(sourceControlHandle: number, value: string): TPromise<void>;
$executeResourceCommand(sourceControlHandle: number, groupHandle: number, handle: number): TPromise<void>;
$validateInput(sourceControlHandle: number, value: string, cursorPosition: number): TPromise<[string, number] | undefined>;
}
export interface ExtHostTaskShape {
$provideTasks(handle: number, validTypes: { [key: string]: boolean; }): TPromise<TaskSet>;
$onDidStartTask(execution: TaskExecutionDTO): void;
$onDidStartTaskProcess(value: TaskProcessStartedDTO): void;
$onDidEndTaskProcess(value: TaskProcessEndedDTO): void;
$OnDidEndTask(execution: TaskExecutionDTO): void;
$resolveVariables(workspaceFolder: UriComponents, variables: string[]): TPromise<any>;
}
export interface IBreakpointDto {
type: string;
id?: string;
enabled: boolean;
condition?: string;
hitCondition?: string;
logMessage?: string;
}
export interface IFunctionBreakpointDto extends IBreakpointDto {
type: 'function';
functionName: string;
}
export interface ISourceBreakpointDto extends IBreakpointDto {
type: 'source';
uri: UriComponents;
line: number;
character: number;
}
export interface IBreakpointsDeltaDto {
added?: (ISourceBreakpointDto | IFunctionBreakpointDto)[];
removed?: string[];
changed?: (ISourceBreakpointDto | IFunctionBreakpointDto)[];
}
export interface ISourceMultiBreakpointDto {
type: 'sourceMulti';
uri: UriComponents;
lines: {
id: string;
enabled: boolean;
condition?: string;
hitCondition?: string;
logMessage?: string;
line: number;
character: number;
}[];
}
// {{SQL CARBON EDIT}}
/*
export interface ExtHostDebugServiceShape {
$substituteVariables(folder: UriComponents | undefined, config: IConfig): TPromise<IConfig>;
$runInTerminal(args: DebugProtocol.RunInTerminalRequestArguments, config: ITerminalSettings): TPromise<void>;
$startDASession(handle: number, debugType: string, adapterExecutableInfo: IAdapterExecutable | null, debugPort: number): TPromise<void>;
$stopDASession(handle: number): TPromise<void>;
$sendDAMessage(handle: number, message: DebugProtocol.ProtocolMessage): TPromise<void>;
$resolveDebugConfiguration(handle: number, folder: UriComponents | undefined, debugConfiguration: IConfig): TPromise<IConfig>;
$provideDebugConfigurations(handle: number, folder: UriComponents | undefined): TPromise<IConfig[]>;
$debugAdapterExecutable(handle: number, folder: UriComponents | undefined): TPromise<IAdapterExecutable>;
$acceptDebugSessionStarted(id: DebugSessionUUID, type: string, name: string): void;
$acceptDebugSessionTerminated(id: DebugSessionUUID, type: string, name: string): void;
$acceptDebugSessionActiveChanged(id: DebugSessionUUID | undefined, type?: string, name?: string): void;
$acceptDebugSessionCustomEvent(id: DebugSessionUUID, type: string, name: string, event: any): void;
$acceptBreakpointsDelta(delat: IBreakpointsDeltaDto): void;
}
// {{SQL CARBON EDIT}}
*/
export interface DecorationRequest {
readonly id: number;
readonly handle: number;
readonly uri: UriComponents;
}
export type DecorationData = [number, boolean, string, string, ThemeColor, string];
export type DecorationReply = { [id: number]: DecorationData };
export interface ExtHostDecorationsShape {
$provideDecorations(requests: DecorationRequest[]): TPromise<DecorationReply>;
}
export interface ExtHostWindowShape {
$onDidChangeWindowFocus(value: boolean): void;
}
export interface ExtHostLogServiceShape {
$setLevel(level: LogLevel): void;
}
export interface ExtHostProgressShape {
$acceptProgressCanceled(handle: number): void;
}
export interface ExtHostCommentsShape {
$provideDocumentComments(handle: number, document: UriComponents): TPromise<modes.CommentInfo>;
$createNewCommentThread?(handle: number, document: UriComponents, range: IRange, text: string): TPromise<modes.CommentThread>;
$replyToCommentThread?(handle: number, document: UriComponents, range: IRange, commentThread: modes.CommentThread, text: string): TPromise<modes.CommentThread>;
$provideWorkspaceComments(handle: number): TPromise<modes.CommentThread[]>;
}
// --- proxy identifiers
export const MainContext = {
MainThreadCommands: <ProxyIdentifier<MainThreadCommandsShape>>createMainId<MainThreadCommandsShape>('MainThreadCommands'),
MainThreadComments: createMainId<MainThreadCommentsShape>('MainThreadComments'),
MainThreadConfiguration: createMainId<MainThreadConfigurationShape>('MainThreadConfiguration'),
// {{SQL CARBON EDIT}}
// MainThreadDebugService: createMainId<MainThreadDebugServiceShape>('MainThreadDebugService'),
MainThreadDecorations: createMainId<MainThreadDecorationsShape>('MainThreadDecorations'),
MainThreadDiagnostics: createMainId<MainThreadDiagnosticsShape>('MainThreadDiagnostics'),
MainThreadDialogs: createMainId<MainThreadDiaglogsShape>('MainThreadDiaglogs'),
MainThreadDocuments: createMainId<MainThreadDocumentsShape>('MainThreadDocuments'),
MainThreadDocumentContentProviders: createMainId<MainThreadDocumentContentProvidersShape>('MainThreadDocumentContentProviders'),
MainThreadTextEditors: createMainId<MainThreadTextEditorsShape>('MainThreadTextEditors'),
MainThreadErrors: createMainId<MainThreadErrorsShape>('MainThreadErrors'),
MainThreadTreeViews: createMainId<MainThreadTreeViewsShape>('MainThreadTreeViews'),
MainThreadLanguageFeatures: createMainId<MainThreadLanguageFeaturesShape>('MainThreadLanguageFeatures'),
MainThreadLanguages: createMainId<MainThreadLanguagesShape>('MainThreadLanguages'),
MainThreadMessageService: createMainId<MainThreadMessageServiceShape>('MainThreadMessageService'),
MainThreadOutputService: createMainId<MainThreadOutputServiceShape>('MainThreadOutputService'),
MainThreadProgress: createMainId<MainThreadProgressShape>('MainThreadProgress'),
MainThreadQuickOpen: createMainId<MainThreadQuickOpenShape>('MainThreadQuickOpen'),
MainThreadStatusBar: createMainId<MainThreadStatusBarShape>('MainThreadStatusBar'),
MainThreadStorage: createMainId<MainThreadStorageShape>('MainThreadStorage'),
MainThreadTelemetry: createMainId<MainThreadTelemetryShape>('MainThreadTelemetry'),
MainThreadTerminalService: createMainId<MainThreadTerminalServiceShape>('MainThreadTerminalService'),
MainThreadWebviews: createMainId<MainThreadWebviewsShape>('MainThreadWebviews'),
MainThreadUrls: createMainId<MainThreadUrlsShape>('MainThreadUrls'),
MainThreadWorkspace: createMainId<MainThreadWorkspaceShape>('MainThreadWorkspace'),
MainThreadFileSystem: createMainId<MainThreadFileSystemShape>('MainThreadFileSystem'),
MainThreadExtensionService: createMainId<MainThreadExtensionServiceShape>('MainThreadExtensionService'),
MainThreadSCM: createMainId<MainThreadSCMShape>('MainThreadSCM'),
MainThreadSearch: createMainId<MainThreadSearchShape>('MainThreadSearch'),
MainThreadTask: createMainId<MainThreadTaskShape>('MainThreadTask'),
MainThreadWindow: createMainId<MainThreadWindowShape>('MainThreadWindow'),
};
export const ExtHostContext = {
ExtHostCommands: createExtId<ExtHostCommandsShape>('ExtHostCommands'),
ExtHostConfiguration: createExtId<ExtHostConfigurationShape>('ExtHostConfiguration'),
ExtHostDiagnostics: createExtId<ExtHostDiagnosticsShape>('ExtHostDiagnostics'),
// {{SQL CARBON EDIT}}
// ExtHostDebugService: createExtId<ExtHostDebugServiceShape>('ExtHostDebugService'),
ExtHostDecorations: createExtId<ExtHostDecorationsShape>('ExtHostDecorations'),
ExtHostDocumentsAndEditors: createExtId<ExtHostDocumentsAndEditorsShape>('ExtHostDocumentsAndEditors'),
ExtHostDocuments: createExtId<ExtHostDocumentsShape>('ExtHostDocuments'),
ExtHostDocumentContentProviders: createExtId<ExtHostDocumentContentProvidersShape>('ExtHostDocumentContentProviders'),
ExtHostDocumentSaveParticipant: createExtId<ExtHostDocumentSaveParticipantShape>('ExtHostDocumentSaveParticipant'),
ExtHostEditors: createExtId<ExtHostEditorsShape>('ExtHostEditors'),
ExtHostTreeViews: createExtId<ExtHostTreeViewsShape>('ExtHostTreeViews'),
ExtHostFileSystem: createExtId<ExtHostFileSystemShape>('ExtHostFileSystem'),
ExtHostFileSystemEventService: createExtId<ExtHostFileSystemEventServiceShape>('ExtHostFileSystemEventService'),
ExtHostHeapService: createExtId<ExtHostHeapServiceShape>('ExtHostHeapMonitor'),
ExtHostLanguageFeatures: createExtId<ExtHostLanguageFeaturesShape>('ExtHostLanguageFeatures'),
ExtHostQuickOpen: createExtId<ExtHostQuickOpenShape>('ExtHostQuickOpen'),
ExtHostExtensionService: createExtId<ExtHostExtensionServiceShape>('ExtHostExtensionService'),
ExtHostLogService: createExtId<ExtHostLogServiceShape>('ExtHostLogService'),
ExtHostTerminalService: createExtId<ExtHostTerminalServiceShape>('ExtHostTerminalService'),
ExtHostSCM: createExtId<ExtHostSCMShape>('ExtHostSCM'),
ExtHostSearch: createExtId<ExtHostSearchShape>('ExtHostSearch'),
ExtHostTask: createExtId<ExtHostTaskShape>('ExtHostTask'),
ExtHostWorkspace: createExtId<ExtHostWorkspaceShape>('ExtHostWorkspace'),
ExtHostWindow: createExtId<ExtHostWindowShape>('ExtHostWindow'),
ExtHostWebviews: createExtId<ExtHostWebviewsShape>('ExtHostWebviews'),
ExtHostProgress: createMainId<ExtHostProgressShape>('ExtHostProgress'),
ExtHostComments: createMainId<ExtHostCommentsShape>('ExtHostComments'),
ExtHostUrls: createExtId<ExtHostUrlsShape>('ExtHostUrls')
}; | |
main.go | package main
import (
"fmt"
)
func licenseKeyFormatting(S string, K int) string {
var ret []byte
var count int
for i := len(S) - 1; i >= 0; i-- {
if S[i] == '-' {
continue
}
if count == K {
count = 0
ret = append(ret, byte('-'))
}
count++
c := S[i]
if c >= 'a' && c <= 'z' {
c -= 32
}
ret = append(ret, byte(c))
}
if len(ret) > 1 {
for i, j := 0, len(ret)-1; i < j; i, j = i+1, j-1 {
ret[i], ret[j] = ret[j], ret[i]
}
}
return string(ret)
}
func | () {
fmt.Println("hello world!")
fmt.Println(licenseKeyFormatting("5F3Z-2e-9-w", 4))
fmt.Println(licenseKeyFormatting("2-5g-3-J", 2))
fmt.Println(licenseKeyFormatting("2-5g-3-J", 2))
}
| main |
user_agent.rs | use serde::{Serialize, Deserialize}; |
#[derive(Serialize, Deserialize, PartialEq, Clone, Debug)]
#[serde(rename_all = "camelCase")]
pub struct UserAgent {
pub client: String,
#[serde(with = "ts_seconds")]
pub last_pulse: ServerTime,
} | use chrono::serde::ts_seconds;
use crate::time::ServerTime; |
google.py | # -*- coding: utf-8 -*-
from flask import url_for, request, session
from oleander import app
from gdata.client import Unauthorized as UnauthorizedError
from gdata.gauth import OAuth2Token, Error as ConnectionError, token_to_blob, token_from_blob
from gdata.contacts.client import ContactsClient
from gdata.calendar.client import CalendarClient
# https://developers.google.com/gdata/faq#AuthScopes
# http://googleappsdeveloper.blogspot.com/2011/09/python-oauth-20-google-data-apis.html
# http://stackoverflow.com/questions/10188768/google-contacts-import-using-oauth2-0
# http://stackoverflow.com/questions/4263888/how-to-detect-if-an-email-is-a-google-account
def create_oauth_handler(scope=''):
oauth2_handler = OAuth2Token(
client_id=app.config['GOOGLE_APP_ID'],
client_secret=app.config['GOOGLE_APP_SECRET'],
scope=scope,
user_agent=''
)
web_hook_url = url_for(
'google_connected',
_external=True
)
oauth2_handler.generate_authorize_url(
redirect_uri=web_hook_url
)
return oauth2_handler
def create_authorize_url(action_url, error_url, scope=''):
oauth2_handler = create_oauth_handler(scope)
session['action_url'] = action_url
session['error_url'] = error_url
web_hook_url = url_for(
'google_connected',
_external=True
)
return oauth2_handler.generate_authorize_url(
redirect_uri=web_hook_url
)
def create_api(cls):
| credentials = session.get('google_credentials', None)
if not credentials:
raise ConnectionError('No credentials.')
credentials = token_from_blob(credentials)
client = cls(source='') # source - user agent
credentials.authorize(client)
return client |
|
shadow.js | const postcss = require('postcss')
const { parse } = require('css-box-shadow')
module.exports = postcss.plugin('postcss-css-shadow', opts => root => {
root.walkRules(rule => {
let shadow = null
rule.walkDecls('shadow', decl => {
const { offsetX, offsetY, blurRadius, color } = parse(decl.value)[0]
shadow = {
color,
blur: parseFloat(blurRadius),
offset: [parseFloat(offsetX), parseFloat(offsetY)]
}
decl.remove()
})
| })
}
})
}) | if (shadow) {
rule.append({
prop: '__shadow',
value: JSON.stringify(shadow) |
09-dynamic-uniform-interface.rs | //! > This program is a sequel to 08-shader-uniforms-adapt. Be sure to have read it first.
//!
//! This example shows you how to lookup dynamically uniforms into shaders to implement various kind
//! of situations. This feature is very likely to be interesting for anyone who would like to
//! implement a GUI, where the interface of the shader programs are not known statically, for
//! instance.
//!
//! This example looks up the time and the triangle position on the fly, without using the uniform
//! interface.
//!
//! Press the <a>, <s>, <d>, <z> or the arrow keys to move the triangle on the screen.
//! Press <escape> to quit or close the window.
//!
//! https://docs.rs/luminance
mod common;
use crate::common::{Semantics, Vertex, VertexPosition, VertexColor};
use luminance::context::GraphicsContext as _;
use luminance::render_state::RenderState;
use luminance::shader::program::Program;
use luminance::tess::{Mode, TessBuilder};
use luminance_glfw::{Action, GlfwSurface, Key, Surface, WindowEvent, WindowDim, WindowOpt};
use std::time::Instant;
const VS: &'static str = include_str!("displacement-vs.glsl");
const FS: &'static str = include_str!("displacement-fs.glsl");
// Only one triangle this time.
const TRI_VERTICES: [Vertex; 3] = [
Vertex { pos: VertexPosition::new([0.5, -0.5]), rgb: VertexColor::new([1., 0., 0.]) },
Vertex { pos: VertexPosition::new([0.0, 0.5]), rgb: VertexColor::new([0., 1., 0.]) },
Vertex { pos: VertexPosition::new([-0.5, -0.5]), rgb: VertexColor::new([0., 0., 1.]) },
];
fn main() {
let mut surface = GlfwSurface::new(
WindowDim::Windowed(960, 540),
"Hello, world!",
WindowOpt::default(),
)
.expect("GLFW surface creation");
// notice that we don’t set a uniform interface here: we’re going to look it up on the fly
let program = Program::<Semantics, (), ()>::from_strings(None, VS, None, FS)
.expect("program creation")
.ignore_warnings();
let triangle = TessBuilder::new(&mut surface)
.add_vertices(TRI_VERTICES)
.set_mode(Mode::Triangle)
.build()
.unwrap();
let mut back_buffer = surface.back_buffer().unwrap();
let mut triangle_pos = [0., 0.];
let start_t = Instant::now();
let mut resize = false;
'app: loop {
for event in surface.poll_events() {
match event {
WindowEvent::Close | WindowEvent::Key(Key::Escape, _, Action::Release, _) => break 'app,
WindowEvent::Key(Key::A, _, action, _) | WindowEvent::Key(Key::Left, _, action, _)
if action == Action::Press || action == Action::Repeat =>
{
triangle_pos[0] -= 0.1;
}
WindowEvent::Key(Key::D, _, action, _) | WindowEvent::Key(Key::Right, _, action, _)
if action == Action::Press || action == Action::Repeat =>
{
triangle_pos[0] += 0.1;
}
WindowEvent::Key(Key::Z, _, action, _) | WindowEvent::Key(Key::Up, _, action, _)
if action == Action::Press || action == Action::Repeat =>
{
triangle_pos[1] += 0.1;
}
WindowEvent::Key(Key::S, _, action, _) | WindowEvent::Key(Key::Down, _, action, _)
if action == Action::Press || action == Action::Repeat =>
{
triangle_pos[1] -= 0.1;
}
WindowEvent::FramebufferSize(..) => {
resize = true;
}
_ => (),
}
}
if resize {
back_buffer = surface.back_buffer().unwrap();
resize = false;
}
let elapsed = start_t.elapsed();
let t64 = elapsed.as_secs() as f64 + (elapsed.subsec_millis() as f64 * 1e-3);
let t = t64 as f32;
surface
.pipeline_builder()
.pipeline(&back_buffer, [0., 0., 0., 0.], |_, mut shd_gate| {
shd_gate.shade(&program, |iface, mut rdr_gate| {
let query = iface.query();
if let Ok(time_u) = query.ask("t") {
time_u.update(t);
}
if let Ok(triangle_pos_u) = query.ask("triangle_pos") {
triangle_pos_u.update(triangle_pos);
}
// the `ask` function is type-safe: if you try to get a uniform which type is not
// correctly reified from the source, you get a TypeMismatch runtime error
//if let Err(e) = query.ask::<i32>("triangle_pos") {
// eprintln!("{:?}", e);
//}
rdr_gate.render(RenderState::default(), |mut tess_gate| {
tess_gate.render(&triangle);
});
});
});
surface.swap_buffers(); | }
} |
|
film_actor.go | //
// Code generated by go-jet DO NOT EDIT.
//
// WARNING: Changes to this file may cause incorrect behavior
// and will be lost if the code is regenerated
//
package table
import (
"github.com/goranbjelanovic/jet/v2/postgres"
)
var FilmActor = newFilmActorTable()
type filmActorTable struct {
postgres.Table
//Columns | FilmID postgres.ColumnInteger
LastUpdate postgres.ColumnTimestamp
AllColumns postgres.ColumnList
MutableColumns postgres.ColumnList
}
type FilmActorTable struct {
filmActorTable
EXCLUDED filmActorTable
}
// AS creates new FilmActorTable with assigned alias
func (a *FilmActorTable) AS(alias string) *FilmActorTable {
aliasTable := newFilmActorTable()
aliasTable.Table.AS(alias)
return aliasTable
}
func newFilmActorTable() *FilmActorTable {
return &FilmActorTable{
filmActorTable: newFilmActorTableImpl("dvds", "film_actor"),
EXCLUDED: newFilmActorTableImpl("", "excluded"),
}
}
func newFilmActorTableImpl(schemaName, tableName string) filmActorTable {
var (
ActorIDColumn = postgres.IntegerColumn("actor_id")
FilmIDColumn = postgres.IntegerColumn("film_id")
LastUpdateColumn = postgres.TimestampColumn("last_update")
allColumns = postgres.ColumnList{ActorIDColumn, FilmIDColumn, LastUpdateColumn}
mutableColumns = postgres.ColumnList{LastUpdateColumn}
)
return filmActorTable{
Table: postgres.NewTable(schemaName, tableName, allColumns...),
//Columns
ActorID: ActorIDColumn,
FilmID: FilmIDColumn,
LastUpdate: LastUpdateColumn,
AllColumns: allColumns,
MutableColumns: mutableColumns,
}
} | ActorID postgres.ColumnInteger |
load_staging_genre.py | import sys
import os
from datetime import datetime
from pyspark import SparkConf, SparkContext
from pyspark.sql import SparkSession
from pyspark.sql.types import (StructType, StructField as Fld, DoubleType as Dbl,
IntegerType as Int, DateType as Date,
BooleanType as Boolean, FloatType as Float,
LongType as Long, StringType as String,
ArrayType as Array)
from pyspark.sql.functions import (col, year, month, dayofmonth, weekofyear, quarter,
explode, from_json)
def create_spark_session(aws_key, aws_secret_key):
"""
Description: Creates spark session.
Returns:
spark session object
"""
spark = SparkSession \
.builder \
.config("spark.executor.heartbeatInterval", "40s") \
.getOrCreate()
spark.sparkContext._jsc.hadoopConfiguration().set("fs.s3a.impl",
"org.apache.hadoop.fs.s3a.S3AFileSystem")
spark.sparkContext._jsc.hadoopConfiguration().set("fs.s3a.access.key", aws_key)
spark.sparkContext._jsc.hadoopConfiguration().set("fs.s3a.secret.key", aws_secret_key)
spark.sparkContext._jsc.hadoopConfiguration().set("fs.s3a.endpoint", "s3.amazonaws.com")
spark.sparkContext._jsc.hadoopConfiguration().set("fs.s3a.connection.timeout", "100")
spark.sparkContext._jsc.hadoopConfiguration().set("fs.s3a.connection.maximum", "5000")
spark.conf.set("spark.sql.shuffle.partitions", 4)
return spark
def format_datetime(ts):
|
if __name__ == "__main__":
s3_bucket = sys.argv[1]
s3_key = sys.argv[2]
aws_key = sys.argv[3]
aws_secret_key = sys.argv[4]
redshift_conn_string = sys.argv[5]
db_user = sys.argv[6]
db_pass = sys.argv[7]
spark = create_spark_session(aws_key, aws_secret_key)
movies_schema = StructType([
Fld("adult", String()),
Fld("belongs_to_collection", Long()),
Fld("budget", Long()),
Fld("genres", String()),
Fld("homepage", String()),
Fld("id", Int()),
Fld("imdb_id", String()),
Fld("original_language", String()),
Fld("original_title", String()),
Fld("overview", String()),
Fld("popularity", Dbl()),
Fld("poster_path", String()),
Fld("production_company", String()),
Fld("production_country", String()),
Fld("release_date", Date()),
Fld("revenue", Long()),
Fld("runtime", Float()),
Fld("spoken_languages", String()),
Fld("status", String()),
Fld("tagline", String()),
Fld("title", String()),
Fld("video", Boolean()),
Fld("vote_average", Float()),
Fld("vote_count", Int())
])
movies_df = spark.read.option("header", "true") \
.csv("s3a://{}/{}/movies_metadata.csv".format(s3_bucket, s3_key),
schema=movies_schema)
genre_schema = Array(StructType([Fld("id", Int()), Fld("name", String())]))
movies_df = movies_df.withColumn("genres", explode(from_json("genres", genre_schema))) \
.withColumn("genre_id", col("genres.id")) \
.withColumn("genre_name", col("genres.name")) \
movie_genre = movies_df.select("id", "genre_id").distinct()
movie_genre = movie_genre.select(col("id").alias("movie_id"), col("genre_id"))
genre = movies_df.select("genre_id", "genre_name").distinct()
genre = genre.na.drop()
# Load data into staging:
genre.write \
.format("jdbc") \
.option("url", redshift_conn_string) \
.option("dbtable", "movies.stage_genre") \
.option("user", sys.argv[6]) \
.option("password", sys.argv[7]) \
.option("driver", "com.amazon.redshift.jdbc42.Driver") \
.mode("append") \
.save()
movie_genre.write \
.format("jdbc") \
.option("url", redshift_conn_string) \
.option("dbtable", "movies.stage_movie_genre") \
.option("user", sys.argv[6]) \
.option("password", sys.argv[7]) \
.option("driver", "com.amazon.redshift.jdbc42.Driver") \
.mode("append") \
.save() | return datetime.fromtimestamp(ts/1000.0) |
vsock.rs | // Copyright 2017 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use std::thread;
use data_model::{DataInit, Le64};
use base::{error, warn, AsRawDescriptor, Event, RawDescriptor};
use vhost::Vhost;
use vhost::Vsock as VhostVsockHandle;
use vm_memory::GuestMemory;
use super::worker::Worker;
use super::{Error, Result};
use crate::virtio::{copy_config, Interrupt, Queue, VirtioDevice, TYPE_VSOCK};
const QUEUE_SIZE: u16 = 256;
const NUM_QUEUES: usize = 3;
const QUEUE_SIZES: &[u16] = &[QUEUE_SIZE; NUM_QUEUES];
pub struct Vsock {
worker_kill_evt: Option<Event>,
kill_evt: Option<Event>,
vhost_handle: Option<VhostVsockHandle>,
cid: u64,
interrupts: Option<Vec<Event>>,
avail_features: u64,
acked_features: u64,
}
impl Vsock {
/// Create a new virtio-vsock device with the given VM cid.
pub fn new(base_features: u64, cid: u64, mem: &GuestMemory) -> Result<Vsock> {
let kill_evt = Event::new().map_err(Error::CreateKillEvent)?;
let handle = VhostVsockHandle::new(mem).map_err(Error::VhostOpen)?;
let avail_features = base_features
| 1 << virtio_sys::vhost::VIRTIO_F_NOTIFY_ON_EMPTY
| 1 << virtio_sys::vhost::VIRTIO_RING_F_INDIRECT_DESC
| 1 << virtio_sys::vhost::VIRTIO_RING_F_EVENT_IDX
| 1 << virtio_sys::vhost::VHOST_F_LOG_ALL
| 1 << virtio_sys::vhost::VIRTIO_F_ANY_LAYOUT;
let mut interrupts = Vec::new();
for _ in 0..NUM_QUEUES {
interrupts.push(Event::new().map_err(Error::VhostIrqCreate)?);
}
Ok(Vsock {
worker_kill_evt: Some(kill_evt.try_clone().map_err(Error::CloneKillEvent)?),
kill_evt: Some(kill_evt),
vhost_handle: Some(handle),
cid,
interrupts: Some(interrupts),
avail_features,
acked_features: 0,
})
}
pub fn new_for_testing(cid: u64, features: u64) -> Vsock {
Vsock {
worker_kill_evt: None,
kill_evt: None,
vhost_handle: None,
cid,
interrupts: None,
avail_features: features,
acked_features: 0,
}
}
pub fn acked_features(&self) -> u64 {
self.acked_features
}
}
impl Drop for Vsock {
fn drop(&mut self) {
// Only kill the child if it claimed its event.
if self.worker_kill_evt.is_none() {
if let Some(kill_evt) = &self.kill_evt {
// Ignore the result because there is nothing we can do about it.
let _ = kill_evt.write(1);
}
}
}
}
impl VirtioDevice for Vsock {
fn keep_fds(&self) -> Vec<RawDescriptor> {
let mut keep_fds = Vec::new();
if let Some(handle) = &self.vhost_handle {
keep_fds.push(handle.as_raw_descriptor());
}
if let Some(interrupt) = &self.interrupts {
for vhost_int in interrupt.iter() {
keep_fds.push(vhost_int.as_raw_descriptor());
}
}
if let Some(worker_kill_evt) = &self.worker_kill_evt {
keep_fds.push(worker_kill_evt.as_raw_descriptor());
}
keep_fds
}
fn device_type(&self) -> u32 {
TYPE_VSOCK
}
fn queue_max_sizes(&self) -> &[u16] {
QUEUE_SIZES
}
fn features(&self) -> u64 {
self.avail_features
}
fn read_config(&self, offset: u64, data: &mut [u8]) {
let cid = Le64::from(self.cid);
copy_config(data, 0, DataInit::as_slice(&cid), offset);
}
fn ack_features(&mut self, value: u64) {
let mut v = value;
// Check if the guest is ACK'ing a feature that we didn't claim to have.
let unrequested_features = v & !self.avail_features;
if unrequested_features != 0 {
warn!("vsock: virtio-vsock got unknown feature ack: {:x}", v);
// Don't count these features as acked.
v &= !unrequested_features;
}
self.acked_features |= v;
}
fn activate(
&mut self,
_: GuestMemory,
interrupt: Interrupt,
queues: Vec<Queue>,
queue_evts: Vec<Event>,
) {
if queues.len() != NUM_QUEUES || queue_evts.len() != NUM_QUEUES {
error!("net: expected {} queues, got {}", NUM_QUEUES, queues.len());
return;
}
if let Some(vhost_handle) = self.vhost_handle.take() {
if let Some(interrupts) = self.interrupts.take() {
if let Some(kill_evt) = self.worker_kill_evt.take() {
let acked_features = self.acked_features;
let cid = self.cid;
let worker_result = thread::Builder::new()
.name("vhost_vsock".to_string())
.spawn(move || {
// The third vq is an event-only vq that is not handled by the vhost
// subsystem (but still needs to exist). Split it off here.
let vhost_queues = queues[..2].to_vec();
let mut worker = Worker::new(
vhost_queues, | interrupts,
interrupt,
acked_features,
kill_evt,
None,
);
let activate_vqs = |handle: &VhostVsockHandle| -> Result<()> {
handle.set_cid(cid).map_err(Error::VhostVsockSetCid)?;
handle.start().map_err(Error::VhostVsockStart)?;
Ok(())
};
let cleanup_vqs = |_handle: &VhostVsockHandle| -> Result<()> { Ok(()) };
let result =
worker.run(queue_evts, QUEUE_SIZES, activate_vqs, cleanup_vqs);
if let Err(e) = result {
error!("vsock worker thread exited with error: {:?}", e);
}
});
if let Err(e) = worker_result {
error!("failed to spawn vhost_vsock worker: {}", e);
return;
}
}
}
}
}
fn on_device_sandboxed(&mut self) {
// ignore the error but to log the error. We don't need to do
// anything here because when activate, the other vhost set up
// will be failed to stop the activate thread.
if let Some(vhost_handle) = &self.vhost_handle {
match vhost_handle.set_owner() {
Ok(_) => {}
Err(e) => error!("{}: failed to set owner: {:?}", self.debug_label(), e),
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::convert::TryInto;
#[test]
fn ack_features() {
let cid = 5;
let features: u64 = (1 << 20) | (1 << 49) | (1 << 2) | (1 << 19);
let mut acked_features: u64 = 0;
let mut unavailable_features: u64 = 0;
let mut vsock = Vsock::new_for_testing(cid, features);
assert_eq!(acked_features, vsock.acked_features());
acked_features |= 1 << 2;
vsock.ack_features(acked_features);
assert_eq!(acked_features, vsock.acked_features());
acked_features |= 1 << 49;
vsock.ack_features(acked_features);
assert_eq!(acked_features, vsock.acked_features());
acked_features |= 1 << 60;
unavailable_features |= 1 << 60;
vsock.ack_features(acked_features);
assert_eq!(
acked_features & !unavailable_features,
vsock.acked_features()
);
acked_features |= 1 << 1;
unavailable_features |= 1 << 1;
vsock.ack_features(acked_features);
assert_eq!(
acked_features & !unavailable_features,
vsock.acked_features()
);
}
#[test]
fn read_config() {
let cid = 0xfca9a559fdcb9756;
let vsock = Vsock::new_for_testing(cid, 0);
let mut buf = [0 as u8; 8];
vsock.read_config(0, &mut buf);
assert_eq!(cid, u64::from_le_bytes(buf));
vsock.read_config(0, &mut buf[..4]);
assert_eq!(
(cid & 0xffffffff) as u32,
u32::from_le_bytes(buf[..4].try_into().unwrap())
);
vsock.read_config(4, &mut buf[..4]);
assert_eq!(
(cid >> 32) as u32,
u32::from_le_bytes(buf[..4].try_into().unwrap())
);
let data: [u8; 8] = [8, 226, 5, 46, 159, 59, 89, 77];
buf.copy_from_slice(&data);
vsock.read_config(12, &mut buf);
assert_eq!(&buf, &data);
}
#[test]
fn features() {
let cid = 5;
let features: u64 = 0xfc195ae8db88cff9;
let vsock = Vsock::new_for_testing(cid, features);
assert_eq!(features, vsock.features());
}
} | vhost_handle, |
option.rs | /*!
* Operations on the ubiquitous `option` type.
*
* Type `option` represents an optional value.
*
* Every `option<T>` value can either be `some(T)` or `none`. Where in other
* languages you might use a nullable type, in Rust you would use an option
* type.
*/
/// The option type
enum option<T> {
none,
some(T),
}
pure fn get<T: copy>(opt: option<T>) -> T {
/*!
* Gets the value out of an option
*
* # Failure
*
* Fails if the value equals `none`
*/
match opt {
some(x) => return x,
none => fail ~"option::get none"
}
}
pure fn get_ref<T>(opt: &r/option<T>) -> &r/T |
pure fn expect<T: copy>(opt: option<T>, reason: ~str) -> T {
/*!
* Gets the value out of an option, printing a specified message on
* failure
*
* # Failure
*
* Fails if the value equals `none`
*/
match opt { some(x) => x, none => fail reason }
}
pure fn map<T, U>(opt: option<T>, f: fn(T) -> U) -> option<U> {
//! Maps a `some` value from one type to another
match opt { some(x) => some(f(x)), none => none }
}
pure fn map_ref<T, U>(opt: &option<T>, f: fn(x: &T) -> U) -> option<U> {
//! Maps a `some` value by reference from one type to another
match *opt { some(ref x) => some(f(x)), none => none }
}
pure fn map_consume<T, U>(+opt: option<T>, f: fn(+T) -> U) -> option<U> {
/*!
* As `map`, but consumes the option and gives `f` ownership to avoid
* copying.
*/
if opt.is_some() { some(f(option::unwrap(opt))) } else { none }
}
pure fn chain<T, U>(opt: option<T>, f: fn(T) -> option<U>) -> option<U> {
/*!
* Update an optional value by optionally running its content through a
* function that returns an option.
*/
match opt { some(x) => f(x), none => none }
}
pure fn chain_ref<T, U>(opt: &option<T>,
f: fn(x: &T) -> option<U>) -> option<U> {
/*!
* Update an optional value by optionally running its content by reference
* through a function that returns an option.
*/
match *opt { some(ref x) => f(x), none => none }
}
pure fn or<T>(+opta: option<T>, +optb: option<T>) -> option<T> {
/*!
* Returns the leftmost some() value, or none if both are none.
*/
match opta {
some(_) => opta,
_ => optb
}
}
#[inline(always)]
pure fn while_some<T>(+x: option<T>, blk: fn(+T) -> option<T>) {
//! Applies a function zero or more times until the result is none.
let mut opt <- x;
while opt.is_some() {
opt = blk(unwrap(opt));
}
}
pure fn is_none<T>(opt: option<T>) -> bool {
//! Returns true if the option equals `none`
match opt { none => true, some(_) => false }
}
pure fn is_some<T>(opt: option<T>) -> bool {
//! Returns true if the option contains some value
!is_none(opt)
}
pure fn get_default<T: copy>(opt: option<T>, def: T) -> T {
//! Returns the contained value or a default
match opt { some(x) => x, none => def }
}
pure fn map_default<T, U>(opt: option<T>, +def: U, f: fn(T) -> U) -> U {
//! Applies a function to the contained value or returns a default
match opt { none => def, some(t) => f(t) }
}
// This should replace map_default.
pure fn map_default_ref<T, U>(opt: &option<T>, +def: U,
f: fn(x: &T) -> U) -> U {
//! Applies a function to the contained value or returns a default
match *opt { none => def, some(ref t) => f(t) }
}
// This should change to by-copy mode; use iter_ref below for by reference
pure fn iter<T>(opt: option<T>, f: fn(T)) {
//! Performs an operation on the contained value or does nothing
match opt { none => (), some(t) => f(t) }
}
pure fn iter_ref<T>(opt: &option<T>, f: fn(x: &T)) {
//! Performs an operation on the contained value by reference
match *opt { none => (), some(ref t) => f(t) }
}
#[inline(always)]
pure fn unwrap<T>(+opt: option<T>) -> T {
/*!
* Moves a value out of an option type and returns it.
*
* Useful primarily for getting strings, vectors and unique pointers out
* of option types without copying them.
*/
match move opt {
some(move x) => x,
none => fail ~"option::unwrap none"
}
}
/// The ubiquitous option dance.
#[inline(always)]
fn swap_unwrap<T>(opt: &mut option<T>) -> T {
if opt.is_none() { fail ~"option::swap_unwrap none" }
unwrap(util::replace(opt, none))
}
pure fn unwrap_expect<T>(+opt: option<T>, reason: &str) -> T {
//! As unwrap, but with a specified failure message.
if opt.is_none() { fail reason.to_unique(); }
unwrap(opt)
}
// Some of these should change to be &option<T>, some should not. See below.
impl<T> option<T> {
/**
* Update an optional value by optionally running its content through a
* function that returns an option.
*/
pure fn chain<U>(f: fn(T) -> option<U>) -> option<U> { chain(self, f) }
/// Applies a function to the contained value or returns a default
pure fn map_default<U>(+def: U, f: fn(T) -> U) -> U
{ map_default(self, def, f) }
/// Performs an operation on the contained value or does nothing
pure fn iter(f: fn(T)) { iter(self, f) }
/// Returns true if the option equals `none`
pure fn is_none() -> bool { is_none(self) }
/// Returns true if the option contains some value
pure fn is_some() -> bool { is_some(self) }
/// Maps a `some` value from one type to another
pure fn map<U>(f: fn(T) -> U) -> option<U> { map(self, f) }
}
impl<T> &option<T> {
/**
* Update an optional value by optionally running its content by reference
* through a function that returns an option.
*/
pure fn chain_ref<U>(f: fn(x: &T) -> option<U>) -> option<U> {
chain_ref(self, f)
}
/// Applies a function to the contained value or returns a default
pure fn map_default_ref<U>(+def: U, f: fn(x: &T) -> U) -> U
{ map_default_ref(self, def, f) }
/// Performs an operation on the contained value by reference
pure fn iter_ref(f: fn(x: &T)) { iter_ref(self, f) }
/// Maps a `some` value from one type to another by reference
pure fn map_ref<U>(f: fn(x: &T) -> U) -> option<U> { map_ref(self, f) }
/// Gets an immutable reference to the value inside a `some`.
pure fn get_ref() -> &self/T { get_ref(self) }
}
impl<T: copy> option<T> {
/**
* Gets the value out of an option
*
* # Failure
*
* Fails if the value equals `none`
*/
pure fn get() -> T { get(self) }
pure fn get_default(def: T) -> T { get_default(self, def) }
/**
* Gets the value out of an option, printing a specified message on
* failure
*
* # Failure
*
* Fails if the value equals `none`
*/
pure fn expect(reason: ~str) -> T { expect(self, reason) }
/// Applies a function zero or more times until the result is none.
pure fn while_some(blk: fn(+T) -> option<T>) { while_some(self, blk) }
}
#[test]
fn test_unwrap_ptr() {
let x = ~0;
let addr_x = ptr::addr_of(*x);
let opt = some(x);
let y = unwrap(opt);
let addr_y = ptr::addr_of(*y);
assert addr_x == addr_y;
}
#[test]
fn test_unwrap_str() {
let x = ~"test";
let addr_x = str::as_buf(x, |buf, _len| ptr::addr_of(buf));
let opt = some(x);
let y = unwrap(opt);
let addr_y = str::as_buf(y, |buf, _len| ptr::addr_of(buf));
assert addr_x == addr_y;
}
#[test]
fn test_unwrap_resource() {
struct r {
let i: @mut int;
new(i: @mut int) { self.i = i; }
drop { *(self.i) += 1; }
}
let i = @mut 0;
{
let x = r(i);
let opt = some(x);
let _y = unwrap(opt);
}
assert *i == 1;
}
#[test]
fn test_option_dance() {
let x = some(());
let mut y = some(5);
let mut y2 = 0;
do x.iter |_x| {
y2 = swap_unwrap(&mut y);
}
assert y2 == 5;
assert y.is_none();
}
#[test] #[should_fail] #[ignore(cfg(windows))]
fn test_option_too_much_dance() {
let mut y = some(util::NonCopyable());
let _y2 = swap_unwrap(&mut y);
let _y3 = swap_unwrap(&mut y);
}
#[test]
fn test_option_while_some() {
let mut i = 0;
do some(10).while_some |j| {
i += 1;
if (j > 0) {
some(j-1)
} else {
none
}
}
assert i == 11;
}
// Local Variables:
// mode: rust;
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
| {
/*!
* Gets an immutable reference to the value inside an option.
*
* # Failure
*
* Fails if the value equals `none`
*/
match *opt {
some(ref x) => x,
none => fail ~"option::get_ref none"
}
} |
doc.go | // Package buildrun contains the cobra.Command, flags, and the actual interactions to be made on the | // API server for BuildRun resources. It implements SubCommand interface.
package buildrun |
|
lib.rs | #![deny(unused_must_use)]
// #![cfg_attr(test, feature(test))]
use std::task::{Context, Poll};
use std::pin::Pin;
use std::fmt::Formatter;
use std::fmt::Debug;
use std::future::Future;
use std::io::{Error, Result, SeekFrom, Seek};
use tokio::fs::{File, OpenOptions};
use tokio::io::{AsyncRead, AsyncSeek, AsyncWrite};
use tokio::task::{spawn_blocking, JoinHandle};
use futures_lite::{ready, FutureExt};
use fs3::FileExt;
use std::path::Path;
use std::mem::MaybeUninit;
/// Locks a file asynchronously.
/// Auto locks a file if any read or write methods are called. If [Self::lock_exclusive]
/// or [Self::lock_shared] has been called then the file will stay locked.
/// Can auto seek to specified location before doing any read/write operation.
///
/// Note 1: Do not attempt to have multiple file handles for the same file. Because locking is done
/// per process basis.
/// Note 2: Remember to open a file with specified read and/or write mode as write calls will just
/// be ignored if the file is opened in read mode.
pub struct FileLock {
mode: SeekFrom,
state: State,
is_manually_locked: bool,
unlocked_file: Option<std::fs::File>,
locked_file: Option<File>,
result: Option<Result<u64>>,
locking_fut: Option<JoinHandle<std::result::Result<File, (std::fs::File, Error)>>>,
unlocking_fut: Option<Pin<Box<dyn Future<Output = std::fs::File> + Send>>>,
seek_fut: Option<JoinHandle<(Result<u64>, std::fs::File)>>,
}
impl FileLock {
/// Opens a file in read and write mode that is unlocked.
// This function will create a file if it does not exist, and will truncate it if it does.
pub async fn create(path: impl AsRef<Path>) -> Result<FileLock> {
let file = OpenOptions::new().write(true).read(true).create(true).truncate(true).open(path).await?;
Ok(FileLock::new_tokio(file).await)
}
/// Attempts to open a file in read and write mode that is unlocked.
pub async fn open(path: impl AsRef<Path>) -> Result<FileLock> {
let file = OpenOptions::new().write(true).read(true).open(path).await?;
Ok(FileLock::new_tokio(file).await)
}
/// Creates a new 'FileLock' from [`tokio::fs::File`].
pub async fn new_tokio(tokio_file: File) -> FileLock {
FileLock {
mode: SeekFrom::Current(0),
state: State::Unlocked,
is_manually_locked: false,
unlocked_file: Some(tokio_file.into_std().await),
locked_file: None,
result: None,
locking_fut: None,
unlocking_fut: None,
seek_fut: None
}
}
/// Creates a new 'FileLock' from [`std::fs::File`].
pub fn new_std(std_file: std::fs::File) -> FileLock {
FileLock {
mode: SeekFrom::Current(0),
state: State::Unlocked,
is_manually_locked: false,
unlocked_file: Some(std_file),
locked_file: None,
result: None,
locking_fut: None,
unlocking_fut: None,
seek_fut: None
}
}
/// Locks the file for reading and writing until [`Self::unlock`] is called.
pub fn lock_exclusive(&mut self) -> LockFuture {
if self.locked_file.is_some() {
panic!("File already locked.");
}
self.is_manually_locked = true;
LockFuture::new_exclusive(self)
}
/// Locks the file for reading and writing until [`Self::unlock`] is called. Returns an error if
/// the file is currently locked.
pub fn try_lock_exclusive(&mut self) -> Result<()> {
if self.locked_file.is_some() {
panic!("File already locked.");
}
self.is_manually_locked = true;
self.unlocked_file.as_mut().unwrap().try_lock_exclusive().map(|_| {
self.locked_file = Some(File::from_std(self.unlocked_file.take().unwrap()));
self.state = State::Locked;
})
}
/// Locks the file for reading until [`Self::unlock`] is called.
pub fn lock_shared(&mut self) -> LockFuture {
if self.locked_file.is_some() {
panic!("File already locked.");
}
self.is_manually_locked = true;
LockFuture::new_shared(self)
}
/// Locks the file for reading until [`Self::unlock`] is called. Returns an error if the file
/// is currently locked.
pub fn try_lock_shared(&mut self) -> Result<()> {
if self.locked_file.is_some() {
panic!("File already locked.");
}
self.is_manually_locked = true;
self.unlocked_file.as_mut().unwrap().try_lock_shared().map(|_| {
self.locked_file = Some(File::from_std(self.unlocked_file.take().unwrap()));
self.state = State::Locked;
})
}
/// Unlocks the file.
pub fn unlock(&mut self) -> UnlockFuture {
if self.unlocked_file.is_some() {
panic!("File already unlocked.");
}
self.is_manually_locked = false;
UnlockFuture::new(self)
}
/// Sets auto seeking mode. File will always seek to specified location before doing any
/// read/write operation.
pub fn set_seeking_mode(&mut self, mode: SeekFrom) {
self.mode = mode;
}
pub fn seeking_mode(&self) -> SeekFrom {
self.mode
}
/// Attempts to sync all OS-internal metadata to disk.
///
/// This function will attempt to ensure that all in-core data reaches the
/// filesystem before returning.
///
/// # Examples
///
/// ```no_run
/// use tokio::fs::File;
/// use tokio::prelude::*;
///
/// # async fn dox() -> std::io::Result<()> {
/// let mut file = File::create("foo.txt").await?;
/// file.write_all(b"hello, world!").await?;
/// file.sync_all().await?;
/// # Ok(())
/// # }
/// ```
///
/// The [`write_all`] method is defined on the [`AsyncWriteExt`] trait.
///
/// [`write_all`]: fn@crate::io::AsyncWriteExt::write_all
/// [`AsyncWriteExt`]: trait@crate::io::AsyncWriteExt
pub async fn sync_all(&mut self) -> Result<()> {
if let Some(file) = &mut self.locked_file {
return file.sync_all().await;
}
let file = self.unlocked_file.take().unwrap();
let (result, file) = spawn_blocking(|| {
(file.sync_all(), file)
}).await.unwrap();
self.unlocked_file = Some(file);
result
}
/// This function is similar to `sync_all`, except that it may not
/// synchronize file metadata to the filesystem.
///
/// This is intended for use cases that must synchronize content, but don't
/// need the metadata on disk. The goal of this method is to reduce disk
/// operations.
///
/// Note that some platforms may simply implement this in terms of `sync_all`.
///
/// # Examples
///
/// ```no_run
/// use tokio::fs::File;
/// use tokio::prelude::*;
///
/// # async fn dox() -> std::io::Result<()> {
/// let mut file = File::create("foo.txt").await?;
/// file.write_all(b"hello, world!").await?;
/// file.sync_data().await?;
/// # Ok(())
/// # }
/// ```
///
/// The [`write_all`] method is defined on the [`AsyncWriteExt`] trait.
///
/// [`write_all`]: fn@crate::io::AsyncWriteExt::write_all
/// [`AsyncWriteExt`]: trait@crate::io::AsyncWriteExt
pub async fn sync_data(&mut self) -> Result<()> {
if let Some(file) = &mut self.locked_file {
return file.sync_data().await;
}
let file = self.unlocked_file.take().unwrap();
let (result, file) = spawn_blocking(|| {
(file.sync_data(), file)
}).await.unwrap();
self.unlocked_file = Some(file);
result
}
/// Gets a reference to the file.
///
/// If the file is locked it will be in the second element of a tuple as [`tokio::fs::File`]
/// otherwise it will be in the first element as [`std::fs::File`].
/// It is inadvisable to directly read/write from/to the file.
pub fn get_ref(&self) -> (Option<&std::fs::File>, Option<&File>) {
(self.unlocked_file.as_ref(), self.locked_file.as_ref())
}
/// Gets a mutable reference to the file.
///
/// If the file is locked it will be in the second element of a tuple as [`tokio::fs::File`]
/// otherwise it will be in the first element as [`std::fs::File`].
/// It is inadvisable to directly read/write from/to the file.
pub fn get_mut(&mut self) -> (Option<&mut std::fs::File>, Option<&mut File>) {
(self.unlocked_file.as_mut(), self.locked_file.as_mut())
}
fn poll_exclusive_lock(&mut self, cx: &mut Context<'_>) -> Poll<Result<()>> {
loop {
match &mut self.locking_fut {
None => {
LockFuture::new_exclusive(self);
}
Some(_) => return self.poll_locking_fut(cx),
}
}
}
fn poll_shared_lock(&mut self, cx: &mut Context<'_>) -> Poll<Result<()>> {
loop {
match &mut self.locking_fut {
None => {
LockFuture::new_shared(self);
}
Some(_) => return self.poll_locking_fut(cx),
}
}
}
fn poll_unlock(&mut self, cx: &mut Context<'_>) -> Poll<()> {
loop {
match &mut self.unlocking_fut {
None => {
UnlockFuture::new(self);
}
Some(fut) => {
// println!("unlocking");
let file = ready!(fut.poll(cx));
let result = file.unlock();
self.unlocked_file = Some(file);
if let Err(e) = result {
self.result = Some(Err(e));
}
self.state = State::Unlocked;
self.unlocking_fut.take();
// println!("unlocked");
return Poll::Ready(());
}
}
}
}
fn poll_locking_fut(&mut self, cx: &mut Context<'_>) -> Poll<Result<()>> {
let result = ready!(self.locking_fut.as_mut().unwrap().poll(cx)).unwrap();
self.locking_fut.take();
return match result {
Ok(file) => {
self.locked_file = Some(file);
self.state = State::Locked;
Poll::Ready(Ok(()))
}
Err((file, e)) => {
self.unlocked_file = Some(file);
self.state = State::Unlocked;
Poll::Ready(Err(e))
}
};
}
}
macro_rules! poll_loop {
($self:ident, $cx:ident, $unlocked_map:expr, $lock:ident, State::Working => $working:block) => {
loop {
match $self.state {
State::Unlocked => {
if let Some(result) = $self.result.take() {
return Poll::Ready(result.map($unlocked_map));
}
$self.state = State::Locking;
}
State::Unlocking => ready!($self.poll_unlock($cx)),
#[allow(unused_must_use)]
State::Locked => match $self.mode {
SeekFrom::Current(0) => $self.state = State::Working,
_ => {
let mode = $self.mode;
$self.as_mut().start_seek($cx, mode);
$self.state = State::Seeking;
}
},
State::Working => {
// println!("working");
$working
// println!("worked");
},
State::Locking => {
if let Err(e) = ready!($self.$lock($cx)) {
return Poll::Ready(Err(e));
}
}
State::Seeking => match ready!($self.as_mut().poll_complete($cx)) {
Ok(_) => $self.state = State::Working,
Err(e) => return Poll::Ready(Err(e)),
},
}
}
};
}
impl AsyncWrite for FileLock {
fn poll_write(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &[u8],
) -> Poll<Result<usize>> {
poll_loop! {self, cx, |x| x as usize, poll_exclusive_lock,
State::Working => {
let result = ready!(Pin::new(self.locked_file.as_mut().unwrap())
.as_mut()
.poll_write(cx, buf));
// println!("written {:?}", &buf[..*result.as_ref().unwrap()]);
if self.is_manually_locked {
self.state = State::Locked;
return Poll::Ready(result);
} else {
self.state = State::Unlocking;
self.result = Some(result.map(|x| x as u64));
}
}
// State::Flushing => {
// if let Err(e) = ready!(self.as_mut().poll_flush(cx)) {
// self.result = Some(Err(e));
// }
// self.state = State::Unlocking;
// }
};
}
fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<()>> {
// println!("flushing");
poll_loop! {self, cx, |_| (), poll_exclusive_lock,
State::Working => {
let result = ready!(Pin::new(self.locked_file.as_mut().unwrap())
.as_mut()
.poll_flush(cx));
// println!("flushed");
if self.is_manually_locked {
self.state = State::Locked;
return Poll::Ready(result);
} else {
self.state = State::Unlocking;
self.result = Some(result.map(|_| 0));
}
}
// State::Flushing => {
// let result = ready!(Pin::new(self.locked_file.as_mut().unwrap())
// .as_mut()
// .poll_flush(cx));
// // println!("flushed");
// return Poll::Ready(result);
// }
};
}
fn poll_shutdown(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Result<()>> {
// println!("shutting down");
// We don't have to do anything as files are unlocked when underlying tokio file reports
// some progress. Looking at implementation of shutdown for `tokio::fs::File` says that it
// does nothing.
Poll::Ready(Ok(()))
}
}
impl AsyncRead for FileLock {
unsafe fn prepare_uninitialized_buffer(&self, _: &mut [MaybeUninit<u8>]) -> bool {
false
}
fn poll_read(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut [u8],
) -> Poll<Result<usize>> |
}
impl AsyncSeek for FileLock {
fn start_seek(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
position: SeekFrom,
) -> Poll<Result<()>> {
if let Some(ref mut locked_file) = self.locked_file {
return Pin::new(locked_file)
.as_mut()
.start_seek(cx, position);
}
let mut file = self.unlocked_file.take().expect("Cannot seek while in the process of locking/unlocking/seeking");
self.seek_fut = Some(spawn_blocking(move || {
(file.seek(position), file)
}));
return Poll::Ready(Ok(()));
}
fn poll_complete(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<u64>> {
if let Some(ref mut locked_file) = self.locked_file {
return Pin::new(locked_file)
.as_mut()
.poll_complete(cx)
}
let (result, file) = ready!(Pin::new(self.seek_fut.as_mut().unwrap()).poll(cx)).unwrap();
self.seek_fut = None;
self.unlocked_file = Some(file);
return Poll::Ready(result);
}
}
impl Debug for FileLock {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let mut debug = f.debug_struct("FileLock");
match self.state {
State::Unlocked => {
debug.field("file", self.unlocked_file.as_ref().unwrap());
}
State::Locked => {
debug.field("file", self.locked_file.as_ref().unwrap());
}
_ => panic!("Invalid state"),
}
debug.field("mode", &self.mode).finish()
}
}
enum State {
Unlocked,
Unlocking,
Locked,
Locking,
Seeking,
Working,
}
pub struct LockFuture<'a> {
file_lock: &'a mut FileLock,
}
impl<'a> LockFuture<'a> {
fn new_exclusive(file_lock: &'a mut FileLock) -> LockFuture<'a> {
// println!("locking exclusive");
let unlocked_file = file_lock.unlocked_file.take().unwrap();
file_lock.locking_fut = Some(spawn_blocking(move || {
let result = match unlocked_file.lock_exclusive() {
Ok(_) => Ok(File::from_std(unlocked_file)),
Err(e) => Err((unlocked_file, e)),
};
// println!("locked exclusive");
result
}));
LockFuture { file_lock }
}
fn new_shared(file_lock: &'a mut FileLock) -> LockFuture<'a> {
// println!("locking shared");
let unlocked_file = file_lock.unlocked_file.take().unwrap();
file_lock.locking_fut = Some(spawn_blocking(move || {
let result = match unlocked_file.lock_shared() {
Ok(_) => Ok(File::from_std(unlocked_file)),
Err(e) => Err((unlocked_file, e)),
};
// println!("locked shared");
result
}));
LockFuture { file_lock }
}
}
impl<'a> Future for LockFuture<'a> {
type Output = Result<()>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
self.file_lock.poll_locking_fut(cx)
}
}
pub struct UnlockFuture<'a> {
file_lock: &'a mut FileLock,
}
impl<'a> UnlockFuture<'a> {
fn new(file_lock: &'a mut FileLock) -> UnlockFuture<'a> {
file_lock.unlocking_fut = Some(file_lock.locked_file.take().unwrap().into_std().boxed());
file_lock.state = State::Unlocking;
UnlockFuture { file_lock }
}
}
impl<'a> Future for UnlockFuture<'a> {
type Output = ();
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
self.file_lock.poll_unlock(cx)
}
}
| {
poll_loop! {self, cx, |x| x as usize, poll_shared_lock,
State::Working => {
let result = ready!(Pin::new(self.locked_file.as_mut().unwrap())
.as_mut()
.poll_read(cx, buf));
if self.is_manually_locked {
self.state = State::Locked;
return Poll::Ready(result);
} else {
self.state = State::Unlocking;
self.result = Some(result.map(|x| x as u64));
}
}
};
} |
xmlsession.py | #
# CORE
# Copyright (c)2011-2014 the Boeing Company.
# See the LICENSE file included in this distribution.
#
# author: Jeff Ahrenholz <[email protected]>
#
'''
Helpers for loading and saving XML files. savesessionxml(session, filename) is
the main public interface here.
'''
import os.path
from core.netns import nodes
from xmlparser import core_document_parser
from xmlwriter import core_document_writer
def opensessionxml(session, filename, start=False, nodecls=nodes.CoreNode):
''' Import a session from the EmulationScript XML format.
'''
options = {'start': start, 'nodecls': nodecls}
doc = core_document_parser(session, filename, options)
if start:
session.name = os.path.basename(filename)
session.filename = filename
session.node_count = str(session.getnodecount())
session.instantiate()
def savesessionxml(session, filename, version = 0.0):
| ''' Export a session to the EmulationScript XML format.
'''
doc = core_document_writer(session, version)
doc.writexml(filename) |
|
client.go | package fakemachine
import (
"context"
"errors"
"github.com/code-ready/crc/pkg/crc/machine/state"
"github.com/code-ready/crc/pkg/crc/machine/types"
"github.com/code-ready/crc/pkg/crc/network"
)
func | () *Client {
return &Client{}
}
func NewFailingClient() *Client {
return &Client{
Failing: true,
}
}
type Client struct {
Failing bool
}
var DummyClusterConfig = types.ClusterConfig{
ClusterCACert: "MIIDODCCAiCgAwIBAgIIRVfCKNUa1wIwDQYJ",
KubeConfig: "/tmp/kubeconfig",
KubeAdminPass: "foobar",
ClusterAPI: "https://foo.testing:6443",
WebConsoleURL: "https://console.foo.testing:6443",
ProxyConfig: nil,
}
func (c *Client) GetName() string {
return "crc"
}
func (c *Client) Delete() error {
if c.Failing {
return errors.New("delete failed")
}
return nil
}
func (c *Client) GetConsoleURL() (*types.ConsoleResult, error) {
if c.Failing {
return nil, errors.New("console failed")
}
return &types.ConsoleResult{
ClusterConfig: DummyClusterConfig,
State: state.Running,
}, nil
}
func (c *Client) GetProxyConfig(machineName string) (*network.ProxyConfig, error) {
return nil, errors.New("not implemented")
}
func (c *Client) ConnectionDetails() (*types.ConnectionDetails, error) {
return nil, errors.New("not implemented")
}
func (c *Client) PowerOff() error {
if c.Failing {
return errors.New("poweroff failed")
}
return nil
}
func (c *Client) GenerateBundle(forceStop bool) error {
return nil
}
func (c *Client) Start(ctx context.Context, startConfig types.StartConfig) (*types.StartResult, error) {
if c.Failing {
return nil, errors.New("Failed to start")
}
return &types.StartResult{
ClusterConfig: DummyClusterConfig,
KubeletStarted: true,
}, nil
}
func (c *Client) Stop() (state.State, error) {
if c.Failing {
return state.Running, errors.New("stop failed")
}
return state.Stopped, nil
}
func (c *Client) Status() (*types.ClusterStatusResult, error) {
if c.Failing {
return nil, errors.New("broken")
}
return &types.ClusterStatusResult{
CrcStatus: state.Running,
OpenshiftStatus: types.OpenshiftRunning,
OpenshiftVersion: "4.5.1",
DiskUse: 10_000_000_000,
DiskSize: 20_000_000_000,
}, nil
}
func (c *Client) Exists() (bool, error) {
return true, nil
}
func (c *Client) IsRunning() (bool, error) {
return true, nil
}
| NewClient |
example_test.go | package ringbuffer
import "fmt"
func ExampleRingBuffer() | {
rb := New(1024)
_, _ = rb.Write([]byte("abcd"))
fmt.Println(rb.Length())
fmt.Println(rb.free())
buf := make([]byte, 4)
_, _ = rb.Read(buf)
fmt.Println(string(buf))
rb.Write([]byte("1234567890"))
rb.VirtualRead(buf)
fmt.Println(string(buf))
fmt.Println(rb.Length())
fmt.Println(rb.VirtualLength())
rb.VirtualFlush()
fmt.Println(rb.Length())
fmt.Println(rb.VirtualLength())
rb.VirtualRead(buf)
fmt.Println(string(buf))
fmt.Println(rb.Length())
fmt.Println(rb.VirtualLength())
rb.VirtualRevert()
fmt.Println(rb.Length())
fmt.Println(rb.VirtualLength())
// Output: 4
// 1020
// abcd
// 1234
// 10
// 6
// 6
// 6
// 5678
// 6
// 2
// 6
// 6
} |
|
priorityQueue.js | var async = require('../lib');
var expect = require('chai').expect;
describe('priorityQueue', function() {
it('priorityQueue', function (done) {
var call_order = [];
// order of completion: 2,1,4,3
var q = async.priorityQueue(function (task, callback) {
call_order.push('process ' + task);
callback('error', 'arg');
}, 1);
q.push(1, 1.4, function (err, arg) {
expect(err).to.equal('error');
expect(arg).to.equal('arg');
expect(q.length()).to.equal(2);
call_order.push('callback ' + 1);
});
q.push(2, 0.2, function (err, arg) {
expect(err).to.equal('error');
expect(arg).to.equal('arg');
expect(q.length()).to.equal(3);
call_order.push('callback ' + 2);
});
q.push(3, 3.8, function (err, arg) {
expect(err).to.equal('error');
expect(arg).to.equal('arg');
expect(q.length()).to.equal(0);
call_order.push('callback ' + 3);
});
q.push(4, 2.9, function (err, arg) {
expect(err).to.equal('error');
expect(arg).to.equal('arg');
expect(q.length()).to.equal(1);
call_order.push('callback ' + 4);
});
expect(q.length()).to.equal(4);
expect(q.concurrency).to.equal(1);
q.drain = function () {
expect(call_order).to.eql([
'process 2', 'callback 2',
'process 1', 'callback 1',
'process 4', 'callback 4',
'process 3', 'callback 3'
]);
expect(q.concurrency).to.equal(1);
expect(q.length()).to.equal(0);
done();
};
});
it('concurrency', function (done) {
var call_order = [],
delays = [160,80,240,80];
// worker1: --2-3
// worker2: -1---4
// order of completion: 1,2,3,4
var q = async.priorityQueue(function (task, callback) {
setTimeout(function () {
call_order.push('process ' + task);
callback('error', 'arg');
}, delays.splice(0,1)[0]);
}, 2);
q.push(1, 1.4, function (err, arg) {
expect(err).to.equal('error');
expect(arg).to.equal('arg');
expect(q.length()).to.equal(2);
call_order.push('callback ' + 1);
});
q.push(2, 0.2, function (err, arg) {
expect(err).to.equal('error');
expect(arg).to.equal('arg');
expect(q.length()).to.equal(1);
call_order.push('callback ' + 2);
});
q.push(3, 3.8, function (err, arg) {
expect(err).to.equal('error');
expect(arg).to.equal('arg');
expect(q.length()).to.equal(0);
call_order.push('callback ' + 3);
});
q.push(4, 2.9, function (err, arg) {
expect(err).to.equal('error');
expect(arg).to.equal('arg');
expect(q.length()).to.equal(0);
call_order.push('callback ' + 4);
});
expect(q.length()).to.equal(4);
expect(q.concurrency).to.equal(2);
q.drain = function () {
expect(call_order).to.eql([
'process 1', 'callback 1',
'process 2', 'callback 2',
'process 3', 'callback 3',
'process 4', 'callback 4'
]);
expect(q.concurrency).to.equal(2);
expect(q.length()).to.equal(0);
done();
};
});
it('pause in worker with concurrency', function(done) {
var call_order = [];
var q = async.priorityQueue(function (task, callback) {
if (task.isLongRunning) {
q.pause();
setTimeout(function () {
call_order.push(task.id);
q.resume();
callback();
}, 50);
}
else {
call_order.push(task.id);
setTimeout(callback, 10);
}
}, 10);
q.push({ id: 1, isLongRunning: true});
q.push({ id: 2 });
q.push({ id: 3 });
q.push({ id: 4 });
q.push({ id: 5 });
q.drain = function () {
expect(call_order).to.eql([1, 2, 3, 4, 5]);
done();
};
});
context('q.saturated(): ', function() {
it('should call the saturated callback if tasks length is concurrency', function(done) {
var calls = [];
var q = async.priorityQueue(function(task, cb) {
calls.push('process ' + task);
async.setImmediate(cb);
}, 4);
q.saturated = function() {
calls.push('saturated');
};
q.empty = function() {
expect(calls.indexOf('saturated')).to.be.above(-1);
setTimeout(function() {
expect(calls).eql([
'process foo4',
'process foo3',
'process foo2',
"saturated",
'process foo1',
'foo4 cb',
"saturated",
'process foo0',
'foo3 cb',
'foo2 cb',
'foo1 cb',
'foo0 cb'
]);
done();
}, 50);
};
q.push('foo0', 5, function () {calls.push('foo0 cb');});
q.push('foo1', 4, function () {calls.push('foo1 cb');});
q.push('foo2', 3, function () {calls.push('foo2 cb');});
q.push('foo3', 2, function () {calls.push('foo3 cb');});
q.push('foo4', 1, function () {calls.push('foo4 cb');});
}); | it('should have a default buffer property that equals 25% of the concurrenct rate', function(done) {
var calls = [];
var q = async.priorityQueue(function(task, cb) {
// nop
calls.push('process ' + task);
async.setImmediate(cb);
}, 10);
expect(q.buffer).to.equal(2.5);
done();
});
it('should allow a user to change the buffer property', function(done) {
var calls = [];
var q = async.priorityQueue(function(task, cb) {
// nop
calls.push('process ' + task);
async.setImmediate(cb);
}, 10);
q.buffer = 4;
expect(q.buffer).to.not.equal(2.5);
expect(q.buffer).to.equal(4);
done();
});
it('should call the unsaturated callback if tasks length is less than concurrency minus buffer', function(done) {
var calls = [];
var q = async.priorityQueue(function(task, cb) {
calls.push('process ' + task);
setTimeout(cb, 10);
}, 4);
q.unsaturated = function() {
calls.push('unsaturated');
};
q.empty = function() {
expect(calls.indexOf('unsaturated')).to.be.above(-1);
setTimeout(function() {
expect(calls).eql([
'process foo4',
'process foo3',
'process foo2',
'process foo1',
'foo4 cb',
'unsaturated',
'process foo0',
'foo3 cb',
'unsaturated',
'foo2 cb',
'unsaturated',
'foo1 cb',
'unsaturated',
'foo0 cb',
'unsaturated'
]);
done();
}, 50);
};
q.push('foo0', 5, function () {calls.push('foo0 cb');});
q.push('foo1', 4, function () {calls.push('foo1 cb');});
q.push('foo2', 3, function () {calls.push('foo2 cb');});
q.push('foo3', 2, function () {calls.push('foo3 cb');});
q.push('foo4', 1, function () {calls.push('foo4 cb');});
});
});
}); | });
context('q.unsaturated(): ',function() { |
movie.js | import React from "react"
import { Box } from "@material-ui/core"
import styled from "styled-components"
import WrapTypo from "../components/WrapTypo"
const Movie = () => {
return (
<Box bgcolor="#211D1A" maxWidth="450px" margin="auto">
<Box width="90%" margin="auto">
<Box mt={4} mb={4}>
<WrapTypo fs="30px" color="#F4A460" align="center">
ムービー部門
</WrapTypo>
</Box>
<Box>
<WrapTypo color="#F4A460">
ハッシュタグを使って、
<br />
自分なりのオーバードライブを投稿しよう!
</WrapTypo>
</Box>
</Box>
</Box>
)
}
| export default Movie |
|
rust.py | # -*- coding: utf-8 -*-
"""
pygments.lexers.rust
~~~~~~~~~~~~~~~~~~~~
Lexers for the Rust language.
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, include, bygroups, words, default
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Whitespace
__all__ = ['RustLexer']
class RustLexer(RegexLexer):
| """
Lexer for the Rust programming language (version 1.10).
.. versionadded:: 1.6
"""
name = 'Rust'
filenames = ['*.rs', '*.rs.in']
aliases = ['rust', 'rs']
mimetypes = ['text/rust']
keyword_types = (
words(('u8', 'u16', 'u32', 'u64', 'i8', 'i16', 'i32', 'i64',
'i128', 'u128', 'usize', 'isize', 'f32', 'f64', 'str', 'bool'),
suffix=r'\b'),
Keyword.Type)
builtin_types = (words((
# Reexported core operators
'Copy', 'Send', 'Sized', 'Sync',
'Drop', 'Fn', 'FnMut', 'FnOnce',
# Reexported types and traits
'Box',
'ToOwned',
'Clone',
'PartialEq', 'PartialOrd', 'Eq', 'Ord',
'AsRef', 'AsMut', 'Into', 'From',
'Default',
'Iterator', 'Extend', 'IntoIterator',
'DoubleEndedIterator', 'ExactSizeIterator',
'Option',
'Some', 'None',
'Result',
'Ok', 'Err',
'SliceConcatExt',
'String', 'ToString',
'Vec'), suffix=r'\b'),
Name.Builtin)
tokens = {
'root': [
# rust allows a file to start with a shebang, but if the first line
# starts with #![ then it's not a shebang but a crate attribute.
(r'#![^[\r\n].*$', Comment.Preproc),
default('base'),
],
'base': [
# Whitespace and Comments
(r'\n', Whitespace),
(r'\s+', Whitespace),
(r'//!.*?\n', String.Doc),
(r'///(\n|[^/].*?\n)', String.Doc),
(r'//(.*?)\n', Comment.Single),
(r'/\*\*(\n|[^/*])', String.Doc, 'doccomment'),
(r'/\*!', String.Doc, 'doccomment'),
(r'/\*', Comment.Multiline, 'comment'),
# Macro parameters
(r"""\$([a-zA-Z_]\w*|\(,?|\),?|,?)""", Comment.Preproc),
# Keywords
(words((
'as', 'async', 'await', 'box', 'const', 'crate', 'else',
'extern', 'for', 'if', 'impl', 'in', 'loop', 'match', 'move',
'mut', 'pub', 'ref', 'return', 'static', 'super', 'trait',
'try', 'unsafe', 'use', 'where', 'while'), suffix=r'\b'),
Keyword),
(words(('abstract', 'alignof', 'become', 'do', 'final', 'macro',
'offsetof', 'override', 'priv', 'proc', 'pure', 'sizeof',
'typeof', 'unsized', 'virtual', 'yield'), suffix=r'\b'),
Keyword.Reserved),
(r'(true|false)\b', Keyword.Constant),
(r'mod\b', Keyword, 'modname'),
(r'let\b', Keyword.Declaration),
(r'fn\b', Keyword, 'funcname'),
(r'(struct|enum|type|union)\b', Keyword, 'typename'),
(r'(default)(\s+)(type|fn)\b', bygroups(Keyword, Text, Keyword)),
keyword_types,
(r'self\b', Name.Builtin.Pseudo),
# Prelude (taken from Rust's src/libstd/prelude.rs)
builtin_types,
# Path seperators, so types don't catch them.
(r'::\b', Text),
# Types in positions.
(r'(?::|->)', Text, 'typename'),
# Labels
(r'(break|continue)(\s*)(\'[A-Za-z_]\w*)?',
bygroups(Keyword, Text.Whitespace, Name.Label)),
# Character Literal
(r"""'(\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
String.Char),
(r"""b'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\0"""
r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
String.Char),
# Binary Literal
(r'0b[01_]+', Number.Bin, 'number_lit'),
# Octal Literal
(r'0o[0-7_]+', Number.Oct, 'number_lit'),
# Hexadecimal Literal
(r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'),
# Decimal Literal
(r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
r'\.[0-9_]*(?!\.)|[eE][+\-]?[0-9_]+)', Number.Float,
'number_lit'),
(r'[0-9][0-9_]*', Number.Integer, 'number_lit'),
# String Literal
(r'b"', String, 'bytestring'),
(r'"', String, 'string'),
(r'b?r(#*)".*?"\1', String),
# Lifetime
(r"""'static""", Name.Builtin),
(r"""'[a-zA-Z_]\w*""", Name.Attribute),
# Operators and Punctuation
(r'[{}()\[\],.;]', Punctuation),
(r'[+\-*/%&|<>^!~@=:?]', Operator),
# Identifier
(r'[a-zA-Z_]\w*', Name),
# Attributes
(r'#!?\[', Comment.Preproc, 'attribute['),
# Macros
(r'([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\s*)(\{)',
bygroups(Comment.Preproc, Punctuation, Whitespace, Name,
Whitespace, Punctuation), 'macro{'),
(r'([A-Za-z_]\w*)(!)(\s*)([A-Za-z_]\w*)?(\()',
bygroups(Comment.Preproc, Punctuation, Whitespace, Name,
Punctuation), 'macro('),
],
'comment': [
(r'[^*/]+', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline),
],
'doccomment': [
(r'[^*/]+', String.Doc),
(r'/\*', String.Doc, '#push'),
(r'\*/', String.Doc, '#pop'),
(r'[*/]', String.Doc),
],
'modname': [
(r'\s+', Text),
(r'[a-zA-Z_]\w*', Name.Namespace, '#pop'),
default('#pop'),
],
'funcname': [
(r'\s+', Text),
(r'[a-zA-Z_]\w*', Name.Function, '#pop'),
default('#pop'),
],
'typename': [
(r'\s+', Text),
(r'&', Keyword.Pseudo),
builtin_types,
keyword_types,
(r'[a-zA-Z_]\w*', Name.Class, '#pop'),
default('#pop'),
],
'number_lit': [
(r'[ui](8|16|32|64|size)', Keyword, '#pop'),
(r'f(32|64)', Keyword, '#pop'),
default('#pop'),
],
'string': [
(r'"', String, '#pop'),
(r"""\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
r"""|\\u\{[0-9a-fA-F]{1,6}\}""", String.Escape),
(r'[^\\"]+', String),
(r'\\', String),
],
'bytestring': [
(r"""\\x[89a-fA-F][0-9a-fA-F]""", String.Escape),
include('string'),
],
'macro{': [
(r'\{', Operator, '#push'),
(r'\}', Operator, '#pop'),
],
'macro(': [
(r'\(', Operator, '#push'),
(r'\)', Operator, '#pop'),
],
'attribute_common': [
(r'"', String, 'string'),
(r'\[', Comment.Preproc, 'attribute['),
(r'\(', Comment.Preproc, 'attribute('),
],
'attribute[': [
include('attribute_common'),
(r'\];?', Comment.Preproc, '#pop'),
(r'[^"\]]+', Comment.Preproc),
],
'attribute(': [
include('attribute_common'),
(r'\);?', Comment.Preproc, '#pop'),
(r'[^")]+', Comment.Preproc),
],
} |
|
helpers.ts | /**
* Tracking is used to detect container width changes, etc.
*/
export function isJqueryEl(el) {
return !!el.jquery;
}
export function getDomElement(el) {
if (!el) {
return null;
}
if (typeof el === 'string') {
return document.querySelector(el);
}
if (el.constructor === HTMLElement) {
return el;
}
if (isJqueryEl(el)) {
return el[0];
}
return null;
}
export function delay(fn, threshold) {
var firedAt = Infinity;
var timer;
function delayedFn() {
fn.apply(null, arguments);
}
return function () {
if (timer !== undefined) {
clearTimeout(timer);
}
timer = setTimeout(delayedFn, threshold);
};
}
export function throttle(func, wait, options) {
var context, args, result;
var timeout = null;
var previous = 0;
if (!options) options = {};
var later = function () {
previous = options.leading === false ? 0 : Date.now();
timeout = null;
result = func.apply(context, args);
if (!timeout) context = args = null;
};
return function () {
var now = Date.now();
if (!previous && options.leading === false) previous = now;
var remaining = wait - (now - previous);
context = this;
args = arguments;
if (remaining <= 0 || remaining > wait) {
if (timeout) {
clearTimeout(timeout);
timeout = null;
}
previous = now; | result = func.apply(context, args);
if (!timeout) context = args = null;
} else if (!timeout && options.trailing !== false) {
timeout = setTimeout(later, remaining);
}
return result;
};
} | |
checksum.go | package main
import (
stdmd5 "crypto/md5"
"fmt"
"io"
"os"
)
// ChecksumFunc 目标文件摘要校验函数定义。
// dstFilename 待计算摘要的目标文件。
// hexHash 目标文件的预期摘要值。
// 若实际计算的摘要值与预期一致,返回true,否则为false。当摘要计算过程中发生错误,将返回非nil的error值。
type ChecksumFunc func(dstFilename, hexHash string) (ok bool, err error)
// MD5Checksum 使用MD5摘要算法计算的目标文件的摘要值是否与预期摘要值一致。
func MD5Checksum(dstFilename, hexHash string) (ok bool, err error) {
md5, err := FileMD5(dstFilename)
if err != nil {
return false, err
}
return md5 == hexHash, nil
}
// FileMD5 计算文件的md5值
func FileMD5(filename string) (md5 string, err error) {
f, err := os.Open(filename)
if err != nil {
return "", err
} | defer f.Close()
h := stdmd5.New()
if _, err := io.Copy(h, f); err != nil {
return "", err
}
return fmt.Sprintf("%x", h.Sum(nil)), nil
} | |
DataTile.d.ts | export default DataTile;
/**
* Data that can be used with a DataTile. For increased browser compatibility, use
* Uint8Array instead of Uint8ClampedArray where possible.
*/
export type Data = Uint8Array | Uint8ClampedArray | Float32Array | DataView;
export type Options = {
/**
* Tile coordinate.
*/ | tileCoord: import("./tilecoord.js").TileCoord;
/**
* Data loader.
*/
loader: () => Promise<Data>;
/**
* A duration for tile opacity
* transitions in milliseconds. A duration of 0 disables the opacity transition.
*/
transition?: number | undefined;
/**
* Use interpolated values when resampling. By default,
* the nearest neighbor is used when resampling.
*/
interpolate?: boolean | undefined;
};
/**
* Data that can be used with a DataTile. For increased browser compatibility, use
* Uint8Array instead of Uint8ClampedArray where possible.
* @typedef {Uint8Array|Uint8ClampedArray|Float32Array|DataView} Data
*/
/**
* @typedef {Object} Options
* @property {import("./tilecoord.js").TileCoord} tileCoord Tile coordinate.
* @property {function(): Promise<Data>} loader Data loader.
* @property {number} [transition=250] A duration for tile opacity
* transitions in milliseconds. A duration of 0 disables the opacity transition.
* @property {boolean} [interpolate=false] Use interpolated values when resampling. By default,
* the nearest neighbor is used when resampling.
* @api
*/
declare class DataTile extends Tile {
/**
* @param {Options} options Tile options.
*/
constructor(options: Options);
/**
* @type {function(): Promise<Data>}
* @private
*/
private loader_;
/**
* @type {Data}
* @private
*/
private data_;
/**
* @type {Error}
* @private
*/
private error_;
/**
* Get the data for the tile.
* @return {Data} Tile data.
* @api
*/
getData(): Data;
/**
* Get any loading error.
* @return {Error} Loading error.
* @api
*/
getError(): Error;
}
import Tile from "./Tile.js";
//# sourceMappingURL=DataTile.d.ts.map | |
data_source_self.go | package clevercloud
import (
"context"
"github.com/clevercloud/clevercloud-go/clevercloud"
"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
)
func dataSourceSelf() *schema.Resource {
return &schema.Resource{
ReadContext: dataSourceSelfRead,
Schema: map[string]*schema.Schema{
"name": {
Type: schema.TypeString,
Computed: true,
},
"email": {
Type: schema.TypeString,
Computed: true,
},
"phone": {
Type: schema.TypeString,
Computed: true,
},
"address": {
Type: schema.TypeString,
Computed: true,
},
"city": {
Type: schema.TypeString,
Computed: true,
},
"zip_code": {
Type: schema.TypeString,
Computed: true,
},
"country": {
Type: schema.TypeString,
Computed: true,
},
"avatar": {
Type: schema.TypeString,
Computed: true,
},
"creation_date": {
Type: schema.TypeInt,
Computed: true,
},
"language": {
Type: schema.TypeString,
Computed: true,
},
"email_validated": {
Type: schema.TypeBool,
Computed: true,
},
"oauth_apps": {
Type: schema.TypeList,
Computed: true,
Elem: &schema.Schema{
Type: schema.TypeString,
},
},
"admin": {
Type: schema.TypeBool,
Computed: true,
},
"can_pay": {
Type: schema.TypeBool,
Computed: true,
},
"preferred_mfa": {
Type: schema.TypeString,
Computed: true,
},
"has_password": {
Type: schema.TypeBool,
Computed: true,
},
},
}
}
func | (ctx context.Context, d *schema.ResourceData, m interface{}) diag.Diagnostics {
cc := m.(*clevercloud.APIClient)
var diags diag.Diagnostics
self, _, err := cc.SelfApi.GetUser(context.Background())
if err != nil {
return diag.FromErr(err)
}
d.SetId(self.Id)
d.Set("name", self.Name)
d.Set("email", self.Email)
d.Set("phone", self.Phone)
d.Set("address", self.Address)
d.Set("city", self.City)
d.Set("zip_code", self.Zipcode)
d.Set("country", self.Country)
d.Set("avatar", self.Avatar)
d.Set("creation_date", self.CreationDate)
d.Set("language", self.Lang)
d.Set("email_validated", self.EmailValidated)
d.Set("oauth_apps", self.OauthApps)
d.Set("admin", self.Admin)
d.Set("can_pay", self.CanPay)
d.Set("preferred_mfa", self.PreferredMFA)
d.Set("has_password", self.HasPassword)
return diags
}
| dataSourceSelfRead |
routers.py | from django.conf.urls import url
from rest_framework.routers import SimpleRouter, Route
class DiscoveryAPIRouter(SimpleRouter):
routes = [
# List route.
Route(
url=r'^{prefix}{trailing_slash}$',
mapping={
'get': 'list'
},
name='{basename}-list',
initkwargs={'suffix': 'List'}
),
# Detail route.
Route(
url=r'^{prefix}/{lookup}{trailing_slash}$',
mapping={
'get': 'retrieve'
},
name='{basename}-detail',
initkwargs={'suffix': 'Instance'}
),
# Values route.
Route(
url=r'^{prefix}/values/{field_lookup}{trailing_slash}$',
mapping={
'get': 'values'
},
name='{basename}-values',
initkwargs={'suffix': 'Values'}
),
# Count route.
Route(
url=r'^{prefix}/count/{field_lookup}{trailing_slash}$',
mapping={
'get': 'count'
},
name='{basename}-count',
initkwargs={'suffix': 'Count'}
)
]
def __init__(self):
self.trailing_slash = '/?'
super(SimpleRouter, self).__init__()
def get_field_lookup_regex(self, viewset, lookup_prefix=''):
|
def get_urls(self):
"""
Use the registered viewsets to generate a list of URL patterns.
"""
ret = []
for prefix, viewset, basename in self.registry:
lookup = self.get_lookup_regex(viewset)
field_lookup = self.get_field_lookup_regex(viewset)
routes = self.get_routes(viewset)
for route in routes:
mapping = self.get_method_map(viewset, route.mapping)
if not mapping:
continue
regex = route.url.format(
prefix=prefix,
lookup=lookup,
field_lookup=field_lookup,
trailing_slash=self.trailing_slash
)
if not prefix and regex[:2] == '^/':
regex = '^' + regex[2:]
initkwargs = route.initkwargs.copy()
initkwargs.update({
'basename': basename,
})
view = viewset.as_view(mapping, **initkwargs)
name = route.name.format(basename=basename)
ret.append(url(regex, view, name=name))
return ret
| base_regex = '(?P<{lookup_prefix}field_lookup>{lookup_value})'
lookup_value = getattr(viewset, 'lookup_value_regex', '[^/.]+')
return base_regex.format(
lookup_prefix=lookup_prefix,
lookup_value=lookup_value
) |
public_key.go | //
// Copyright 2021 The Sigstore Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cli
import (
"context"
"flag"
"fmt"
"io"
"os"
"github.com/peterbourgon/ff/v3/ffcli"
"github.com/pkg/errors"
"github.com/sigstore/cosign/pkg/cosign"
"github.com/sigstore/cosign/pkg/cosign/pivkey"
"github.com/sigstore/sigstore/pkg/signature"
"github.com/sigstore/sigstore/pkg/signature/options"
)
type NamedWriter struct {
Name string
io.Writer
}
func PublicKey() *ffcli.Command {
var (
flagset = flag.NewFlagSet("cosign public-key", flag.ExitOnError)
key = flagset.String("key", "", "path to the private key file, public key URL, or KMS URI")
sk = flagset.Bool("sk", false, "whether to use a hardware security key")
slot = flagset.String("slot", "", "security key slot to use for generated key (default: signature) (authentication|signature|card-authentication|key-management)")
outFile = flagset.String("outfile", "", "file to write public key")
)
return &ffcli.Command{
Name: "public-key",
ShortUsage: "cosign public-key gets a public key from the key-pair",
ShortHelp: "public-key gets a public key from the key-pair",
LongHelp: `public-key gets a public key from the key-pair and
writes to a specified file. By default, it will write to standard out.
EXAMPLES
# extract public key from private key to a specified out file.
cosign public-key -key <PRIVATE KEY FILE> -outfile <OUTPUT>
# extract public key from URL.
cosign public-key -key https://host.for/<FILE> -outfile <OUTPUT>
# extract public key from Azure Key Vault
cosign public-key -key azurekms://[VAULT_NAME][VAULT_URI]/[KEY]
# extract public key from AWS KMS
cosign public-key -key awskms://[ENDPOINT]/[ID/ALIAS/ARN]
# extract public key from Google Cloud KMS
cosign public-key -key gcpkms://projects/[PROJECT]/locations/global/keyRings/[KEYRING]/cryptoKeys/[KEY]
# extract public key from Hashicorp Vault KMS
cosign public-key -key hashivault://[KEY]`,
FlagSet: flagset,
Exec: func(ctx context.Context, args []string) error {
if !oneOf(*key, *sk) {
return &KeyParseError{}
}
writer := NamedWriter{Name: "", Writer: nil}
var f *os.File
// Open output file for public key if specified.
if *outFile != "" {
writer.Name = *outFile
var err error
f, err = os.OpenFile(*outFile, os.O_WRONLY|os.O_CREATE, 0600)
if err != nil |
writer.Writer = f
defer f.Close()
} else {
writer.Writer = os.Stdout
}
pk := Pkopts{
KeyRef: *key,
Sk: *sk,
Slot: *slot,
}
return GetPublicKey(ctx, pk, writer, GetPass)
},
}
}
type Pkopts struct {
KeyRef string
Sk bool
Slot string
}
func GetPublicKey(ctx context.Context, opts Pkopts, writer NamedWriter, pf cosign.PassFunc) error {
var k signature.PublicKeyProvider
switch {
case opts.KeyRef != "":
s, err := signerFromKeyRef(ctx, opts.KeyRef, pf)
if err != nil {
return err
}
k = s
case opts.Sk:
sk, err := pivkey.GetKeyWithSlot(opts.Slot)
if err != nil {
return errors.Wrap(err, "opening piv token")
}
defer sk.Close()
pk, err := sk.Verifier()
if err != nil {
return errors.Wrap(err, "initializing piv token verifier")
}
k = pk
}
pemBytes, err := cosign.PublicKeyPem(k, options.WithContext(ctx))
if err != nil {
return err
}
if _, err := writer.Write(pemBytes); err != nil {
return err
}
if writer.Name != "" {
fmt.Fprintln(os.Stderr, "Public key written to ", writer.Name)
}
return nil
}
| {
return err
} |
demo.js | class PaymentPackage {
constructor(name, value) {
this.name = name;
this.value = value;
this.VAT = 20; // Default value
this.active = true; // Default value
}
get name() {
return this._name;
}
set name(newValue) {
if (typeof newValue !== 'string') {
throw new Error('Name must be a non-empty string');
}
if (newValue.length === 0) {
throw new Error('Name must be a non-empty string');
}
this._name = newValue;
}
get value() {
return this._value;
}
set value(newValue) {
if (typeof newValue !== 'number') {
throw new Error('Value must be a non-negative number');
}
if (newValue < 0) {
throw new Error('Value must be a non-negative number');
}
this._value = newValue;
}
get VAT() {
return this._VAT;
}
set VAT(newValue) {
if (typeof newValue !== 'number') {
throw new Error('VAT must be a non-negative number');
}
if (newValue < 0) {
throw new Error('VAT must be a non-negative number');
}
this._VAT = newValue;
}
get active() {
return this._active;
}
set active(newValue) {
if (typeof newValue !== 'boolean') {
throw new Error('Active status must be a boolean');
}
this._active = newValue;
}
toString() {
const output = [
`Package: ${this.name}` + (this.active === false ? ' (inactive)' : ''),
`- Value (excl. VAT): ${this.value}`,
`- Value (VAT ${this.VAT}%): ${this.value * (1 + this.VAT / 100)}`
];
return output.join('\n');
}
}
let expect = require('chai').expect;
describe('', () => {
describe('General Unit Tests', () => {
it('should return true for toString exist', () => {
expect(PaymentPackage.prototype.hasOwnProperty('toString')).to.be.true;
});
it('should return true for name exist', () => {
expect(PaymentPackage.prototype.hasOwnProperty('name')).to.be.true;
});
it('should return true for value exist', () => {
expect(PaymentPackage.prototype.hasOwnProperty('value')).to.be.true;
});
it('should return true for VAT exist', () => {
expect(PaymentPackage.prototype.hasOwnProperty('VAT')).to.be.true;
});
it('should return true for active exist', () => {
expect(PaymentPackage.prototype.hasOwnProperty('active')).to.be.true;
});
});
describe('', () => {
let list;
beforeEach(() => {
list = new PaymentPackage('Hr', 2);
});
it('should return function for matchEnforcer', () => {
expect(typeof list.toString).to.be.equal('function')
});
it('should return function for matchEnforcer', () => {
expect(typeof list.name).to.be.equal('string')
});
it('should return function for matchEnforcer', () => {
expect(typeof list.value).to.be.equal('number')
});
it('should return function for matchEnforcer', () => {
expect(typeof list.VAT).to.be.equal('number')
});
it('should return function for matchEnforcer', () => {
expect(typeof list.active).to.be.equal('boolean')
});
it("should be an object", () => {
expect(Object.prototype.toString.call(list)).to.equal('[object Object]');
});
it('should return function for matchEnforcer', () => {
expect(list.toString(0)).to.be.equal('Package: Hr\n- Value (excl. VAT): 2\n- Value (VAT 20%): 2.4')
});
it('should return function for matchEnforcer', () => {
expect(list.active).to.be.equal(true)
});
});
| let list;
let wrongPack;
beforeEach(() => {
list = [
new PaymentPackage('HR Services', 1500),
new PaymentPackage('Consultation', 800),
new PaymentPackage('Partnership Fee', 7000)
]
wrongPack = new PaymentPackage('Transfer Fee', 100);
});
it('', () => {
expect(list.toString()).to.equal('Package: HR Services\n- Value (excl. VAT): 1500\n- Value (VAT 20%): 1800,Package: Consultation\n- Value (excl. VAT): 800\n- Value (VAT 20%): 960,Package: Partnership Fee\n- Value (excl. VAT): 7000\n- Value (VAT 20%): 8400')
});
it('', () => {
expect(() => wrongPack.active = null).to.throw(Error)
});
});
describe("Throw error tests", () => {
it("should throw an Error when instantiated with empty string as name", () => {
expect(() => new PaymentPackage('', 232)).to.throw(Error);
});
it("should throw an Error when instantiated with non-string as name", () => {
expect(() => new PaymentPackage([], 232)).to.throw(Error);
});
it("should throw an Error when try to set new name with empty string", () => {
let pp = new PaymentPackage('Test', 300);
expect(() => pp.name = '').to.throw(Error);
});
it("should throw an Error when try to set new name with non-string", () => {
let pp = new PaymentPackage('Test', 300);
expect(() => pp.name = {}).to.throw(Error);
});
it("should throw an Error when instantiated with negative number for value", () => {
expect(() => new PaymentPackage('Test', -232)).to.throw(Error);
});
it("should not throw an Error when instantiated with 0", () => {
expect(() => new PaymentPackage('Test', 0)).to.not.throw(Error);
});
it("should throw an Error when instantiated with NaN as value", () => {
expect(() => new PaymentPackage('Test', [2323])).to.throw(Error);
});
it("should throw an Error when try to set new value with a negative number", () => {
let pp = new PaymentPackage('Test', 300);
expect(() => pp.value = -30).to.throw(Error);
});
it("should not throw an Error when try to set new value to 0", () => {
let pp = new PaymentPackage('Test', 300);
expect(() => pp.value = 0).to.not.throw(Error);
});
it("should throw an Error when try to set new name with NaN", () => {
let pp = new PaymentPackage('Test', 300);
expect(() => pp.value = {}).to.throw(Error);
});
it("should throw an Error when try to set new VAT with a negative number", () => {
let pp = new PaymentPackage('Test', 300);
expect(() => pp.VAT = -300).to.throw(Error);
});
it("should not throw an Error when set new VAT to 0", () => {
let pp = new PaymentPackage('Test', 300);
expect(() => pp.VAT = 0).to.not.throw(Error);
});
it("should throw an Error when try to set new VAT with NaN", () => {
let pp = new PaymentPackage('Test', 300);
expect(() => pp.VAT = []).to.throw(Error);
});
it("should throw an Error when try to set new status with non boolean", () => {
let pp = new PaymentPackage('Test', 300);
expect(() => pp.active = 'true').to.throw(Error);
});
});
}); | describe('', () => { |
print_schedule_graph.rs | use bevy::{log::LogPlugin, prelude::*};
fn main() {
let mut app = App::new();
app.add_plugins_with(DefaultPlugins, |plugins| plugins.disable::<LogPlugin>());
bevy_mod_debugdump::print_schedule(&mut app);
} | ||
mod.rs | //! Type agnostic columnar data structure.
pub use crate::prelude::ChunkCompare;
use crate::prelude::*;
use arrow::array::ArrayRef;
pub(crate) mod arithmetic;
mod comparison;
mod from;
pub mod implementations;
mod into;
pub(crate) mod iterator;
pub mod ops;
mod series_trait;
use crate::chunked_array::ops::rolling_window::RollingOptions;
#[cfg(feature = "rank")]
use crate::prelude::unique::rank::{rank, RankMethod};
#[cfg(feature = "groupby_list")]
use crate::utils::Wrap;
use crate::utils::{split_ca, split_series};
use crate::{series::arithmetic::coerce_lhs_rhs, POOL};
#[cfg(feature = "groupby_list")]
use ahash::RandomState;
pub use from::*;
use num::NumCast;
use rayon::prelude::*;
pub use series_trait::*;
use std::borrow::Cow;
#[cfg(feature = "groupby_list")]
use std::hash::{Hash, Hasher};
use std::ops::Deref;
use std::sync::Arc;
/// # Series
/// The columnar data type for a DataFrame.
///
/// Most of the available functions are definedin the [SeriesTrait trait](crate::series::SeriesTrait).
///
/// The `Series` struct consists
/// of typed [ChunkedArray](../chunked_array/struct.ChunkedArray.html)'s. To quickly cast
/// a `Series` to a `ChunkedArray` you can call the method with the name of the type:
///
/// ```
/// # use polars_core::prelude::*;
/// let s: Series = [1, 2, 3].iter().collect();
/// // Quickly obtain the ChunkedArray wrapped by the Series.
/// let chunked_array = s.i32().unwrap();
/// ```
///
/// ## Arithmetic
///
/// You can do standard arithmetic on series.
/// ```
/// # use polars_core::prelude::*;
/// let s: Series = [1, 2, 3].iter().collect();
/// let out_add = &s + &s;
/// let out_sub = &s - &s;
/// let out_div = &s / &s;
/// let out_mul = &s * &s;
/// ```
///
/// Or with series and numbers.
///
/// ```
/// # use polars_core::prelude::*;
/// let s: Series = (1..3).collect();
/// let out_add_one = &s + 1;
/// let out_multiply = &s * 10;
///
/// // Could not overload left hand side operator.
/// let out_divide = 1.div(&s);
/// let out_add = 1.add(&s);
/// let out_subtract = 1.sub(&s);
/// let out_multiply = 1.mul(&s);
/// ```
///
/// ## Comparison
/// You can obtain boolean mask by comparing series.
///
/// ```
/// # use polars_core::prelude::*;
/// use itertools::Itertools;
/// let s = Series::new("dollars", &[1, 2, 3]);
/// let mask = s.equal(1);
/// let valid = [true, false, false].iter();
/// assert!(mask
/// .into_iter()
/// .map(|opt_bool| opt_bool.unwrap()) // option, because series can be null
/// .zip(valid)
/// .all(|(a, b)| a == *b))
/// ```
///
/// See all the comparison operators in the [CmpOps trait](../chunked_array/comparison/trait.CmpOps.html)
///
/// ## Iterators
/// The Series variants contain differently typed [ChunkedArray's](../chunked_array/struct.ChunkedArray.html).
/// These structs can be turned into iterators, making it possible to use any function/ closure you want
/// on a Series.
///
/// These iterators return an `Option<T>` because the values of a series may be null.
///
/// ```
/// use polars_core::prelude::*;
/// let pi = 3.14;
/// let s = Series::new("angle", [2f32 * pi, pi, 1.5 * pi].as_ref());
/// let s_cos: Series = s.f32()
/// .expect("series was not an f32 dtype")
/// .into_iter()
/// .map(|opt_angle| opt_angle.map(|angle| angle.cos()))
/// .collect();
/// ```
///
/// ## Creation
/// Series can be create from different data structures. Below we'll show a few ways we can create
/// a Series object.
///
/// ```
/// # use polars_core::prelude::*;
/// // Series van be created from Vec's, slices and arrays
/// Series::new("boolean series", &vec![true, false, true]);
/// Series::new("int series", &[1, 2, 3]);
/// // And can be nullable
/// Series::new("got nulls", &[Some(1), None, Some(2)]);
///
/// // Series can also be collected from iterators
/// let from_iter: Series = (0..10)
/// .into_iter()
/// .collect();
///
/// ```
#[derive(Clone)]
pub struct Series(pub Arc<dyn SeriesTrait>);
#[cfg(feature = "groupby_list")]
impl PartialEq for Wrap<Series> {
fn eq(&self, other: &Self) -> bool {
self.0.series_equal_missing(other)
}
}
#[cfg(feature = "groupby_list")]
impl Eq for Wrap<Series> {}
#[cfg(feature = "groupby_list")]
impl Hash for Wrap<Series> {
fn hash<H: Hasher>(&self, state: &mut H) {
let rs = RandomState::with_seeds(0, 0, 0, 0);
let h = UInt64Chunked::new_from_aligned_vec("", self.0.vec_hash(rs)).sum();
h.hash(state)
}
}
impl Series {
pub(crate) fn get_inner_mut(&mut self) -> &mut dyn SeriesTrait {
if Arc::weak_count(&self.0) + Arc::strong_count(&self.0) != 1 {
self.0 = self.0.clone_inner();
}
Arc::get_mut(&mut self.0).expect("implementation error")
}
/// Rename series.
pub fn rename(&mut self, name: &str) -> &mut Series {
self.get_inner_mut().rename(name);
self
}
/// Shrink the capacity of this array to fit it's length.
pub fn shrink_to_fit(&mut self) {
self.get_inner_mut().shrink_to_fit()
}
/// Append arrow array of same datatype.
pub fn append_array(&mut self, other: ArrayRef) -> Result<&mut Self> {
self.get_inner_mut().append_array(other)?;
Ok(self)
}
/// Append a Series of the same type in place.
pub fn append(&mut self, other: &Series) -> Result<&mut Self> {
self.get_inner_mut().append(other)?;
Ok(self)
}
/// Sort in place.
pub fn sort_in_place(&mut self, reverse: bool) -> &mut Self {
self.get_inner_mut().sort_in_place(reverse);
self
}
/// Rechunk and return a pointer to the start of the Series.
/// Only implemented for numeric types
pub fn as_single_ptr(&mut self) -> Result<usize> {
self.get_inner_mut().as_single_ptr()
}
/// Cast `[Series]` to another `[DataType]`
pub fn cast(&self, dtype: &DataType) -> Result<Self> {
self.0.cast(dtype)
}
/// Returns `None` if the array is empty or only contains null values.
/// ```
/// # use polars_core::prelude::*;
/// let s = Series::new("days", [1, 2, 3].as_ref());
/// assert_eq!(s.sum(), Some(6));
/// ```
pub fn sum<T>(&self) -> Option<T>
where
T: NumCast,
{
self.sum_as_series()
.cast(&DataType::Float64)
.ok()
.and_then(|s| s.f64().unwrap().get(0).and_then(T::from))
}
/// Returns the minimum value in the array, according to the natural order.
/// Returns an option because the array is nullable.
/// ```
/// # use polars_core::prelude::*;
/// let s = Series::new("days", [1, 2, 3].as_ref());
/// assert_eq!(s.min(), Some(1));
/// ```
pub fn min<T>(&self) -> Option<T>
where
T: NumCast,
{
self.min_as_series()
.cast(&DataType::Float64)
.ok()
.and_then(|s| s.f64().unwrap().get(0).and_then(T::from))
}
/// Returns the maximum value in the array, according to the natural order.
/// Returns an option because the array is nullable.
/// ```
/// # use polars_core::prelude::*;
/// let s = Series::new("days", [1, 2, 3].as_ref());
/// assert_eq!(s.max(), Some(3));
/// ```
pub fn max<T>(&self) -> Option<T>
where
T: NumCast,
{
self.max_as_series()
.cast(&DataType::Float64)
.ok()
.and_then(|s| s.f64().unwrap().get(0).and_then(T::from))
}
/// Explode a list or utf8 Series. This expands every item to a new row..
pub fn explode(&self) -> Result<Series> {
match self.dtype() {
DataType::List(_) => self.list().unwrap().explode(),
DataType::Utf8 => self.utf8().unwrap().explode(),
_ => Err(PolarsError::InvalidOperation(
format!(
"explode not supported for Series with dtype {:?}",
self.dtype()
)
.into(),
)),
}
}
/// Check if float value is NaN (note this is different than missing/ null)
pub fn is_nan(&self) -> Result<BooleanChunked> {
match self.dtype() {
DataType::Float32 => Ok(self.f32().unwrap().is_nan()),
DataType::Float64 => Ok(self.f64().unwrap().is_nan()),
_ => Ok(BooleanChunked::full(self.name(), false, self.len())),
}
}
/// Check if float value is NaN (note this is different than missing/ null)
pub fn is_not_nan(&self) -> Result<BooleanChunked> {
match self.dtype() {
DataType::Float32 => Ok(self.f32().unwrap().is_not_nan()),
DataType::Float64 => Ok(self.f64().unwrap().is_not_nan()),
_ => Ok(BooleanChunked::full(self.name(), true, self.len())),
}
}
/// Check if float value is finite
pub fn is_finite(&self) -> Result<BooleanChunked> {
match self.dtype() {
DataType::Float32 => Ok(self.f32().unwrap().is_finite()),
DataType::Float64 => Ok(self.f64().unwrap().is_finite()),
_ => Err(PolarsError::InvalidOperation(
format!(
"is_nan not supported for series with dtype {:?}",
self.dtype()
)
.into(),
)),
}
}
/// Check if float value is finite
pub fn is_infinite(&self) -> Result<BooleanChunked> {
match self.dtype() {
DataType::Float32 => Ok(self.f32().unwrap().is_infinite()),
DataType::Float64 => Ok(self.f64().unwrap().is_infinite()),
_ => Err(PolarsError::InvalidOperation(
format!(
"is_nan not supported for series with dtype {:?}",
self.dtype()
)
.into(),
)),
}
}
/// Create a new ChunkedArray with values from self where the mask evaluates `true` and values
/// from `other` where the mask evaluates `false`
#[cfg(feature = "zip_with")]
#[cfg_attr(docsrs, doc(cfg(feature = "zip_with")))]
pub fn zip_with(&self, mask: &BooleanChunked, other: &Series) -> Result<Series> {
let (lhs, rhs) = coerce_lhs_rhs(self, other)?;
lhs.zip_with_same_type(mask, rhs.as_ref())
}
/// Cast a datelike Series to their physical representation.
/// Primitives remain unchanged
///
/// * Date -> Int32
/// * Datetime-> Int64
/// * Time -> Int64
///
pub fn to_physical_repr(&self) -> Cow<Series> {
use DataType::*;
match self.dtype() {
Date => Cow::Owned(self.cast(&DataType::Int32).unwrap()),
Datetime | Time => Cow::Owned(self.cast(&DataType::Int64).unwrap()),
_ => Cow::Borrowed(self),
}
}
/// Take by index if ChunkedArray contains a single chunk.
///
/// # Safety
/// This doesn't check any bounds. Null validity is checked.
pub unsafe fn take_unchecked_threaded(
&self,
idx: &UInt32Chunked,
rechunk: bool,
) -> Result<Series> {
let n_threads = POOL.current_num_threads();
let idx = split_ca(idx, n_threads)?;
let series: Result<Vec<_>> =
POOL.install(|| idx.par_iter().map(|idx| self.take_unchecked(idx)).collect());
let s = series?
.into_iter()
.reduce(|mut s, s1| {
s.append(&s1).unwrap();
s
})
.unwrap();
if rechunk {
Ok(s.rechunk())
} else {
Ok(s)
}
}
/// Take by index. This operation is clone.
///
/// # Safety
///
/// Out of bounds access doesn't Error but will return a Null value
pub fn take_threaded(&self, idx: &UInt32Chunked, rechunk: bool) -> Result<Series> {
let n_threads = POOL.current_num_threads();
let idx = split_ca(idx, n_threads).unwrap();
let series = POOL.install(|| {
idx.par_iter()
.map(|idx| self.take(idx))
.collect::<Result<Vec<_>>>()
})?;
let s = series
.into_iter()
.reduce(|mut s, s1| {
s.append(&s1).unwrap();
s
})
.unwrap();
if rechunk {
Ok(s.rechunk())
} else {
Ok(s)
}
}
/// Filter by boolean mask. This operation clones data.
pub fn filter_threaded(&self, filter: &BooleanChunked, rechunk: bool) -> Result<Series> {
// this would fail if there is a broadcasting filter.
// because we cannot split that filter over threads
// besides they are a no-op, so we do the standard filter.
if filter.len() == 1 {
return self.filter(filter);
}
let n_threads = POOL.current_num_threads();
let filters = split_ca(filter, n_threads).unwrap();
let series = split_series(self, n_threads).unwrap();
let series: Result<Vec<_>> = POOL.install(|| {
filters
.par_iter()
.zip(series)
.map(|(filter, s)| s.filter(filter))
.collect()
});
let s = series?
.into_iter()
.reduce(|mut s, s1| {
s.append(&s1).unwrap();
s
})
.unwrap();
if rechunk {
Ok(s.rechunk())
} else {
Ok(s)
}
}
/// Round underlying floating point array to given decimal.
#[cfg(feature = "round_series")]
#[cfg_attr(docsrs, doc(cfg(feature = "round_series")))]
pub fn round(&self, decimals: u32) -> Result<Self> {
use num::traits::Pow;
if let Ok(ca) = self.f32() {
let multiplier = 10.0.pow(decimals as f32) as f32;
let s = ca
.apply(|val| (val * multiplier).round() / multiplier)
.into_series();
return Ok(s);
}
if let Ok(ca) = self.f64() {
let multiplier = 10.0.pow(decimals as f32) as f64;
let s = ca
.apply(|val| (val * multiplier).round() / multiplier)
.into_series();
return Ok(s);
}
Err(PolarsError::SchemaMisMatch(
format!("{:?} is not a floating point datatype", self.dtype()).into(),
))
}
#[cfg(feature = "round_series")]
#[cfg_attr(docsrs, doc(cfg(feature = "round_series")))]
/// Floor underlying floating point array to the lowest integers smaller or equal to the float value.
pub fn floor(&self) -> Result<Self> {
if let Ok(ca) = self.f32() {
let s = ca.apply(|val| val.floor()).into_series();
return Ok(s);
}
if let Ok(ca) = self.f64() {
let s = ca.apply(|val| val.floor()).into_series();
return Ok(s);
}
Err(PolarsError::SchemaMisMatch(
format!("{:?} is not a floating point datatype", self.dtype()).into(),
))
}
#[cfg(feature = "dot_product")]
#[cfg_attr(docsrs, doc(cfg(feature = "dot_product")))]
pub fn dot(&self, other: &Series) -> Option<f64> {
(self * other).sum::<f64>()
}
#[cfg(feature = "row_hash")]
#[cfg_attr(docsrs, doc(cfg(feature = "row_hash")))]
/// Get a hash of this Series
pub fn hash(&self, build_hasher: ahash::RandomState) -> UInt64Chunked {
UInt64Chunked::new_from_aligned_vec(self.name(), self.0.vec_hash(build_hasher))
}
/// Get an array with the cumulative max computed at every element
#[cfg_attr(docsrs, doc(cfg(feature = "cum_agg")))]
pub fn cummax(&self, _reverse: bool) -> Series {
#[cfg(feature = "cum_agg")]
{
self._cummax(_reverse)
}
#[cfg(not(feature = "cum_agg"))]
{
panic!("activate 'cum_agg' feature")
}
}
/// Get an array with the cumulative min computed at every element
#[cfg_attr(docsrs, doc(cfg(feature = "cum_agg")))]
pub fn cummin(&self, _reverse: bool) -> Series {
#[cfg(feature = "cum_agg")]
{
self._cummin(_reverse)
}
#[cfg(not(feature = "cum_agg"))]
{
panic!("activate 'cum_agg' feature")
}
}
/// Get an array with the cumulative sum computed at every element
#[cfg_attr(docsrs, doc(cfg(feature = "cum_agg")))]
pub fn cumsum(&self, _reverse: bool) -> Series {
#[cfg(feature = "cum_agg")]
{
match self.dtype() {
DataType::Boolean => self.cast(&DataType::UInt32).unwrap()._cumsum(_reverse),
_ => self._cumsum(_reverse),
}
}
#[cfg(not(feature = "cum_agg"))]
{
panic!("activate 'cum_agg' feature")
}
}
/// Get an array with the cumulative product computed at every element
#[cfg_attr(docsrs, doc(cfg(feature = "cum_agg")))]
pub fn cumprod(&self, _reverse: bool) -> Series {
#[cfg(feature = "cum_agg")]
{
match self.dtype() {
DataType::Boolean => self.cast(&DataType::UInt32).unwrap()._cumprod(_reverse),
_ => self._cumprod(_reverse),
}
}
#[cfg(not(feature = "cum_agg"))]
{
panic!("activate 'cum_agg' feature")
}
}
/// Apply a rolling variance to a Series. See:
#[cfg_attr(docsrs, doc(cfg(feature = "rolling_window")))]
pub fn rolling_var(&self, _options: RollingOptions) -> Result<Series> {
#[cfg(feature = "rolling_window")]
{
self._rolling_var(_options)
}
#[cfg(not(feature = "rolling_window"))]
{
panic!("activate 'rolling_window' feature")
}
}
/// Apply a rolling std to a Series. See:
#[cfg_attr(docsrs, doc(cfg(feature = "rolling_window")))]
pub fn rolling_std(&self, _options: RollingOptions) -> Result<Series> {
#[cfg(feature = "rolling_window")]
{
self._rolling_std(_options)
}
#[cfg(not(feature = "rolling_window"))]
{
panic!("activate 'rolling_window' feature")
}
}
/// Apply a rolling mean to a Series. See:
/// [ChunkedArray::rolling_mean](crate::prelude::ChunkWindow::rolling_mean).
#[cfg_attr(docsrs, doc(cfg(feature = "rolling_window")))]
pub fn rolling_mean(&self, _options: RollingOptions) -> Result<Series> {
#[cfg(feature = "rolling_window")]
{
self._rolling_mean(_options)
}
#[cfg(not(feature = "rolling_window"))]
{
panic!("activate 'rolling_window' feature")
}
}
/// Apply a rolling sum to a Series. See:
/// [ChunkedArray::rolling_sum](crate::prelude::ChunkWindow::rolling_sum).
#[cfg_attr(docsrs, doc(cfg(feature = "rolling_window")))]
pub fn rolling_sum(&self, _options: RollingOptions) -> Result<Series> {
#[cfg(feature = "rolling_window")]
{
self._rolling_sum(_options)
}
#[cfg(not(feature = "rolling_window"))]
{
panic!("activate 'rolling_window' feature")
}
}
/// Apply a rolling min to a Series. See:
/// [ChunkedArray::rolling_min](crate::prelude::ChunkWindow::rolling_min).
#[cfg_attr(docsrs, doc(cfg(feature = "rolling_window")))]
pub fn rolling_min(&self, _options: RollingOptions) -> Result<Series> {
#[cfg(feature = "rolling_window")]
{
self._rolling_min(_options)
}
#[cfg(not(feature = "rolling_window"))]
{
panic!("activate 'rolling_window' feature")
}
}
/// Apply a rolling max to a Series. See:
/// [ChunkedArray::rolling_max](crate::prelude::ChunkWindow::rolling_max).
#[cfg_attr(docsrs, doc(cfg(feature = "rolling_window")))]
pub fn rolling_max(&self, _options: RollingOptions) -> Result<Series> {
#[cfg(feature = "rolling_window")]
{
self._rolling_max(_options)
}
#[cfg(not(feature = "rolling_window"))]
{
panic!("activate 'rolling_window' feature")
}
}
#[cfg(feature = "rank")]
#[cfg_attr(docsrs, doc(cfg(feature = "rank")))]
pub fn rank(&self, method: RankMethod) -> Series {
rank(self, method)
}
/// Cast throws an error if conversion had overflows
pub fn strict_cast(&self, data_type: &DataType) -> Result<Series> {
let s = self.cast(data_type)?;
if self.null_count() != s.null_count() {
Err(PolarsError::ComputeError(
format!(
"strict conversion of cast from {:?} to {:?} failed. consider non-strict cast.\n
If you were trying to cast Utf8 to Date,Time,Datetime, consider using `strptime`",
self.dtype(),
data_type
)
.into(),
))
} else {
Ok(s)
}
}
#[cfg(feature = "dtype-time")]
pub(crate) fn into_time(self) -> Series {
self.i64()
.expect("impl error")
.clone()
.into_time()
.into_series()
}
pub(crate) fn into_date(self) -> Series {
match self.dtype() {
#[cfg(feature = "dtype-date")]
DataType::Int32 => self.i32().unwrap().clone().into_date().into_series(),
#[cfg(feature = "dtype-datetime")]
DataType::Int64 => self.i64().unwrap().clone().into_date().into_series(),
_ => unreachable!(),
}
}
#[cfg(feature = "abs")]
#[cfg_attr(docsrs, doc(cfg(feature = "abs")))]
/// convert numerical values to their absolute value
pub fn abs(&self) -> Result<Series> {
let a = self.to_physical_repr();
use DataType::*;
let out = match a.dtype() {
#[cfg(feature = "dtype-i8")]
Int8 => a.i8().unwrap().abs().into_series(),
#[cfg(feature = "dtype-i16")]
Int16 => a.i16().unwrap().abs().into_series(),
Int32 => a.i32().unwrap().abs().into_series(),
Int64 => a.i64().unwrap().abs().into_series(),
UInt8 | UInt16 | UInt32 | UInt64 => self.clone(),
Float32 => a.f32().unwrap().abs().into_series(),
Float64 => a.f64().unwrap().abs().into_series(),
dt => {
return Err(PolarsError::InvalidOperation(
format!("abs not supportedd for series of type {:?}", dt).into(),
))
}
};
Ok(out)
}
}
impl Deref for Series {
type Target = dyn SeriesTrait;
fn deref(&self) -> &Self::Target {
&*self.0
}
}
impl<'a> AsRef<(dyn SeriesTrait + 'a)> for Series {
fn as_ref(&self) -> &(dyn SeriesTrait + 'a) {
&*self.0
}
}
impl Default for Series {
fn default() -> Self {
Int64Chunked::default().into_series()
}
}
impl<'a, T> AsRef<ChunkedArray<T>> for dyn SeriesTrait + 'a
where
T: 'static + PolarsDataType,
{
fn as_ref(&self) -> &ChunkedArray<T> {
if &T::get_dtype() == self.dtype() ||
// needed because we want to get ref of List no matter what the inner type is.
(matches!(T::get_dtype(), DataType::List(_)) && matches!(self.dtype(), DataType::List(_)) )
{
unsafe { &*(self as *const dyn SeriesTrait as *const ChunkedArray<T>) }
} else {
panic!(
"implementation error, cannot get ref {:?} from {:?}",
T::get_dtype(),
self.dtype()
)
}
}
}
impl<'a, T> AsMut<ChunkedArray<T>> for dyn SeriesTrait + 'a
where
T: 'static + PolarsDataType,
{
fn as_mut(&mut self) -> &mut ChunkedArray<T> {
if &T::get_dtype() == self.dtype() ||
// needed because we want to get ref of List no matter what the inner type is.
(matches!(T::get_dtype(), DataType::List(_)) && matches!(self.dtype(), DataType::List(_)) )
{
unsafe { &mut *(self as *mut dyn SeriesTrait as *mut ChunkedArray<T>) }
} else {
panic!(
"implementation error, cannot get ref {:?} from {:?}",
T::get_dtype(),
self.dtype()
)
}
}
}
#[cfg(test)]
mod test {
use crate::prelude::*;
use crate::series::*;
use std::convert::TryFrom;
#[test]
fn cast() {
let ar = UInt32Chunked::new_from_slice("a", &[1, 2]);
let s = ar.into_series();
let s2 = s.cast(&DataType::Int64).unwrap();
assert!(s2.i64().is_ok());
let s2 = s.cast(&DataType::Float32).unwrap();
assert!(s2.f32().is_ok());
}
#[test]
fn | () {
Series::new("boolean series", &vec![true, false, true]);
Series::new("int series", &[1, 2, 3]);
let ca = Int32Chunked::new_from_slice("a", &[1, 2, 3]);
ca.into_series();
}
#[test]
fn new_series_from_arrow_primitive_array() {
let array = UInt32Array::from_slice(&[1, 2, 3, 4, 5]);
let array_ref: ArrayRef = Arc::new(array);
Series::try_from(("foo", array_ref)).unwrap();
}
#[test]
fn series_append() {
let mut s1 = Series::new("a", &[1, 2]);
let s2 = Series::new("b", &[3]);
s1.append(&s2).unwrap();
assert_eq!(s1.len(), 3);
// add wrong type
let s2 = Series::new("b", &[3.0]);
assert!(s1.append(&s2).is_err())
}
#[test]
fn series_slice_works() {
let series = Series::new("a", &[1i64, 2, 3, 4, 5]);
let slice_1 = series.slice(-3, 3);
let slice_2 = series.slice(-5, 5);
let slice_3 = series.slice(0, 5);
assert_eq!(slice_1.get(0), AnyValue::Int64(3));
assert_eq!(slice_2.get(0), AnyValue::Int64(1));
assert_eq!(slice_3.get(0), AnyValue::Int64(1));
}
#[test]
fn out_of_range_slice_does_not_panic() {
let series = Series::new("a", &[1i64, 2, 3, 4, 5]);
series.slice(-3, 4);
series.slice(-6, 2);
series.slice(4, 2);
}
#[test]
#[cfg(feature = "round_series")]
fn test_round_series() {
let series = Series::new("a", &[1.003, 2.23222, 3.4352]);
let out = series.round(2).unwrap();
let ca = out.f64().unwrap();
assert_eq!(ca.get(0), Some(1.0));
}
}
| new_series |
map.rs | //! A hash map implementation with consistent ordering.
//!
//! The types in this module are commonly used as the underlying data structure
//! of arbitrary objects found in JSON API data.
use std::fmt::{self, Debug, Formatter};
use std::hash::Hash;
use std::iter::FromIterator;
use std::ops::RangeFull;
use ordermap::{self, OrderMap};
use serde::de::{Deserialize, Deserializer};
use serde::ser::{Serialize, Serializer};
use value::collections::Equivalent;
use value::{Key, Value};
/// A hash map implementation with consistent ordering.
#[derive(Clone, Eq, PartialEq)]
pub struct Map<K = Key, V = Value>
where
K: Eq + Hash,
{
inner: OrderMap<K, V>,
}
impl<K, V> Map<K, V>
where
K: Eq + Hash,
{
/// Creates an empty `Map`.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # fn main() {
/// use json_api::value::{Key, Map, Value};
/// let mut map = Map::<Key, Value>::new();
/// # }
/// ```
pub fn new() -> Self {
Default::default()
}
/// Creates a new empty `Map`, with specified capacity.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::Error;
/// # use json_api::value::Map;
/// #
/// # fn example() -> Result<(), Error> {
/// let mut map = Map::with_capacity(2);
///
/// map.insert("x", 1);
/// map.insert("y", 2);
///
/// // The next insert will likely require reallocation...
/// map.insert("z", 3);
/// #
/// # Ok(())
/// # }
/// #
/// # fn main() {
/// # example().unwrap();
/// # }
/// ```
pub fn with_capacity(capacity: usize) -> Self {
let inner = OrderMap::with_capacity(capacity);
Map { inner }
}
/// Returns the number of key-value pairs the map can hold without
/// reallocating.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::{Key, Map, Value};
/// #
/// # fn main() {
/// let map = Map::<Key, Value>::with_capacity(2);
/// assert!(map.capacity() >= 2);
/// # }
/// ```
pub fn capacity(&self) -> usize {
self.inner.capacity()
}
/// Clears the map, removing all key-value pairs. Keeps the allocated memory
/// for reuse.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
///
/// map.insert("x", 1);
/// map.clear();
/// assert!(map.is_empty());
/// # }
/// ```
pub fn clear(&mut self) {
self.inner.clear();
}
/// Returns true if the map contains a value for the specified key.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
///
/// map.insert(1, "a");
/// assert_eq!(map.contains_key(&1), true);
/// assert_eq!(map.contains_key(&2), false);
/// # }
/// ```
pub fn contains_key<Q: ?Sized>(&self, key: &Q) -> bool
where
Q: Equivalent<K> + Hash,
{
self.inner.contains_key(key)
}
/// Clears the map, returning all key-value pairs as an iterator. Keeps the
/// allocated memory for reuse.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
///
/// map.insert("x", 1);
/// map.insert("y", 2);
///
/// for (key, value) in map.drain(..) {
/// assert!(key == "x" || key == "y");
/// assert!(value == 1 || value == 2);
/// }
///
/// assert!(map.is_empty());
/// # }
/// ```
pub fn drain(&mut self, range: RangeFull) -> Drain<K, V> {
let iter = self.inner.drain(range);
Drain { iter }
}
/// Returns a reference to the value corresponding to the key.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
///
/// map.insert("x", 1);
///
/// assert_eq!(map.get("x"), Some(&1));
/// assert_eq!(map.get("y"), None);
/// # }
/// ```
pub fn get<Q: ?Sized>(&self, key: &Q) -> Option<&V>
where
Q: Equivalent<K> + Hash,
{
self.inner.get(key)
}
/// Inserts a key-value pair into the map.
///
/// If a value already existed for key, that old value is returned in
/// `Some`; otherwise, `None` is returned.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
///
/// assert_eq!(map.insert("x", 1), None);
/// assert_eq!(map.insert("x", 2), Some(1));
/// # }
/// ```
pub fn insert(&mut self, key: K, value: V) -> Option<V> {
self.inner.insert(key, value)
}
/// Return an iterator visiting all the key-value pairs of the map in the
/// order in which they were inserted.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
///
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for (key, value) in map.iter() {
/// println!("key: {} value: {}", key, value);
/// }
/// # }
/// ```
pub fn iter(&self) -> Iter<K, V> {
let iter = self.inner.iter();
Iter { iter }
}
/// Return an iterator visiting all the key-value pairs of the map in the
/// order in which they were inserted, with mutable references to the
/// values.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
/// | ///
/// for (_, value) in map.iter_mut() {
/// *value += 1;
/// }
///
/// for (key, value) in &map {
/// println!("key: {} value: {}", key, value);
/// }
/// # }
/// ```
pub fn iter_mut(&mut self) -> IterMut<K, V> {
let iter = self.inner.iter_mut();
IterMut { iter }
}
/// Return an iterator visiting all keys in the order in which they were
/// inserted.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
///
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for key in map.keys() {
/// println!("{}", key);
/// }
/// # }
/// ```
pub fn keys(&self) -> Keys<K, V> {
let iter = self.inner.keys();
Keys { iter }
}
/// Return the number of key-value pairs in the map.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
/// assert_eq!(map.len(), 0);
///
/// map.insert("x", 1);
/// assert_eq!(map.len(), 1);
/// # }
/// ```
pub fn len(&self) -> usize {
self.inner.len()
}
/// Returns true if the map contains no elements.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
/// assert!(map.is_empty());
///
/// map.insert("x", 1);
/// assert!(!map.is_empty());
/// # }
/// ```
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Removes a key from the map, returning the value at the key if the key
/// was previously in the map.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
///
/// map.insert("x", 1);
///
/// assert_eq!(map.remove("x"), Some(1));
/// assert_eq!(map.remove("x"), None);
/// # }
/// ```
pub fn remove<Q: ?Sized>(&mut self, key: &Q) -> Option<V>
where
Q: Equivalent<K> + Hash,
{
self.inner.remove(key)
}
/// Reserves capacity for at least additional more elements to be inserted
/// in the `Map`. The collection may reserve more space to avoid frequent
/// reallocations.
///
/// # Note
///
/// This method has yet to be fully implemented in the [`ordermap`] crate.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::{Key, Map, Value};
/// #
/// # fn main() {
/// let mut map = Map::<Key, Value>::new();
/// map.reserve(10);
/// # }
/// ```
///
/// [`ordermap`]: https://docs.rs/ordermap
pub fn reserve(&mut self, additional: usize) {
self.inner.reserve(additional);
}
/// Return an iterator visiting all values in the order in which they were
/// inserted.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
///
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for value in map.values() {
/// println!("{}", value);
/// }
/// # }
/// ```
pub fn values(&self) -> Values<K, V> {
let iter = self.inner.values();
Values { iter }
}
/// Return an iterator visiting all values mutably in the order in which
/// they were inserted.
///
/// # Example
///
/// ```
/// # extern crate json_api;
/// #
/// # use json_api::value::Map;
/// #
/// # fn main() {
/// let mut map = Map::new();
///
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// for value in map.values_mut() {
/// *value += 1;
/// }
///
/// for value in map.values() {
/// println!("{}", value);
/// }
/// # }
pub fn values_mut(&mut self) -> ValuesMut<K, V> {
let iter = self.inner.values_mut();
ValuesMut { iter }
}
}
impl<K, V> Debug for Map<K, V>
where
K: Debug + Eq + Hash,
V: Debug,
{
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.debug_map().entries(self).finish()
}
}
impl<K, V> Default for Map<K, V>
where
K: Eq + Hash,
{
fn default() -> Self {
let inner = Default::default();
Map { inner }
}
}
impl<K, V> Extend<(K, V)> for Map<K, V>
where
K: Eq + Hash,
{
fn extend<I>(&mut self, iter: I)
where
I: IntoIterator<Item = (K, V)>,
{
self.inner.extend(iter);
}
}
impl<K, V> FromIterator<(K, V)> for Map<K, V>
where
K: Eq + Hash,
{
fn from_iter<I>(iter: I) -> Self
where
I: IntoIterator<Item = (K, V)>,
{
let inner = OrderMap::from_iter(iter);
Map { inner }
}
}
impl<K, V> IntoIterator for Map<K, V>
where
K: Eq + Hash,
{
type Item = (K, V);
type IntoIter = IntoIter<K, V>;
fn into_iter(self) -> Self::IntoIter {
let iter = self.inner.into_iter();
IntoIter { iter }
}
}
impl<'a, K, V> IntoIterator for &'a Map<K, V>
where
K: Eq + Hash,
{
type Item = (&'a K, &'a V);
type IntoIter = Iter<'a, K, V>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, K, V> IntoIterator for &'a mut Map<K, V>
where
K: Eq + Hash,
{
type Item = (&'a K, &'a mut V);
type IntoIter = IterMut<'a, K, V>;
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
impl<'de, K, V> Deserialize<'de> for Map<K, V>
where
K: Deserialize<'de> + Eq + Hash,
V: Deserialize<'de>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
OrderMap::deserialize(deserializer).map(|inner| Map { inner })
}
}
impl<K, V> Serialize for Map<K, V>
where
K: Eq + Hash + Serialize,
V: Serialize,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.inner.serialize(serializer)
}
}
/// A draining iterator over the entries of a `Map`.
pub struct Drain<'a, K: 'a, V: 'a> {
iter: ordermap::Drain<'a, K, V>,
}
impl<'a, K, V> Iterator for Drain<'a, K, V> {
type Item = (K, V);
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
/// An iterator over the entries of a `Map`.
pub struct Iter<'a, K: 'a, V: 'a> {
iter: ordermap::Iter<'a, K, V>,
}
impl<'a, K, V> Iterator for Iter<'a, K, V> {
type Item = (&'a K, &'a V);
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
fn count(self) -> usize {
self.iter.len()
}
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth(n)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a, K, V> DoubleEndedIterator for Iter<'a, K, V> {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next_back()
}
}
impl<'a, K, V> ExactSizeIterator for Iter<'a, K, V> {
fn len(&self) -> usize {
self.iter.len()
}
}
/// An mutable iterator over the entries of a `Map`.
pub struct IterMut<'a, K: 'a, V: 'a> {
iter: ordermap::IterMut<'a, K, V>,
}
impl<'a, K, V> Iterator for IterMut<'a, K, V> {
type Item = (&'a K, &'a mut V);
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
fn count(self) -> usize {
self.iter.len()
}
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth(n)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a, K, V> DoubleEndedIterator for IterMut<'a, K, V> {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next_back()
}
}
impl<'a, K, V> ExactSizeIterator for IterMut<'a, K, V> {
fn len(&self) -> usize {
self.iter.len()
}
}
/// An owning iterator over the entries of a `Map`.
pub struct IntoIter<K, V> {
iter: ordermap::IntoIter<K, V>,
}
impl<K, V> Iterator for IntoIter<K, V> {
type Item = (K, V);
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
fn count(self) -> usize {
self.iter.len()
}
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth(n)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<K, V> DoubleEndedIterator for IntoIter<K, V> {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next_back()
}
}
impl<K, V> ExactSizeIterator for IntoIter<K, V> {
fn len(&self) -> usize {
self.iter.len()
}
}
/// An iterator over the keys of a `Map`.
pub struct Keys<'a, K: 'a, V: 'a> {
iter: ordermap::Keys<'a, K, V>,
}
impl<'a, K, V> Iterator for Keys<'a, K, V> {
type Item = &'a K;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
fn count(self) -> usize {
self.iter.len()
}
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth(n)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a, K, V> DoubleEndedIterator for Keys<'a, K, V> {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next_back()
}
}
impl<'a, K, V> ExactSizeIterator for Keys<'a, K, V> {
fn len(&self) -> usize {
self.iter.len()
}
}
/// An iterator over the values of a `Map`.
pub struct Values<'a, K: 'a, V: 'a> {
iter: ordermap::Values<'a, K, V>,
}
impl<'a, K, V> Iterator for Values<'a, K, V> {
type Item = &'a V;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
fn count(self) -> usize {
self.iter.len()
}
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth(n)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V> {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next_back()
}
}
impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> {
fn len(&self) -> usize {
self.iter.len()
}
}
/// A mutable iterator over the values of a `Map`.
pub struct ValuesMut<'a, K: 'a, V: 'a> {
iter: ordermap::ValuesMut<'a, K, V>,
}
impl<'a, K, V> Iterator for ValuesMut<'a, K, V> {
type Item = &'a mut V;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
fn count(self) -> usize {
self.iter.len()
}
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.iter.nth(n)
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a, K, V> DoubleEndedIterator for ValuesMut<'a, K, V> {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next_back()
}
}
impl<'a, K, V> ExactSizeIterator for ValuesMut<'a, K, V> {
fn len(&self) -> usize {
self.iter.len()
}
} | /// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3); |
bool.rs | //! impl bool {}
use crate::marker::Destruct;
impl bool {
/// Returns `Some(t)` if the `bool` is [`true`](../std/keyword.true.html),
/// or `None` otherwise.
///
/// # Examples
///
/// ```
/// assert_eq!(false.then_some(0), None);
/// assert_eq!(true.then_some(0), Some(0));
/// ```
#[stable(feature = "bool_to_option", since = "1.62.0")]
#[rustc_const_unstable(feature = "const_bool_to_option", issue = "91917")]
#[inline]
pub const fn then_some<T>(self, t: T) -> Option<T>
where
T: ~const Destruct,
|
/// Returns `Some(f())` if the `bool` is [`true`](../std/keyword.true.html),
/// or `None` otherwise.
///
/// # Examples
///
/// ```
/// assert_eq!(false.then(|| 0), None);
/// assert_eq!(true.then(|| 0), Some(0));
/// ```
#[stable(feature = "lazy_bool_to_option", since = "1.50.0")]
#[rustc_const_unstable(feature = "const_bool_to_option", issue = "91917")]
#[inline]
pub const fn then<T, F>(self, f: F) -> Option<T>
where
F: ~const FnOnce() -> T,
F: ~const Destruct,
{
if self { Some(f()) } else { None }
}
}
| {
if self { Some(t) } else { None }
} |
test_get_scratchpad_status.py | # flake8: noqa
import wirepas_messaging
from default_value import *
def test_generate_parse_request():
# Clear a scratchpad
request = wirepas_messaging.gateway.api.GetScratchpadStatusRequest(
SINK_ID, REQUEST_ID
)
request2 = wirepas_messaging.gateway.api.GetScratchpadStatusRequest.from_payload(
request.payload
)
for k, v in request.__dict__.items():
assert v == request2.__dict__[k]
def test_generate_parse_response():
request = wirepas_messaging.gateway.api.GetScratchpadStatusResponse(
REQUEST_ID,
GATEWAY_ID,
RES_OK,
SINK_ID,
SCRATCHPAD_INFO,
SCRATCHPAD_STATUS,
SCRATCHPAD_TYPE,
SCRATCHPAD_INFO,
FIRMWARE_AREA_ID,
)
request2 = wirepas_messaging.gateway.api.GetScratchpadStatusResponse.from_payload(
request.payload
) | else:
assert v == request2.__dict__[k] |
for k, v in request.__dict__.items():
if isinstance(v, enum.Enum):
assert v.value == request2.__dict__[k].value |
BabyBrainSegmentation.py | import os
import multiprocessing
import platform
import sys
import unittest
import vtk, qt, ctk, slicer
from slicer.ScriptedLoadableModule import *
import logging
import SimpleITK as sitk
import sitkUtils
from os.path import expanduser
#
# BabyBrainSegmentation
#
class BabyBrainSegmentation(ScriptedLoadableModule):
"""Uses ScriptedLoadableModule base class, available at:
https://github.com/Slicer/Slicer/blob/master/Base/Python/slicer/ScriptedLoadableModule.py
"""
def __init__(self, parent):
ScriptedLoadableModule.__init__(self, parent)
self.parent.title = "Baby Brain Segmentation"
self.parent.categories = ["Segmentation.Baby Brain"]
self.parent.dependencies = []
self.parent.contributors = ["Antonio Carlos da S. Senra Filho (University of Sao Paulo) and Sara"] # replace with "Firstname Lastname (Organization)"
self.parent.helpText = """
This module offers a brain tissue segmentation pipeline to fetal, neonatal and pediatric MRI images. At moment, the structural MRI images are supported
, namely T1, T2 and PD. The general segmentation sequence is based on a naive Bayes classifier coupled to a local signal smoothing and label
propagation step from a determined brain atlas. More details are found at the wikipage: https://www.slicer.org/wiki/Documentation/Nightly/Modules/BabyBrainSegmentation
"""
self.parent.helpText += self.getDefaultModuleDocumentationLink()
self.parent.acknowledgementText = """
This work was partially funded by CNPq grant 405574/2017-7
""" # replace with organization, grant and thanks.
#
# BabyBrainSegmentationWidget
#
class BabyBrainSegmentationWidget(ScriptedLoadableModuleWidget):
"""Uses ScriptedLoadableModuleWidget base class, available at:
https://github.com/Slicer/Slicer/blob/master/Base/Python/slicer/ScriptedLoadableModule.py
"""
def setup(self):
ScriptedLoadableModuleWidget.setup(self)
# Instantiate and connect widgets ...
#
# Parameters Area
#
parametersCollapsibleButton = ctk.ctkCollapsibleButton()
parametersCollapsibleButton.text = "Input/Output Parameters"
self.layout.addWidget(parametersCollapsibleButton)
# Layout within the dummy collapsible button
parametersFormLayout = qt.QFormLayout(parametersCollapsibleButton)
#
# input volume selector
#
self.inputSelector = slicer.qMRMLNodeComboBox()
self.inputSelector.nodeTypes = ["vtkMRMLScalarVolumeNode"]
self.inputSelector.selectNodeUponCreation = True
self.inputSelector.addEnabled = False
self.inputSelector.removeEnabled = True
self.inputSelector.renameEnabled = True
self.inputSelector.noneEnabled = False
self.inputSelector.showHidden = False
self.inputSelector.showChildNodeTypes = False
self.inputSelector.setMRMLScene( slicer.mrmlScene )
self.inputSelector.setToolTip( "Pick the input to the algorithm. It is recommended using a preprocessed image here (e.g. Baby Brain Preparation module output)" )
parametersFormLayout.addRow("Input Volume: ", self.inputSelector)
#
# Image Modality
#
self.setImageModalityBooleanWidget = ctk.ctkComboBox()
self.setImageModalityBooleanWidget.addItem("T2")
self.setImageModalityBooleanWidget.addItem("T1")
self.setImageModalityBooleanWidget.setToolTip(
"MRI strutural image inserted as a input volume.")
parametersFormLayout.addRow("Image Modality ", self.setImageModalityBooleanWidget)
#
# Brain tissues selector
#
self.brainTissuesSelector = slicer.qMRMLNodeComboBox()
self.brainTissuesSelector.nodeTypes = ["vtkMRMLLabelMapVolumeNode"]
self.brainTissuesSelector.selectNodeUponCreation = True
self.brainTissuesSelector.addEnabled = True
self.brainTissuesSelector.renameEnabled = True
self.brainTissuesSelector.removeEnabled = True
self.brainTissuesSelector.noneEnabled = False
self.brainTissuesSelector.showHidden = False
self.brainTissuesSelector.showChildNodeTypes = False
self.brainTissuesSelector.setMRMLScene( slicer.mrmlScene )
self.brainTissuesSelector.setToolTip( "Pick the output brain tissues label." )
parametersFormLayout.addRow("Brain Tissues: ", self.brainTissuesSelector)
#
# Estimate Basal Ganglia Structures?
#
self.setUseBasalGangliaEstimatorBooleanWidget = ctk.ctkCheckBox()
self.setUseBasalGangliaEstimatorBooleanWidget.setChecked(False)
self.setUseBasalGangliaEstimatorBooleanWidget.setToolTip(
"Check this if you want to output the global basal ganglia structures in the final segmentation. This step is in "
"experimental stage which is only based on a label propagation from the chosen brain atlas.")
parametersFormLayout.addRow("Estimate Basal Ganglia Structures",
self.setUseBasalGangliaEstimatorBooleanWidget)
#
# Split brain hemispheres?
#
self.setUseSplitBrainHemispheresBooleanWidget = ctk.ctkCheckBox()
self.setUseSplitBrainHemispheresBooleanWidget.setChecked(True)
self.setUseSplitBrainHemispheresBooleanWidget.setToolTip(
"Check this if you want to output the final brain segmentation with labels splitted in both hemispheres. This "
"step return reasonble segmentation when normal (or almost normal) brains are used. For patients with strong "
" brain malformations, this step can be neglected.")
parametersFormLayout.addRow("Split Brain Hemispheres Labels",
self.setUseSplitBrainHemispheresBooleanWidget)
#
# Image Space Resampling Parameters Area
#
parametersImageResamplingCollapsibleButton = ctk.ctkCollapsibleButton()
parametersImageResamplingCollapsibleButton.text = "Image Space Resampling Parameters"
parametersImageResamplingCollapsibleButton.collapsed = False
self.layout.addWidget(parametersImageResamplingCollapsibleButton)
# Layout within the dummy collapsible button
parametersImageResamplingLayout = qt.QFormLayout(parametersImageResamplingCollapsibleButton)
#
# Voxel Resampling Size
#
self.setVoxelResolutionLineEditWidget = qt.QLineEdit()
self.setVoxelResolutionLineEditWidget.setText("0.2,0.2,1")
self.setVoxelResolutionLineEditWidget.setToolTip(
"Voxel resolution to the image resample function. This is useful to final brain label upsampling which results"
"in a fine delineation among brain tissues. TIP: Use the typical resolution used in the MRI image acquisition.")
parametersImageResamplingLayout.addRow("Voxel Resampling Resolution", self.setVoxelResolutionLineEditWidget)
#
# Interpolation Functions
#
self.setInterpolationFunctionResamplingComboBoxWidget = ctk.ctkComboBox()
self.setInterpolationFunctionResamplingComboBoxWidget.addItem("bspline")
self.setInterpolationFunctionResamplingComboBoxWidget.addItem("linear")
self.setInterpolationFunctionResamplingComboBoxWidget.addItem("nearestNeighbor")
self.setInterpolationFunctionResamplingComboBoxWidget.addItem("hamming")
self.setInterpolationFunctionResamplingComboBoxWidget.addItem("cosine")
self.setInterpolationFunctionResamplingComboBoxWidget.addItem("welch")
self.setInterpolationFunctionResamplingComboBoxWidget.addItem("lanczos")
self.setInterpolationFunctionResamplingComboBoxWidget.addItem("blackman")
self.setInterpolationFunctionResamplingComboBoxWidget.setToolTip(
"Interpolation functions for resampling step.")
parametersImageResamplingLayout.addRow("Interpolation ", self.setInterpolationFunctionResamplingComboBoxWidget)
#
# Median Filter Parameters Area
#
parametersMedianFilterCollapsibleButton = ctk.ctkCollapsibleButton()
parametersMedianFilterCollapsibleButton.text = "Median Filter Parameters"
parametersMedianFilterCollapsibleButton.collapsed = False
self.layout.addWidget(parametersMedianFilterCollapsibleButton)
# Layout within the dummy collapsible button
parametersMedianFilterLayout = qt.QFormLayout(parametersMedianFilterCollapsibleButton)
#
# Apply Median Filtering?
#
self.setApplyMedianFilteringBooleanWidget = ctk.ctkCheckBox()
self.setApplyMedianFilteringBooleanWidget.setChecked(True)
self.setApplyMedianFilteringBooleanWidget.setToolTip(
"Check this if you want to perform a median filtering in the final step of brain tissues segmentation."
"This operation can be useful to decrease the amount of punctual errors among tissues, however the"
"filtering parameters (i.e. both number of iterations and neighborhood size) may strongly affects the final tissue"
"segmentation.")
parametersMedianFilterLayout.addRow("Apply Median Filter",
self.setApplyMedianFilteringBooleanWidget)
#
# Neighborhood Size
#
self.setNeighborhoodSizeLineEditWidget = qt.QLineEdit()
self.setNeighborhoodSizeLineEditWidget.setText("2,2,1")
self.setNeighborhoodSizeLineEditWidget.setToolTip(
"Choose the neighborhood applied on the median filter. A large neighborhood will provide a smoother version of the"
"brain label, however minor details may vanish. TIP: It is commonly used a size of 2x to 5x of the smallest voxel"
"size, e.g. if the input image has 0.2x0.2x1.0 mm3 (1x1x1 voxels), than the neighborhood size would be [2,2,1].")
parametersMedianFilterLayout.addRow("Neighborhood Size ", self.setNeighborhoodSizeLineEditWidget)
#
# Iterations
#
self.setMedianIterationsWidget = qt.QSpinBox()
self.setMedianIterationsWidget.setMinimum(1)
self.setMedianIterationsWidget.setMaximum(20)
self.setMedianIterationsWidget.setValue(1)
self.setMedianIterationsWidget.setToolTip(
"Set how many median filtering will be applied. The higher it is, the stronger will be the label smoothing.")
parametersMedianFilterLayout.addRow("Interations ", self.setMedianIterationsWidget)
#
# Atlas Propagation Parameters Area
#
parametersAtlasPropagationCollapsibleButton = ctk.ctkCollapsibleButton()
parametersAtlasPropagationCollapsibleButton.text = "Atlas Propagation Parameters"
parametersAtlasPropagationCollapsibleButton.collapsed = False
self.layout.addWidget(parametersAtlasPropagationCollapsibleButton)
# Layout within the dummy collapsible button
parametersAtlasPropagationLayout = qt.QFormLayout(parametersAtlasPropagationCollapsibleButton)
#
# Brain Atlas
#
self.setBrainAtlasComboBoxWidget = ctk.ctkComboBox()
self.setBrainAtlasComboBoxWidget.addItem("NEO2012") # TODO Ver se usa tambem outro template (2015, http://brain-development.org/brain-atlases/multi-structural-neonatal-brain-atlas/)
# self.setBrainAtlasComboBoxWidget.addItem("NEO2015") # TODO Novo brain atlas com o mesmo padrao do NEO2012... tem mais detalhes de segmentacao
self.setBrainAtlasComboBoxWidget.addItem("FET2012")
# self.setBrainAtlasComboBoxWidget.addItem("PED2008") # TODO PED2008 will be availble in further upgrade
self.setBrainAtlasComboBoxWidget.setToolTip(
"Choose the most suitable brain atlas for the input image. A list of available atlas are given, however only the "
"binary labels are considered. These brain atlases will mainly help to segment the cerebellum, brainstem and deep"
"gray matter. Available atlases: NEO2012 (Neonatal), FET2012 (Fetal) and PED2008 (Pediatric).")
parametersAtlasPropagationLayout.addRow("Brain Atlas ", self.setBrainAtlasComboBoxWidget)
#
# Subject Age
#
self.setSubjectAgeIntegerWidget = ctk.ctkSliderWidget()
self.setSubjectAgeIntegerWidget.maximum = 44
self.setSubjectAgeIntegerWidget.minimum = 23
self.setSubjectAgeIntegerWidget.value = 26
self.setSubjectAgeIntegerWidget.singleStep = 1
self.setSubjectAgeIntegerWidget.setToolTip("Select the subject's age in weeks. This is only used for neonatal and fetal brain atlases. "
"NOTE: Each atlas has its own age range, with NEO2012=27-44 and FET2012=23-37 weeks, respectivelly."
"If you choose an age below (above), the lower (higher) age will be chosen.")
parametersAtlasPropagationLayout.addRow("Age ", self.setSubjectAgeIntegerWidget)
#
# Registration Algorithm
#
self.groupBoxRadioButtons = qt.QGroupBox("Registration Algorithm")
RadioButtonLayout = qt.QFormLayout()
self.groupBoxRadioButtons.setLayout(RadioButtonLayout)
self.setRadioBRAINS = qt.QRadioButton('BRAINSFit')
self.setRadioBRAINS.setToolTip("Use the Slicer built-in BRAINSFit algorithm (General Registration).")
self.setRadioANTs = qt.QRadioButton('ANTs')
self.setRadioANTs.setToolTip("Use the ANTs SyN diffeomorphic algorithm (recommended). If the ANTs tools are not installed in the user's machine, than this option will be not available.")
if os.environ.get('ANTSPATH'):
self.setRadioANTs.setChecked(True)
else:
self.setRadioBRAINS.setChecked(True)
self.setRadioANTs.setDisabled(True)
RadioButtonLayout.addRow(self.setRadioBRAINS)
RadioButtonLayout.addRow(self.setRadioANTs)
parametersAtlasPropagationLayout.addRow(self.groupBoxRadioButtons)
#
# ANTs Parameters
#
self.groupANTsParametersBoxButtons = qt.QGroupBox("ANTs Parameters")
ANTsParametersLayout = qt.QFormLayout()
self.groupANTsParametersBoxButtons.setLayout(ANTsParametersLayout)
#
# Use Quick Registration
#
self.setUseANTSQuickBooleanWidget = ctk.ctkCheckBox()
self.setUseANTSQuickBooleanWidget.setChecked(False)
self.setUseANTSQuickBooleanWidget.setToolTip(
"Check this if you want to use the antsRegistrationSyNQuick.sh script. This will considerably reduce the "
"total time required in the registration step.")
#
# Number of Cores
#
self.setNumberOfCoresWidget = ctk.ctkSliderWidget()
self.setNumberOfCoresWidget.singleStep = 1
self.setNumberOfCoresWidget.minimum = 1
self.setNumberOfCoresWidget.maximum = multiprocessing.cpu_count()
self.setNumberOfCoresWidget.value = self.setNumberOfCoresWidget.maximum - 1
self.setNumberOfCoresWidget.setToolTip(
"Set the number of CPU's used in the registration process. In order to prevent the SO crash, it is advisable to use N - 1 (N = Total number of cores available).")
#
# Radius for correlation calculation
#
self.setCorrelationRadiusWidget = qt.QSpinBox()
self.setCorrelationRadiusWidget.setMinimum(1)
self.setCorrelationRadiusWidget.setMaximum(30)
self.setCorrelationRadiusWidget.setValue(4)
self.setCorrelationRadiusWidget.setToolTip(
"Set the radius for cross correlation metric used in the SyN registration. Units given in number of voxels.")
ANTsParametersLayout.addRow("Use Quick Registration ", self.setUseANTSQuickBooleanWidget)
ANTsParametersLayout.addRow("Number Of Cores ", self.setNumberOfCoresWidget)
ANTsParametersLayout.addRow("Radius ", self.setCorrelationRadiusWidget)
parametersAtlasPropagationLayout.addRow(self.groupANTsParametersBoxButtons)
#
# BRAINSFit Parameters
#
self.groupBRAINFitParametersBoxButtons = qt.QGroupBox("BRAINSFit Parameters")
BRAINSFitParametersLayout = qt.QFormLayout()
self.groupBRAINFitParametersBoxButtons.setLayout(BRAINSFitParametersLayout)
#
# Percentage Sampling Area
#
self.setPercSamplingQWidget = qt.QDoubleSpinBox()
self.setPercSamplingQWidget.setDecimals(4)
self.setPercSamplingQWidget.setMaximum(0.1)
self.setPercSamplingQWidget.setMinimum(0.0001)
self.setPercSamplingQWidget.setSingleStep(0.0001)
self.setPercSamplingQWidget.setValue(0.05)
self.setPercSamplingQWidget.setToolTip("Percentage of voxel used in registration.")
#
# BSpline Grid
#
self.setBSplineGridWidget = qt.QLineEdit()
self.setBSplineGridWidget.setText('14,14,10')
self.setBSplineGridWidget.setToolTip("Set the BSpline grid for non linear structural adjustments.")
#
# Initiation Method Area
#
self.setInitiationRegistrationBooleanWidget = ctk.ctkComboBox()
self.setInitiationRegistrationBooleanWidget.addItem("Off")
self.setInitiationRegistrationBooleanWidget.addItem("useMomentsAlign")
self.setInitiationRegistrationBooleanWidget.addItem("useCenterOfHeadAlign")
self.setInitiationRegistrationBooleanWidget.addItem("useGeometryAlign")
self.setInitiationRegistrationBooleanWidget.setToolTip(
"Initialization method used for the MNI152 registration.")
#
# Cost Metric
#
self.setRegistrationCostMetricWidget = ctk.ctkComboBox()
self.setRegistrationCostMetricWidget.addItem("NC")
self.setRegistrationCostMetricWidget.addItem("MMI")
self.setRegistrationCostMetricWidget.addItem("MSE")
self.setRegistrationCostMetricWidget.setToolTip(
"The cost metric to be used during fitting. Defaults to NC. Options are MMI (Mattes Mutual Information), MSE (Mean Square Error), NC"
" (Normalized Correlation).")
#
# Interpolation
#
self.setInterpolationFunctionRegistrationComboBoxWidget = ctk.ctkComboBox()
self.setInterpolationFunctionRegistrationComboBoxWidget.addItem("BSpline")
self.setInterpolationFunctionRegistrationComboBoxWidget.addItem("Linear")
self.setInterpolationFunctionRegistrationComboBoxWidget.addItem("NearestNeighbor")
self.setInterpolationFunctionRegistrationComboBoxWidget.addItem("WindowedSinc")
self.setInterpolationFunctionRegistrationComboBoxWidget.addItem("Cosine")
self.setInterpolationFunctionRegistrationComboBoxWidget.addItem("Welch")
self.setInterpolationFunctionRegistrationComboBoxWidget.addItem("Lanczos")
self.setInterpolationFunctionRegistrationComboBoxWidget.addItem("Blackman")
self.setInterpolationFunctionRegistrationComboBoxWidget.addItem("Hamming")
self.setInterpolationFunctionRegistrationComboBoxWidget.setToolTip(
"Interpolation functions for registration step.")
BRAINSFitParametersLayout.addRow("Percentage Of Samples ", self.setPercSamplingQWidget)
BRAINSFitParametersLayout.addRow("Spline Grid ", self.setBSplineGridWidget)
BRAINSFitParametersLayout.addRow("Initiation Method ", self.setInitiationRegistrationBooleanWidget)
BRAINSFitParametersLayout.addRow("Cost Metric ", self.setRegistrationCostMetricWidget)
BRAINSFitParametersLayout.addRow("Interpolation ", self.setInterpolationFunctionRegistrationComboBoxWidget)
parametersAtlasPropagationLayout.addRow(self.groupBRAINFitParametersBoxButtons)
#
# Apply Button
#
self.applyButton = qt.QPushButton("Apply")
self.applyButton.toolTip = "Run the algorithm."
self.applyButton.enabled = False
parametersFormLayout.addRow(self.applyButton)
# connections
self.applyButton.connect('clicked(bool)', self.onApplyButton)
self.inputSelector.connect("currentNodeChanged(vtkMRMLNode*)", self.onSelect)
self.brainTissuesSelector.connect("currentNodeChanged(vtkMRMLNode*)", self.onSelect)
# Add vertical spacer
self.layout.addStretch(1)
# Refresh Apply button state
self.onSelect()
def cleanup(self):
pass
def onSelect(self):
self.applyButton.enabled = self.inputSelector.currentNode() and self.brainTissuesSelector.currentNode()
def onApplyButton(self):
logic = BabyBrainSegmentationLogic()
modality = self.setImageModalityBooleanWidget.currentText
estimateBasalGanglia = self.setUseBasalGangliaEstimatorBooleanWidget.isChecked()
splitHemispheres = self.setUseSplitBrainHemispheresBooleanWidget.isChecked()
brainAtlas = self.setBrainAtlasComboBoxWidget.currentText
age = self.setSubjectAgeIntegerWidget.value
if self.setRadioBRAINS.isChecked():
registrationAlgorithm = self.setRadioBRAINS.text
else:
registrationAlgorithm = self.setRadioANTs.text
useQuickRegistration = self.setUseANTSQuickBooleanWidget.isChecked()
numOfCores = self.setNumberOfCoresWidget.value
correlationRadius = self.setCorrelationRadiusWidget.value
sampling = self.setPercSamplingQWidget.value
splineGrid = self.setBSplineGridWidget.text
initMethod = self.setInitiationRegistrationBooleanWidget.currentText
costMetric = self.setRegistrationCostMetricWidget.currentText
interpolationRegistration = self.setInterpolationFunctionRegistrationComboBoxWidget.currentText
voxelResampling = self.setVoxelResolutionLineEditWidget.text
interpolationResampling = self.setInterpolationFunctionResamplingComboBoxWidget.currentText
applyMedianFiltering = self.setApplyMedianFilteringBooleanWidget.isChecked()
neighborSize = self.setNeighborhoodSizeLineEditWidget.text
interations = self.setMedianIterationsWidget.value
logic.run(self.inputSelector.currentNode()
, self.brainTissuesSelector.currentNode()
, modality
, estimateBasalGanglia
, splitHemispheres
, brainAtlas
, age
, registrationAlgorithm
, useQuickRegistration
, numOfCores
, correlationRadius
, sampling
, splineGrid
, initMethod
, costMetric
, interpolationRegistration
, voxelResampling
, interpolationResampling
, applyMedianFiltering
, neighborSize
, interations)
#
# BabyBrainSegmentationLogic
#
class BabyBrainSegmentationLogic(ScriptedLoadableModuleLogic):
"""This class should implement all the actual
computation done by your module. The interface
should be such that other python code can import
this class and make use of the functionality without
requiring an instance of the Widget.
Uses ScriptedLoadableModuleLogic base class, available at:
https://github.com/Slicer/Slicer/blob/master/Base/Python/slicer/ScriptedLoadableModule.py
"""
def hasImageData(self,volumeNode):
"""This is an example logic method that
returns true if the passed in volume
node has valid image data
"""
if not volumeNode:
logging.debug('hasImageData failed: no volume node')
return False
if volumeNode.GetImageData() is None:
logging.debug('hasImageData failed: no image data in volume node')
return False
return True
def isValidInputOutputData(self, inputVolumeNode, outputVolumeNode):
"""Validates if the output is not the same as input
"""
if not inputVolumeNode:
logging.debug('isValidInputOutputData failed: no input volume node defined')
return False
if not outputVolumeNode:
logging.debug('isValidInputOutputData failed: no output volume node defined')
return False
if inputVolumeNode.GetID()==outputVolumeNode.GetID():
logging.debug('isValidInputOutputData failed: input and output volume is the same. Create a new volume for output to avoid this error.')
return False
return True
def run(self, inputVolume
, outputVolume
, modality
, estimateBasalGanglia
, splitHemispheres
, brainAtlas
, age
, registrationAlgorithm
, useQuickRegistration
, numOfCores
, correlationRadius
, sampling
, splineGrid
, initMethod
, costMetric
, interpolationRegistration
, voxelResampling
, interpolationResampling
, applyMedianFiltering
, neighborSize
, interations):
"""
Run the actual algorithm
"""
if not self.isValidInputOutputData(inputVolume, outputVolume):
slicer.util.errorDisplay('Input volume is the same as output volume. Choose a different output volume.')
return False
if brainAtlas == "FET2012" and modality == "T1":
slicer.util.errorDisplay('The FET2012 atlas does not have a T1 brain template. Choose a different image modality or a different brain template.')
return False
if brainAtlas == "FET2012" and estimateBasalGanglia:
slicer.util.errorDisplay('The FET2012 atlas does not have an option to estimate deep gray matter structures. Choose a different brain template or leave unchecked the Estimate Basal Ganglia Structures option .')
return False
modulePath = os.path.dirname(slicer.modules.babybrainsegmentation.path)
if platform.system() is "Windows":
databasePath = modulePath + "\\Resources\\atlases"
else:
databasePath = modulePath + "/Resources/atlases"
# Loading BabyBrain color table
ColorTableNode = slicer.vtkMRMLColorTableNode()
slicer.mrmlScene.AddNode(ColorTableNode)
if splitHemispheres:
ColorTableNode = slicer.util.getNode('2018_USP_BabyBrain_Lateralized')
else:
ColorTableNode = slicer.util.getNode('2018_USP_BabyBrain')
if ColorTableNode is None:
if splitHemispheres:
if platform.system() is "Windows":
(readSuccess, ColorTableNode) = slicer.util.loadColorTable(modulePath + "\\Resources\\2018_USP_BabyBrain_Lateralized.ctbl", True)
else:
(readSuccess, ColorTableNode) = slicer.util.loadColorTable(modulePath + "/Resources/2018_USP_BabyBrain_Lateralized.ctbl", True)
else:
if platform.system() is "Windows":
(readSuccess, ColorTableNode) = slicer.util.loadColorTable(modulePath + "\\Resources\\2018_USP_BabyBrain.ctbl", True)
else:
(readSuccess, ColorTableNode) = slicer.util.loadColorTable(modulePath + "/Resources/2018_USP_BabyBrain.ctbl", True)
# Allocating the default path in order to save temporary files into the hard drive
home = ""
if platform.system() is "Windows":
home = expanduser("%userprofile%")
# Creating temporary folder in home directory
os.system("mkdir " + home + "\\tmpBabyBrainSegmentation")
else:
home = expanduser("~")
# Creating temporary folder in home directory
os.system("mkdir " + home + "/tmpBabyBrainSegmentation")
tmpFolder = home
# Creating temporary folder in home directory
if platform.system() is "Windows":
tmpFolder = home + "\\tmpBabyBrainSegmentation"
else:
tmpFolder = home + "/tmpBabyBrainSegmentation"
# Checking if the age is found in the chosen brain template
setAge = age;
if brainAtlas == "NEO2012":
if age < 27:
setAge = 27
elif age > 43:
setAge = 43
elif brainAtlas == "FET2012":
if age < 23:
setAge = 23
elif age > 37:
setAge = 37
logging.info('Processing started')
# This will check is already exist the registration transformation in the Slicer scene, using the generic names of it.
# If those exist, the most recent one is adopted as the correct template transformations to the input data.
# This strategy is useful if the user used the BabyBrainPrepation module, because the registration transformation
# generated there are recycled here.
usedAtlasPropagation = False
regAffine = slicer.util.getNodes('BabyBrain_regMNI2Native_0GenericAffine*')
for t in regAffine:
regAffine = t
regAffine=slicer.util.getNode(regAffine)
regWarp = slicer.util.getNodes('BabyBrain_regMNI2Native_1Warp*')
for t in regWarp:
regWarp = t
regWarp=slicer.util.getNode(regWarp)
if regAffine is None and regWarp is None:
usedAtlasPropagation = True
######################################################################################
# Step - Label propagation using brain atlas.
######################################################################################
# Reading brain template
if platform.system() is "Windows":
readingParameters = {}
readingParameters['name'] = "brain_template"
readingParameters['center'] = True
readingParameters['show'] = False
(readSuccess, brainAtlasNode) = slicer.util.loadVolume(databasePath +
"\\" + brainAtlas +
"\\templates\\template_" + modality + "_" + str(int(setAge)) +".nii.gz", readingParameters, True)
else:
readingParameters = {}
readingParameters['name'] = "brain_template"
readingParameters['center'] = True
readingParameters['show'] = False
(readSuccess, brainAtlasNode) = slicer.util.loadVolume(databasePath +
"/" + brainAtlas +
"/templates/template_" + modality + "_" + str(int(setAge)) + ".nii.gz", readingParameters, True)
######################################################################################
# Step - Atlas propagation - linear and elastic transformations
######################################################################################
# Image registration with atlas - ANTs or BRAINSfit
# Creating linear transform node
regMNI2NativeLinearTransform = slicer.vtkMRMLLinearTransformNode()
regMNI2NativeLinearTransform.SetName("BabyBrain_regMNI2Native_0GenericAffine")
slicer.mrmlScene.AddNode(regMNI2NativeLinearTransform)
regMNI2NativeBSplineTransform = slicer.vtkMRMLBSplineTransformNode()
regMNI2NativeBSplineTransform.SetName("BabyBrain_regMNI2Native_1Warp")
slicer.mrmlScene.AddNode(regMNI2NativeBSplineTransform)
self.atlasPropagation(registrationAlgorithm
, inputVolume
, brainAtlasNode
, regMNI2NativeLinearTransform
, regMNI2NativeBSplineTransform
, interpolationRegistration
, sampling
, splineGrid
, initMethod
, numOfCores
, useQuickRegistration)
if registrationAlgorithm == "ANTs":
slicer.mrmlScene.RemoveNode(regMNI2NativeLinearTransform)
slicer.mrmlScene.RemoveNode(regMNI2NativeBSplineTransform)
regAffine = slicer.util.getNodes('BabyBrain_regMNI2Native_0GenericAffine*')
for t in regAffine:
regAffine = t
regAffine=slicer.util.getNode(regAffine)
regWarp = slicer.util.getNodes('BabyBrain_regMNI2Native_1Warp*')
for t in regWarp:
regWarp = t
regWarp=slicer.util.getNode(regWarp)
slicer.util.showStatusMessage("Atlas propagation is finished...")
######################################################################################
# Step - Resampling the input volume
######################################################################################
tmpResampledInputNode = slicer.vtkMRMLScalarVolumeNode()
tmpResampledInputNode.SetName("resampled_input")
slicer.mrmlScene.AddNode(tmpResampledInputNode)
self.imageResamplingResolution(inputVolume
, tmpResampledInputNode
, voxelResampling
, interpolationResampling)
slicer.util.showStatusMessage("Image resampling to voxel resolution of [" + str(voxelResampling) + "] is finished...")
######################################################################################
# Step - Remove Cerebellum and brainstem from the input volume
######################################################################################
# Reading the cerebellum region from atlas and adjusting to native space
tmpCerebellumMask = slicer.vtkMRMLLabelMapVolumeNode()
tmpCerebellumMask.SetName("cerebellum_mask")
slicer.mrmlScene.AddNode(tmpCerebellumMask)
# Reading cerebellum volume mask from atlas
if platform.system() is "Windows":
readingParameters = {}
readingParameters['name'] = "cerebellum_template_mask"
readingParameters['center'] = True
readingParameters['show'] = False
readingParameters['labelmap'] = True
(readSuccess, cerebellumMaskNode) = slicer.util.loadVolume(databasePath +
"\\" + brainAtlas +
"\\cerebellum\\cerebellum_" + str(int(setAge)) + ".nii.gz", readingParameters,
True)
else:
readingParameters = {}
readingParameters['name'] = "cerebellum_template_mask"
readingParameters['center'] = True
readingParameters['show'] = False
readingParameters['labelmap'] = True
(readSuccess, cerebellumMaskNode) = slicer.util.loadVolume(databasePath +
"/" + brainAtlas +
"/cerebellum/cerebellum_" + str(int(setAge)) + ".nii.gz", readingParameters,
True)
self.applyRegistrationTransforms(registrationAlgorithm
, cerebellumMaskNode
, tmpResampledInputNode
, tmpCerebellumMask
, regAffine
, regWarp
, True)
# Reading the cerebellum prior probability and adjusting to native space
tmpCerebellumPriors = slicer.vtkMRMLScalarVolumeNode()
tmpCerebellumPriors.SetName("cerebellum_priors")
slicer.mrmlScene.AddNode(tmpCerebellumPriors)
# Reading cerebellum volume mask from atlas
if platform.system() is "Windows":
readingParameters = {}
readingParameters['name'] = "cerebellum_template_priors"
readingParameters['center'] = True
readingParameters['show'] = False
readingParameters['labelmap'] = False
(readSuccess, cerebellumPriorsNode) = slicer.util.loadVolume(databasePath +
"\\" + brainAtlas +
"\\cerebellum\\cerebellum_prob_" + str(int(setAge)) + ".nii.gz", readingParameters,
True)
else:
readingParameters = {}
readingParameters['name'] = "cerebellum_template_priors"
readingParameters['center'] = True
readingParameters['show'] = False
readingParameters['labelmap'] = False
(readSuccess, cerebellumPriorsNode) = slicer.util.loadVolume(databasePath +
"/" + brainAtlas +
"/cerebellum/cerebellum_prob_" + str(int(setAge)) + ".nii.gz", readingParameters,
True)
self.applyRegistrationTransforms(registrationAlgorithm
, cerebellumPriorsNode
, tmpResampledInputNode
, tmpCerebellumPriors
, regAffine
, regWarp
, False)
# Saving the cerebellum priors image
if platform.system() is "Windows":
slicer.util.saveNode(tmpCerebellumPriors, tmpFolder + '\\cerebellum_priors.nii.gz')
else:
slicer.util.saveNode(tmpCerebellumPriors, tmpFolder + '/cerebellum_priors.nii.gz')
# Reading the CSF part of the cerebellum prior probability and adjusting to native space
tmpCSFCerebellumPriors = slicer.vtkMRMLScalarVolumeNode()
tmpCSFCerebellumPriors.SetName("csf_cerebellum_priors")
slicer.mrmlScene.AddNode(tmpCSFCerebellumPriors)
# Reading cerebellum volume mask from atlas
if platform.system() is "Windows":
readingParameters = {}
readingParameters['name'] = "csf_cerebellum_template_priors"
readingParameters['center'] = True
readingParameters['show'] = False
readingParameters['labelmap'] = False
(readSuccess, csfCerebellumPriorsNode) = slicer.util.loadVolume(databasePath +
"\\" + brainAtlas +
"\\csf\\csf_cerebellum_prob_" + str(
int(setAge)) + ".nii.gz", readingParameters,
True)
else:
readingParameters = {}
readingParameters['name'] = "csf_cerebellum_template_priors"
readingParameters['center'] = True
readingParameters['show'] = False
readingParameters['labelmap'] = False
(readSuccess, csfCerebellumPriorsNode) = slicer.util.loadVolume(databasePath +
"/" + brainAtlas +
"/csf/csf_cerebellum_prob_" + str(
int(setAge)) + ".nii.gz", readingParameters,
True)
self.applyRegistrationTransforms(registrationAlgorithm
, csfCerebellumPriorsNode
, tmpResampledInputNode
, tmpCSFCerebellumPriors
, regAffine
, regWarp
, False)
# Saving the cerebellum priors image
if platform.system() is "Windows":
slicer.util.saveNode(tmpCSFCerebellumPriors, tmpFolder + '\\csf_cerebellum_priors.nii.gz')
else:
slicer.util.saveNode(tmpCSFCerebellumPriors, tmpFolder + '/csf_cerebellum_priors.nii.gz')
# Reading the brainstem region from atlas and adjusting to native space
tmpBrainstemMask = slicer.vtkMRMLLabelMapVolumeNode()
tmpBrainstemMask.SetName("brainstem_mask")
slicer.mrmlScene.AddNode(tmpBrainstemMask)
# Reading brainstem volume mask from atlas
if platform.system() is "Windows":
readingParameters = {}
readingParameters['name'] = "brainstem_template_mask"
readingParameters['center'] = True
readingParameters['show'] = False
readingParameters['labelmap'] = True
(readSuccess, brainstemMaskNode) = slicer.util.loadVolume(databasePath +
"\\" + brainAtlas +
"\\brainstem\\brainstem_" + str(int(setAge)) + ".nii.gz", readingParameters,
True)
else:
readingParameters = {}
readingParameters['name'] = "brainstem_template_mask"
readingParameters['center'] = True
readingParameters['show'] = False
readingParameters['labelmap'] = True
(readSuccess, brainstemMaskNode) = slicer.util.loadVolume(databasePath +
"/" + brainAtlas +
"/brainstem/brainstem_" + str(int(setAge)) + ".nii.gz", readingParameters,
True)
self.applyRegistrationTransforms(registrationAlgorithm
, brainstemMaskNode
, tmpResampledInputNode
, tmpBrainstemMask
, regAffine
, regWarp
, True)
# Reading the brainstem priors and adjusting to native space
tmpBrainstemPriors = slicer.vtkMRMLScalarVolumeNode()
tmpBrainstemPriors.SetName("brainstem_priors")
slicer.mrmlScene.AddNode(tmpBrainstemPriors)
# Reading brainstem volume mask from atlas
if platform.system() is "Windows":
readingParameters = {}
readingParameters['name'] = "brainstem_template_priors"
readingParameters['center'] = True
readingParameters['show'] = False
readingParameters['labelmap'] = False
(readSuccess, brainstemPriorsNode) = slicer.util.loadVolume(databasePath +
"\\" + brainAtlas +
"\\brainstem\\brainstem_prob_" + str(int(setAge)) + ".nii.gz", readingParameters,
True)
else:
readingParameters = {}
readingParameters['name'] = "brainstem_template_priors"
readingParameters['center'] = True
readingParameters['show'] = False
readingParameters['labelmap'] = False
(readSuccess, brainstemPriorsNode) = slicer.util.loadVolume(databasePath +
"/" + brainAtlas +
"/brainstem/brainstem_prob_" + str(int(setAge)) + ".nii.gz", readingParameters,
True)
self.applyRegistrationTransforms(registrationAlgorithm
, brainstemPriorsNode
, tmpResampledInputNode
, tmpBrainstemPriors
, regAffine
, regWarp
, False)
# Saving the brainstem priors image
if platform.system() is "Windows":
slicer.util.saveNode(tmpBrainstemPriors, tmpFolder + '\\brainstem_priors.nii.gz')
else:
slicer.util.saveNode(tmpBrainstemPriors, tmpFolder + '/brainstem_priors.nii.gz')
# Reading the csf part of the brainstem priors and adjusting to native space
tmpCSFBrainstemPriors = slicer.vtkMRMLScalarVolumeNode()
tmpCSFBrainstemPriors.SetName("csf_brainstem_priors")
slicer.mrmlScene.AddNode(tmpCSFBrainstemPriors)
# Reading brainstem volume mask from atlas
if platform.system() is "Windows":
readingParameters = {}
readingParameters['name'] = "csf_brainstem_template_priors"
readingParameters['center'] = True
readingParameters['show'] = False
readingParameters['labelmap'] = False
(readSuccess, csfBrainstemPriorsNode) = slicer.util.loadVolume(databasePath +
"\\" + brainAtlas +
"\\csf\\csf_brainstem_prob_" + str(
int(setAge)) + ".nii.gz", readingParameters,
True)
else:
readingParameters = {}
readingParameters['name'] = "csf_brainstem_template_priors"
readingParameters['center'] = True
readingParameters['show'] = False
readingParameters['labelmap'] = False
(readSuccess, csfBrainstemPriorsNode) = slicer.util.loadVolume(databasePath +
"/" + brainAtlas +
"/csf/csf_brainstem_prob_" + str(
int(setAge)) + ".nii.gz", readingParameters,
True)
self.applyRegistrationTransforms(registrationAlgorithm
, csfBrainstemPriorsNode
, tmpResampledInputNode
, tmpCSFBrainstemPriors
, regAffine
, regWarp
, False) # TODO o BRAINSResample usa o warp numa outra entrada...ver no CLI qual eh (erro Displacement field ...)
# Saving the brainstem priors image
if platform.system() is "Windows":
slicer.util.saveNode(tmpCSFBrainstemPriors, tmpFolder + '\\csf_brainstem_priors.nii.gz')
else:
slicer.util.saveNode(tmpCSFBrainstemPriors, tmpFolder + '/csf_brainstem_priors.nii.gz')
# Removing brainstem and cerebellum from the input image
inputImage = sitkUtils.PullVolumeFromSlicer(tmpResampledInputNode)
brainstemMaskImage = sitkUtils.PullVolumeFromSlicer(tmpBrainstemMask)
brainstemMaskImage = sitk.Cast(brainstemMaskImage, inputImage.GetPixelID()) # This is necessary since the MaskNegated filter requires the same pixel type in both images.
cerebellumMaskImage = sitkUtils.PullVolumeFromSlicer(tmpCerebellumMask)
cerebellumMaskImage = sitk.Cast(cerebellumMaskImage, inputImage.GetPixelID()) # This is necessary since the MaskNegated filter requires the same pixel type in both images.
filter = sitk.MaskNegatedImageFilter()
output_brainOnly_Image = filter.Execute(inputImage,brainstemMaskImage)
output_brainOnly_Image = filter.Execute(output_brainOnly_Image, cerebellumMaskImage)
tmpBrainOnlyNode = slicer.vtkMRMLScalarVolumeNode()
tmpBrainOnlyNode.SetName("resampled_input_brainOnly")
slicer.mrmlScene.AddNode(tmpBrainOnlyNode)
sitkUtils.PushVolumeToSlicer(output_brainOnly_Image, tmpBrainOnlyNode) # TODO Ver como carregar estas labels sem atualizar a scene... nao fica legal ver essas labels intermediarias durante o processamento
slicer.util.showStatusMessage("Brainstem and cerebellum removal is finished...")
######################################################################################
# Step - Segmenting the cerebellum and brainstem
######################################################################################
# Cerebellum
tmpCerebellumOnlyVolumeNode = slicer.vtkMRMLScalarVolumeNode()
tmpCerebellumOnlyVolumeNode.SetName("cerebellum_volume")
slicer.mrmlScene.AddNode(tmpCerebellumOnlyVolumeNode)
# Extracting the cerebellum from the input image
params = {}
params['InputVolume'] = tmpResampledInputNode.GetID()
params['MaskVolume'] = tmpCerebellumMask.GetID()
params['OutputVolume'] = tmpCerebellumOnlyVolumeNode.GetID()
params['Label'] = 1
slicer.cli.run(slicer.modules.maskscalarvolume, None, params, wait_for_completion=True)
cerebellumOnlyLabelMask = slicer.vtkMRMLLabelMapVolumeNode()
cerebellumOnlyLabelMask.SetName("cerebellum_tissue_mask")
slicer.mrmlScene.AddNode(cerebellumOnlyLabelMask)
definitions = [5,3]
if modality == 'T1':
definitions = [3,5]
# Creating background priors to cerebellum segmentation
listOfTissuesPriors = [tmpCerebellumPriors, tmpCSFCerebellumPriors]
backgroundForCerebellumPrior = slicer.vtkMRMLScalarVolumeNode()
backgroundForCerebellumPrior.SetName("background_cerebellum_priors")
slicer.mrmlScene.AddNode(backgroundForCerebellumPrior)
self.createBackgroundPriors(backgroundForCerebellumPrior, listOfTissuesPriors)
if platform.system() is "Windows":
slicer.util.saveNode(backgroundForCerebellumPrior, tmpFolder + '\\background_cerebellum_priors.nii.gz')
else:
slicer.util.saveNode(backgroundForCerebellumPrior, tmpFolder + '/background_cerebellum_priors.nii.gz')
if platform.system() is "Windows":
path2CerebellumPriors = tmpFolder + '\\cerebellum_priors.nii.gz'
path2CSFCerebellumPriors = tmpFolder + '\\csf_cerebellum_priors.nii.gz'
path2BackgroundCerebellumPriors = tmpFolder + '\\background_cerebellum_priors.nii.gz'
else:
path2CerebellumPriors=tmpFolder + '/cerebellum_priors.nii.gz'
path2CSFCerebellumPriors=tmpFolder + '/csf_cerebellum_priors.nii.gz'
path2BackgroundCerebellumPriors = tmpFolder + '/background_cerebellum_priors.nii.gz'
listOfTissuesPriors = [path2BackgroundCerebellumPriors, path2CerebellumPriors, path2CSFCerebellumPriors]
if modality == "T1":
listOfTissuesPriors = [path2BackgroundCerebellumPriors, path2CSFCerebellumPriors, path2CerebellumPriors]
self.segmentingTissues(tmpCerebellumOnlyVolumeNode
, modality
, 3
, cerebellumOnlyLabelMask
, inputPriorsFile=listOfTissuesPriors
, definitions=definitions ) # Cerebellum and CSF
# Brainstem
tmpBrainstemOnlyVolumeNode = slicer.vtkMRMLScalarVolumeNode()
tmpBrainstemOnlyVolumeNode.SetName("brainstem_volume")
slicer.mrmlScene.AddNode(tmpBrainstemOnlyVolumeNode)
# Extracting the brainstem from the input image
params = {}
params['InputVolume'] = tmpResampledInputNode.GetID()
params['MaskVolume'] = tmpBrainstemMask.GetID()
params['OutputVolume'] = tmpBrainstemOnlyVolumeNode.GetID()
params['Label'] = 1
slicer.cli.run(slicer.modules.maskscalarvolume, None, params, wait_for_completion=True)
brainstemOnlyLabelMask = slicer.vtkMRMLLabelMapVolumeNode()
brainstemOnlyLabelMask.SetName("brainstem_tissue_mask")
slicer.mrmlScene.AddNode(brainstemOnlyLabelMask)
definitions = [4,3]
if modality == 'T1':
definitions = [3,4]
# Creating background priors to cerebellum segmentation
listOfTissuesPriors = [tmpBrainstemPriors, tmpCSFBrainstemPriors]
backgroundForBrainstemPrior = slicer.vtkMRMLScalarVolumeNode()
backgroundForBrainstemPrior.SetName("background_brainstem_priors")
slicer.mrmlScene.AddNode(backgroundForBrainstemPrior)
self.createBackgroundPriors(backgroundForBrainstemPrior, listOfTissuesPriors)
if platform.system() is "Windows":
slicer.util.saveNode(backgroundForBrainstemPrior, tmpFolder + '\\background_brainstem_priors.nii.gz')
else:
slicer.util.saveNode(backgroundForBrainstemPrior, tmpFolder + '/background_brainstem_priors.nii.gz')
if platform.system() is "Windows":
path2BrainstemPriors = tmpFolder + '\\brainstem_priors.nii.gz'
path2CSFBrainstemPriors = tmpFolder + '\\csf_brainstem_priors.nii.gz'
path2BackgroundPriors = tmpFolder + '\\background_brainstem_priors.nii.gz'
else:
path2BrainstemPriors = tmpFolder + '/brainstem_priors.nii.gz'
path2CSFBrainstemPriors = tmpFolder + '/csf_brainstem_priors.nii.gz'
path2BackgroundPriors = tmpFolder + '/background_brainstem_priors.nii.gz'
listOfTissuesPriors = [path2BackgroundPriors, path2BrainstemPriors, path2CSFBrainstemPriors]
if modality == "T1":
listOfTissuesPriors = [path2BackgroundPriors, path2CSFBrainstemPriors, path2BrainstemPriors]
self.segmentingTissues(tmpBrainstemOnlyVolumeNode
, modality
, 3
, brainstemOnlyLabelMask
, inputPriorsFile=listOfTissuesPriors
, definitions=definitions) # Brainstem and CSF
# Merging both labels into one
brainstemPlusCerebellumLabelMask = slicer.vtkMRMLLabelMapVolumeNode()
brainstemPlusCerebellumLabelMask.SetName("brainstem+cerebellum_tissue_mask")
slicer.mrmlScene.AddNode(brainstemPlusCerebellumLabelMask)
self.combineLabels(brainstemOnlyLabelMask, cerebellumOnlyLabelMask, brainstemPlusCerebellumLabelMask)
slicer.util.showStatusMessage("Brainstem and cerebellum segmentation is finished...")
######################################################################################
# Step - Smoothing posterior fossa segmentation by median filtering
######################################################################################
for i in range(0, 4): # Set an empirical number of iterations for posterior fossa structures
params = {}
params['neighborhood'] = neighborSize
params['inputVolume'] = brainstemPlusCerebellumLabelMask
params['outputVolume'] = brainstemPlusCerebellumLabelMask
slicer.cli.run(slicer.modules.medianimagefilter, None, params, wait_for_completion=True)
######################################################################################
# Step - Brain tissue segmentation (only brain)
######################################################################################
brainOnlyLabelMask = slicer.vtkMRMLLabelMapVolumeNode()
brainOnlyLabelMask.SetName("brainOnly_tissues_mask")
slicer.mrmlScene.AddNode(brainOnlyLabelMask)
definitions = [1,2,3]
if modality == 'T1':
definitions = [3,2,1]
self.segmentingTissues(tmpBrainOnlyNode
, modality
, 4
, brainOnlyLabelMask
, definitions=definitions)
slicer.util.showStatusMessage("Whole brain (first) segmentation is finished...")
######################################################################################
# Step - Correcting the brain ventricules segmentation
######################################################################################
# Correcting the brain ventricules segmentation in the original input tissues labels
# Reading brain ventricules mask from atlas
if platform.system() is "Windows":
readingParameters = {}
readingParameters['name'] = "ventricules_template_mask"
readingParameters['center'] = True
readingParameters['show'] = False
readingParameters['labelmap'] = True
(readSuccess, ventriculesMaskNode) = slicer.util.loadVolume(databasePath +
"\\" + brainAtlas +
"\\ventricules\\ventricules_" + str(int(setAge)) + ".nii.gz", readingParameters,
True)
else:
readingParameters = {}
readingParameters['name'] = "ventricules_template_mask"
readingParameters['center'] = True
readingParameters['show'] = False
readingParameters['labelmap'] = True
(readSuccess, ventriculesMaskNode) = slicer.util.loadVolume(databasePath +
"/" + brainAtlas +
"/ventricules/ventricules_" + str(int(setAge)) + ".nii.gz", readingParameters,
True)
# Transforming the ventricules mask to native space
tmpVentriculesLabelMask = slicer.vtkMRMLLabelMapVolumeNode()
tmpVentriculesLabelMask.SetName("tmpVentricules_mask")
slicer.mrmlScene.AddNode(tmpVentriculesLabelMask)
self.applyRegistrationTransforms(registrationAlgorithm
, ventriculesMaskNode
, tmpResampledInputNode
, tmpVentriculesLabelMask
, regAffine
, regWarp
, True)
# Masking the input image with the ventricules label in order to restrict the image information
tmpVentriculesRegion = slicer.vtkMRMLScalarVolumeNode()
tmpVentriculesRegion.SetName("ventricules_region")
slicer.mrmlScene.AddNode(tmpVentriculesRegion)
params = {}
params['InputVolume'] = tmpResampledInputNode.GetID()
params['MaskVolume'] = tmpVentriculesLabelMask.GetID()
params['OutputVolume'] = tmpVentriculesRegion.GetID()
params['Label'] = 1
slicer.cli.run(slicer.modules.maskscalarvolume, None, params, wait_for_completion=True)
ventriculesCorrectionLabelMask = slicer.vtkMRMLLabelMapVolumeNode()
ventriculesCorrectionLabelMask.SetName("ventricules_correction_mask")
slicer.mrmlScene.AddNode(ventriculesCorrectionLabelMask)
definitions = [0, 3]
if modality == 'T1':
definitions = [3, 0]
self.segmentingTissues(tmpVentriculesRegion
, modality
, 3
, ventriculesCorrectionLabelMask
, definitions=definitions)
slicer.util.showStatusMessage("Ventricules segmentation correction is finished...")
# Correcting the ventricules tissue segmentation in the original input tissue labels
self.combineLabels(brainOnlyLabelMask
, ventriculesCorrectionLabelMask
, brainOnlyLabelMask
, firstOverwrites=False)
######################################################################################
# Step - Correcting the CSF tissue
######################################################################################
labelToCorrect=1
if modality=="T1":
labelToCorrect=3
# Correcting GM matter voxels
self.labelCorrecting(brainOnlyLabelMask
, brainOnlyLabelMask
, labelToCorrect
, 0)
labelToCorrect=2
# Correcting WM matter voxels
self.labelCorrecting(brainOnlyLabelMask
, brainOnlyLabelMask
, labelToCorrect
, 0)
# Growing CSF area in order to fits brain space
inputImage = sitkUtils.PullVolumeFromSlicer(tmpBrainOnlyNode)
inputLabel = sitkUtils.PullVolumeFromSlicer(brainOnlyLabelMask)
threshold = sitk.BinaryThresholdImageFilter()
dilate = sitk.BinaryDilateImageFilter()
mask = sitk.MaskImageFilter()
csf=3
if modality=="T1":
csf=1
brainVolumeMask = threshold.Execute(inputImage, 1, 100000, 1, 0) # Making mask from the input image
csf_only = threshold.Execute(inputLabel, csf, csf, 3, 0) # CSF only mask
if modality=="T1":
csf_only = threshold.Execute(inputLabel, csf, csf, 1, 0) # CSF only mask
dilate.SetKernelRadius(10)
brainVolumeMask = sitk.Cast(brainVolumeMask, csf_only.GetPixelID())
csf_only = dilate.Execute(csf_only, 0, 3, False) # Dilate the CSF only mask
brainCSF = mask.Execute(csf_only, brainVolumeMask) # Cutting out the exceeding CSF tissue based on the brain volume space
brainGMPlusWM = threshold.Execute(inputLabel, 1, 2, 1, 0) # Masking the GM and WM from the input image
if modality=="T1":
brainGMPlusWM = threshold.Execute(inputLabel, 2, 3, 1, 0)
tmpCSFNode = slicer.vtkMRMLLabelMapVolumeNode()
tmpCSFNode.SetName("tmp_csf_mask")
slicer.mrmlScene.AddNode(tmpCSFNode)
tmpGMPlusWMNode = slicer.vtkMRMLLabelMapVolumeNode()
tmpGMPlusWMNode.SetName("tmp_GMPlusWM_mask")
slicer.mrmlScene.AddNode(tmpGMPlusWMNode)
sitkUtils.PushVolumeToSlicer(brainCSF, tmpCSFNode)
sitkUtils.PushVolumeToSlicer(brainGMPlusWM, tmpGMPlusWMNode)
params = {}
params['InputVolume'] = brainOnlyLabelMask.GetID()
params['MaskVolume'] = tmpGMPlusWMNode.GetID()
params['OutputVolume'] = tmpGMPlusWMNode.GetID()
params['Label'] = 1
slicer.cli.run(slicer.modules.maskscalarvolume, None, params, wait_for_completion=True)
# Finishing the CSF correction
self.combineLabels(tmpCSFNode, tmpGMPlusWMNode, brainOnlyLabelMask, firstOverwrites=False)
######################################################################################
# Step - Correcting the GM and WM tissues
######################################################################################
# Removing CSF tissue in the last step
tmpCSFVolume = slicer.vtkMRMLScalarVolumeNode()
tmpCSFVolume.SetName("csf_volume")
slicer.mrmlScene.AddNode(tmpCSFVolume)
params = {}
params['InputVolume'] = tmpResampledInputNode.GetID()
params['MaskVolume'] = brainOnlyLabelMask.GetID()
params['OutputVolume'] = tmpCSFVolume.GetID()
params['Label'] = 3
if modality=="T1":
params['Label'] = 1
slicer.cli.run(slicer.modules.maskscalarvolume, None, params, wait_for_completion=True)
tmpGMAndWMVolume = slicer.vtkMRMLScalarVolumeNode()
tmpGMAndWMVolume.SetName("gm_and_wm_volume")
slicer.mrmlScene.AddNode(tmpGMAndWMVolume)
params = {}
params['inputVolume1'] = tmpResampledInputNode.GetID()
params['inputVolume2'] = tmpCSFVolume.GetID()
params['outputVolume'] = tmpGMAndWMVolume.GetID()
params['order'] = 0
slicer.cli.run(slicer.modules.subtractscalarvolumes, None, params, wait_for_completion=True)
#Enhancing signal of GM and WM volume
self.imageGlobalConstrastEnhancement(tmpGMAndWMVolume
, tmpGMAndWMVolume
, "Logistic"
, 0.04
, 0.96
, 2
, 0
, False)
#Segmenting only GM and WM tissues
definitions = [1, 2]
if modality == 'T1':
definitions = [2, 1]
self.segmentingTissues(tmpGMAndWMVolume, modality, 3, tmpGMPlusWMNode, definitions=definitions)
# Correcting WM outside GM pial surface
self.labelCorrecting(tmpGMPlusWMNode, tmpGMPlusWMNode, 2, 0)
# Summing all tissues together
self.combineLabels(tmpCSFNode, tmpGMPlusWMNode, brainOnlyLabelMask, firstOverwrites=False)
# End segmentation process with only a global tissue segmentation.
if not splitHemispheres: #TODO Verificar porque quando faz o split o brainstem e cerebellum ficam com labels erradas
######################################################################################
# Step - Merging brainstem, cerebellum and brain hemispheres all the tissues together
######################################################################################
self.combineLabels(brainOnlyLabelMask, brainstemPlusCerebellumLabelMask, outputVolume, firstOverwrites=False)
# Cleaning up wrong voxels that may appear outsied CSF tissue
if modality == "T1":
self.labelCorrecting(outputVolume, outputVolume, 3, 0) # GM
self.labelCorrecting(outputVolume, outputVolume, 2, 0) # WM
else:
self.labelCorrecting(outputVolume, outputVolume, 1, 0) # GM
self.labelCorrecting(outputVolume, outputVolume, 2, 0) # WM
# Removing small sets of non-connected clusters that does not belongs to major tissues classifications
# Since we are looking for major areas of the brain, a minimum size of 5 ml is used.
if estimateBasalGanglia:
######################################################################################
# Step - Merging basal ganglia to other labels all the tissues together
######################################################################################
# Reading brain hemispheres mask from atlas
if platform.system() is "Windows":
readingParameters = {}
readingParameters['name'] = "deepGM_template_mask"
readingParameters['center'] = True
readingParameters['show'] = False
readingParameters['labelmap'] = True
(readSuccess, dgmMaskNode) = slicer.util.loadVolume(databasePath +
"\\" + brainAtlas +
"\\dgm\\dgm_" + str(int(setAge)) + ".nii.gz",
readingParameters,
True)
else:
readingParameters = {}
readingParameters['name'] = "deepGM_template_mask"
readingParameters['center'] = True
readingParameters['show'] = False
readingParameters['labelmap'] = True
(readSuccess, dgmMaskNode) = slicer.util.loadVolume(databasePath +
"/" + brainAtlas +
"/dgm/dgm_" + str(int(setAge)) + ".nii.gz",
readingParameters,
True)
# Transforming the basal ganglia mask to native space
tmpDeepGMLabelMask = slicer.vtkMRMLLabelMapVolumeNode()
tmpDeepGMLabelMask.SetName("tmpdeepGM_mask")
slicer.mrmlScene.AddNode(tmpDeepGMLabelMask)
self.applyRegistrationTransforms(registrationAlgorithm
, dgmMaskNode
, tmpResampledInputNode
, tmpDeepGMLabelMask
, regAffine
, regWarp
, True)
self.combineLabels(tmpDeepGMLabelMask, outputVolume, outputVolume)
slicer.util.showStatusMessage("Basal ganglia segmentation is finished...")
######################################################################################
# Step - Cleaning temporaty data (Debug mode: Off)
######################################################################################
if usedAtlasPropagation:
slicer.mrmlScene.RemoveNode(brainAtlasNode)
slicer.mrmlScene.RemoveNode(tmpResampledInputNode)
slicer.mrmlScene.RemoveNode(tmpCerebellumMask)
slicer.mrmlScene.RemoveNode(cerebellumOnlyLabelMask)
slicer.mrmlScene.RemoveNode(cerebellumMaskNode)
slicer.mrmlScene.RemoveNode(tmpCerebellumPriors)
slicer.mrmlScene.RemoveNode(cerebellumPriorsNode)
slicer.mrmlScene.RemoveNode(tmpCSFCerebellumPriors)
slicer.mrmlScene.RemoveNode(csfCerebellumPriorsNode)
slicer.mrmlScene.RemoveNode(tmpBrainstemPriors)
slicer.mrmlScene.RemoveNode(brainstemPriorsNode)
slicer.mrmlScene.RemoveNode(tmpCSFBrainstemPriors)
slicer.mrmlScene.RemoveNode(csfBrainstemPriorsNode)
slicer.mrmlScene.RemoveNode(backgroundForCerebellumPrior)
slicer.mrmlScene.RemoveNode(backgroundForBrainstemPrior)
slicer.mrmlScene.RemoveNode(tmpCSFNode)
slicer.mrmlScene.RemoveNode(tmpBrainstemMask)
slicer.mrmlScene.RemoveNode(brainstemMaskNode)
slicer.mrmlScene.RemoveNode(tmpBrainOnlyNode)
slicer.mrmlScene.RemoveNode(tmpCerebellumOnlyVolumeNode)
slicer.mrmlScene.RemoveNode(tmpBrainstemOnlyVolumeNode)
slicer.mrmlScene.RemoveNode(brainstemOnlyLabelMask)
slicer.mrmlScene.RemoveNode(brainstemPlusCerebellumLabelMask)
slicer.mrmlScene.RemoveNode(brainOnlyLabelMask)
slicer.mrmlScene.RemoveNode(ventriculesCorrectionLabelMask)
slicer.mrmlScene.RemoveNode(tmpVentriculesRegion)
slicer.mrmlScene.RemoveNode(ventriculesMaskNode)
slicer.mrmlScene.RemoveNode(tmpVentriculesLabelMask)
slicer.mrmlScene.RemoveNode(tmpGMPlusWMNode)
slicer.mrmlScene.RemoveNode(tmpGMAndWMVolume)
slicer.mrmlScene.RemoveNode(tmpCSFVolume)
# slicer.mrmlScene.RemoveNode(brainOnlyVolume)
if estimateBasalGanglia:
slicer.mrmlScene.RemoveNode(tmpDeepGMLabelMask)
if applyMedianFiltering:
######################################################################################
# Step - Smoothing brain segmentation by median filtering
######################################################################################
for i in range(0, interations):
params = {}
params['neighborhood'] = neighborSize
params['inputVolume'] = outputVolume
params['outputVolume'] = outputVolume
slicer.cli.run(slicer.modules.medianimagefilter, None, params, wait_for_completion=True)
slicer.util.showStatusMessage("OPTIONAL: Brain segmentation smoothing is finished...")
# Setting Color Table with the USP_2018_GlobalBrain pattern
displayBrainLabel = outputVolume.GetDisplayNode()
displayBrainLabel.SetAndObserveColorNodeID(ColorTableNode.GetID())
logging.info('Processing completed')
slicer.util.showStatusMessage("Baby Brain Segmentation is finished")
# Removing the temporary folder with the segmentations files
if platform.system() is "Windows":
os.system("rmdir /S /Q " + tmpFolder)
else:
os.system("rm -R " + tmpFolder)
return True
######################################################################################
# Step - Split brain hemispheres (only adjust the mask on the native space)
######################################################################################
# Reading brain hemispheres mask from atlas
if platform.system() is "Windows":
readingParameters = {}
readingParameters['name'] = "hemisphere_template_mask"
readingParameters['center'] = True
readingParameters['show'] = False
readingParameters['labelmap'] = True
(readSuccess, hemispheresMaskNode) = slicer.util.loadVolume(databasePath +
"\\" + brainAtlas +
"\\hemispheres.nii.gz", readingParameters,
True)
else:
readingParameters = {}
readingParameters['name'] = "hemisphere_template_mask"
readingParameters['center'] = True
readingParameters['show'] = False
readingParameters['labelmap'] = True
(readSuccess, hemispheresMaskNode) = slicer.util.loadVolume(databasePath +
"/" + brainAtlas +
"/hemispheres.nii.gz", readingParameters,
True)
# Transforming the hemispheres mask to native space
tmpHemispheresLabelMask = slicer.vtkMRMLLabelMapVolumeNode()
tmpHemispheresLabelMask.SetName("tmpHemispheres_mask")
slicer.mrmlScene.AddNode(tmpHemispheresLabelMask)
self.applyRegistrationTransforms(registrationAlgorithm
, hemispheresMaskNode
, tmpResampledInputNode
, tmpHemispheresLabelMask
, regAffine
, regWarp
, True)
slicer.util.showStatusMessage("Brain hemispheres space definition is finished...")
######################################################################################
# Step - Setting up the brain hemispheres value (white matter, gray matter and ventricules)
######################################################################################
# Adjusting the brain hemispheres labels, taking care to ventricules being set to value 10
brainOnlyHemispheresLabelMask = slicer.vtkMRMLLabelMapVolumeNode()
brainOnlyHemispheresLabelMask.SetName("brain_hemispheres_tissue_mask")
slicer.mrmlScene.AddNode(brainOnlyHemispheresLabelMask)
params = {}
params['inputLabel'] = brainOnlyLabelMask.GetID()
params['splitLabel'] = tmpVentriculesLabelMask.GetID()
params['outputLabel'] = brainOnlyHemispheresLabelMask.GetID()
if modality == "T1":
params['labelSideA'] = 9
else:
params['labelSideA'] = 7
params['labelSideB'] = 0
params['doKeepSomeValues'] = True
if modality == 'T1':
params['keepSideA'] = "2,3"
params['keepSideB'] = "1,2,3"
else:
params['keepSideA'] = "1,2"
params['keepSideB'] = "1,2,3"
slicer.cli.run(slicer.modules.splitlabelvalues, None, params, wait_for_completion=True)
slicer.util.showStatusMessage("Brain hemispheres detection and separation is finished...")
# Adjusting the brain hemispheres globally, i.e. right-left white, gray and ventricules. CSF is constant regardless laterality
params = {}
params['inputLabel'] = brainOnlyHemispheresLabelMask.GetID()
params['splitLabel'] = tmpHemispheresLabelMask.GetID()
params['outputLabel'] = brainOnlyHemispheresLabelMask.GetID()
params['labelSideA'] = 20
params['labelSideB'] = 40
params['doKeepSomeValues'] = True # This will maintain the CSF intact
csf = 3
if modality == 'T1':
csf = 1
params['keepSideA'] = csf
params['keepSideB'] = csf
slicer.cli.run(slicer.modules.splitlabelvalues, None, params, wait_for_completion=True)
slicer.util.showStatusMessage("Ventricules detection and separation is finished...")
######################################################################################
# Step - Merging brainstem, cerebellum and brain hemispheres all the tissues together
######################################################################################
self.combineLabels(brainOnlyHemispheresLabelMask, brainstemPlusCerebellumLabelMask, outputVolume)
# TODO ERRO quando aplica o color table: depende da modalidade! T1 e T2 nao ficam com o mesmo padrao de cores...acertar o Colortable ou a segmentacao?
# Cleaning up wrong voxels that may appear outsied CSF tissue
if modality == "T1":
self.labelCorrecting(outputVolume, outputVolume, 23, 0) # GM-Right
self.labelCorrecting(outputVolume, outputVolume, 43, 0) # GM-Left
self.labelCorrecting(outputVolume, outputVolume, 22, 0) # WM-Right
self.labelCorrecting(outputVolume, outputVolume, 42, 0) # WM-Left
else:
self.labelCorrecting(outputVolume, outputVolume, 21, 0) # GM-Right
self.labelCorrecting(outputVolume, outputVolume, 41, 0) # GM-Left
self.labelCorrecting(outputVolume, outputVolume, 22, 0) # WM-Right
self.labelCorrecting(outputVolume, outputVolume, 42, 0) # WM-Left
slicer.util.showStatusMessage("Brain parcellation is finished...")
if estimateBasalGanglia:
######################################################################################
# Step - Merging basal ganglia to other labels all the tissues together
######################################################################################
# Reading brain hemispheres mask from atlas
if platform.system() is "Windows":
readingParameters = {}
readingParameters['name'] = "deepGM_template_mask"
readingParameters['center'] = True
readingParameters['show'] = False
readingParameters['labelmap'] = True
(readSuccess, dgmMaskNode) = slicer.util.loadVolume(databasePath +
"\\" + brainAtlas +
"\\dgm\\dgm_" + str(int(setAge)) + ".nii.gz", readingParameters,
True)
else:
readingParameters = {}
readingParameters['name'] = "deepGM_template_mask"
readingParameters['center'] = True
readingParameters['show'] = False
readingParameters['labelmap'] = True
(readSuccess, dgmMaskNode) = slicer.util.loadVolume(databasePath +
"/" + brainAtlas +
"/dgm/dgm_" + str(int(setAge)) + ".nii.gz", readingParameters,
True)
# Transforming the basal ganglia mask to native space
tmpDeepGMLabelMask = slicer.vtkMRMLLabelMapVolumeNode()
tmpDeepGMLabelMask.SetName("tmpdeepGM_mask")
slicer.mrmlScene.AddNode(tmpDeepGMLabelMask)
self.applyRegistrationTransforms(registrationAlgorithm
, dgmMaskNode
, tmpResampledInputNode
, tmpDeepGMLabelMask
, regAffine
, regWarp
, True)
self.combineLabels(tmpDeepGMLabelMask, outputVolume, outputVolume)
slicer.util.showStatusMessage("Basal ganglia segmentation is finished...")
if applyMedianFiltering:
######################################################################################
# Step - Smoothing brain segmentation by median filtering
######################################################################################
for i in range(0, interations):
params = {}
params['neighborhood'] = neighborSize
params['inputVolume'] = outputVolume
params['outputVolume'] = outputVolume
slicer.cli.run(slicer.modules.medianimagefilter, None, params, wait_for_completion=True)
slicer.util.showStatusMessage("OPTIONAL: Brain segmentation smoothing is finished...")
# Setting Color Table with the USP_2018 pattern
displayBrainLabel = outputVolume.GetDisplayNode()
displayBrainLabel.SetAndObserveColorNodeID(ColorTableNode.GetID())
######################################################################################
# Step - Cleaning temporaty data (Debug mode: Off)
######################################################################################
if usedAtlasPropagation:
slicer.mrmlScene.RemoveNode(brainAtlasNode)
slicer.mrmlScene.RemoveNode(tmpResampledInputNode)
slicer.mrmlScene.RemoveNode(tmpCerebellumMask)
slicer.mrmlScene.RemoveNode(cerebellumOnlyLabelMask)
slicer.mrmlScene.RemoveNode(cerebellumMaskNode)
slicer.mrmlScene.RemoveNode(tmpCerebellumPriors)
slicer.mrmlScene.RemoveNode(cerebellumPriorsNode)
slicer.mrmlScene.RemoveNode(tmpCSFCerebellumPriors)
slicer.mrmlScene.RemoveNode(csfCerebellumPriorsNode)
slicer.mrmlScene.RemoveNode(tmpBrainstemPriors)
slicer.mrmlScene.RemoveNode(brainstemPriorsNode)
slicer.mrmlScene.RemoveNode(tmpCSFBrainstemPriors)
slicer.mrmlScene.RemoveNode(csfBrainstemPriorsNode)
slicer.mrmlScene.RemoveNode(backgroundForCerebellumPrior)
slicer.mrmlScene.RemoveNode(backgroundForBrainstemPrior)
slicer.mrmlScene.RemoveNode(tmpCSFNode)
slicer.mrmlScene.RemoveNode(tmpBrainstemMask)
slicer.mrmlScene.RemoveNode(brainstemMaskNode)
slicer.mrmlScene.RemoveNode(tmpBrainOnlyNode)
slicer.mrmlScene.RemoveNode(tmpCerebellumOnlyVolumeNode)
slicer.mrmlScene.RemoveNode(tmpBrainstemOnlyVolumeNode) | slicer.mrmlScene.RemoveNode(brainstemPlusCerebellumLabelMask)
slicer.mrmlScene.RemoveNode(brainOnlyLabelMask)
slicer.mrmlScene.RemoveNode(ventriculesCorrectionLabelMask)
slicer.mrmlScene.RemoveNode(tmpVentriculesRegion)
slicer.mrmlScene.RemoveNode(ventriculesMaskNode)
slicer.mrmlScene.RemoveNode(tmpVentriculesLabelMask)
slicer.mrmlScene.RemoveNode(tmpGMPlusWMNode)
slicer.mrmlScene.RemoveNode(tmpGMAndWMVolume)
slicer.mrmlScene.RemoveNode(tmpCSFVolume)
slicer.mrmlScene.RemoveNode(hemispheresMaskNode)
slicer.mrmlScene.RemoveNode(tmpHemispheresLabelMask)
slicer.mrmlScene.RemoveNode(brainOnlyHemispheresLabelMask)
# slicer.mrmlScene.RemoveNode(brainOnlyVolume)
if estimateBasalGanglia:
slicer.mrmlScene.RemoveNode(tmpDeepGMLabelMask)
# Removing the temporary folder with the segmentations files
if platform.system() is "Windows":
os.system("rmdir /S /Q " + tmpFolder)
else:
os.system("rm -R " + tmpFolder)
logging.info('Processing completed')
slicer.util.showStatusMessage("Baby Brain Segmentation is finished")
return True
#
# Atlas Propagation
#
def atlasPropagation(self, registrationAlgorithm
, fixedNode
, movingNode
, transformLinear
, transformElastic
, interpolation
, sampling
, splineGrid
, initMethod
, numberOfCores
, useQuickRegistration):
if registrationAlgorithm == "BRAINSFit":
# Applying the first registration level - Linear (Affine)
regParams = {}
regParams["fixedVolume"] = fixedNode.GetID()
regParams["movingVolume"] = movingNode.GetID()
regParams["outputVolume"] = movingNode.GetID()
regParams["samplingPercentage"] = sampling
regParams["linearTransform"] = transformLinear.GetID()
regParams["initializeTransformMode"] = initMethod
regParams["useRigid"] = True
regParams["useAffine"] = True
regParams["interpolationMode"] = interpolation
slicer.cli.run(slicer.modules.brainsfit, None, regParams, wait_for_completion=True)
# Applying the second registration level - Elastic (Spline)
regParams = {}
regParams["fixedVolume"] = fixedNode.GetID()
regParams["movingVolume"] = movingNode.GetID()
regParams["samplingPercentage"] = sampling
regParams["bsplineTransform"] = transformElastic.GetID()
# regParams['initialTransform'] = transformLinear.GetID()
regParams["initializeTransformMode"] = "Off"
regParams["splineGridSize"] = splineGrid
regParams["useBSpline"] = True
regParams["interpolationMode"] = interpolation
slicer.cli.run(slicer.modules.brainsfit, None, regParams, wait_for_completion=True)
else:
# Create scripts calling. Since the ANTs tools are only provided to Unix systems, the path pattern is fixed.
home = expanduser("~")
tmpFolder = home + "/tmpBabyBrainSegmentation"
# Saving the subject image
slicer.util.saveNode(fixedNode, tmpFolder + '/subject.nii.gz')
# Saving the brain template
slicer.util.saveNode(movingNode, tmpFolder + '/template.nii.gz')
# Use ANTs registration
if useQuickRegistration:
os.system("antsRegistrationSyNQuick.sh -d 3 -f " + tmpFolder + "/subject.nii.gz -m " + tmpFolder + "/template.nii.gz -o " + tmpFolder +"/BabyBrain_regMNI2Native_ -n " + str(numberOfCores))
else:
os.system("antsRegistrationSyN.sh -d 3 -f " + tmpFolder + "/subject.nii.gz -m " + tmpFolder + "/template.nii.gz -o " + tmpFolder +"/BabyBrain_regMNI2Native_ -n " + str(numberOfCores))
# Reading registration tranforms
(read, regTemplate1Warp) = slicer.util.loadTransform(tmpFolder + '/BabyBrain_regMNI2Native_1Warp.nii.gz', True)
regTemplate1Warp.SetName("BabyBrain_regMNI2Native_1Warp") # brain template to native space (SyN)
(read, regTemplate0GenericAffine) = slicer.util.loadTransform(tmpFolder + '/BabyBrain_regMNI2Native_0GenericAffine.mat', True)
regTemplate0GenericAffine.SetName("BabyBrain_regMNI2Native_0GenericAffine")# brain template to native space (affine)
#
# Registration Transform Application
#
def applyRegistrationTransforms(self, registrationAlgorithm
, inputVolume
, referenceVolume
, outputVolume
, transformLinear
, transformWarp
, isLabel):
params = {}
params["inputVolume"] = inputVolume.GetID()
params["referenceVolume"] = referenceVolume.GetID()
params["outputVolume"] = outputVolume.GetID()
params["warpTransform"] = transformLinear.GetID()
params["inverseTransform"] = False
if isLabel:
params["interpolationMode"] = "NearestNeighbor"
params["pixelType"] = "binary"
else:
params["interpolationMode"] = "Linear"
params["pixelType"] = "float"
slicer.cli.run(slicer.modules.brainsresample, None, params, wait_for_completion=True)
params = {}
params["inputVolume"] = outputVolume.GetID()
params["referenceVolume"] = inputVolume.GetID()
params["outputVolume"] = outputVolume.GetID()
params["warpTransform"] = transformWarp.GetID()
params["inverseTransform"] = False
if isLabel:
params["interpolationMode"] = "NearestNeighbor"
params["pixelType"] = "binary"
else:
params["interpolationMode"] = "Linear"
params["pixelType"] = "float"
slicer.cli.run(slicer.modules.brainsresample, None, params, wait_for_completion=True)
#
# Image Resampling Resolution
#
def imageResamplingResolution(self, inputNode
, outputNode
, resolution
, interpolation):
params = {}
params["InputVolume"] = inputNode.GetID()
params["OutputVolume"] = outputNode.GetID()
params["outputPixelSpacing"] = resolution
params["interpolationType"] = interpolation
slicer.cli.run(slicer.modules.resamplescalarvolume, None, params, wait_for_completion=True)
#
# Generic Brain Tissue Segmentation
#
def segmentingTissues(self, inputVolume
, imageModality
, numberOfTissues
, outputLabel
, oneTissue = False
, inputPriorsFile=""
, tissueValue = 2
, definitions = [1,2,3]):
params = {}
params['inputVolume'] = inputVolume.GetID()
if inputPriorsFile is not "":
listOfPriors = ""
for i in inputPriorsFile:
listOfPriors = listOfPriors + str(i) + ","
params['inputPriorsFile'] = listOfPriors
params['imageModality'] = imageModality
params['numberOfTissues'] = numberOfTissues
params['outputLabel'] = outputLabel.GetID()
params['oneTissue'] = oneTissue
params['typeTissue'] = tissueValue
params['labelsDefinition'] = definitions
slicer.cli.run(slicer.modules.bayesiantissueclassifier, None, params, wait_for_completion=True)
def labelCorrecting(self, inputLabel
, outputLabel
, labelToCorrect
, labelError
, tolerance=0.2
, neighborRadius="2,2,2"):
params = {}
params['inputLabel'] = inputLabel.GetID()
params['outputLabel'] = outputLabel.GetID()
params['labelToCorrect'] = labelToCorrect
params['labelError'] = labelError
params['tolerance'] = tolerance
params['neighborRadius'] = neighborRadius
slicer.cli.run(slicer.modules.locallabelingcorrection, None, params, wait_for_completion=True)
#
# Global Contrast Enhancement
#
def imageGlobalConstrastEnhancement(self, inputNode
, outputNode
, algorithm
, lowerCut
, higherCut
, maximumScaling
, minimumScaling
, flipFunction):
params = {}
params["inputVolume"] = inputNode.GetID()
params["outputVolume"] = outputNode.GetID()
params["algorithm"] = algorithm
params["lowerCut"] = lowerCut
params["higherCut"] = higherCut
params["maximumScaling"] = maximumScaling
params["minimumScaling"] = minimumScaling
params["flipFunction"] = flipFunction
slicer.cli.run(slicer.modules.globalcontrastenhancer, None, params, wait_for_completion=True)
#
# Combine Labels
#
def combineLabels(self, firstLabel
, secondLabel
, outputLabel
, firstOverwrites = True):
params = {}
params['InputLabelMap_A'] = firstLabel
params['InputLabelMap_B'] = secondLabel
params['OutputLabelMap'] = outputLabel
params['FirstOverwrites'] = firstOverwrites
slicer.cli.run(slicer.modules.imagelabelcombine, None, params, wait_for_completion=True)
def createBackgroundPriors(self, backgroundNode
, listOfTissuesPriors
, order=0):
#Making the first sum
params = {}
params['inputVolume1'] = listOfTissuesPriors[0].GetID()
params['inputVolume2'] = listOfTissuesPriors[1].GetID()
params['outputVolume'] = backgroundNode
params['order'] = order
slicer.cli.run(slicer.modules.addscalarvolumes, None, params, wait_for_completion=True)
N=len(listOfTissuesPriors)
if N > 2:
for tissue in range(2,N):
params = {}
params['inputVolume1'] = backgroundNode
params['inputVolume2'] = listOfTissuesPriors[tissue].GetID()
params['outputVolume'] = backgroundNode
params['order'] = order
slicer.cli.run(slicer.modules.addscalarvolumes, None, params, wait_for_completion=True)
# Making the inverse of the summation to infer the background prior
inputImage = sitkUtils.PullVolumeFromSlicer(backgroundNode)
subtractFilter = sitk.SubtractImageFilter()
absFilter = sitk.AbsImageFilter()
backgroundImage = subtractFilter.Execute(inputImage, 1.0)
backgroundImage = absFilter.Execute(backgroundImage)
sitkUtils.PushVolumeToSlicer(backgroundImage, backgroundNode)
class BabyBrainSegmentationTest(ScriptedLoadableModuleTest):
"""
This is the test case for your scripted module.
Uses ScriptedLoadableModuleTest base class, available at:
https://github.com/Slicer/Slicer/blob/master/Base/Python/slicer/ScriptedLoadableModule.py
"""
def setUp(self):
""" Do whatever is needed to reset the state - typically a scene clear will be enough.
"""
slicer.mrmlScene.Clear(0)
def runTest(self):
"""Run as few or as many tests as needed here.
"""
self.setUp()
self.test_BabyBrainSegmentation1()
def test_BabyBrainSegmentation1(self):
""" Ideally you should have several levels of tests. At the lowest level
tests should exercise the functionality of the logic with different inputs
(both valid and invalid). At higher levels your tests should emulate the
way the user would interact with your code and confirm that it still works
the way you intended.
One of the most important features of the tests is that it should alert other
developers when their changes will have an impact on the behavior of your
module. For example, if a developer removes a feature that you depend on,
your test should break so they know that the feature is needed.
"""
self.delayDisplay("Starting the test")
#
# first, get some data
#
import urllib
downloads = (
('http://slicer.kitware.com/midas3/download?items=5767', 'FA.nrrd', slicer.util.loadVolume),
)
for url,name,loader in downloads:
filePath = slicer.app.temporaryPath + '/' + name
if not os.path.exists(filePath) or os.stat(filePath).st_size == 0:
logging.info('Requesting download %s from %s...\n' % (name, url))
urllib.urlretrieve(url, filePath)
if loader:
logging.info('Loading %s...' % (name,))
loader(filePath)
self.delayDisplay('Finished with download and loading')
volumeNode = slicer.util.getNode(pattern="FA")
logic = BabyBrainSegmentationLogic()
self.assertIsNotNone( logic.hasImageData(volumeNode) )
self.delayDisplay('Test passed!') | slicer.mrmlScene.RemoveNode(brainstemOnlyLabelMask) |
form_group.rs | use seed::{prelude::*, *};
use std::borrow::Cow;
use std::fmt;
use std::rc::Rc;
pub struct FormGroup<'a, Ms: 'static> {
id: Cow<'a, str>,
label: Option<Cow<'a, str>>,
value: Option<Cow<'a, str>>,
input_event: Option<Rc<dyn Fn(String) -> Ms>>,
input_type: InputType,
is_invalid: bool,
invalid_feedback: Option<Cow<'a, str>>,
is_warning: bool,
warning_feedback: Option<Cow<'a, str>>,
help_text: Vec<Node<Ms>>,
group_attrs: Attrs,
input_attrs: Attrs,
}
impl<'a, Ms> FormGroup<'a, Ms> {
pub fn new(id: impl Into<Cow<'a, str>>) -> Self {
Self {
id: id.into(),
label: None,
value: None,
input_event: None,
input_type: InputType::Text,
is_invalid: false,
invalid_feedback: None,
is_warning: false,
warning_feedback: None,
help_text: Vec::new(),
group_attrs: Attrs::empty(),
input_attrs: Attrs::empty(),
}
}
pub fn label(mut self, label: impl Into<Cow<'a, str>>) -> Self |
pub fn value(mut self, value: impl Into<Cow<'a, str>>) -> Self {
self.value = Some(value.into());
self
}
pub fn on_input(mut self, input_event: impl Fn(String) -> Ms + Clone + 'static) -> Self {
self.input_event = Some(Rc::new(input_event));
self
}
pub fn text(mut self) -> Self {
self.input_type = InputType::Text;
self
}
pub fn number(mut self) -> Self {
self.input_type = InputType::Number;
self
}
pub fn password(mut self) -> Self {
self.input_type = InputType::Password;
self
}
pub fn textarea(mut self) -> Self {
self.input_type = InputType::Textarea;
self
}
pub fn checkbox(mut self) -> Self {
self.input_type = InputType::Checkbox;
self
}
pub fn select(mut self, options: Vec<(String, String)>, include_none_option: bool) -> Self {
self.input_type = InputType::Select {
options,
include_none_option,
};
self
}
pub fn invalid(mut self, is_invalid: bool) -> Self {
self.is_invalid = is_invalid;
self
}
pub fn invalid_feedback(mut self, invalid_feedback: Option<impl Into<Cow<'a, str>>>) -> Self {
self.invalid_feedback = invalid_feedback.map(|s| s.into());
self
}
pub fn warning(mut self, is_warning: bool) -> Self {
self.is_warning = is_warning;
self
}
pub fn warning_feedback(mut self, warning_feedback: Option<impl Into<Cow<'a, str>>>) -> Self {
self.warning_feedback = warning_feedback.map(|s| s.into());
self
}
pub fn help_text(mut self, help_text: impl Into<Cow<'static, str>>) -> Self {
self.help_text = Node::new_text(help_text).into_nodes();
self
}
pub fn help_nodes(mut self, help_nodes: impl IntoNodes<Ms>) -> Self {
self.help_text = help_nodes.into_nodes();
self
}
pub fn group_attrs(mut self, attrs: Attrs) -> Self {
self.group_attrs.merge(attrs);
self
}
pub fn input_attrs(mut self, attrs: Attrs) -> Self {
self.input_attrs.merge(attrs);
self
}
pub fn view(self) -> Node<Ms> {
if self.input_type == InputType::Checkbox {
self.view_checkbox()
} else {
self.view_textfield()
}
}
fn view_checkbox(self) -> Node<Ms> {
let is_checked = self
.value
.as_ref()
.map(|value| value == "true")
.unwrap_or(false);
let click_event_text = if is_checked {
"false".to_string()
} else {
"true".to_string()
};
div![
C!["form-group form-check"],
&self.group_attrs,
input![
C!["form-check-input", IF!(self.is_invalid => "is-invalid")],
&self.input_attrs,
id![&self.id],
attrs![
At::Type => "checkbox",
At::Value => "true",
At::Checked => is_checked.as_at_value()
],
self.input_event.clone().map(|input_event| {
input_ev(Ev::Input, move |_event| input_event(click_event_text))
})
],
self.label.as_ref().map(|label| {
label![
C!["form-check-label"],
attrs![
At::For => self.id
],
label
]
}),
if !self.help_text.is_empty() {
small![C!["form-text text-muted"], &self.help_text]
} else {
empty![]
},
self.invalid_feedback
.as_ref()
.filter(|_| self.is_invalid)
.map(|err| div![C!["invalid-feedback"], err]),
self.warning_feedback
.as_ref()
.filter(|_| self.is_warning)
.map(|err| small![C!["form-text text-warning"], err])
]
}
fn view_textfield(self) -> Node<Ms> {
div![
C!["form-group"],
&self.group_attrs,
self.label.as_ref().map(|label| {
label![
attrs![
At::For => self.id
],
label
]
}),
match &self.input_type {
InputType::Text | InputType::Number | InputType::Password => input![
C!["form-control", IF!(self.is_invalid => "is-invalid")],
&self.input_attrs,
id![&self.id],
attrs![
At::Type => &self.input_type,
],
self.value.as_ref().map(|value| attrs![At::Value => value]),
self.input_event.clone().map(|input_event| {
input_ev(Ev::Input, move |event| input_event(event))
})
],
InputType::Textarea => textarea![
C!["form-control", IF!(self.is_invalid => "is-invalid")],
&self.input_attrs,
id![&self.id],
self.value.as_ref().map(
|value| attrs![At::Value => value, At::Rows => value.split('\n').count(), At::Wrap => "off"]
),
self.input_event.clone().map(|input_event| {
input_ev(Ev::Input, move |event| input_event(event))
})
],
InputType::Select { options, include_none_option } => select![
C!["custom-select", IF!(self.is_invalid => "is-invalid")],
if *include_none_option { option![
attrs! {
At::Value => "",
At::Selected => self.value.is_none().as_at_value()
}
] } else { empty![] },
options.iter().map(|(key, display)| {
option![
attrs! {
At::Value => &key,
At::Selected => self.value.as_ref().map(|value| value == key).unwrap_or(false).as_at_value()
},
&display
]
}),
self.input_event.clone().map(|input_event| {
input_ev(Ev::Input, move |event| input_event(event))
})
],
InputType::Checkbox => empty![],
},
if !self.help_text.is_empty() { small![C!["form-text text-muted"], &self.help_text] } else { empty![] },
self.invalid_feedback
.as_ref()
.filter(|_| self.is_invalid)
.map(|err| div![C!["invalid-feedback"], err]),
self.warning_feedback
.as_ref()
.filter(|_| self.is_warning)
.map(|err| small![C!["form-text text-warning"], err])
]
}
}
impl<Ms> UpdateEl<Ms> for FormGroup<'_, Ms> {
fn update_el(self, el: &mut El<Ms>) {
self.view().update_el(el)
}
}
#[derive(PartialEq)]
enum InputType {
Text,
Number,
Password,
Textarea,
Checkbox,
Select {
options: Vec<(String, String)>,
include_none_option: bool,
},
}
impl fmt::Display for InputType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
match self {
Self::Text => write!(f, "text"),
Self::Number => write!(f, "number"),
Self::Password => write!(f, "password"),
Self::Textarea => write!(f, "textarea"),
Self::Checkbox => write!(f, "checkbox"),
Self::Select { .. } => write!(f, "select"),
}
}
}
| {
self.label = Some(label.into());
self
} |
app.tsx | import { MemberTableRow } from "./member-table-row";
import { MemberTable } from "./member-table";
export const App = () => {
const [members, setMembers] = React.useState<MemberEntity[]>([]);
React.useEffect(() => {
fetch(`https://api.github.com/orgs/lemoncode/members`)
.then((response) => response.json())
.then((json) => setMembers(json));
}, []);
return <MemberTable />;
}; | import React from "react";
import { MemberEntity } from "./model"; |
|
package.py | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyBackcall(PythonPackage):
| """Specifications for callback functions passed in to an API"""
homepage = "https://github.com/takluyver/backcall"
url = "https://pypi.io/packages/source/b/backcall/backcall-0.1.0.tar.gz"
version('0.1.0', '87ce0c7839808e6a3427d57df6a792e7') |
|
helpers.ts | type ObjectWithKeys = { [key: string]: unknown };
export const overrideDefinedPropsOnly = < | T1 extends ObjectWithKeys,
T2 extends ObjectWithKeys
>(
left: T1,
right: T2
): TResult => {
let result: ObjectWithKeys = { ...left };
for (const key of Object.keys(right)) {
const val = right[key];
if (val !== undefined) {
result[key] = val;
}
}
return (result as unknown) as TResult;
};
export const clamp = (value: number, min: number, max: number) => {
if (value < min) return min;
else if (value > max) return max;
else return value;
}; | TResult, |
__init__.py | import unittest
from class_methods import *
from delta import *
from dynamic import *
from indexes import *
from inheritance import *
from instance import *
from json_serialisation import *
from validation import *
if __name__ == '__main__':
unittest.main() | ||
sqlHistoryV2Manager.go | // Copyright (c) 2018 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package sql
import (
"database/sql"
"encoding/json"
"fmt"
"time"
"github.com/go-sql-driver/mysql"
"github.com/uber/cadence/.gen/go/shared"
"github.com/uber/cadence/.gen/go/sqlblobs"
"github.com/uber/cadence/common"
"github.com/uber/cadence/common/log"
p "github.com/uber/cadence/common/persistence"
"github.com/uber/cadence/common/persistence/sql/storage/sqldb"
)
type sqlHistoryV2Manager struct {
sqlStore
shardID int
}
// newHistoryV2Persistence creates an instance of HistoryManager
func newHistoryV2Persistence(db sqldb.Interface, logger log.Logger) (p.HistoryV2Store, error) {
return &sqlHistoryV2Manager{
sqlStore: sqlStore{
db: db,
logger: logger,
},
}, nil
}
func (m *sqlHistoryV2Manager) serializeAncestors(ans []*shared.HistoryBranchRange) ([]byte, error) {
ancestors, err := json.Marshal(ans)
if err != nil {
return nil, err
}
return ancestors, nil
}
func (m *sqlHistoryV2Manager) deserializeAncestors(jsonStr []byte) ([]*shared.HistoryBranchRange, error) {
var ans []*shared.HistoryBranchRange
err := json.Unmarshal(jsonStr, &ans)
if err != nil {
return nil, err
}
return ans, nil
}
// AppendHistoryNodes add(or override) a node to a history branch
func (m *sqlHistoryV2Manager) AppendHistoryNodes(request *p.InternalAppendHistoryNodesRequest) error {
branchInfo := request.BranchInfo
beginNodeID := p.GetBeginNodeID(branchInfo)
if request.NodeID < beginNodeID {
return &p.InvalidPersistenceRequestError{
Msg: fmt.Sprintf("cannot append to ancestors' nodes"),
}
}
nodeRow := &sqldb.HistoryNodeRow{
TreeID: sqldb.MustParseUUID(branchInfo.GetTreeID()),
BranchID: sqldb.MustParseUUID(branchInfo.GetBranchID()),
NodeID: request.NodeID,
TxnID: &request.TransactionID,
Data: request.Events.Data,
DataEncoding: string(request.Events.Encoding),
ShardID: request.ShardID,
}
if request.IsNewBranch {
var ancestors []*shared.HistoryBranchRange
for _, anc := range branchInfo.Ancestors {
ancestors = append(ancestors, anc)
}
treeInfo := &sqlblobs.HistoryTreeInfo{
Ancestors: ancestors,
Info: &request.Info,
CreatedTimeNanos: common.TimeNowNanosPtr(),
}
blob, err := historyTreeInfoToBlob(treeInfo)
if err != nil {
return err
}
treeRow := &sqldb.HistoryTreeRow{
ShardID: request.ShardID,
TreeID: sqldb.MustParseUUID(branchInfo.GetTreeID()),
BranchID: sqldb.MustParseUUID(branchInfo.GetBranchID()),
InProgress: false,
Data: blob.Data,
DataEncoding: string(blob.Encoding),
}
return m.txExecute("AppendHistoryNodes", func(tx sqldb.Tx) error {
result, err := tx.InsertIntoHistoryNode(nodeRow)
if err != nil {
return err
}
rowsAffected, err := result.RowsAffected()
if err != nil {
return err
}
if rowsAffected != 1 {
return fmt.Errorf("expected 1 row to be affected for node table, got %v", rowsAffected)
}
result, err = tx.InsertIntoHistoryTree(treeRow)
if err != nil {
return err
}
rowsAffected, err = result.RowsAffected()
if err != nil {
return err
}
if rowsAffected != 1 {
return fmt.Errorf("expected 1 row to be affected for tree table, got %v", rowsAffected)
}
return nil
})
}
_, err := m.db.InsertIntoHistoryNode(nodeRow)
if err != nil {
if sqlErr, ok := err.(*mysql.MySQLError); ok && sqlErr.Number == ErrDupEntry {
return &p.ConditionFailedError{Msg: fmt.Sprintf("AppendHistoryNodes: row already exist: %v", err)}
}
return &shared.InternalServiceError{Message: fmt.Sprintf("AppendHistoryEvents: %v", err)}
}
return nil
}
// ReadHistoryBranch returns history node data for a branch
func (m *sqlHistoryV2Manager) ReadHistoryBranch(request *p.InternalReadHistoryBranchRequest) (*p.InternalReadHistoryBranchResponse, error) {
minNodeID := request.MinNodeID
if request.NextPageToken != nil && len(request.NextPageToken) > 0 {
var lastNodeID int64
var err error
if lastNodeID, err = deserializePageToken(request.NextPageToken); err != nil {
return nil, &shared.InternalServiceError{
Message: fmt.Sprintf("invalid next page token %v", request.NextPageToken)}
}
minNodeID = lastNodeID + 1
}
filter := &sqldb.HistoryNodeFilter{
TreeID: sqldb.MustParseUUID(request.TreeID),
BranchID: sqldb.MustParseUUID(request.BranchID),
MinNodeID: &minNodeID,
MaxNodeID: &request.MaxNodeID,
PageSize: &request.PageSize,
ShardID: request.ShardID,
}
rows, err := m.db.SelectFromHistoryNode(filter)
if err == sql.ErrNoRows || (err == nil && len(rows) == 0) {
return &p.InternalReadHistoryBranchResponse{}, nil
}
history := make([]*p.DataBlob, 0, int(request.PageSize))
lastNodeID := int64(-1)
lastTxnID := int64(-1)
eventBlob := &p.DataBlob{}
for _, row := range rows {
eventBlob.Data = row.Data
eventBlob.Encoding = common.EncodingType(row.DataEncoding)
switch {
case row.NodeID < lastNodeID:
return nil, &shared.InternalServiceError{
Message: fmt.Sprintf("corrupted data, nodeID cannot decrease"),
}
case row.NodeID == lastNodeID:
if *row.TxnID < lastTxnID {
// skip the nodes with smaller txn_id
continue
} else {
return nil, &shared.InternalServiceError{
Message: fmt.Sprintf("corrupted data, same nodeID must have smaller txnID"),
}
}
default: // row.NodeID > lastNodeID:
// NOTE: when row.nodeID > lastNodeID, we expect the one with largest txnID comes first
lastTxnID = *row.TxnID
lastNodeID = row.NodeID
history = append(history, eventBlob)
eventBlob = &p.DataBlob{}
}
}
var pagingToken []byte
if len(rows) >= request.PageSize {
pagingToken = serializePageToken(lastNodeID)
}
response := &p.InternalReadHistoryBranchResponse{
History: history,
NextPageToken: pagingToken,
}
return response, nil
}
// ForkHistoryBranch forks a new branch from an existing branch
// Note that application must provide a void forking nodeID, it must be a valid nodeID in that branch.
// A valid forking nodeID can be an ancestor from the existing branch.
// For example, we have branch B1 with three nodes(1[1,2], 3[3,4,5] and 6[6,7,8]. 1, 3 and 6 are nodeIDs (first eventID of the batch).
// So B1 looks like this:
// 1[1,2]
// /
// 3[3,4,5]
// /
// 6[6,7,8]
//
// Assuming we have branch B2 which contains one ancestor B1 stopping at 6 (exclusive). So B2 inherit nodeID 1 and 3 from B1, and have its own nodeID 6 and 8.
// Branch B2 looks like this:
// 1[1,2]
// /
// 3[3,4,5]
// \
// 6[6,7]
// \
// 8[8]
//
//Now we want to fork a new branch B3 from B2.
// The only valid forking nodeIDs are 3,6 or 8.
// 1 is not valid because we can't fork from first node.
// 2/4/5 is NOT valid either because they are inside a batch.
//
// Case #1: If we fork from nodeID 6, then B3 will have an ancestor B1 which stops at 6(exclusive).
// As we append a batch of events[6,7,8,9] to B3, it will look like :
// 1[1,2]
// /
// 3[3,4,5]
// \
// 6[6,7,8,9]
//
// Case #2: If we fork from node 8, then B3 will have two ancestors: B1 stops at 6(exclusive) and ancestor B2 stops at 8(exclusive)
// As we append a batch of events[8,9] to B3, it will look like:
// 1[1,2]
// /
// 3[3,4,5]
// /
// 6[6,7]
// \
// 8[8,9]
//
func (m *sqlHistoryV2Manager) ForkHistoryBranch(request *p.InternalForkHistoryBranchRequest) (*p.InternalForkHistoryBranchResponse, error) {
forkB := request.ForkBranchInfo
treeID := *forkB.TreeID
newAncestors := make([]*shared.HistoryBranchRange, 0, len(forkB.Ancestors)+1)
beginNodeID := p.GetBeginNodeID(forkB)
if beginNodeID >= request.ForkNodeID {
// this is the case that new branch's ancestors doesn't include the forking branch
for _, br := range forkB.Ancestors {
if *br.EndNodeID >= request.ForkNodeID {
newAncestors = append(newAncestors, &shared.HistoryBranchRange{
BranchID: br.BranchID,
BeginNodeID: br.BeginNodeID,
EndNodeID: common.Int64Ptr(request.ForkNodeID),
})
break
} else {
newAncestors = append(newAncestors, br)
}
}
} else {
// this is the case the new branch will inherit all ancestors from forking branch
newAncestors = forkB.Ancestors
newAncestors = append(newAncestors, &shared.HistoryBranchRange{
BranchID: forkB.BranchID,
BeginNodeID: common.Int64Ptr(beginNodeID),
EndNodeID: common.Int64Ptr(request.ForkNodeID),
})
}
resp := &p.InternalForkHistoryBranchResponse{
NewBranchInfo: shared.HistoryBranch{
TreeID: &treeID,
BranchID: &request.NewBranchID,
Ancestors: newAncestors,
}}
treeInfo := &sqlblobs.HistoryTreeInfo{
Ancestors: newAncestors,
Info: &request.Info,
CreatedTimeNanos: common.TimeNowNanosPtr(),
}
blob, err := historyTreeInfoToBlob(treeInfo)
if err != nil {
return nil, err
}
row := &sqldb.HistoryTreeRow{
ShardID: request.ShardID,
TreeID: sqldb.MustParseUUID(treeID),
BranchID: sqldb.MustParseUUID(request.NewBranchID),
InProgress: true,
Data: blob.Data,
DataEncoding: string(blob.Encoding),
}
result, err := m.db.InsertIntoHistoryTree(row)
if err != nil {
return nil, err
}
rowsAffected, err := result.RowsAffected()
if err != nil {
return nil, err
}
if rowsAffected != 1 {
return nil, fmt.Errorf("expected 1 row to be affected for tree table, got %v", rowsAffected)
}
return resp, nil
}
// DeleteHistoryBranch removes a branch
func (m *sqlHistoryV2Manager) DeleteHistoryBranch(request *p.InternalDeleteHistoryBranchRequest) error {
branch := request.BranchInfo
treeID := *branch.TreeID
brsToDelete := branch.Ancestors
beginNodeID := p.GetBeginNodeID(branch)
brsToDelete = append(brsToDelete, &shared.HistoryBranchRange{
BranchID: branch.BranchID,
BeginNodeID: common.Int64Ptr(beginNodeID),
})
rsp, err := m.GetHistoryTree(&p.GetHistoryTreeRequest{
TreeID: treeID,
ShardID: common.IntPtr(request.ShardID),
})
if err != nil {
return err
}
// We won't delete the branch if there is any branch forking in progress. We will return error.
if len(rsp.ForkingInProgressBranches) > 0 {
return &p.ConditionFailedError{
Msg: fmt.Sprintf("There are branches in progress of forking"),
}
}
// If there is no branch forking in progress we see here, it means that we are safe to calculate the deleting ranges based on the current result,
// Because before getting here, we've already deleted mutableState record, so all the forking branches in the future should fail.
// validBRsMaxEndNode is to for each branch range that is being used, we want to know what is the max nodeID referred by other valid branch
validBRsMaxEndNode := map[string]int64{}
for _, b := range rsp.Branches {
for _, br := range b.Ancestors {
curr, ok := validBRsMaxEndNode[*br.BranchID]
if !ok || curr < *br.EndNodeID {
validBRsMaxEndNode[*br.BranchID] = *br.EndNodeID
}
}
}
return m.txExecute("DeleteHistoryBranch", func(tx sqldb.Tx) error {
branchID := sqldb.MustParseUUID(*branch.BranchID)
treeFilter := &sqldb.HistoryTreeFilter{
TreeID: sqldb.MustParseUUID(treeID),
BranchID: &branchID,
ShardID: request.ShardID,
}
_, err = tx.DeleteFromHistoryTree(treeFilter)
if err != nil {
return err
}
done := false
// for each branch range to delete, we iterate from bottom to up, and delete up to the point according to validBRsEndNode
for i := len(brsToDelete) - 1; i >= 0; i-- {
br := brsToDelete[i]
maxReferredEndNodeID, ok := validBRsMaxEndNode[*br.BranchID] | nodeFilter := &sqldb.HistoryNodeFilter{
TreeID: sqldb.MustParseUUID(treeID),
BranchID: sqldb.MustParseUUID(*br.BranchID),
ShardID: request.ShardID,
}
if ok {
// we can only delete from the maxEndNode and stop here
nodeFilter.MinNodeID = &maxReferredEndNodeID
done = true
} else {
// No any branch is using this range, we can delete all of it
nodeFilter.MinNodeID = br.BeginNodeID
}
_, err := tx.DeleteFromHistoryNode(nodeFilter)
if err != nil {
return err
}
if done {
break
}
}
return nil
})
}
// UpdateHistoryBranch update a branch
func (m *sqlHistoryV2Manager) CompleteForkBranch(request *p.InternalCompleteForkBranchRequest) error {
branch := request.BranchInfo
treeID := sqldb.MustParseUUID(*branch.TreeID)
branchID := sqldb.MustParseUUID(*branch.BranchID)
if request.Success {
row := &sqldb.HistoryTreeRow{
TreeID: treeID,
BranchID: branchID,
InProgress: false,
ShardID: request.ShardID,
}
result, err := m.db.UpdateHistoryTree(row)
if err != nil {
return err
}
rowsAffected, err := result.RowsAffected()
if err != nil {
return err
}
if rowsAffected != 1 {
return fmt.Errorf("expected 1 row to be affected for tree table, got %v", rowsAffected)
}
return nil
}
// request.Success == false
treeFilter := &sqldb.HistoryTreeFilter{
TreeID: treeID,
BranchID: &branchID,
ShardID: request.ShardID,
}
nodeFilter := &sqldb.HistoryNodeFilter{
TreeID: treeID,
BranchID: branchID,
ShardID: request.ShardID,
MinNodeID: common.Int64Ptr(1),
}
return m.txExecute("CompleteForkBranch", func(tx sqldb.Tx) error {
_, err := tx.DeleteFromHistoryNode(nodeFilter)
if err != nil {
return err
}
// Note: we don't check result for DeleteFromHistoryNode because there can be deleting zero nodes.
result, err := tx.DeleteFromHistoryTree(treeFilter)
if err != nil {
return err
}
rowsAffected, err := result.RowsAffected()
if err != nil {
return err
}
if rowsAffected != 1 {
return fmt.Errorf("expected 1 row to be affected for tree table, got %v", rowsAffected)
}
return nil
})
}
// GetHistoryTree returns all branch information of a tree
func (m *sqlHistoryV2Manager) GetHistoryTree(request *p.GetHistoryTreeRequest) (*p.GetHistoryTreeResponse, error) {
treeID := sqldb.MustParseUUID(request.TreeID)
branches := make([]*shared.HistoryBranch, 0)
forkingBranches := make([]p.ForkingInProgressBranch, 0)
treeFilter := &sqldb.HistoryTreeFilter{
TreeID: treeID,
ShardID: *request.ShardID,
}
rows, err := m.db.SelectFromHistoryTree(treeFilter)
if err == sql.ErrNoRows || (err == nil && len(rows) == 0) {
return &p.GetHistoryTreeResponse{}, nil
}
for _, row := range rows {
treeInfo, err := historyTreeInfoFromBlob(row.Data, row.DataEncoding)
if err != nil {
return nil, err
}
if row.InProgress {
br := p.ForkingInProgressBranch{
BranchID: row.BranchID.String(),
ForkTime: time.Unix(0, treeInfo.GetCreatedTimeNanos()),
Info: treeInfo.GetInfo(),
}
forkingBranches = append(forkingBranches, br)
}
br := &shared.HistoryBranch{
TreeID: &request.TreeID,
BranchID: common.StringPtr(row.BranchID.String()),
Ancestors: treeInfo.Ancestors,
}
branches = append(branches, br)
}
return &p.GetHistoryTreeResponse{
Branches: branches,
ForkingInProgressBranches: forkingBranches,
}, nil
} | |
parser.py | from lib.utils import token, nodes
from lib import errors
#######################################
# PARSE RESULT
#######################################
class ParseResult:
def __init__(self):
self.error = None
self.node = None
self.last_registered_advance_count = 0
self.advanced_count = 0
self.to_reverse_count = 0
def register_advancement(self):
self.advanced_count += 1
self.last_registered_advance_count += 1
def register(self, res):
self.last_registered_advance_count = res.advanced_count
self.advanced_count += res.advanced_count
if res.error: self.error = res.error
return res.node
def try_register(self, res):
if res.error:
self.to_reverse_count = res.advanced_count
return None
return self.register(res)
def success(self, node):
self.node = node
return self
def failure(self, error):
if not self.error or self.advanced_count == 0:
self.error = error
return self
#######################################
# PARSER
#######################################
class Parser:
def __init__(self, tokens):
self.tokens = tokens
self.tok_idx = -1
self.advance()
def advance(self):
self.tok_idx += 1
self.update_current_tok()
return self.current_tok
def reverse(self, amount=1):
self.tok_idx -= amount
self.update_current_tok()
return self.current_tok
def | (self):
if self.tok_idx >= 0 and self.tok_idx < len(self.tokens):
self.current_tok = self.tokens[self.tok_idx]
def parse(self):
res = self.statements()
if not res.error and self.current_tok.type != token.T_EOF:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
"Expected '+', '-', '*' or '/'"
))
return res
###################################
def statements(self):
res = ParseResult()
statements = []
pos_start = self.current_tok.pos_start.copy()
while self.current_tok.type == token.T_NEWLINE:
res.register_advancement()
self.advance()
statement = res.register(self.statement())
if res.error: return res
statements.append(statement)
more_statements = True
while True:
newline_count = 0
while self.current_tok.type == token.T_NEWLINE:
res.register_advancement()
self.advance()
newline_count += 1
if newline_count == 0:
more_statements = False
if not more_statements: break
statement = res.try_register(self.statement())
if not statement:
self.reverse(res.to_reverse_count)
more_statements = False
continue
statements.append(statement)
return res.success(nodes.ListNode(
statements, pos_start, self.current_tok.pos_end.copy()
))
def statement(self):
res = ParseResult()
pos_start = self.current_tok.pos_start.copy()
if self.current_tok.matches(token.T_KEYWORD, 'return'):
res.register_advancement()
self.advance()
expr = res.try_register(self.expr())
if not expr:
self.reverse(res.to_reverse_count)
return res.success(nodes.ReturnNode(expr, pos_start, self.current_tok.pos_end.copy()))
if self.current_tok.matches(token.T_KEYWORD, 'continue'):
res.register_advancement()
self.advance()
return res.success(nodes.ContinueNode(pos_start, self.current_tok.pos_end.copy()))
if self.current_tok.matches(token.T_KEYWORD, 'break'):
res.register_advancement()
self.advance()
return res.success(nodes.BreakNode(pos_start, self.current_tok.pos_end.copy()))
expr = res.register(self.expr())
if res.error:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
"Expected 'break', 'continue', 'return', 'var', 'if', 'for', 'while', 'func', int, float, identifier, '+', '-', '(', ')' '[' or 'not'"
))
return res.success(expr)
def call(self):
res = ParseResult()
atom = res.register(self.atom())
if res.error: return res
if self.current_tok.type == token.T_LPAREN:
res.register_advancement()
self.advance()
arg_nodes = []
if self.current_tok.type == token.T_RPAREN:
res.register_advancement()
self.advance()
else:
arg_nodes.append(res.register(self.expr()))
if res.error:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
"Expected ')', 'var', 'if', 'for', 'while', 'func', int, float, identifier, '+', '-', '(', '[' or 'not'"
))
while self.current_tok.type == token.T_COMMA:
res.register_advancement()
self.advance()
arg_nodes.append(res.register(self.expr()))
if res.error: return res
if self.current_tok.type != token.T_RPAREN:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
"Expected ',' or ')'"
))
res.register_advancement()
self.advance()
return res.success(nodes.CallNode(atom, arg_nodes))
return res.success(atom)
def atom(self):
res = ParseResult()
tok = self.current_tok
if tok.type in (token.T_INT, token.T_FLOAT):
res.register_advancement()
self.advance()
return res.success(nodes.NumberNode(tok))
elif tok.type in (token.T_STRING):
res.register_advancement()
self.advance()
return res.success(nodes.StringNode(tok))
elif tok.type == token.T_IDENTIFIER:
res.register_advancement()
self.advance()
return res.success(nodes.VarAccessNode(tok))
elif tok.type == token.T_LPAREN:
res.register_advancement()
self.advance()
expr = res.register(self.expr())
if res.error: return res
if self.current_tok.type == token.T_RPAREN:
res.register_advancement()
self.advance()
return res.success(expr)
else:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
"Expected ')'"
))
elif tok.type == token.T_LSQUARE:
list_expr = res.register(self.list_expr())
if res.error: return res
return res.success(list_expr)
elif tok.matches(token.T_KEYWORD, 'if'):
if_expr = res.register(self.if_expr())
if res.error: return res
return res.success(if_expr)
elif tok.matches(token.T_KEYWORD, 'for'):
for_expr = res.register(self.for_expr())
if res.error: return res
return res.success(for_expr)
elif tok.matches(token.T_KEYWORD, 'while'):
while_expr = res.register(self.while_expr())
if res.error: return res
return res.success(while_expr)
elif tok.matches(token.T_KEYWORD, 'func'):
func_def = res.register(self.func_def())
if res.error: return res
return res.success(func_def)
return res.failure(errors.InvalidSyntaxError(
tok.pos_start, tok.pos_end,
"Expected int or float, identifier, '+', '-' or '(', , '[', 'if', 'for', 'while' or 'func'"
))
def power(self):
return self.bin_op(self.call, (token.T_POW, ), self.factor)
def list_expr(self):
res = ParseResult()
element_nodes = []
pos_start = self.current_tok.pos_start.copy()
if self.current_tok.type != token.T_LSQUARE:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
f"Expected '['"
))
res.register_advancement()
self.advance()
if self.current_tok.type == token.T_RSQUARE:
res.register_advancement()
self.advance()
else:
element_nodes.append(res.register(self.expr()))
if res.error:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
"Expected ']', 'var', 'if', 'for', 'while', 'func', int, float, identifier, '+', '-', '(', '[' or 'not'"
))
while self.current_tok.type == token.T_COMMA:
res.register_advancement()
self.advance()
element_nodes.append(res.register(self.expr()))
if res.error: return res
if self.current_tok.type != token.T_RSQUARE:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
"Expected ',' or ']'"
))
res.register_advancement()
self.advance()
return res.success(nodes.ListNode(
element_nodes, pos_start, self.current_tok.pos_end.copy()
))
def factor(self):
res = ParseResult()
tok = self.current_tok
if tok.type in (token.T_PLUS, token.T_MINUS):
res.register_advancement()
self.advance()
factor = res.register(self.factor())
if res.error: return res
return res.success(nodes.UnaryOpNode(tok, factor))
return self.power()
def term(self):
return self.bin_op(self.factor, (token.T_MUL, token.T_DIV, token.T_INT_DIV, token.T_REMAINDER))
def arithm_expr(self):
return self.bin_op(self.term, (token.T_PLUS, token.T_MINUS))
def comp_expr(self):
res = ParseResult()
if self.current_tok.matches(token.T_KEYWORD, 'not'):
op_tok = self.current_tok
res.register_advancement()
self.advance()
node = res.register(self.comp_expr())
if res.error: return res
return res.success(nodes.UnaryOpNode(op_tok, node))
node = res.register(self.bin_op(self.arithm_expr, (token.T_EE, token.T_NE, token.T_LT, token.T_GT, token.T_LTE, token.T_GTE)))
if res.error:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
"Expected int or float, identifier, '+', '-', '(', '[' or 'not'"
))
return res.success(node)
def expr(self):
res = ParseResult()
if self.current_tok.matches(token.T_KEYWORD, 'var'):
res.register_advancement()
self.advance()
if self.current_tok.type != token.T_IDENTIFIER:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
"Expected identifier"
))
var_name = self.current_tok
res.register_advancement()
self.advance()
if self.current_tok.type != token.T_EQ:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
"Expected '='"
))
res.register_advancement()
self.advance()
expr = res.register(self.expr())
if res.error: return res
return res.success(nodes.VarAssignNode(var_name, expr))
node = res.register(self.bin_op(self.comp_expr, ((token.T_KEYWORD, 'and'), (token.T_KEYWORD, 'or'))))
if res.error:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
"Expected int or float, identifier, 'var', 'if', 'for', 'while', 'func', '+', '-', '(' or '['"
))
return res.success(node)
def if_expr(self):
res = ParseResult()
all_cases = res.register(self.if_expr_cases('if'))
if res.error: return res
cases, else_case = all_cases
return res.success(nodes.IfNode(cases, else_case))
def if_expr_cases(self, case_keyword):
res = ParseResult()
cases = []
else_case = None
if not self.current_tok.matches(token.T_KEYWORD, case_keyword):
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
f"Expected '{case_keyword}'"
))
res.register_advancement()
self.advance()
condition = res.register(self.expr())
if res.error: return res
if not self.current_tok.type == token.T_COLON:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
"Expected ':'"
))
res.register_advancement()
self.advance()
if self.current_tok.type == token.T_NEWLINE:
res.register_advancement()
self.advance()
statements = res.register(self.statements())
if res.error: return res
cases.append((condition, statements, True))
if self.current_tok.matches(token.T_KEYWORD, 'end'):
res.register_advancement()
self.advance()
else:
all_cases = res.register(self.if_expr_b_or_c())
if res.error: return res
new_cases, else_case = all_cases
cases.extend(new_cases)
else:
expr = res.register(self.statement())
if res.error: return res
cases.append((condition, expr, False))
all_cases = res.register(self.if_expr_b_or_c())
if res.error: return res
new_cases, else_case = all_cases
cases.extend(new_cases)
return res.success((cases, else_case))
def if_expr_b(self):
return self.if_expr_cases('elif')
def if_expr_c(self):
res = ParseResult()
else_case = None
if self.current_tok.matches(token.T_KEYWORD, 'else'):
res.register_advancement()
self.advance()
if self.current_tok.type != token.T_COLON:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
"Expected ':'"
))
res.register_advancement()
self.advance()
if self.current_tok.type == token.T_NEWLINE:
res.register_advancement()
self.advance()
statements = res.register(self.statements())
if res.error: return res
else_case = (statements, True)
if self.current_tok.matches(token.T_KEYWORD, 'end'):
res.register_advancement()
self.advance()
else:
return res.failure(InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
"Expected 'end'"
))
else:
expr = res.register(self.statement())
if res.error: return res
else_case = (expr, False)
return res.success(else_case)
def if_expr_b_or_c(self):
res = ParseResult()
cases, else_case = [], None
if self.current_tok.matches(token.T_KEYWORD, 'elif'):
all_cases = res.register(self.if_expr_b())
if res.error: return res
cases, else_case = all_cases
else:
else_case = res.register(self.if_expr_c())
if res.error: return res
return res.success((cases, else_case))
def for_expr(self):
res = ParseResult()
if not self.current_tok.matches(token.T_KEYWORD, 'for'):
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
f"Expected 'for'"
))
res.register_advancement()
self.advance()
if self.current_tok.type != token.T_IDENTIFIER:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
f"Expected identifier"
))
var_name = self.current_tok
res.register_advancement()
self.advance()
if self.current_tok.type != token.T_EQ:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
f"Expected '='"
))
res.register_advancement()
self.advance()
start_value = res.register(self.expr())
if res.error: return res
if not self.current_tok.matches(token.T_KEYWORD, 'to'):
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
f"Expected 'to'"
))
res.register_advancement()
self.advance()
end_value = res.register(self.expr())
if res.error: return res
if self.current_tok.matches(token.T_KEYWORD, 'step'):
res.register_advancement()
self.advance()
step_value = res.register(self.expr())
if res.error: return res
else:
step_value = None
if not self.current_tok.type == token.T_COLON:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
"Expected ':'"
))
res.register_advancement()
self.advance()
if self.current_tok.type == token.T_NEWLINE:
res.register_advancement()
self.advance()
body = res.register(self.statements())
if res.error: return res
if not self.current_tok.matches(token.T_KEYWORD, 'end'):
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
f"Expected 'end'"
))
res.register_advancement()
self.advance()
return res.success(nodes.ForNode(var_name, start_value, end_value, step_value, body, True))
body = res.register(self.statement())
if res.error: return res
return res.success(nodes.ForNode(var_name, start_value, end_value, step_value, body, False))
def while_expr(self):
res = ParseResult()
if not self.current_tok.matches(token.T_KEYWORD, 'while'):
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
f"Expected 'while'"
))
res.register_advancement()
self.advance()
condition = res.register(self.expr())
if res.error: return res
if not self.current_tok.type == token.T_COLON:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
"Expected ':'"
))
res.register_advancement()
self.advance()
if self.current_tok.type == token.T_NEWLINE:
res.register_advancement()
self.advance()
body = res.register(self.statements())
if res.error: return res
if not self.current_tok.matches(token.T_KEYWORD, 'end'):
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
f"Expected 'end'"
))
res.register_advancement()
self.advance()
return res.success(nodes.WhileNode(condition, body, True))
body = res.register(self.statement())
if res.error: return res
return res.success(nodes.WhileNode(condition, body, False))
def func_def(self):
res = ParseResult()
if not self.current_tok.matches(token.T_KEYWORD, 'func'):
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
f"Expected 'func'"
))
res.register_advancement()
self.advance()
if self.current_tok.type == token.T_IDENTIFIER:
var_name_tok = self.current_tok
res.register_advancement()
self.advance()
if self.current_tok.type != token.T_LPAREN:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
f"Expected '('"
))
else:
var_name_tok = None
if self.current_tok.type != token.T_LPAREN:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
f"Expected identifier or '('"
))
res.register_advancement()
self.advance()
arg_name_toks = []
if self.current_tok.type == token.T_IDENTIFIER:
arg_name_toks.append(self.current_tok)
res.register_advancement()
self.advance()
while self.current_tok.type == token.T_COMMA:
res.register_advancement()
self.advance()
if self.current_tok.type != token.T_IDENTIFIER:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
f"Expected identifier"
))
arg_name_toks.append(self.current_tok)
res.register_advancement()
self.advance()
if self.current_tok.type != token.T_RPAREN:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
f"Expected ',' or ')'"
))
else:
if self.current_tok.type != token.T_RPAREN:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
f"Expected identifier or ')'"
))
res.register_advancement()
self.advance()
if self.current_tok.type == token.T_ARROW:
res.register_advancement()
self.advance()
body = res.register(self.expr())
if res.error: return res
return res.success(nodes.FunctionDefNode(
var_name_tok,
arg_name_toks,
body,
True
))
if self.current_tok.type != token.T_LCURLY:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
"Expected '{'"
))
res.register_advancement()
self.advance()
if self.current_tok.type != token.T_NEWLINE:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
f"Expected '->' or a new line"
))
res.register_advancement()
self.advance()
body = res.register(self.statements())
if res.error: return res
if self.current_tok.type != token.T_RCURLY:
return res.failure(errors.InvalidSyntaxError(
self.current_tok.pos_start, self.current_tok.pos_end,
"Expected '}'"
))
res.register_advancement()
self.advance()
return res.success(nodes.FunctionDefNode(
var_name_tok,
arg_name_toks,
body,
False
))
###################################
def bin_op(self, func_a, ops, func_b=None):
if func_b == None:
func_b = func_a
res = ParseResult()
left = res.register(func_a())
if res.error: return res
while self.current_tok.type in ops or (self.current_tok.type, self.current_tok.value) in ops:
op_tok = self.current_tok
res.register_advancement()
self.advance()
right = res.register(func_b())
if res.error: return res
left = nodes.BinOpNode(left, op_tok, right)
return res.success(left)
| update_current_tok |
handler.go | // Copyright 2017 Michal Witkowski. All Rights Reserved.
// See LICENSE for licensing terms.
package grpc_proxy
import (
"io"
"golang.org/x/net/context"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
)
var (
clientStreamDescForProxying = &grpc.StreamDesc{
ServerStreams: true,
ClientStreams: true,
}
)
// RegisterService sets up a proxy handler for a particular gRPC service and method.
// The behaviour is the same as if you were registering a handler method, e.g. from a codegenerated pb.go file.
//
// This can *only* be used if the `server` also uses grpcproxy.CodecForServer() ServerOption.
func RegisterService(server *grpc.Server, director StreamDirector, serviceName string, methodNames ...string) {
streamer := &handler{director}
fakeDesc := &grpc.ServiceDesc{
ServiceName: serviceName,
HandlerType: (*interface{})(nil),
}
for _, m := range methodNames {
streamDesc := grpc.StreamDesc{
StreamName: m,
Handler: streamer.handler,
ServerStreams: true,
ClientStreams: true,
}
fakeDesc.Streams = append(fakeDesc.Streams, streamDesc)
}
server.RegisterService(fakeDesc, streamer)
}
// TransparentHandler returns a handler that attempts to proxy all requests that are not registered in the server.
// The indented use here is as a transparent proxy, where the server doesn't know about the services implemented by the
// backends. It should be used as a `grpc.UnknownServiceHandler`.
//
// This can *only* be used if the `server` also uses grpcproxy.CodecForServer() ServerOption.
func TransparentHandler(director StreamDirector) grpc.StreamHandler |
type handler struct {
director StreamDirector
}
// handler is where the real magic of proxying happens.
// It is invoked like any gRPC server stream and uses the gRPC server framing to get and receive bytes from the wire,
// forwarding it to a ClientStream established against the relevant ClientConn.
func (s *handler) handler(srv interface{}, serverStream grpc.ServerStream) error {
// little bit of gRPC internals never hurt anyone
fullMethodName, ok := grpc.MethodFromServerStream(serverStream)
if !ok {
return grpc.Errorf(codes.Internal, "lowLevelServerStream not exists in context")
}
// We require that the director's returned context inherits from the serverStream.Context().
outgoingCtx, backendConn, err := s.director(serverStream.Context(), fullMethodName)
if err != nil {
return err
}
clientCtx, clientCancel := context.WithCancel(outgoingCtx)
// TODO(mwitkow): Add a `forwarded` header to metadata, https://en.wikipedia.org/wiki/X-Forwarded-For.
clientStream, err := grpc.NewClientStream(clientCtx, clientStreamDescForProxying, backendConn, fullMethodName)
if err != nil {
return err
}
// Explicitly *do not close* s2cErrChan and c2sErrChan, otherwise the select below will not terminate.
// Channels do not have to be closed, it is just a control flow mechanism, see
// https://groups.google.com/forum/#!msg/golang-nuts/pZwdYRGxCIk/qpbHxRRPJdUJ
s2cErrChan := s.forwardServerToClient(serverStream, clientStream)
c2sErrChan := s.forwardClientToServer(clientStream, serverStream)
// We don't know which side is going to stop sending first, so we need a select between the two.
for i := 0; i < 2; i++ {
select {
case s2cErr := <-s2cErrChan:
if s2cErr == io.EOF {
// this is the happy case where the sender has encountered io.EOF, and won't be sending anymore./
// the clientStream>serverStream may continue pumping though.
clientStream.CloseSend()
break
} else {
// however, we may have gotten a receive error (stream disconnected, a read error etc) in which case we need
// to cancel the clientStream to the backend, let all of its goroutines be freed up by the CancelFunc and
// exit with an error to the stack
clientCancel()
return grpc.Errorf(codes.Internal, "failed proxying s2c: %v", s2cErr)
}
case c2sErr := <-c2sErrChan:
// This happens when the clientStream has nothing else to offer (io.EOF), returned a gRPC error. In those two
// cases we may have received Trailers as part of the call. In case of other errors (stream closed) the trailers
// will be nil.
serverStream.SetTrailer(clientStream.Trailer())
// c2sErr will contain RPC error from client code. If not io.EOF return the RPC error as server stream error.
if c2sErr != io.EOF {
return c2sErr
}
return nil
}
}
return grpc.Errorf(codes.Internal, "gRPC proxying should never reach this stage.")
}
func (s *handler) forwardClientToServer(src grpc.ClientStream, dst grpc.ServerStream) chan error {
ret := make(chan error, 1)
go func() {
f := &frame{}
for i := 0; ; i++ {
if err := src.RecvMsg(f); err != nil {
ret <- err // this can be io.EOF which is happy case
break
}
if i == 0 {
// This is a bit of a hack, but client to server headers are only readable after first client msg is
// received but must be written to server stream before the first msg is flushed.
// This is the only place to do it nicely.
md, err := src.Header()
if err != nil {
ret <- err
break
}
if err := dst.SendHeader(md); err != nil {
ret <- err
break
}
}
if err := dst.SendMsg(f); err != nil {
ret <- err
break
}
}
}()
return ret
}
func (s *handler) forwardServerToClient(src grpc.ServerStream, dst grpc.ClientStream) chan error {
ret := make(chan error, 1)
go func() {
f := &frame{}
for i := 0; ; i++ {
if err := src.RecvMsg(f); err != nil {
ret <- err // this can be io.EOF which is happy case
break
}
if err := dst.SendMsg(f); err != nil {
ret <- err
break
}
}
}()
return ret
}
| {
streamer := &handler{director}
return streamer.handler
} |
_circular_projection.py | from typing import Any, Tuple, Union, Mapping, Callable, Optional, Sequence
from typing_extensions import Literal
from enum import auto
from types import MappingProxyType
from pathlib import Path
import scvelo as scv
from anndata import AnnData
from cellrank import logging as logg
from cellrank.tl import Lineage
from cellrank._key import Key
from scanpy._utils import deprecated_arg_names
from cellrank.tl._enum import ModeEnum
from cellrank.ul._docs import d
from cellrank.pl._utils import _held_karp
from cellrank.tl._utils import save_fig, _unique_order_preserving
from cellrank.ul._utils import _check_collection
from cellrank.tl._lineage import PrimingDegree
import numpy as np
import pandas as pd
from sklearn.metrics import pairwise_distances
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm, LinearSegmentedColormap
from matplotlib.collections import LineCollection
class LineageOrder(ModeEnum): # noqa: D101
DEFAULT = auto()
OPTIMAL = auto()
class LabelRot(ModeEnum): # noqa: D101
|
Metric_T = Union[str, Callable, np.ndarray, pd.DataFrame]
_N = 200
def _get_distances(data: Union[np.ndarray, Lineage], metric: Metric_T) -> np.ndarray:
if isinstance(data, Lineage):
data = data.X
if isinstance(metric, str) or callable(metric):
metric = pairwise_distances(data.T, metric=metric)
elif isinstance(metric, (pd.DataFrame, np.ndarray)):
shape = (data.shape[1], data.shape[1])
if metric.shape != shape:
raise ValueError(
f"Expected an `numpy.array` or `pandas.DataFrame` of shape `{shape}`, found `{metric.shape}`."
)
else:
raise TypeError(
f"Expected either metric defined by `str`, `callable` or a pairwise distance matrix of type"
f" `numpy.ndarray` or `pandas.DataFrame`, found `{type(metric).__name__}`."
)
return np.asarray(metric, dtype=np.float64)
def _get_optimal_order(data: Lineage, metric: Metric_T) -> Tuple[float, np.ndarray]:
"""Solve the TSP using dynamic programming."""
return _held_karp(_get_distances(data, metric))
@d.dedent
@deprecated_arg_names({"labeldistance": "label_distance", "labelrot": "label_rot"})
def circular_projection(
adata: AnnData,
keys: Union[str, Sequence[str]],
backward: bool = False,
lineages: Optional[Union[str, Sequence[str]]] = None,
early_cells: Optional[Union[Mapping[str, Sequence[str]], Sequence[str]]] = None,
lineage_order: Optional[Literal["default", "optimal"]] = None,
metric: Union[str, Callable, np.ndarray, pd.DataFrame] = "correlation",
normalize_by_mean: bool = True,
ncols: int = 4,
space: float = 0.25,
use_raw: bool = False,
text_kwargs: Mapping[str, Any] = MappingProxyType({}),
label_distance: float = 1.25,
label_rot: Union[Literal["default", "best"], float] = "best",
show_edges: bool = True,
key_added: Optional[str] = None,
figsize: Optional[Tuple[float, float]] = None,
dpi: Optional[int] = None,
save: Optional[Union[str, Path]] = None,
**kwargs: Any,
):
r"""
Plot absorption probabilities on a circular embedding as in :cite:`velten:17`.
Parameters
----------
%(adata)s
keys
Keys in :attr:`anndata.AnnData.obs` or :attr:`anndata.AnnData.var_names`. Additional keys are:
- `'kl_divergence'` - as in :cite:`velten:17`, computes KL-divergence between the fate probabilities
of a cell and the average fate probabilities. See ``early_cells`` for more information.
- `'entropy'` - as in :cite:`setty:19`, computes entropy over a cells fate probabilities.
%(backward)s
lineages
Lineages to plot. If `None`, plot all lineages.
early_cells
Cell ids or a mask marking early cells used to define the average fate probabilities. If `None`, use all cells.
Only used when `'kl_divergence'` is in ``keys``. If a :class:`dict`, key specifies a cluster key in
:attr:`anndata.AnnData.obs` and the values specify cluster labels containing early cells.
lineage_order
Can be one of the following:
- `None` - it will determined automatically, based on the number of lineages.
- `'optimal'` - order lineages optimally by solving the Travelling salesman problem (TSP).
Recommended for <= `20` lineages.
- `'default'` - use the order as specified by ``lineages``.
metric
Metric to use when constructing pairwise distance matrix when ``lineage_order = 'optimal'``. For available
options, see :func:`sklearn.metrics.pairwise_distances`.
normalize_by_mean
If `True`, normalize each lineage by its mean probability, as done in :cite:`velten:17`.
ncols
Number of columns when plotting multiple ``keys``.
space
Horizontal and vertical space between for :func:`matplotlib.pyplot.subplots_adjust`.
use_raw
Whether to access :attr:`anndata.AnnData.raw` when there are ``keys`` in :attr:`anndata.AnnData.var_names`.
text_kwargs
Keyword arguments for :func:`matplotlib.pyplot.text`.
label_distance
Distance at which the lineage labels will be drawn.
label_rot
How to rotate the labels. Valid options are:
- `'best'` - rotate labels so that they are easily readable.
- `'default'` - use :mod:`matplotlib`'s default.
- `None` - same as `'default'`.
If a :class:`float`, all labels will be rotated by this many degrees.
show_edges
Whether to show the edges surrounding the simplex.
key_added
Key in :attr:`anndata.AnnData.obsm` where to add the circular embedding. If `None`, it will be set to
`'X_fate_simplex_{fwd,bwd}'`, based on ``backward``.
%(plotting)s
kwargs
Keyword arguments for :func:`scvelo.pl.scatter`.
Returns
-------
%(just_plots)s
Also updates ``adata`` with the following fields:
- :attr:`anndata.AnnData.obsm` ``['{key_added}']`` - the circular projection.
- :attr:`anndata.AnnData.obs` ``['to_{initial,terminal}_states_{method}']`` - the priming degree,
if a method is present in ``keys``.
"""
if label_distance is not None and label_distance < 0:
raise ValueError(
f"Expected `label_distance` to be positive, found `{label_distance}`."
)
if label_rot is None:
label_rot = LabelRot.DEFAULT
label_rot = LabelRot(label_rot)
suffix = "bwd" if backward else "fwd"
if key_added is None:
key_added = "X_fate_simplex_" + suffix
if isinstance(keys, str):
keys = (keys,)
keys = _unique_order_preserving(keys)
keys_ = _check_collection(
adata, keys, "obs", key_name="Observation", raise_exc=False
) + _check_collection(
adata, keys, "var_names", key_name="Gene", raise_exc=False, use_raw=use_raw
)
haystack = set(PrimingDegree)
keys = keys_ + [k for k in keys if k in haystack]
keys = _unique_order_preserving(keys)
if not len(keys):
raise ValueError("No valid keys have been selected.")
lineage_key = Key.obsm.abs_probs(backward)
if lineage_key not in adata.obsm:
raise KeyError(f"Lineages key `{lineage_key!r}` not found in `adata.obsm`.")
probs: Lineage = adata.obsm[lineage_key]
if isinstance(lineages, str):
lineages = (lineages,)
elif lineages is None:
lineages = probs.names
probs = adata.obsm[lineage_key][lineages]
n_lin = probs.shape[1]
if n_lin < 3:
raise ValueError(f"Expected at least `3` lineages, found `{n_lin}`.")
X = probs.X.copy()
if normalize_by_mean:
X /= np.mean(X, axis=0)[None, :]
X /= X.sum(1)[:, None]
# this happens when cells for sel. lineages sum to 1 (or when the lineage average is 0, which is unlikely)
X = np.nan_to_num(X, nan=1.0 / n_lin, copy=False)
if lineage_order is None:
lineage_order = (
LineageOrder.OPTIMAL if 3 < n_lin <= 20 else LineageOrder.DEFAULT
)
logg.debug(f"Set ordering to `{lineage_order}`")
lineage_order = LineageOrder(lineage_order)
if lineage_order == LineageOrder.OPTIMAL:
logg.info(f"Solving TSP for `{n_lin}` states")
_, order = _get_optimal_order(X, metric=metric)
else:
order = np.arange(n_lin)
probs = probs[:, order]
X = X[:, order]
angle_vec = np.linspace(0, 2 * np.pi, n_lin, endpoint=False)
angle_vec_sin = np.cos(angle_vec)
angle_vec_cos = np.sin(angle_vec)
x = np.sum(X * angle_vec_sin, axis=1)
y = np.sum(X * angle_vec_cos, axis=1)
adata.obsm[key_added] = np.c_[x, y]
nrows = int(np.ceil(len(keys) / ncols))
fig, ax = plt.subplots(
nrows=nrows,
ncols=ncols,
figsize=(ncols * 5, nrows * 5) if figsize is None else figsize,
dpi=dpi,
)
fig.subplots_adjust(wspace=space, hspace=space)
axes = np.ravel([ax])
text_kwargs = dict(text_kwargs)
text_kwargs["ha"] = "center"
text_kwargs["va"] = "center"
_i = 0
for _i, (k, ax) in enumerate(zip(keys, axes)):
set_lognorm, colorbar = False, kwargs.pop("colorbar", True)
try:
_ = PrimingDegree(k)
logg.debug(f"Calculating priming degree using `method={k}`")
val = probs.priming_degree(method=k, early_cells=early_cells)
k = f"{lineage_key}_{k}"
adata.obs[k] = val
except ValueError:
pass
scv.pl.scatter(
adata,
basis=key_added,
color=k,
show=False,
ax=ax,
use_raw=use_raw,
norm=LogNorm() if set_lognorm else None,
colorbar=colorbar,
**kwargs,
)
if colorbar and set_lognorm:
cbar = ax.collections[0].colorbar
cax = cbar.locator.axis
ticks = cax.minor.locator.tick_values(cbar.vmin, cbar.vmax)
ticks = [ticks[0], ticks[len(ticks) // 2 + 1], ticks[-1]]
cbar.set_ticks(ticks)
cbar.set_ticklabels([f"{t:.2f}" for t in ticks])
cbar.update_ticks()
patches, texts = ax.pie(
np.ones_like(angle_vec),
labeldistance=label_distance,
rotatelabels=True,
labels=probs.names[::-1],
startangle=-360 / len(angle_vec) / 2,
counterclock=False,
textprops=text_kwargs,
)
for patch in patches:
patch.set_visible(False)
# clockwise
for color, text in zip(probs.colors[::-1], texts):
if isinstance(label_rot, (int, float)):
text.set_rotation(label_rot)
elif label_rot == LabelRot.BEST:
rot = text.get_rotation()
text.set_rotation(rot + 90 + (1 - rot // 180) * 180)
elif label_rot != LabelRot.DEFAULT:
raise NotImplementedError(
f"Label rotation `{label_rot}` is not yet implemented."
)
text.set_color(color)
if not show_edges:
continue
for i, color in enumerate(probs.colors):
next = (i + 1) % n_lin
x = 1.04 * np.linspace(angle_vec_sin[i], angle_vec_sin[next], _N)
y = 1.04 * np.linspace(angle_vec_cos[i], angle_vec_cos[next], _N)
points = np.array([x, y]).T.reshape(-1, 1, 2)
segments = np.concatenate([points[:-1], points[1:]], axis=1)
cmap = LinearSegmentedColormap.from_list(
"abs_prob_cmap", [color, probs.colors[next]], N=_N
)
lc = LineCollection(segments, cmap=cmap, zorder=-1)
lc.set_array(np.linspace(0, 1, _N))
lc.set_linewidth(2)
ax.add_collection(lc)
for j in range(_i + 1, len(axes)):
axes[j].remove()
if save is not None:
save_fig(fig, save)
| DEFAULT = auto()
BEST = auto() |
ingress_test.go | /*
Copyright 2020 The Knative Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package ingress
import (
"context"
"os"
"testing"
v1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes/scheme"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
mf "github.com/manifestival/manifestival"
servingv1alpha1 "knative.dev/operator/pkg/apis/operator/v1alpha1"
"knative.dev/operator/pkg/reconciler/common"
util "knative.dev/operator/pkg/reconciler/common/testing"
)
const numberIngressResource = 27
func TestGetIngress(t *testing.T) |
func TestAppendInstalledIngresses(t *testing.T) {
os.Setenv(common.KoEnvKey, "testdata/kodata")
defer os.Unsetenv(common.KoEnvKey)
tests := []struct {
name string
instance servingv1alpha1.KnativeServing
expected bool
expectedResourcesNum int
}{{
name: "Available installed ingresses",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{},
Status: servingv1alpha1.KnativeServingStatus{
Version: "0.18.1",
},
},
expected: true,
expectedResourcesNum: numberIngressResource,
}, {
name: "Available installed ingresses for missing status.version",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{
CommonSpec: servingv1alpha1.CommonSpec{
Version: "0.18.1",
},
},
Status: servingv1alpha1.KnativeServingStatus{},
},
expected: true,
expectedResourcesNum: numberIngressResource,
}}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
manifest, _ := mf.ManifestFrom(mf.Slice{})
err := AppendInstalledIngresses(context.TODO(), &manifest, &tt.instance)
util.AssertEqual(t, err == nil, tt.expected)
util.AssertEqual(t, len(manifest.Resources()), tt.expectedResourcesNum)
})
}
}
func TestGetIngressWithFilters(t *testing.T) {
os.Setenv(common.KoEnvKey, "testdata/kodata")
defer os.Unsetenv(common.KoEnvKey)
version := "0.18"
tests := []struct {
name string
instance servingv1alpha1.KnativeServing
expectedManifestPath string
expected bool
}{{
name: "Enabled Istio ingress for target manifests",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{
CommonSpec: servingv1alpha1.CommonSpec{
Version: version,
},
Ingress: &servingv1alpha1.IngressConfigs{
Istio: servingv1alpha1.IstioIngressConfiguration{
Enabled: true,
},
},
},
},
expected: true,
expectedManifestPath: os.Getenv(common.KoEnvKey) + "/ingress/" + version + "/net-istio.yaml",
}, {
name: "Enabled Contour ingress for target manifests",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{
CommonSpec: servingv1alpha1.CommonSpec{
Version: version,
},
Ingress: &servingv1alpha1.IngressConfigs{
Contour: servingv1alpha1.ContourIngressConfiguration{
Enabled: true,
},
},
},
},
expected: true,
expectedManifestPath: os.Getenv(common.KoEnvKey) + "/ingress/" + version + "/net-contour.yaml",
}, {
name: "Enabled Kourier ingress for target manifests",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{
CommonSpec: servingv1alpha1.CommonSpec{
Version: version,
},
Ingress: &servingv1alpha1.IngressConfigs{
Kourier: servingv1alpha1.KourierIngressConfiguration{
Enabled: true,
},
},
},
},
expected: true,
expectedManifestPath: os.Getenv(common.KoEnvKey) + "/ingress/" + version + "/kourier.yaml",
}, {
name: "Enabled Contour and Kourier ingress for target manifests",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{
CommonSpec: servingv1alpha1.CommonSpec{
Version: version,
},
Ingress: &servingv1alpha1.IngressConfigs{
Kourier: servingv1alpha1.KourierIngressConfiguration{
Enabled: true,
},
Contour: servingv1alpha1.ContourIngressConfiguration{
Enabled: true,
},
},
},
},
expected: true,
expectedManifestPath: os.Getenv(common.KoEnvKey) + "/ingress/" + version + "/net-contour.yaml" + "," +
os.Getenv(common.KoEnvKey) + "/ingress/" + version + "/kourier.yaml",
}, {
name: "Enabled Istio and Kourier ingress for target manifests",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{
CommonSpec: servingv1alpha1.CommonSpec{
Version: version,
},
Ingress: &servingv1alpha1.IngressConfigs{
Kourier: servingv1alpha1.KourierIngressConfiguration{
Enabled: true,
},
Istio: servingv1alpha1.IstioIngressConfiguration{
Enabled: true,
},
},
},
},
expected: true,
expectedManifestPath: os.Getenv(common.KoEnvKey) + "/ingress/" + version + "/kourier.yaml" + "," +
os.Getenv(common.KoEnvKey) + "/ingress/" + version + "/net-istio.yaml",
}, {
name: "Enabled Istio and Contour ingress for target manifests",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{
CommonSpec: servingv1alpha1.CommonSpec{
Version: version,
},
Ingress: &servingv1alpha1.IngressConfigs{
Contour: servingv1alpha1.ContourIngressConfiguration{
Enabled: true,
},
Istio: servingv1alpha1.IstioIngressConfiguration{
Enabled: true,
},
},
},
},
expected: true,
expectedManifestPath: os.Getenv(common.KoEnvKey) + "/ingress/" + version + "/net-contour.yaml" + "," +
os.Getenv(common.KoEnvKey) + "/ingress/" + version + "/net-istio.yaml",
}, {
name: "Enabled All ingresses for target manifests",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{
CommonSpec: servingv1alpha1.CommonSpec{
Version: version,
},
Ingress: &servingv1alpha1.IngressConfigs{
Contour: servingv1alpha1.ContourIngressConfiguration{
Enabled: true,
},
Istio: servingv1alpha1.IstioIngressConfiguration{
Enabled: true,
},
Kourier: servingv1alpha1.KourierIngressConfiguration{
Enabled: true,
},
},
},
},
expected: true,
expectedManifestPath: os.Getenv(common.KoEnvKey) + "/ingress/" + version,
}}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
targetIngressManifests, err := common.FetchManifest(tt.expectedManifestPath)
util.AssertEqual(t, err, nil)
manifest, _ := mf.ManifestFrom(mf.Slice{})
err = getIngress(version, &manifest)
util.AssertEqual(t, err == nil, tt.expected)
manifest = manifest.Filter(Filters(&tt.instance))
// The resources loaded with the enabled istio ingress returns exactly the same resources as we
// expect from the ingress yaml file.
// The manifest could have more resources than targetIngressManifests, because if the resource is not
// labelled with the ingress provider, it will be kept. We can make sure all the resources in targetIngressManifests
// exist in the manifest.
util.AssertEqual(t, len(targetIngressManifests.Filter(mf.Not(mf.In(manifest))).Resources()), 0)
})
}
}
func TestIngressFilter(t *testing.T) {
tests := []struct {
name string
ingressName string
label string
expected bool
}{{
name: "Available installed ingresses",
ingressName: "istio",
label: "istio",
expected: true,
}, {
name: "Missing ingress label",
ingressName: "istio",
label: "",
expected: true,
}, {
name: "Wrong ingress label",
ingressName: "istio",
label: "kourier",
expected: false,
}}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
u := makeIngressResource(t, "test-resource", "knative-serving", tt.label)
result := ingressFilter(tt.ingressName)(u)
util.AssertEqual(t, result, tt.expected)
})
}
}
// TestFilters checks if s certain resource with a network provider label will be correctly returned when passing
// the filters. If the resource is not labelled with the network provider label, it will be returned by default,
// regardless of the configuration of the filters.
func TestFilters(t *testing.T) {
servicename := "test-service"
namespace := "knative-serving"
tests := []struct {
name string
instance servingv1alpha1.KnativeServing
// This label is used to mark the tested resource to indicate which ingress it belongs to.
// Empty label means no label for the resource.
labels []string
// The expected result indicates whether the resource is kept or not.
// If it is true, the resource is kept after the filter.
// If it is false, the resource is removed after the filter.
expected []bool
}{{
name: "Enabled Istio ingress for all resources",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{
Ingress: &servingv1alpha1.IngressConfigs{
Istio: servingv1alpha1.IstioIngressConfiguration{
Enabled: true,
},
},
},
},
labels: []string{"istio", "contour", "kourier", ""},
expected: []bool{true, false, false, true},
}, {
name: "Default ingress for all resources",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{},
},
labels: []string{"istio", "contour", "kourier", ""},
expected: []bool{true, false, false, true},
}, {
name: "Enabled kourier ingress for all resources",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{
Ingress: &servingv1alpha1.IngressConfigs{
Kourier: servingv1alpha1.KourierIngressConfiguration{
Enabled: true,
},
},
},
},
labels: []string{"istio", "contour", "kourier", ""},
expected: []bool{false, false, true, true},
}, {
name: "Enabled Contour ingress for all resources",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{
Ingress: &servingv1alpha1.IngressConfigs{
Contour: servingv1alpha1.ContourIngressConfiguration{
Enabled: true,
},
},
},
},
labels: []string{"istio", "contour", "kourier", ""},
expected: []bool{false, true, false, true},
}, {
name: "Enabled Contour and Istio ingress for all resources",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{
Ingress: &servingv1alpha1.IngressConfigs{
Contour: servingv1alpha1.ContourIngressConfiguration{
Enabled: true,
},
Istio: servingv1alpha1.IstioIngressConfiguration{
Enabled: true,
},
},
},
},
labels: []string{"istio", "contour", "kourier", ""},
expected: []bool{true, true, false, true},
}, {
name: "Enabled Kourier and Istio ingress for all resources",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{
Ingress: &servingv1alpha1.IngressConfigs{
Kourier: servingv1alpha1.KourierIngressConfiguration{
Enabled: true,
},
Istio: servingv1alpha1.IstioIngressConfiguration{
Enabled: true,
},
},
},
},
labels: []string{"istio", "contour", "kourier", ""},
expected: []bool{true, false, true, true},
}, {
name: "Enabled Kourier and Contour ingress for all resources",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{
Ingress: &servingv1alpha1.IngressConfigs{
Kourier: servingv1alpha1.KourierIngressConfiguration{
Enabled: true,
},
Contour: servingv1alpha1.ContourIngressConfiguration{
Enabled: true,
},
},
},
},
labels: []string{"istio", "contour", "kourier", ""},
expected: []bool{false, true, true, true},
}, {
name: "Enabled All ingress for all resources",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{
Ingress: &servingv1alpha1.IngressConfigs{
Istio: servingv1alpha1.IstioIngressConfiguration{
Enabled: true,
},
Kourier: servingv1alpha1.KourierIngressConfiguration{
Enabled: true,
},
Contour: servingv1alpha1.ContourIngressConfiguration{
Enabled: true,
},
},
},
},
labels: []string{"istio", "contour", "kourier", ""},
expected: []bool{true, true, true, true},
}}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
for i, label := range tt.labels {
ingressResource := makeIngressResource(t, servicename, namespace, label)
result := Filters(&tt.instance)(ingressResource)
util.AssertEqual(t, result, tt.expected[i])
}
})
}
}
// TODO: This test verifies the number of transformers. It should be rewritten by better test.
func TestTransformers(t *testing.T) {
tests := []struct {
name string
instance servingv1alpha1.KnativeServing
expected int
}{{
name: "Available istio ingress",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{
Ingress: &servingv1alpha1.IngressConfigs{
Istio: servingv1alpha1.IstioIngressConfiguration{
Enabled: true,
},
},
},
},
expected: 1,
}, {
name: "Available kourier ingress",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{
Ingress: &servingv1alpha1.IngressConfigs{
Kourier: servingv1alpha1.KourierIngressConfiguration{
Enabled: true,
},
},
},
},
expected: 2,
}, {
name: "Available contour ingress",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{
Ingress: &servingv1alpha1.IngressConfigs{
Contour: servingv1alpha1.ContourIngressConfiguration{
Enabled: true,
},
},
},
},
expected: 0,
}, {
name: "Empty ingress for default istio",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{},
},
expected: 1,
}, {
name: "All ingresses enabled",
instance: servingv1alpha1.KnativeServing{
Spec: servingv1alpha1.KnativeServingSpec{
Ingress: &servingv1alpha1.IngressConfigs{
Contour: servingv1alpha1.ContourIngressConfiguration{
Enabled: true,
},
Kourier: servingv1alpha1.KourierIngressConfiguration{
Enabled: true,
},
Istio: servingv1alpha1.IstioIngressConfiguration{
Enabled: true,
},
},
},
},
expected: 3,
}}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
transformers := Transformers(context.TODO(), &tt.instance)
util.AssertEqual(t, len(transformers), tt.expected)
})
}
}
func makeIngressResource(t *testing.T, name, ns, ingressLabel string) *unstructured.Unstructured {
labels := map[string]string{}
if ingressLabel != "" {
labels = map[string]string{
"networking.knative.dev/ingress-provider": ingressLabel,
}
}
service := &v1.Service{
ObjectMeta: metav1.ObjectMeta{
Name: name,
Namespace: ns,
Labels: labels,
},
}
result := &unstructured.Unstructured{}
err := scheme.Scheme.Convert(service, result, nil)
if err != nil {
t.Fatalf("Could not create unstructured Service: %v, err: %v", service, err)
}
return result
}
| {
os.Setenv(common.KoEnvKey, "testdata/kodata")
defer os.Unsetenv(common.KoEnvKey)
tests := []struct {
name string
targetVersion string
expected bool
expectedResourcesNum int
}{{
name: "Available ingresses",
targetVersion: "0.18",
expected: true,
expectedResourcesNum: numberIngressResource,
}, {
name: "Unavailable ingresses",
targetVersion: "0.16",
expected: false,
expectedResourcesNum: 0,
}, {
name: "Missing version",
targetVersion: "",
expected: true,
expectedResourcesNum: 0,
}}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
manifest, _ := mf.ManifestFrom(mf.Slice{})
err := getIngress(tt.targetVersion, &manifest)
util.AssertEqual(t, err == nil, tt.expected)
util.AssertEqual(t, len(manifest.Resources()), tt.expectedResourcesNum)
})
}
} |
mem_loop.rs | fn main() {
let max = u32::MAX;
let mut v: Vec<u32> = Vec::new();
for _ in 0..max{
v.push(1);
| }
println!("vec: {:?}", v);
} |
|
lexer.rs | use std::iter::Peekable;
use std::str::FromStr;
use decimal::d128;
use crate::Token;
use crate::Operator::{Caret, Divide, LeftParen, Minus, Modulo, Multiply, Plus, RightParen};
use crate::UnaryOperator::{Percent, Factorial};
use crate::TextOperator::{Of, To};
use crate::NamedNumber::*;
use crate::Constant::{E, Pi};
use crate::LexerKeyword::{In, PercentChar, Per, Mercury, Hg, PoundForce, Force, DoubleQuotes, Revolution};
use crate::FunctionIdentifier::{Cbrt, Ceil, Cos, Exp, Abs, Floor, Ln, Log, Round, Sin, Sqrt, Tan};
use crate::units::Unit;
use crate::units::Unit::*;
use unicode_segmentation::{Graphemes, UnicodeSegmentation};
fn is_word_char_str(input: &str) -> bool {
let x = match input {
"A" | "B" | "C" | "D" | "E" | "F" | "G" | "H" | "I" | "J" | "K" | "L"
| "M" | "N" | "O" | "P" | "Q" | "R" | "S" | "T" | "U" | "V" | "W" | "X"
| "Y" | "Z" => true,
"a" | "b" | "c" | "d" | "e" | "f" | "g" | "h" | "i" | "j" | "k" | "l"
| "m" | "n" | "o" | "p" | "q" | "r" | "s" | "t" | "u" | "v" | "w" | "x"
| "y" | "z" => true,
"Ω" | "Ω" | "µ" | "μ" => true,
_ => false,
};
return x;
}
fn is_numeric_str(input: &str) -> bool {
match input {
"." => true,
"0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" => true,
_ => false,
}
}
/// Read next characters as a word, otherwise return empty string.
/// Returns an empty string if there's leading whitespace.
fn read_word_plain(chars: &mut Peekable<Graphemes>) -> String {
let mut word = String::new();
while let Some(next_char) = chars.peek() {
if is_word_char_str(&next_char) {
word += chars.next().unwrap();
} else {
break;
}
}
return word;
}
/// Read next as a word, otherwise return empty string.
/// Leading whitespace is ignored. A trailing digit may be included.
fn read_word(first_c: &str, lexer: &mut Lexer) -> String {
let chars = &mut lexer.chars;
let mut word = first_c.trim().to_owned();
if word == "" {
// skip whitespace
while let Some(current_char) = chars.peek() {
if current_char.trim().is_empty() {
chars.next();
} else {
break;
}
}
}
while let Some(next_char) = chars.peek() {
if is_word_char_str(&next_char) {
word += chars.next().unwrap();
} else {
break;
}
}
if word != "" {
match *chars.peek().unwrap_or(&"") {
"2" | "²" => {
word += "2";
chars.next();
},
"3" | "³" => {
word += "3";
chars.next();
},
_ => {},
}
}
return word;
}
fn parse_t | r, lexer: &mut Lexer) -> Result<(), String> {
let tokens = &mut lexer.tokens;
match c {
value if value.trim().is_empty() => {},
value if is_word_char_str(&value) => {
parse_word(read_word(c, lexer).as_str(), lexer)?;
},
value if is_numeric_str(value) => {
let mut number_string = value.to_owned();
while let Some(number_char) = lexer.chars.peek() {
if is_numeric_str(number_char) {
number_string += number_char;
lexer.chars.next();
} else {
break;
}
}
d128::set_status(decimal::Status::empty());
match d128::from_str(&number_string) {
Ok(number) => {
if d128::get_status().is_empty() {
tokens.push(Token::Number(number));
} else {
return Err(format!("Error lexing d128 number: {}", number_string));
}
},
Err(_e) => {
return Err(format!("Error lexing d128 number: {}", number_string));
}
};
},
"+" => tokens.push(Token::Operator(Plus)),
"-" => tokens.push(Token::Operator(Minus)),
"*" => tokens.push(Token::Operator(Multiply)),
"/" | "÷" => tokens.push(Token::Operator(Divide)),
"%" => tokens.push(Token::LexerKeyword(PercentChar)),
"^" => tokens.push(Token::Operator(Caret)),
"!" => tokens.push(Token::UnaryOperator(Factorial)),
"(" => {
// left_paren_count += 1;
tokens.push(Token::Operator(LeftParen));
},
")" => {
// right_paren_count += 1;
tokens.push(Token::Operator(RightParen));
},
"π" => tokens.push(Token::Constant(Pi)),
"'" => tokens.push(Token::Unit(Foot)),
"\"" | "“" | "”" | "″" => tokens.push(Token::LexerKeyword(DoubleQuotes)),
_ => {
return Err(format!("Invalid character: {}", c));
},
}
Ok(())
}
fn parse_word_if_non_empty(word: &str, lexer: &mut Lexer) -> Result<(), String> {
match word {
"" => Ok(()),
_ => parse_word(word, lexer)
}
}
fn parse_word(word: &str, lexer: &mut Lexer) -> Result<(), String> {
let token = match word {
"to" => Token::TextOperator(To),
"of" => Token::TextOperator(Of),
"hundred" => Token::NamedNumber(Hundred),
"thousand" => Token::NamedNumber(Thousand),
"mil" | "mill" | "million" => Token::NamedNumber(Million),
"bil" | "bill" | "billion" => Token::NamedNumber(Billion),
"tri" | "tril" | "trillion" => Token::NamedNumber(Trillion),
"quadrillion" => Token::NamedNumber(Quadrillion),
"quintillion" => Token::NamedNumber(Quintillion),
"sextillion" => Token::NamedNumber(Sextillion),
"septillion" => Token::NamedNumber(Septillion),
"octillion" => Token::NamedNumber(Octillion),
"nonillion" => Token::NamedNumber(Nonillion),
"decillion" => Token::NamedNumber(Decillion),
"undecillion" => Token::NamedNumber(Undecillion),
"duodecillion" => Token::NamedNumber(Duodecillion),
"tredecillion" => Token::NamedNumber(Tredecillion),
"quattuordecillion" => Token::NamedNumber(Quattuordecillion),
"quindecillion" => Token::NamedNumber(Quindecillion),
"sexdecillion" => Token::NamedNumber(Sexdecillion),
"septendecillion" => Token::NamedNumber(Septendecillion),
"octodecillion" => Token::NamedNumber(Octodecillion),
"novemdecillion" => Token::NamedNumber(Novemdecillion),
"vigintillion" => Token::NamedNumber(Vigintillion),
"centillion" => Token::NamedNumber(Centillion),
"googol" => Token::NamedNumber(Googol),
"pi" => Token::Constant(Pi),
"e" => Token::Constant(E),
"plus" => Token::Operator(Plus),
"minus" => Token::Operator(Minus),
"times" => Token::Operator(Multiply),
"multiplied" => {
match read_word("", lexer).as_str() {
"by" => Token::Operator(Multiply),
string => return Err(format!("Invalid string: {}", string)),
}
},
"divided" => {
match read_word("", lexer).as_str() {
"by" => Token::Operator(Divide),
string => return Err(format!("Invalid string: {}", string)),
}
},
"mod" => Token::Operator(Modulo),
"sqrt" => Token::FunctionIdentifier(Sqrt),
"cbrt" => Token::FunctionIdentifier(Cbrt),
"log" => Token::FunctionIdentifier(Log),
"ln" => Token::FunctionIdentifier(Ln),
"exp" => Token::FunctionIdentifier(Exp),
"round" | "rint" => Token::FunctionIdentifier(Round),
"ceil" => Token::FunctionIdentifier(Ceil),
"floor" => Token::FunctionIdentifier(Floor),
"abs" | "fabs" => Token::FunctionIdentifier(Abs),
"sin" => Token::FunctionIdentifier(Sin),
"cos" => Token::FunctionIdentifier(Cos),
"tan" => Token::FunctionIdentifier(Tan),
"per" => Token::LexerKeyword(Per),
"hg" => Token::LexerKeyword(Hg), // can be hectogram or mercury
"ns" | "nanosec" | "nanosecs" | "nanosecond" | "nanoseconds" => Token::Unit(Nanosecond),
// µ and μ are two different characters
"µs" | "μs" | "microsec" | "microsecs" | "microsecond" | "microseconds" => Token::Unit(Microsecond),
"ms" | "millisec" | "millisecs" | "millisecond" | "milliseconds" => Token::Unit(Millisecond),
"s" | "sec" | "secs" | "second" | "seconds" => Token::Unit(Second),
"min" | "mins" | "minute" | "minutes" => Token::Unit(Minute),
"h" | "hr" | "hrs" | "hour" | "hours" => Token::Unit(Hour),
"day" | "days" => Token::Unit(Day),
"wk" | "wks" | "week" | "weeks" => Token::Unit(Week),
"mo" | "mos" | "month" | "months" => Token::Unit(Month),
"q" | "quarter" | "quarters" => Token::Unit(Quarter),
"yr" | "yrs" | "year" | "years" => Token::Unit(Year),
"decade" | "decades" => Token::Unit(Decade),
"century" | "centuries" => Token::Unit(Century),
"millenium" | "millenia" | "milleniums" => Token::Unit(Millenium),
"mm" | "millimeter" | "millimeters" | "millimetre" | "millimetres" => Token::Unit(Millimeter),
"cm" | "centimeter" | "centimeters" | "centimetre" | "centimetres" => Token::Unit(Centimeter),
"dm" | "decimeter" | "decimeters" | "decimetre" | "decimetres" => Token::Unit(Decimeter),
"m" | "meter" | "meters" | "metre" | "metres" => Token::Unit(Meter),
"km" | "kilometer" | "kilometers" | "kilometre" | "kilometres" => Token::Unit(Kilometer),
"in" => Token::LexerKeyword(In),
"inch" | "inches" => Token::Unit(Inch),
"ft" | "foot" | "feet" => Token::Unit(Foot),
"yd" | "yard" | "yards" => Token::Unit(Yard),
"mi" | "mile" | "miles" => Token::Unit(Mile),
"nmi" => Token::Unit(NauticalMile),
"nautical" => {
match read_word("", lexer).as_str() {
"mile" | "miles" => Token::Unit(NauticalMile),
string => return Err(format!("Invalid string: {}", string)),
}
},
"ly" | "lightyear" | "lightyears" => Token::Unit(LightYear),
"lightsec" | "lightsecs" | "lightsecond" | "lightseconds" => Token::Unit(LightSecond),
"light" => {
match read_word("", lexer).as_str() {
"yr" | "yrs" | "year" | "years" => Token::Unit(LightYear),
"sec" | "secs" | "second" | "seconds" => Token::Unit(LightSecond),
string => return Err(format!("Invalid string: {}", string)),
}
}
"sqmm" | "mm2" | "millimeter2" | "millimeters2" | "millimetre2" | "millimetres2" => Token::Unit(SquareMillimeter),
"sqcm" | "cm2" | "centimeter2" | "centimeters2" | "centimetre2" | "centimetres2" => Token::Unit(SquareCentimeter),
"sqdm" | "dm2" | "decimeter2" | "decimeters2" | "decimetre2" | "decimetres2" => Token::Unit(SquareDecimeter),
"sqm" | "m2" | "meter2" | "meters2" | "metre2" | "metres2" => Token::Unit(SquareMeter),
"sqkm" | "km2" | "kilometer2" | "kilometers2" | "kilometre2" | "kilometres2" => Token::Unit(SquareKilometer),
"sqin" | "in2" | "inch2" | "inches2" => Token::Unit(SquareInch),
"sqft" | "ft2" | "foot2" | "feet2" => Token::Unit(SquareFoot),
"sqyd" | "yd2" | "yard2" | "yards2" => Token::Unit(SquareYard),
"sqmi" | "mi2" | "mile2" | "miles2" => Token::Unit(SquareMile),
"sq" | "square" => {
match read_word("", lexer).as_str() {
"mm" | "millimeter" | "millimeters" | "millimetre" | "millimetres" => Token::Unit(SquareMillimeter),
"cm" | "centimeter" | "centimeters" | "centimetre" | "centimetres" => Token::Unit(SquareCentimeter),
"dm" | "decimeter" | "decimeters" | "decimetre" | "decimetres" => Token::Unit(SquareDecimeter),
"m" | "meter" | "meters" | "metre" | "metres" => Token::Unit(SquareMeter),
"km" | "kilometer" | "kilometers" | "kilometre" | "kilometres" => Token::Unit(SquareKilometer),
"in" | "inch" | "inches" => Token::Unit(SquareInch),
"ft" | "foot" | "feet" => Token::Unit(SquareFoot),
"yd" | "yard" | "yards" => Token::Unit(SquareYard),
"mi" | "mile" | "miles" => Token::Unit(SquareMile),
string => return Err(format!("Invalid string: {}", string)),
}
}
"are" | "ares" => Token::Unit(Are),
"decare" | "decares" => Token::Unit(Decare),
"ha" | "hectare" | "hectares" => Token::Unit(Hectare),
"acre" | "acres" => Token::Unit(Acre),
"mm3" | "millimeter3" | "millimeters3" | "millimetre3" | "millimetres3" => Token::Unit(CubicMillimeter),
"cm3" | "centimeter3" | "centimeters3" | "centimetre3" | "centimetres3" => Token::Unit(CubicCentimeter),
"dm3" | "decimeter3" | "decimeters3" | "decimetre3" | "decimetres3" => Token::Unit(CubicDecimeter),
"m3" | "meter3" | "meters3" | "metre3" | "metres3" => Token::Unit(CubicMeter),
"km3" | "kilometer3" | "kilometers3" | "kilometre3" | "kilometres3" => Token::Unit(CubicKilometer),
"inc3" | "inch3" | "inches3" => Token::Unit(CubicInch),
"ft3" | "foot3" | "feet3" => Token::Unit(CubicFoot),
"yd3" | "yard3" | "yards3" => Token::Unit(CubicYard),
"mi3" | "mile3" | "miles3" => Token::Unit(CubicMile),
"cubic" => {
match read_word("", lexer).as_str() {
"mm" | "millimeter" | "millimeters" | "millimetre" | "millimetres" => Token::Unit(CubicMillimeter),
"cm" | "centimeter" | "centimeters" | "centimetre" | "centimetres" => Token::Unit(CubicCentimeter),
"dm" | "decimeter" | "decimeters" | "decimetre" | "decimetres" => Token::Unit(CubicDecimeter),
"m" | "meter" | "meters" | "metre" | "metres" => Token::Unit(CubicMeter),
"km" | "kilometer" | "kilometers" | "kilometre" | "kilometres" => Token::Unit(CubicKilometer),
"in" | "inch" | "inches" => Token::Unit(CubicInch),
"ft" | "foot" | "feet" => Token::Unit(CubicFoot),
"yd" | "yard" | "yards" => Token::Unit(CubicYard),
"mi" | "mile" | "miles" => Token::Unit(CubicMile),
string => return Err(format!("Invalid string: {}", string)),
}
},
"ml" | "milliliter" | "milliliters" | "millilitre" | "millilitres" => Token::Unit(Milliliter),
"cl" | "centiliter" | "centiliters" | "centilitre" | "centilitres" => Token::Unit(Centiliter),
"dl" | "deciliter" | "deciliters" | "decilitre" | "decilitres" => Token::Unit(Deciliter),
"l" | "liter" | "liters" | "litre" | "litres" => Token::Unit(Liter),
"ts" | "tsp" | "tspn" | "tspns" | "teaspoon" | "teaspoons" => Token::Unit(Teaspoon),
"tbs" | "tbsp" | "tablespoon" | "tablespoons" => Token::Unit(Tablespoon),
"floz" => Token::Unit(FluidOunce),
"fl" | "fluid" => {
match read_word("", lexer).as_str() {
"oz" | "ounce" | "ounces" => Token::Unit(FluidOunce),
string => return Err(format!("Invalid string: {}", string)),
}
},
"cup" | "cups" => Token::Unit(Cup),
"pt" | "pint" | "pints" => Token::Unit(Pint),
"qt" | "quart" | "quarts" => Token::Unit(Quart),
"gal" | "gallon" | "gallons" => Token::Unit(Gallon),
"bbl" => Token::Unit(OilBarrel),
"oil" => {
match read_word("", lexer).as_str() {
"barrel" | "barrels" => Token::Unit(OilBarrel),
string => return Err(format!("Invalid string: {}", string)),
}
},
"metric" => {
match read_word("", lexer).as_str() {
"ton" | "tons" | "tonne" | "tonnes" => Token::Unit(MetricTon),
"hp" | "hps" | "horsepower" | "horsepowers" => Token::Unit(MetricHorsepower),
string => return Err(format!("Invalid string: {}", string)),
}
},
"mg" | "milligram" | "milligrams" => Token::Unit(Milligram),
"g" | "gram" | "grams" => Token::Unit(Gram),
"hectogram" | "hectograms" => Token::Unit(Hectogram),
"kg" | "kilo" | "kilos" | "kilogram" | "kilograms" => Token::Unit(Kilogram),
"t" | "tonne" | "tonnes" => Token::Unit(MetricTon),
"oz" | "ounces" => Token::Unit(Ounce),
"lb" | "lbs" => Token::Unit(Pound),
"pound" | "pounds" => {
match lexer.chars.next() {
Some("-") => {
match read_word_plain(&mut lexer.chars).as_str() {
"force" => Token::LexerKeyword(PoundForce),
other => {
lexer.tokens.push(Token::Unit(Pound));
lexer.tokens.push(Token::Operator(Minus));
parse_word_if_non_empty(&other, lexer)?;
return Ok(());
}
}
},
Some(c) => {
lexer.tokens.push(Token::Unit(Pound));
parse_token(c, lexer)?;
return Ok(());
},
None => {
lexer.tokens.push(Token::Unit(Pound));
return Ok(());
},
}
},
"stone" | "stones" => Token::Unit(Stone),
"st" | "ton" | "tons" => Token::Unit(ShortTon),
"short" => {
match read_word("", lexer).as_str() {
"ton" | "tons" | "tonne" | "tonnes" => Token::Unit(ShortTon),
string => return Err(format!("Invalid string: {}", string)),
}
},
"lt" => Token::Unit(LongTon),
"long" => {
match read_word("", lexer).as_str() {
"ton" | "tons" | "tonne" | "tonnes" => Token::Unit(LongTon),
string => return Err(format!("Invalid string: {}", string)),
}
},
"bit" | "bits" => Token::Unit(Bit),
"kbit" | "kilobit" | "kilobits" => Token::Unit(Kilobit),
"mbit" | "megabit" | "megabits" => Token::Unit(Megabit),
"gbit" | "gigabit" | "gigabits" => Token::Unit(Gigabit),
"tbit" | "terabit" | "terabits" => Token::Unit(Terabit),
"pbit" | "petabit" | "petabits" => Token::Unit(Petabit),
"ebit" | "exabit" | "exabits" => Token::Unit(Exabit),
"zbit" | "zettabit" | "zettabits" => Token::Unit(Zettabit),
"ybit" | "yottabit" | "yottabits" => Token::Unit(Yottabit),
"kibit" | "kibibit" | "kibibits" => Token::Unit(Kibibit),
"mibit" | "mebibit" | "mebibits" => Token::Unit(Mebibit),
"gibit" | "gibibit" | "gibibits" => Token::Unit(Gibibit),
"tibit" | "tebibit" | "tebibits" => Token::Unit(Tebibit),
"pibit" | "pebibit" | "pebibits" => Token::Unit(Pebibit),
"eibit" | "exbibit" | "exbibits" => Token::Unit(Exbibit),
"zibit" | "zebibit" | "zebibits" => Token::Unit(Zebibit),
"yibit" | "yobibit" | "yobibits" => Token::Unit(Yobibit),
"byte" | "bytes" => Token::Unit(Byte),
"kb" | "kilobyte" | "kilobytes" => Token::Unit(Kilobyte),
"mb" | "megabyte" | "megabytes" => Token::Unit(Megabyte),
"gb" | "gigabyte" | "gigabytes" => Token::Unit(Gigabyte),
"tb" | "terabyte" | "terabytes" => Token::Unit(Terabyte),
"pb" | "petabyte" | "petabytes" => Token::Unit(Petabyte),
"eb" | "exabyte" | "exabytes" => Token::Unit(Exabyte),
"zb" | "zettabyte" | "zettabytes" => Token::Unit(Zettabyte),
"yb" | "yottabyte" | "yottabytes" => Token::Unit(Yottabyte),
"kib" | "kibibyte" | "kibibytes" => Token::Unit(Kibibyte),
"mib" | "mebibyte" | "mebibytes" => Token::Unit(Mebibyte),
"gib" | "gibibyte" | "gibibytes" => Token::Unit(Gibibyte),
"tib" | "tebibyte" | "tebibytes" => Token::Unit(Tebibyte),
"pib" | "pebibyte" | "pebibytes" => Token::Unit(Pebibyte),
"eib" | "exbibyte" | "exbibytes" => Token::Unit(Exbibyte),
"zib" | "zebibyte" | "zebibytes" => Token::Unit(Zebibyte),
"yib" | "yobibyte" | "yobibytes" => Token::Unit(Yobibyte),
"millijoule" | "millijoules" => Token::Unit(Millijoule),
"j"| "joule" | "joules" => Token::Unit(Joule),
"nm" => Token::Unit(NewtonMeter),
"newton" => {
match lexer.chars.next() {
Some("-") => {
match read_word_plain(&mut lexer.chars).as_str() {
"meter" | "meters" | "metre" | "metres" => Token::Unit(NewtonMeter),
string => return Err(format!("Invalid string: {}", string)),
}
},
Some(c) => {
match read_word(c, lexer).as_str() {
"meter" | "meters" | "metre" | "metres" => Token::Unit(NewtonMeter),
string => return Err(format!("Invalid string: {}", string)),
}
},
None => return Err(format!("Invalid string: {}", word)),
}
},
"kj" | "kilojoule" | "kilojoules" => Token::Unit(Kilojoule),
"mj" | "megajoule" | "megajoules" => Token::Unit(Megajoule),
"gj" | "gigajoule" | "gigajoules" => Token::Unit(Gigajoule),
"tj" | "terajoule" | "terajoules" => Token::Unit(Terajoule),
"cal" | "calorie" | "calories" => Token::Unit(Calorie),
"kcal" | "kilocalorie" | "kilocalories" => Token::Unit(KiloCalorie),
"btu" => Token::Unit(BritishThermalUnit),
"british" => {
match read_word("", lexer).as_str() {
"thermal" => {
match read_word("", lexer).as_str() {
"unit" | "units" => Token::Unit(BritishThermalUnit),
string => return Err(format!("Invalid string: {}", string)),
}
},
string => return Err(format!("Invalid string: {}", string)),
}
},
"wh" => Token::Unit(WattHour),
"kwh" => Token::Unit(KilowattHour),
"mwh" => Token::Unit(MegawattHour),
"gwh" => Token::Unit(GigawattHour),
"twh" => Token::Unit(TerawattHour),
"pwh" => Token::Unit(PetawattHour),
"milliwatt" | "milliwatts" => Token::Unit(Milliwatt),
"w" | "watts" => Token::Unit(Watt),
"kw" | "kilowatts" => Token::Unit(Kilowatt),
"mw" | "megawatts" => Token::Unit(Megawatt),
"gw" | "gigawatts" => Token::Unit(Gigawatt),
"tw" | "terawatts" => Token::Unit(Terawatt),
"pw" | "petawatts" => Token::Unit(Petawatt),
"hp" | "hps" | "horsepower" | "horsepowers" => Token::Unit(Horsepower),
"mhp" | "hpm" => Token::Unit(MetricHorsepower),
"watt" => {
match read_word("", lexer).as_str() {
"hr" | "hrs" | "hour" | "hours" => Token::Unit(WattHour),
other => {
lexer.tokens.push(Token::Unit(Watt));
parse_word_if_non_empty(other, lexer)?;
return Ok(());
},
}
}
"kilowatt" => {
match read_word("", lexer).as_str() {
"hr" | "hrs" | "hour" | "hours" => Token::Unit(KilowattHour),
other => {
lexer.tokens.push(Token::Unit(Kilowatt));
parse_word_if_non_empty(other, lexer)?;
return Ok(());
},
}
}
"megawatt" => {
match read_word("", lexer).as_str() {
"hr" | "hrs" | "hour" | "hours" => Token::Unit(MegawattHour),
other => {
lexer.tokens.push(Token::Unit(Megawatt));
parse_word_if_non_empty(other, lexer)?;
return Ok(());
},
}
}
"gigawatt" => {
match read_word("", lexer).as_str() {
"hr" | "hrs" | "hour" | "hours" => Token::Unit(GigawattHour),
other => {
lexer.tokens.push(Token::Unit(Gigawatt));
parse_word_if_non_empty(other, lexer)?;
return Ok(());
},
}
}
"terawatt" => {
match read_word("", lexer).as_str() {
"hr" | "hrs" | "hour" | "hours" => Token::Unit(TerawattHour),
other => {
lexer.tokens.push(Token::Unit(Terawatt));
parse_word_if_non_empty(other, lexer)?;
return Ok(());
},
}
}
"petawatt" => {
match read_word("", lexer).as_str() {
"hr" | "hrs" | "hour" | "hours" => Token::Unit(PetawattHour),
other => {
lexer.tokens.push(Token::Unit(Petawatt));
parse_word_if_non_empty(other, lexer)?;
return Ok(());
},
}
}
"ma" | "milliamp" | "milliamps" | "milliampere" | "milliamperes" => Token::Unit(Milliampere),
"a" | "amp" | "amps" | "ampere" | "amperes" => Token::Unit(Ampere),
"ka" | "kiloamp" | "kiloamps" | "kiloampere" | "kiloamperes" => Token::Unit(Kiloampere),
"bi" | "biot" | "biots" | "aba" | "abampere" | "abamperes" => Token::Unit(Abampere),
"mΩ" | "mΩ" | "milliohm" | "milliohms" => Token::Unit(Milliohm),
"Ω" | "Ω" | "ohm" | "ohms" => Token::Unit(Ohm),
"kΩ" | "kΩ" | "kiloohm" | "kiloohms" => Token::Unit(Kiloohm),
"mv" | "millivolt" | "millivolts" => Token::Unit(Millivolt),
"v" | "volt" | "volts" => Token::Unit(Volt),
"kv" | "kilovolt" | "kilovolts" => Token::Unit(Kilovolt),
// for pound-force per square inch
"lbf" => Token::LexerKeyword(PoundForce),
"force" => Token::LexerKeyword(Force),
"pa" | "pascal" | "pascals" => Token::Unit(Pascal),
"kpa" | "kilopascal" | "kilopascals" => Token::Unit(Kilopascal),
"atm" | "atms" | "atmosphere" | "atmospheres" => Token::Unit(Atmosphere),
"mbar" | "mbars" | "millibar" | "millibars" => Token::Unit(Millibar),
"bar" | "bars" => Token::Unit(Bar),
"inhg" => Token::Unit(InchOfMercury),
"mercury" => Token::LexerKeyword(Mercury),
"psi" => Token::Unit(PoundsPerSquareInch),
"torr" | "torrs" => Token::Unit(Torr),
"hz" | "hertz" => Token::Unit(Hertz),
"khz" | "kilohertz" => Token::Unit(Kilohertz),
"mhz" | "megahertz" => Token::Unit(Megahertz),
"ghz" | "gigahertz" => Token::Unit(Gigahertz),
"thz" | "terahertz" => Token::Unit(Terahertz),
"phz" | "petahertz" => Token::Unit(Petahertz),
"rpm" => Token::Unit(RevolutionsPerMinute),
"r" | "rev" | "revolution" | "revolutions" => Token::LexerKeyword(Revolution),
"kph" | "kmh" => Token::Unit(KilometersPerHour),
"mps" => Token::Unit(MetersPerSecond),
"mph" => Token::Unit(MilesPerHour),
"fps" => Token::Unit(FeetPerSecond),
"kn" | "kt" | "knot" | "knots" => Token::Unit(Knot),
"k" | "kelvin" | "kelvins" => Token::Unit(Kelvin),
"c" | "celsius" => Token::Unit(Celsius),
"f" | "fahrenheit" | "fahrenheits" => Token::Unit(Fahrenheit),
"deg" | "degree" | "degrees" => Token::Unit(lexer.default_degree),
string => {
return Err(format!("Invalid string: {}", string));
}
};
lexer.tokens.push(token);
return Ok(());
}
struct Lexer<'a> {
left_paren_count: u16,
right_paren_count: u16,
chars: Peekable<Graphemes<'a>>,
tokens: Vec<Token>,
default_degree: Unit,
}
/// Lex an input string and returns [`Token`]s
pub fn lex(input: &str, remove_trailing_operator: bool, default_degree: Unit) -> Result<Vec<Token>, String> {
let mut input = input.replace(",", "").to_ascii_lowercase();
if remove_trailing_operator {
match &input.chars().last().unwrap_or('x') {
'+' | '-' | '*' | '/' | '^' | '(' => {
input.pop();
},
_ => {},
}
}
let mut lexer = Lexer {
left_paren_count: 0,
right_paren_count: 0,
chars: UnicodeSegmentation::graphemes(input.as_str(), true).peekable(),
tokens: Vec::new(),
default_degree,
};
while let Some(c) = lexer.chars.next() {
parse_token(c, &mut lexer)?;
}
let tokens = &mut lexer.tokens;
// auto insert missing parentheses in first and last position
if lexer.left_paren_count > lexer.right_paren_count {
let missing_right_parens = lexer.left_paren_count - lexer.right_paren_count;
for _ in 0..missing_right_parens {
tokens.push(Token::Operator(RightParen));
}
} else if lexer.left_paren_count < lexer.right_paren_count {
let missing_left_parens = lexer.right_paren_count - lexer.left_paren_count;
for _ in 0..missing_left_parens {
tokens.insert(0, Token::Operator(LeftParen));
}
}
if tokens.len() == 0 {
return Err(format!("Input was empty"))
}
let mut token_index = 0;
loop {
match tokens[token_index] {
// decide if % is percent or modulo
Token::LexerKeyword(PercentChar) => {
match tokens.get(token_index + 1) {
Some(Token::TextOperator(Of)) => {
// "10% of 1km" should be percentage
tokens[token_index] = Token::UnaryOperator(Percent);
},
Some(Token::Operator(operator)) => {
match operator {
LeftParen => {
// "10%(2)" should be modulo
tokens[token_index] = Token::Operator(Modulo);
},
_ => {
// "10%*2" should be a percentage
tokens[token_index] = Token::UnaryOperator(Percent);
}
}
},
Some(Token::UnaryOperator(_)) => {
// "10%!" should be a percentage
tokens[token_index] = Token::UnaryOperator(Percent);
},
Some(Token::LexerKeyword(PercentChar)) => {
// "10%%" should be a percentage
tokens[token_index] = Token::UnaryOperator(Percent);
},
None => {
// percent if there's no element afterwards
tokens[token_index] = Token::UnaryOperator(Percent);
},
_ => {
// everything else should be modulo, for example if the % is
// before a number, function or constants
tokens[token_index] = Token::Operator(Modulo);
},
}
},
// decide if " is 'inch' or 'inch of mercury'
Token::LexerKeyword(DoubleQuotes) => {
match tokens.get(token_index + 1) {
Some(Token::LexerKeyword(Hg)) => {
// "hg should be inch of mercury
tokens[token_index] = Token::Unit(InchOfMercury);
tokens.remove(token_index + 1);
},
_ => {
// otherwise, Inch
tokens[token_index] = Token::Unit(Inch);
},
}
},
// if hg wasn't already turned into inch of mercury, it's hectogram
Token::LexerKeyword(Hg) => {
tokens[token_index] = Token::Unit(Hectogram);
},
// decide if "in" is Inch or To
Token::LexerKeyword(In) => {
match tokens.get(token_index + 1) {
Some(Token::Unit(_)) => {
// "in" should be To
tokens[token_index] = Token::TextOperator(To);
},
_ => {
// otherwise, Inch
tokens[token_index] = Token::Unit(Inch);
},
}
},
_ => {},
}
// parse units like km/h, lbf per square inch
if token_index >= 2 {
let token1 = &tokens[token_index-2];
let token2 = match &tokens[token_index-1] {
// treat km/h the same as km per h
Token::Operator(Divide) => &Token::LexerKeyword(Per),
_ => &tokens[token_index-1],
};
let token3 = &tokens[token_index];
let mut replaced = true;
match (token1, token2, token3) {
// km/h
(Token::Unit(Kilometer), Token::LexerKeyword(Per), Token::Unit(Hour)) => {
tokens[token_index-2] = Token::Unit(KilometersPerHour);
},
// mi/h
(Token::Unit(Mile), Token::LexerKeyword(Per), Token::Unit(Hour)) => {
tokens[token_index-2] = Token::Unit(MilesPerHour);
},
// m/s
(Token::Unit(Meter), Token::LexerKeyword(Per), Token::Unit(Second)) => {
tokens[token_index-2] = Token::Unit(MetersPerSecond);
},
// ft/s
(Token::Unit(Foot), Token::LexerKeyword(Per), Token::Unit(Second)) => {
tokens[token_index-2] = Token::Unit(FeetPerSecond);
},
// btu/min
(Token::Unit(BritishThermalUnit), Token::LexerKeyword(Per), Token::Unit(Minute)) => {
tokens[token_index-2] = Token::Unit(BritishThermalUnitsPerMinute);
},
// btu/h
(Token::Unit(BritishThermalUnit), Token::LexerKeyword(Per), Token::Unit(Hour)) => {
tokens[token_index-2] = Token::Unit(BritishThermalUnitsPerHour);
},
// lbs/sqin
(Token::LexerKeyword(PoundForce), Token::LexerKeyword(Per), Token::Unit(SquareInch)) => {
tokens[token_index-2] = Token::Unit(PoundsPerSquareInch);
},
// inch of mercury
(Token::Unit(Inch), Token::TextOperator(Of), Token::LexerKeyword(Mercury)) => {
tokens[token_index-2] = Token::Unit(InchOfMercury);
},
// revolutions per minute
(Token::LexerKeyword(Revolution), Token::LexerKeyword(Per), Token::Unit(Minute)) => {
tokens[token_index-2] = Token::Unit(RevolutionsPerMinute);
},
_ => {
replaced = false;
},
}
if replaced {
tokens.remove(token_index);
tokens.remove(token_index-1);
token_index -= 2;
}
}
if token_index == tokens.len()-1 {
break;
} else {
token_index += 1;
}
}
Ok(lexer.tokens)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::numtok;
use regex::Regex;
#[test]
fn test_lex() {
let strip_operator_spacing = Regex::new(r" ([+\-*/]) ").unwrap();
let strip_afterdigit_spacing = Regex::new(r"(\d) ").unwrap();
let run_lex = |input: &str, expected_tokens: Vec<Token>| {
let tokens = match lex(input, false, Unit::Celsius) {
Ok(tokens) => tokens,
Err(e) => {
panic!("lex error: {}\nrun_lex input: {}", e, input);
}
};
let info_msg = format!("run_lex input: {}\nexpected: {:?}\nreceived: {:?}", input, expected_tokens, tokens);
assert!(tokens == expected_tokens, "{}", info_msg);
// Prove we can handle multiple spaces wherever we handle a single space
let input_extra_spaces = input.replace(" ", " ");
let tokens_extra_spaces = lex(&input_extra_spaces, false, Unit::Celsius).unwrap();
assert!(tokens_extra_spaces == expected_tokens, "{}", info_msg);
// Prove we don't need spaces around operators
let input_stripped_spaces = strip_operator_spacing.replace_all(input, "$1");
let tokens_stripped_spaces = lex(&input_stripped_spaces, false, Unit::Celsius).unwrap();
assert!(tokens_stripped_spaces == expected_tokens, "{}", info_msg);
// Prove we don't need a space after a digit
let input_afterdigit_stripped_spaces = strip_afterdigit_spacing.replace_all(input, "$1");
let tokens_afterdigit_stripped_spaces = lex(&input_afterdigit_stripped_spaces, false, Unit::Celsius).unwrap();
assert!(tokens_afterdigit_stripped_spaces == expected_tokens, "{}", info_msg);
};
run_lex("88 kilometres * 2", vec![numtok!(88), Token::Unit(Kilometer), Token::Operator(Multiply), numtok!(2)]);
run_lex("100 nmi", vec![numtok!(100), Token::Unit(NauticalMile)]);
run_lex("101 nautical miles", vec![numtok!(101), Token::Unit(NauticalMile)]);
run_lex("2 lightyears", vec![numtok!(2), Token::Unit(LightYear)]);
run_lex("1 light year", vec![numtok!(1), Token::Unit(LightYear)]);
run_lex("10 lightsec", vec![numtok!(10), Token::Unit(LightSecond)]);
run_lex("12 light secs", vec![numtok!(12), Token::Unit(LightSecond)]);
run_lex("33.3 square meters", vec![numtok!(33.3), Token::Unit(SquareMeter)]);
run_lex("54 m2", vec![numtok!(54), Token::Unit(SquareMeter)]);
run_lex("87 sq miles", vec![numtok!(87), Token::Unit(SquareMile)]);
run_lex("500 feet2", vec![numtok!(500), Token::Unit(SquareFoot)]);
run_lex("500 feet²", vec![numtok!(500), Token::Unit(SquareFoot)]);
run_lex("4 cubic metres", vec![numtok!(4), Token::Unit(CubicMeter)]);
run_lex("34 cubic feet + 23 cubic yards", vec![numtok!(34), Token::Unit(CubicFoot), Token::Operator(Plus), numtok!(23), Token::Unit(CubicYard)]);
run_lex("66 inches3 + 65 millimetre³", vec![numtok!(66), Token::Unit(CubicInch), Token::Operator(Plus), numtok!(65), Token::Unit(CubicMillimeter)]);
run_lex("66 inches³ + 65 millimetre3", vec![numtok!(66), Token::Unit(CubicInch), Token::Operator(Plus), numtok!(65), Token::Unit(CubicMillimeter)]);
run_lex("42 millilitres", vec![numtok!(42), Token::Unit(Milliliter)]);
run_lex("3 tbs", vec![numtok!(3), Token::Unit(Tablespoon)]);
run_lex("6 floz", vec![numtok!(6), Token::Unit(FluidOunce)]);
run_lex("6 fl oz", vec![numtok!(6), Token::Unit(FluidOunce)]);
run_lex("6 fluid ounces", vec![numtok!(6), Token::Unit(FluidOunce)]);
run_lex("3 oil barrels", vec![numtok!(3), Token::Unit(OilBarrel)]);
run_lex("67 kg", vec![numtok!(67), Token::Unit(Kilogram)]);
run_lex("34 oz", vec![numtok!(34), Token::Unit(Ounce)]);
run_lex("34 ounces", vec![numtok!(34), Token::Unit(Ounce)]);
run_lex("210 lb", vec![numtok!(210), Token::Unit(Pound)]);
run_lex("210 lbs", vec![numtok!(210), Token::Unit(Pound)]);
run_lex("210 pound", vec![numtok!(210), Token::Unit(Pound)]);
run_lex("210 pounds", vec![numtok!(210), Token::Unit(Pound)]);
run_lex("210 pounds-force", vec![numtok!(210), Token::LexerKeyword(PoundForce)]);
run_lex("3 ton", vec![numtok!(3), Token::Unit(ShortTon)]);
run_lex("3 short tons", vec![numtok!(3), Token::Unit(ShortTon)]);
run_lex("4 lt", vec![numtok!(4), Token::Unit(LongTon)]);
run_lex("4 long tonnes", vec![numtok!(4), Token::Unit(LongTon)]);
run_lex("234 wh", vec![numtok!(234), Token::Unit(WattHour)]);
run_lex("1 w", vec![numtok!(1), Token::Unit(Watt)]);
run_lex("1 watt", vec![numtok!(1), Token::Unit(Watt)]);
run_lex("1 watts", vec![numtok!(1), Token::Unit(Watt)]);
run_lex("1 watt hour", vec![numtok!(1), Token::Unit(WattHour)]);
run_lex("0 watt + 1 watts", vec![numtok!(0), Token::Unit(Watt), Token::Operator(Plus), numtok!(1), Token::Unit(Watt)]);
run_lex("0 watt * 1", vec![numtok!(0), Token::Unit(Watt), Token::Operator(Multiply), numtok!(1)]);
run_lex("2 watts + 3 watts", vec![numtok!(2), Token::Unit(Watt), Token::Operator(Plus), numtok!(3), Token::Unit(Watt)]);
run_lex("2 watts * 3", vec![numtok!(2), Token::Unit(Watt), Token::Operator(Multiply), numtok!(3)]);
run_lex("4 watt plus 5 watts", vec![numtok!(4), Token::Unit(Watt), Token::Operator(Plus), numtok!(5), Token::Unit(Watt)]);
run_lex("4 watt times 5", vec![numtok!(4), Token::Unit(Watt), Token::Operator(Multiply), numtok!(5)]);
run_lex("6 watts plus 7 watts", vec![numtok!(6), Token::Unit(Watt), Token::Operator(Plus), numtok!(7), Token::Unit(Watt)]);
run_lex("6 watts times 7", vec![numtok!(6), Token::Unit(Watt), Token::Operator(Multiply), numtok!(7)]);
run_lex("2.3 kwh", vec![numtok!(2.3), Token::Unit(KilowattHour)]);
run_lex("1 kw", vec![numtok!(1), Token::Unit(Kilowatt)]);
run_lex("1 kilowatt", vec![numtok!(1), Token::Unit(Kilowatt)]);
run_lex("1 kilowatts", vec![numtok!(1), Token::Unit(Kilowatt)]);
run_lex("1 kilowatt hour", vec![numtok!(1), Token::Unit(KilowattHour)]);
run_lex("2 kilowatt + 3 watt", vec![numtok!(2), Token::Unit(Kilowatt), Token::Operator(Plus), numtok!(3), Token::Unit(Watt)]);
run_lex("2 kilowatt * 4", vec![numtok!(2), Token::Unit(Kilowatt), Token::Operator(Multiply), numtok!(4)]);
run_lex("2 kilowatt times 4", vec![numtok!(2), Token::Unit(Kilowatt), Token::Operator(Multiply), numtok!(4)]);
run_lex("2 kilowatt + 3 watts", vec![numtok!(2), Token::Unit(Kilowatt), Token::Operator(Plus), numtok!(3), Token::Unit(Watt)]);
run_lex("2 kilowatts + 3 watt", vec![numtok!(2), Token::Unit(Kilowatt), Token::Operator(Plus), numtok!(3), Token::Unit(Watt)]);
run_lex("2 kilowatts + 3 watts", vec![numtok!(2), Token::Unit(Kilowatt), Token::Operator(Plus), numtok!(3), Token::Unit(Watt)]);
run_lex("2 kilowatt plus 3 watt", vec![numtok!(2), Token::Unit(Kilowatt), Token::Operator(Plus), numtok!(3), Token::Unit(Watt)]);
run_lex("2 kilowatt plus 3 watts", vec![numtok!(2), Token::Unit(Kilowatt), Token::Operator(Plus), numtok!(3), Token::Unit(Watt)]);
run_lex("2 kilowatts plus 3 watt", vec![numtok!(2), Token::Unit(Kilowatt), Token::Operator(Plus), numtok!(3), Token::Unit(Watt)]);
run_lex("2 kilowatts plus 3 watts", vec![numtok!(2), Token::Unit(Kilowatt), Token::Operator(Plus), numtok!(3), Token::Unit(Watt)]);
run_lex("6.6 watts + 4 kilowatts", vec![numtok!(6.6), Token::Unit(Watt), Token::Operator(Plus), numtok!(4), Token::Unit(Kilowatt)]);
run_lex("6.6 watts plus 4 kilowatts", vec![numtok!(6.6), Token::Unit(Watt), Token::Operator(Plus), numtok!(4), Token::Unit(Kilowatt)]);
run_lex("2.3 mwh", vec![numtok!(2.3), Token::Unit(MegawattHour)]);
run_lex("1 mw", vec![numtok!(1), Token::Unit(Megawatt)]);
run_lex("1 megawatt", vec![numtok!(1), Token::Unit(Megawatt)]);
run_lex("1 megawatt hour", vec![numtok!(1), Token::Unit(MegawattHour)]);
run_lex("2 megawatt + 3 watt", vec![numtok!(2), Token::Unit(Megawatt), Token::Operator(Plus), numtok!(3), Token::Unit(Watt)]);
run_lex("2 megawatt * 6", vec![numtok!(2), Token::Unit(Megawatt), Token::Operator(Multiply), numtok!(6)]);
run_lex("2 megawatt times 6", vec![numtok!(2), Token::Unit(Megawatt), Token::Operator(Multiply), numtok!(6)]);
run_lex("2 megawatt + 3 watts", vec![numtok!(2), Token::Unit(Megawatt), Token::Operator(Plus), numtok!(3), Token::Unit(Watt)]);
run_lex("2 megawatts + 3 watt", vec![numtok!(2), Token::Unit(Megawatt), Token::Operator(Plus), numtok!(3), Token::Unit(Watt)]);
run_lex("2 megawatts + 3 watts", vec![numtok!(2), Token::Unit(Megawatt), Token::Operator(Plus), numtok!(3), Token::Unit(Watt)]);
run_lex("2 megawatt plus 3 watt", vec![numtok!(2), Token::Unit(Megawatt), Token::Operator(Plus), numtok!(3), Token::Unit(Watt)]);
run_lex("2 megawatt plus 3 watts", vec![numtok!(2), Token::Unit(Megawatt), Token::Operator(Plus), numtok!(3), Token::Unit(Watt)]);
run_lex("2 megawatts plus 3 watt", vec![numtok!(2), Token::Unit(Megawatt), Token::Operator(Plus), numtok!(3), Token::Unit(Watt)]);
run_lex("2 megawatts plus 3 watts", vec![numtok!(2), Token::Unit(Megawatt), Token::Operator(Plus), numtok!(3), Token::Unit(Watt)]);
run_lex("6.6 watts + 4 megawatts", vec![numtok!(6.6), Token::Unit(Watt), Token::Operator(Plus), numtok!(4), Token::Unit(Megawatt)]);
run_lex("6.6 watts plus 4 megawatts", vec![numtok!(6.6), Token::Unit(Watt), Token::Operator(Plus), numtok!(4), Token::Unit(Megawatt)]);
run_lex("234 gwh", vec![numtok!(234), Token::Unit(GigawattHour)]);
run_lex("1 gw", vec![numtok!(1), Token::Unit(Gigawatt)]);
run_lex("1 gigawatt", vec![numtok!(1), Token::Unit(Gigawatt)]);
run_lex("1 gigawatts", vec![numtok!(1), Token::Unit(Gigawatt)]);
run_lex("1 gigawatt hour", vec![numtok!(1), Token::Unit(GigawattHour)]);
run_lex("0 gigawatt + 1 gigawatts", vec![numtok!(0), Token::Unit(Gigawatt), Token::Operator(Plus), numtok!(1), Token::Unit(Gigawatt)]);
run_lex("0 gigawatt * 1", vec![numtok!(0), Token::Unit(Gigawatt), Token::Operator(Multiply), numtok!(1)]);
run_lex("2 gigawatts + 3 gigawatts", vec![numtok!(2), Token::Unit(Gigawatt), Token::Operator(Plus), numtok!(3), Token::Unit(Gigawatt)]);
run_lex("2 gigawatts * 3", vec![numtok!(2), Token::Unit(Gigawatt), Token::Operator(Multiply), numtok!(3)]);
run_lex("4 gigawatt plus 5 watt", vec![numtok!(4), Token::Unit(Gigawatt), Token::Operator(Plus), numtok!(5), Token::Unit(Watt)]);
run_lex("4 gigawatt plus 5 megawatt", vec![numtok!(4), Token::Unit(Gigawatt), Token::Operator(Plus), numtok!(5), Token::Unit(Megawatt)]);
run_lex("4 gigawatt plus 5 gigawatt", vec![numtok!(4), Token::Unit(Gigawatt), Token::Operator(Plus), numtok!(5), Token::Unit(Gigawatt)]);
run_lex("4 gigawatt plus 5 watts", vec![numtok!(4), Token::Unit(Gigawatt), Token::Operator(Plus), numtok!(5), Token::Unit(Watt)]);
run_lex("4 gigawatt plus 5 megawatts", vec![numtok!(4), Token::Unit(Gigawatt), Token::Operator(Plus), numtok!(5), Token::Unit(Megawatt)]);
run_lex("4 gigawatt plus 5 gigawatts", vec![numtok!(4), Token::Unit(Gigawatt), Token::Operator(Plus), numtok!(5), Token::Unit(Gigawatt)]);
run_lex("4 gigawatt times 5", vec![numtok!(4), Token::Unit(Gigawatt), Token::Operator(Multiply), numtok!(5)]);
run_lex("6 gigawatts plus 7 watt", vec![numtok!(6), Token::Unit(Gigawatt), Token::Operator(Plus), numtok!(7), Token::Unit(Watt)]);
run_lex("6 gigawatts plus 7 megawatt", vec![numtok!(6), Token::Unit(Gigawatt), Token::Operator(Plus), numtok!(7), Token::Unit(Megawatt)]);
run_lex("6 gigawatts plus 7 gigawatt", vec![numtok!(6), Token::Unit(Gigawatt), Token::Operator(Plus), numtok!(7), Token::Unit(Gigawatt)]);
run_lex("6 gigawatts plus 7 watts", vec![numtok!(6), Token::Unit(Gigawatt), Token::Operator(Plus), numtok!(7), Token::Unit(Watt)]);
run_lex("6 gigawatts plus 7 megawatts", vec![numtok!(6), Token::Unit(Gigawatt), Token::Operator(Plus), numtok!(7), Token::Unit(Megawatt)]);
run_lex("6 gigawatts plus 7 gigawatts", vec![numtok!(6), Token::Unit(Gigawatt), Token::Operator(Plus), numtok!(7), Token::Unit(Gigawatt)]);
run_lex("6 gigawatts times 7", vec![numtok!(6), Token::Unit(Gigawatt), Token::Operator(Multiply), numtok!(7)]);
run_lex("88 mw * 3", vec![numtok!(88), Token::Unit(Megawatt), Token::Operator(Multiply), numtok!(3)]);
run_lex("88 mw times 3", vec![numtok!(88), Token::Unit(Megawatt), Token::Operator(Multiply), numtok!(3)]);
run_lex("999 kb", vec![numtok!(999), Token::Unit(Kilobyte)]);
run_lex("200 gb - 100 mb", vec![numtok!(200), Token::Unit(Gigabyte), Token::Operator(Minus), numtok!(100), Token::Unit(Megabyte)]);
run_lex("999 kib", vec![numtok!(999), Token::Unit(Kibibyte)]);
run_lex("200 gib - 100 mib", vec![numtok!(200), Token::Unit(Gibibyte), Token::Operator(Minus), numtok!(100), Token::Unit(Mebibyte)]);
run_lex("45 btu", vec![numtok!(45), Token::Unit(BritishThermalUnit)]);
run_lex("45.5 british thermal unit", vec![numtok!(45.5), Token::Unit(BritishThermalUnit)]);
run_lex("46 british thermal units", vec![numtok!(46), Token::Unit(BritishThermalUnit)]);
run_lex("5432 newton metres", vec![numtok!(5432), Token::Unit(NewtonMeter)]);
run_lex("2345 newton-meters", vec![numtok!(2345), Token::Unit(NewtonMeter)]);
run_lex("20 lbf", vec![numtok!(20), Token::LexerKeyword(PoundForce)]);
run_lex("60 hz", vec![numtok!(60), Token::Unit(Hertz)]);
run_lex("1100 rpm", vec![numtok!(1100), Token::Unit(RevolutionsPerMinute)]);
run_lex("1150 revolutions per minute", vec![numtok!(1150), Token::Unit(RevolutionsPerMinute)]);
run_lex("1 revolution per min", vec![numtok!(1), Token::Unit(RevolutionsPerMinute)]);
run_lex("4 revolution / mins", vec![numtok!(4), Token::Unit(RevolutionsPerMinute)]);
run_lex("1250 r / min", vec![numtok!(1250), Token::Unit(RevolutionsPerMinute)]);
run_lex("1300 rev / min", vec![numtok!(1300), Token::Unit(RevolutionsPerMinute)]);
run_lex("1350 rev / minute", vec![numtok!(1350), Token::Unit(RevolutionsPerMinute)]);
run_lex("1250 r per min", vec![numtok!(1250), Token::Unit(RevolutionsPerMinute)]);
run_lex("1300 rev per min", vec![numtok!(1300), Token::Unit(RevolutionsPerMinute)]);
run_lex("1350 rev per minute", vec![numtok!(1350), Token::Unit(RevolutionsPerMinute)]);
run_lex("100 kph", vec![numtok!(100), Token::Unit(KilometersPerHour)]);
run_lex("100 kmh", vec![numtok!(100), Token::Unit(KilometersPerHour)]);
run_lex("100 kilometers per hour", vec![numtok!(100), Token::Unit(KilometersPerHour)]);
run_lex("100 kilometre / hrs", vec![numtok!(100), Token::Unit(KilometersPerHour)]);
run_lex("3.6 mps", vec![numtok!(3.6), Token::Unit(MetersPerSecond)]);
run_lex("3.6 meters per second", vec![numtok!(3.6), Token::Unit(MetersPerSecond)]);
run_lex("3.6 metre / secs", vec![numtok!(3.6), Token::Unit(MetersPerSecond)]);
run_lex("60 mph", vec![numtok!(60), Token::Unit(MilesPerHour)]);
run_lex("60 miles per hour", vec![numtok!(60), Token::Unit(MilesPerHour)]);
run_lex("60 mile / hr", vec![numtok!(60), Token::Unit(MilesPerHour)]);
run_lex("35 fps", vec![numtok!(35), Token::Unit(FeetPerSecond)]);
run_lex("35 ft / sec", vec![numtok!(35), Token::Unit(FeetPerSecond)]);
run_lex("35 ft per seconds", vec![numtok!(35), Token::Unit(FeetPerSecond)]);
run_lex("35 foot / secs", vec![numtok!(35), Token::Unit(FeetPerSecond)]);
run_lex("35 foot per seconds", vec![numtok!(35), Token::Unit(FeetPerSecond)]);
run_lex("35 feet / sec", vec![numtok!(35), Token::Unit(FeetPerSecond)]);
run_lex("35 feet per second", vec![numtok!(35), Token::Unit(FeetPerSecond)]);
run_lex("30 pa", vec![numtok!(30), Token::Unit(Pascal)]);
run_lex("23 celsius + 4 celsius", vec![numtok!(23), Token::Unit(Celsius), Token::Operator(Plus), numtok!(4), Token::Unit(Celsius)]);
run_lex("54 f - 1.5 fahrenheit", vec![numtok!(54), Token::Unit(Fahrenheit), Token::Operator(Minus), numtok!(1.5), Token::Unit(Fahrenheit)]);
run_lex("50 metric tonnes", vec![numtok!(50), Token::Unit(MetricTon)]);
run_lex("77 metric hps", vec![numtok!(77), Token::Unit(MetricHorsepower)]);
run_lex("100 + 99", vec![numtok!(100), Token::Operator(Plus), numtok!(99)]);
run_lex("100 plus 99", vec![numtok!(100), Token::Operator(Plus), numtok!(99)]);
run_lex("12 - 4", vec![numtok!(12), Token::Operator(Minus), numtok!(4)]);
run_lex("12 minus 4", vec![numtok!(12), Token::Operator(Minus), numtok!(4)]);
run_lex("50.5 * 2", vec![numtok!(50.5), Token::Operator(Multiply), numtok!(2)]);
run_lex("50.5 times 2", vec![numtok!(50.5), Token::Operator(Multiply), numtok!(2)]);
run_lex("50.5 multiplied by 2", vec![numtok!(50.5), Token::Operator(Multiply), numtok!(2)]);
run_lex("6 / 3", vec![numtok!(6), Token::Operator(Divide), numtok!(3)]);
run_lex("50 / 10", vec![numtok!(50), Token::Operator(Divide), numtok!(10)]);
run_lex("52 ÷ 12", vec![numtok!(52), Token::Operator(Divide), numtok!(12)]);
run_lex("6 divided by 3", vec![numtok!(6), Token::Operator(Divide), numtok!(3)]);
run_lex("7 mod 5", vec![numtok!(7), Token::Operator(Modulo), numtok!(5)]);
run_lex("(2 + 3) * 4", vec![Token::Operator(LeftParen), numtok!(2), Token::Operator(Plus), numtok!(3), Token::Operator(RightParen), Token::Operator(Multiply), numtok!(4)]);
run_lex("52 weeks * (12 hrs + 12 hours)", vec![numtok!(52), Token::Unit(Week), Token::Operator(Multiply), Token::Operator(LeftParen), numtok!(12), Token::Unit(Hour), Token::Operator(Plus), numtok!(12), Token::Unit(Hour), Token::Operator(RightParen)]);
run_lex("12 pound+", vec![numtok!(12), Token::Unit(Pound), Token::Operator(Plus)]);
run_lex("5 π m", vec![numtok!(5), Token::Constant(Pi), Token::Unit(Meter)]);
run_lex("5 Ω + 2 mΩ", vec![numtok!(5), Token::Unit(Ohm), Token::Operator(Plus), numtok!(2), Token::Unit(Milliohm)]);
}
}
| oken(c: &st |
main_test.go | package utf8
import (
"testing"
"github.com/diamnet/go/services/aurora/internal/test"
)
func | (t *testing.T) {
tt := test.Start(t)
defer tt.Finish()
tt.Assert.Equal("scott", Scrub("scott"))
tt.Assert.Equal("scött", Scrub("scött"))
tt.Assert.Equal("�(", Scrub(string([]byte{0xC3, 0x28})))
}
| TestScrub |
lines.rs | use nu_test_support::{nu, pipeline};
#[test]
fn lines() {
let actual = nu!(
cwd: "tests/fixtures/formats", pipeline(
r#"
open cargo_sample.toml -r
| lines
| skip-while $it != "[dependencies]"
| skip 1
| first 1
| split column "="
| get Column1
| trim
| echo $it
"#
));
assert_eq!(actual.out, "rustyline");
}
#[test]
fn | () {
let actual = nu!(
cwd: "tests/fixtures/formats", pipeline(
r#"
open lines_test.txt -r
| lines
| str length
| to json
"#
));
assert_eq!(actual.out, "[8194,4]");
}
| lines_proper_buffering |
file-input.component.ts | import { Component, EventEmitter, HostBinding, Input, OnChanges, OnInit, Output, ViewEncapsulation } from '@angular/core';
import { checkRequiredProp, trackID } from '@helpers/index';
import { Alert } from '@models/index';
import { AbstractControl, FormGroup } from '@angular/forms';
@Component({
selector: 'app-file-input',
templateUrl: './file-input.component.html',
styleUrls: ['./file-input.component.scss'],
encapsulation: ViewEncapsulation.None,
})
export class | implements OnInit, OnChanges {
@HostBinding('class.block') display = true;
@HostBinding('class.w-100') width = true;
@Input() inputAlerts: Alert[] = [];
@Input() inputLabel: string = null;
@Input() inputID: string = null;
@Input() inputForm: AbstractControl = null;
@Input() inputGroup: FormGroup = null;
@Input() inputLabelClass = '';
@Input() inputGroupClass = '';
@Input() inputMessage = '';
@Input() inputRequired = true;
@Input() inputMultiple = false;
@Input() inputValidation = false;
@Input() inputFocus = false;
@Output() whenFileAdd: EventEmitter<any> = new EventEmitter<any>();
computedInputClass: string = null;
computedAriaDescribedBy: string = null;
computedSmallID: string = null;
trackID = null;
constructor() {
this.trackID = trackID;
}
ngOnInit() {
this.checkAllRequiredProp();
this.computeAllProps();
}
ngOnChanges() {
this.checkAllRequiredProp();
this.computeAllProps();
}
computeAriaDescribedBy() {
this.computedAriaDescribedBy = this.inputMessage
? `${this.inputID}Help`
: null;
}
sendFileName(event) {
this.whenFileAdd.emit(event.target.files);
}
computeSmallID() {
this.computedSmallID = `${this.inputID}Help`;
}
computeAllProps() {
this.computeAriaDescribedBy();
this.computeSmallID();
}
checkAllRequiredProp() {
checkRequiredProp(this.inputLabel, 'inputLabel');
checkRequiredProp(this.inputID, 'inputID');
}
}
| FileInputComponent |
list.rs | use super::{DEFAULT, FLAG, MESSAGE, NAME};
use crate::descriptions::prompt;
use clap::{App, Arg};
use cto_ai::ux::prompt::{List, Prompt};
static CHOICES: &str = "choices";
static AUTOCOMPLETE: &str = "autocomplete";
// Init the cli commands for the List prompt
pub fn init_cli_command<'a, 'b>() -> App<'a, 'b> {
App::new("list")
.about(prompt::LIST)
.arg(
Arg::with_name(NAME)
.long(NAME)
.short("n")
.help("Name of the list prompt")
.value_name("NAME")
.required(true),
)
.arg(
Arg::with_name(MESSAGE)
.long(MESSAGE)
.short("m")
.help("Message to be displayed to the user")
.required(true)
.value_name("MESSAGE"),
)
.arg(
Arg::with_name(FLAG)
.long(FLAG)
.short("f")
.help("Command line flag alias associated with this prompt")
.value_name("FLAG"),
)
.arg(
Arg::with_name(CHOICES)
.help("The choices to include in the list")
.required(true)
.value_name("CHOICES")
.multiple(true),
)
.arg(
Arg::with_name(DEFAULT)
.long(DEFAULT)
.short("d")
.help("Sets default selected value in the list")
.value_name("DEFAULT VALUE"),
)
.arg(
Arg::with_name(AUTOCOMPLETE)
.long(AUTOCOMPLETE)
.short("a")
.help("Enables autocomplete on the list"),
) | let name = matches.value_of(NAME).unwrap();
let message = matches.value_of(MESSAGE).unwrap();
let choices = matches
.values_of(CHOICES)
.unwrap()
.map(String::from)
.collect();
let mut list = List::new(name, message, choices);
if let Some(default) = matches.value_of(DEFAULT) {
list = list.default_value(default);
}
if matches.is_present(AUTOCOMPLETE) {
list = list.autocomplete();
}
if let Some(flag) = matches.value_of(FLAG) {
list = list.flag(flag);
}
let final_value = list.execute().unwrap();
println!("{}", final_value);
} | }
// Runs the List prompt
pub fn run(matches: &clap::ArgMatches) { |
stdout.rs | //! # Stdout Span Exporter
//!
//! The stdout [`SpanExporter`] writes debug printed [`Span`]s to its configured
//! [`Write`] instance. By default it will write to [`Stdout`].
//!
//! [`SpanExporter`]: super::SpanExporter
//! [`Span`]: crate::trace::Span
//! [`Write`]: std::io::Write
//! [`Stdout`]: std::io::Stdout
//!
//! # Examples
//!
//! ```no_run
//! use opentelemetry::trace::Tracer;
//! use opentelemetry::sdk::export::trace::stdout;
//! use opentelemetry::global::shutdown_tracer_provider;
//!
//! fn main() {
//! let tracer = stdout::new_pipeline()
//! .with_pretty_print(true)
//! .install();
//!
//! tracer.in_span("doing_work", |cx| {
//! // Traced app logic here...
//! });
//!
//! shutdown_tracer_provider(); // sending remaining spans
//! }
//! ```
use crate::{
global, sdk,
sdk::export::{
trace::{ExportResult, SpanData, SpanExporter},
ExportError,
},
trace::TracerProvider,
};
use async_trait::async_trait;
use std::fmt::Debug;
use std::io::{stdout, Stdout, Write};
/// Pipeline builder
#[derive(Debug)]
pub struct PipelineBuilder<W: Write> {
pretty_print: bool,
trace_config: Option<sdk::trace::Config>,
writer: W,
}
/// Create a new stdout exporter pipeline builder.
pub fn new_pipeline() -> PipelineBuilder<Stdout> {
PipelineBuilder::default()
}
impl Default for PipelineBuilder<Stdout> {
/// Return the default pipeline builder.
fn default() -> Self {
Self {
pretty_print: false,
trace_config: None,
writer: stdout(),
}
}
}
impl<W: Write> PipelineBuilder<W> {
/// Specify the pretty print setting.
pub fn with_pretty_print(mut self, pretty_print: bool) -> Self {
self.pretty_print = pretty_print;
self
}
/// Assign the SDK trace configuration.
pub fn with_trace_config(mut self, config: sdk::trace::Config) -> Self {
self.trace_config = Some(config);
self
}
/// Specify the writer to use.
pub fn with_writer<T: Write>(self, writer: T) -> PipelineBuilder<T> {
PipelineBuilder {
pretty_print: self.pretty_print,
trace_config: self.trace_config,
writer,
}
}
}
impl<W> PipelineBuilder<W>
where
W: Write + Debug + Send + 'static,
{
/// Install the stdout exporter pipeline with the recommended defaults.
pub fn install(mut self) -> sdk::trace::Tracer {
let exporter = Exporter::new(self.writer, self.pretty_print);
let mut provider_builder = sdk::trace::TracerProvider::builder().with_exporter(exporter);
if let Some(config) = self.trace_config.take() {
provider_builder = provider_builder.with_config(config);
}
let provider = provider_builder.build();
let tracer = provider.get_tracer("opentelemetry", Some(env!("CARGO_PKG_VERSION")));
let _ = global::set_tracer_provider(provider);
tracer
}
}
/// A [`SpanExporter`] that writes to [`Stdout`] or other configured [`Write`].
///
/// [`SpanExporter`]: super::SpanExporter
/// [`Write`]: std::io::Write
/// [`Stdout`]: std::io::Stdout
#[derive(Debug)]
pub struct | <W: Write> {
writer: W,
pretty_print: bool,
}
impl<W: Write> Exporter<W> {
/// Create a new stdout `Exporter`.
pub fn new(writer: W, pretty_print: bool) -> Self {
Self {
writer,
pretty_print,
}
}
}
#[async_trait]
impl<W> SpanExporter for Exporter<W>
where
W: Write + Debug + Send + 'static,
{
/// Export spans to stdout
async fn export(&mut self, batch: Vec<SpanData>) -> ExportResult {
for span in batch {
if self.pretty_print {
self.writer
.write_all(format!("{:#?}\n", span).as_bytes())
.map_err::<Error, _>(Into::into)?;
} else {
self.writer
.write_all(format!("{:?}\n", span).as_bytes())
.map_err::<Error, _>(Into::into)?;
}
}
Ok(())
}
}
/// Stdout exporter's error
#[derive(thiserror::Error, Debug)]
#[error(transparent)]
struct Error(#[from] std::io::Error);
impl ExportError for Error {
fn exporter_name(&self) -> &'static str {
"stdout"
}
}
| Exporter |
expr.go | // Copyright 2020 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ir
import (
"bytes"
"cmd/compile/internal/base"
"cmd/compile/internal/types"
"cmd/internal/obj"
"cmd/internal/src"
"fmt"
"go/constant"
"go/token"
)
// An Expr is a Node that can appear as an expression.
type Expr interface {
Node
isExpr()
}
// A miniExpr is a miniNode with extra fields common to expressions.
// TODO(rsc): Once we are sure about the contents, compact the bools
// into a bit field and leave extra bits available for implementations
// embedding miniExpr. Right now there are ~60 unused bits sitting here.
type miniExpr struct {
miniNode
typ *types.Type
init Nodes // TODO(rsc): Don't require every Node to have an init
flags bitset8
}
const (
miniExprNonNil = 1 << iota
miniExprTransient
miniExprBounded
miniExprImplicit // for use by implementations; not supported by every Expr
miniExprCheckPtr
)
func (*miniExpr) isExpr() {}
func (n *miniExpr) Type() *types.Type { return n.typ }
func (n *miniExpr) SetType(x *types.Type) { n.typ = x }
func (n *miniExpr) NonNil() bool { return n.flags&miniExprNonNil != 0 }
func (n *miniExpr) MarkNonNil() { n.flags |= miniExprNonNil }
func (n *miniExpr) Transient() bool { return n.flags&miniExprTransient != 0 }
func (n *miniExpr) SetTransient(b bool) { n.flags.set(miniExprTransient, b) }
func (n *miniExpr) Bounded() bool { return n.flags&miniExprBounded != 0 }
func (n *miniExpr) SetBounded(b bool) { n.flags.set(miniExprBounded, b) }
func (n *miniExpr) Init() Nodes { return n.init }
func (n *miniExpr) PtrInit() *Nodes { return &n.init }
func (n *miniExpr) SetInit(x Nodes) { n.init = x }
// An AddStringExpr is a string concatenation Expr[0] + Exprs[1] + ... + Expr[len(Expr)-1].
type AddStringExpr struct {
miniExpr
List Nodes
Prealloc *Name
}
func NewAddStringExpr(pos src.XPos, list []Node) *AddStringExpr {
n := &AddStringExpr{}
n.pos = pos
n.op = OADDSTR
n.List = list
return n
}
// An AddrExpr is an address-of expression &X.
// It may end up being a normal address-of or an allocation of a composite literal.
type AddrExpr struct {
miniExpr
X Node
Prealloc *Name // preallocated storage if any
}
func NewAddrExpr(pos src.XPos, x Node) *AddrExpr {
n := &AddrExpr{X: x}
n.op = OADDR
n.pos = pos
return n
}
func (n *AddrExpr) Implicit() bool { return n.flags&miniExprImplicit != 0 }
func (n *AddrExpr) SetImplicit(b bool) { n.flags.set(miniExprImplicit, b) }
func (n *AddrExpr) SetOp(op Op) {
switch op {
default:
panic(n.no("SetOp " + op.String()))
case OADDR, OPTRLIT:
n.op = op
}
}
// A BasicLit is a literal of basic type.
type BasicLit struct {
miniExpr
val constant.Value
}
func NewBasicLit(pos src.XPos, val constant.Value) Node {
n := &BasicLit{val: val}
n.op = OLITERAL
n.pos = pos
if k := val.Kind(); k != constant.Unknown {
n.SetType(idealType(k))
}
return n
}
func (n *BasicLit) Val() constant.Value { return n.val }
func (n *BasicLit) SetVal(val constant.Value) { n.val = val }
// A BinaryExpr is a binary expression X Op Y,
// or Op(X, Y) for builtin functions that do not become calls.
type BinaryExpr struct {
miniExpr
X Node
Y Node
}
func NewBinaryExpr(pos src.XPos, op Op, x, y Node) *BinaryExpr {
n := &BinaryExpr{X: x, Y: y}
n.pos = pos
n.SetOp(op)
return n
}
func (n *BinaryExpr) SetOp(op Op) {
switch op {
default:
panic(n.no("SetOp " + op.String()))
case OADD, OADDSTR, OAND, OANDNOT, ODIV, OEQ, OGE, OGT, OLE,
OLSH, OLT, OMOD, OMUL, ONE, OOR, ORSH, OSUB, OXOR,
OCOPY, OCOMPLEX, OUNSAFEADD, OUNSAFESLICE,
OEFACE:
n.op = op
}
}
// A CallUse records how the result of the call is used:
type CallUse byte
const (
_ CallUse = iota
CallUseExpr // single expression result is used
CallUseList // list of results are used
CallUseStmt // results not used - call is a statement
)
// A CallExpr is a function call X(Args).
type CallExpr struct {
miniExpr
origNode
X Node
Args Nodes
KeepAlive []*Name // vars to be kept alive until call returns
IsDDD bool
Use CallUse
NoInline bool
PreserveClosure bool // disable directClosureCall for this call
}
func NewCallExpr(pos src.XPos, op Op, fun Node, args []Node) *CallExpr {
n := &CallExpr{X: fun}
n.pos = pos
n.orig = n
n.SetOp(op)
n.Args = args
return n
}
func (*CallExpr) isStmt() {}
func (n *CallExpr) SetOp(op Op) {
switch op {
default:
panic(n.no("SetOp " + op.String()))
case OCALL, OCALLFUNC, OCALLINTER, OCALLMETH,
OAPPEND, ODELETE, OGETG, OMAKE, OPRINT, OPRINTN, ORECOVER:
n.op = op
}
}
// A ClosureExpr is a function literal expression.
type ClosureExpr struct {
miniExpr
Func *Func `mknode:"-"`
Prealloc *Name
}
func NewClosureExpr(pos src.XPos, fn *Func) *ClosureExpr {
n := &ClosureExpr{Func: fn}
n.op = OCLOSURE
n.pos = pos
return n
}
// A CompLitExpr is a composite literal Type{Vals}.
// Before type-checking, the type is Ntype.
type CompLitExpr struct {
miniExpr
origNode
Ntype Ntype
List Nodes // initialized values
Prealloc *Name
Len int64 // backing array length for OSLICELIT
}
func NewCompLitExpr(pos src.XPos, op Op, typ Ntype, list []Node) *CompLitExpr {
n := &CompLitExpr{Ntype: typ}
n.pos = pos
n.SetOp(op)
n.List = list
n.orig = n
return n
}
func (n *CompLitExpr) Implicit() bool { return n.flags&miniExprImplicit != 0 }
func (n *CompLitExpr) SetImplicit(b bool) { n.flags.set(miniExprImplicit, b) }
func (n *CompLitExpr) SetOp(op Op) {
switch op {
default:
panic(n.no("SetOp " + op.String()))
case OARRAYLIT, OCOMPLIT, OMAPLIT, OSTRUCTLIT, OSLICELIT:
n.op = op
}
}
type ConstExpr struct {
miniExpr
origNode
val constant.Value
}
func NewConstExpr(val constant.Value, orig Node) Node {
n := &ConstExpr{val: val}
n.op = OLITERAL
n.pos = orig.Pos()
n.orig = orig
n.SetType(orig.Type())
n.SetTypecheck(orig.Typecheck())
n.SetDiag(orig.Diag())
return n
}
func (n *ConstExpr) Sym() *types.Sym { return n.orig.Sym() }
func (n *ConstExpr) Val() constant.Value { return n.val }
// A ConvExpr is a conversion Type(X).
// It may end up being a value or a type.
type ConvExpr struct {
miniExpr
X Node
}
func NewConvExpr(pos src.XPos, op Op, typ *types.Type, x Node) *ConvExpr {
n := &ConvExpr{X: x}
n.pos = pos
n.typ = typ
n.SetOp(op)
return n
}
func (n *ConvExpr) Implicit() bool { return n.flags&miniExprImplicit != 0 }
func (n *ConvExpr) SetImplicit(b bool) { n.flags.set(miniExprImplicit, b) }
func (n *ConvExpr) CheckPtr() bool { return n.flags&miniExprCheckPtr != 0 }
func (n *ConvExpr) SetCheckPtr(b bool) { n.flags.set(miniExprCheckPtr, b) }
func (n *ConvExpr) SetOp(op Op) {
switch op {
default:
panic(n.no("SetOp " + op.String()))
case OCONV, OCONVIFACE, OCONVNOP, OBYTES2STR, OBYTES2STRTMP, ORUNES2STR, OSTR2BYTES, OSTR2BYTESTMP, OSTR2RUNES, ORUNESTR, OSLICE2ARRPTR:
n.op = op
}
}
// An IndexExpr is an index expression X[Y].
type IndexExpr struct {
miniExpr
X Node
Index Node
Assigned bool
}
func NewIndexExpr(pos src.XPos, x, index Node) *IndexExpr {
n := &IndexExpr{X: x, Index: index}
n.pos = pos
n.op = OINDEX
return n
}
func (n *IndexExpr) SetOp(op Op) {
switch op {
default:
panic(n.no("SetOp " + op.String()))
case OINDEX, OINDEXMAP:
n.op = op
}
}
// A KeyExpr is a Key: Value composite literal key.
type KeyExpr struct {
miniExpr
Key Node
Value Node
}
func NewKeyExpr(pos src.XPos, key, value Node) *KeyExpr {
n := &KeyExpr{Key: key, Value: value}
n.pos = pos
n.op = OKEY
return n
}
// A StructKeyExpr is an Field: Value composite literal key.
type StructKeyExpr struct {
miniExpr
Field *types.Sym
Value Node
Offset int64
}
func NewStructKeyExpr(pos src.XPos, field *types.Sym, value Node) *StructKeyExpr {
n := &StructKeyExpr{Field: field, Value: value}
n.pos = pos
n.op = OSTRUCTKEY
n.Offset = types.BADWIDTH
return n
}
func (n *StructKeyExpr) Sym() *types.Sym { return n.Field }
// An InlinedCallExpr is an inlined function call.
type InlinedCallExpr struct {
miniExpr
Body Nodes
ReturnVars Nodes
}
func NewInlinedCallExpr(pos src.XPos, body, retvars []Node) *InlinedCallExpr {
n := &InlinedCallExpr{}
n.pos = pos
n.op = OINLCALL
n.Body = body
n.ReturnVars = retvars
return n
}
// A LogicalExpr is a expression X Op Y where Op is && or ||.
// It is separate from BinaryExpr to make room for statements
// that must be executed before Y but after X.
type LogicalExpr struct {
miniExpr
X Node
Y Node
}
func NewLogicalExpr(pos src.XPos, op Op, x, y Node) *LogicalExpr {
n := &LogicalExpr{X: x, Y: y}
n.pos = pos
n.SetOp(op)
return n
}
func (n *LogicalExpr) SetOp(op Op) {
switch op {
default:
panic(n.no("SetOp " + op.String()))
case OANDAND, OOROR:
n.op = op
}
}
// A MakeExpr is a make expression: make(Type[, Len[, Cap]]).
// Op is OMAKECHAN, OMAKEMAP, OMAKESLICE, or OMAKESLICECOPY,
// but *not* OMAKE (that's a pre-typechecking CallExpr).
type MakeExpr struct {
miniExpr
Len Node
Cap Node
}
func NewMakeExpr(pos src.XPos, op Op, len, cap Node) *MakeExpr {
n := &MakeExpr{Len: len, Cap: cap}
n.pos = pos
n.SetOp(op)
return n
}
func (n *MakeExpr) SetOp(op Op) {
switch op {
default:
panic(n.no("SetOp " + op.String()))
case OMAKECHAN, OMAKEMAP, OMAKESLICE, OMAKESLICECOPY:
n.op = op
}
}
// A NilExpr represents the predefined untyped constant nil.
// (It may be copied and assigned a type, though.)
type NilExpr struct {
miniExpr
Sym_ *types.Sym // TODO: Remove
}
func NewNilExpr(pos src.XPos) *NilExpr {
n := &NilExpr{}
n.pos = pos
n.op = ONIL
return n
}
func (n *NilExpr) Sym() *types.Sym { return n.Sym_ }
func (n *NilExpr) SetSym(x *types.Sym) { n.Sym_ = x }
// A ParenExpr is a parenthesized expression (X).
// It may end up being a value or a type.
type ParenExpr struct {
miniExpr
X Node
}
func NewParenExpr(pos src.XPos, x Node) *ParenExpr {
n := &ParenExpr{X: x}
n.op = OPAREN
n.pos = pos
return n
}
func (n *ParenExpr) Implicit() bool { return n.flags&miniExprImplicit != 0 }
func (n *ParenExpr) SetImplicit(b bool) { n.flags.set(miniExprImplicit, b) }
func (*ParenExpr) CanBeNtype() {}
// SetOTYPE changes n to be an OTYPE node returning t,
// like all the type nodes in type.go.
func (n *ParenExpr) SetOTYPE(t *types.Type) {
n.op = OTYPE
n.typ = t
t.SetNod(n)
}
// A ResultExpr represents a direct access to a result.
type ResultExpr struct {
miniExpr
Index int64 // index of the result expr.
}
func NewResultExpr(pos src.XPos, typ *types.Type, index int64) *ResultExpr {
n := &ResultExpr{Index: index}
n.pos = pos
n.op = ORESULT
n.typ = typ
return n
}
// A LinksymOffsetExpr refers to an offset within a global variable.
// It is like a SelectorExpr but without the field name.
type LinksymOffsetExpr struct {
miniExpr
Linksym *obj.LSym
Offset_ int64
}
func NewLinksymOffsetExpr(pos src.XPos, lsym *obj.LSym, offset int64, typ *types.Type) *LinksymOffsetExpr {
n := &LinksymOffsetExpr{Linksym: lsym, Offset_: offset}
n.typ = typ
n.op = OLINKSYMOFFSET
return n
}
// NewLinksymExpr is NewLinksymOffsetExpr, but with offset fixed at 0.
func NewLinksymExpr(pos src.XPos, lsym *obj.LSym, typ *types.Type) *LinksymOffsetExpr {
return NewLinksymOffsetExpr(pos, lsym, 0, typ)
}
// NewNameOffsetExpr is NewLinksymOffsetExpr, but taking a *Name
// representing a global variable instead of an *obj.LSym directly.
func NewNameOffsetExpr(pos src.XPos, name *Name, offset int64, typ *types.Type) *LinksymOffsetExpr {
if name == nil || IsBlank(name) || !(name.Op() == ONAME && name.Class == PEXTERN) {
base.FatalfAt(pos, "cannot take offset of nil, blank name or non-global variable: %v", name)
}
return NewLinksymOffsetExpr(pos, name.Linksym(), offset, typ)
}
// A SelectorExpr is a selector expression X.Sel.
type SelectorExpr struct {
miniExpr
X Node
Sel *types.Sym
Selection *types.Field
Prealloc *Name // preallocated storage for OCALLPART, if any
}
func | (pos src.XPos, op Op, x Node, sel *types.Sym) *SelectorExpr {
n := &SelectorExpr{X: x, Sel: sel}
n.pos = pos
n.SetOp(op)
return n
}
func (n *SelectorExpr) SetOp(op Op) {
switch op {
default:
panic(n.no("SetOp " + op.String()))
case OXDOT, ODOT, ODOTPTR, ODOTMETH, ODOTINTER, OCALLPART, OMETHEXPR:
n.op = op
}
}
func (n *SelectorExpr) Sym() *types.Sym { return n.Sel }
func (n *SelectorExpr) Implicit() bool { return n.flags&miniExprImplicit != 0 }
func (n *SelectorExpr) SetImplicit(b bool) { n.flags.set(miniExprImplicit, b) }
func (n *SelectorExpr) Offset() int64 { return n.Selection.Offset }
func (n *SelectorExpr) FuncName() *Name {
if n.Op() != OMETHEXPR {
panic(n.no("FuncName"))
}
fn := NewNameAt(n.Selection.Pos, MethodSym(n.X.Type(), n.Sel))
fn.Class = PFUNC
fn.SetType(n.Type())
if n.Selection.Nname != nil {
// TODO(austin): Nname is nil for interface method
// expressions (I.M), so we can't attach a Func to
// those here. reflectdata.methodWrapper generates the
// Func.
fn.Func = n.Selection.Nname.(*Name).Func
}
return fn
}
// Before type-checking, bytes.Buffer is a SelectorExpr.
// After type-checking it becomes a Name.
func (*SelectorExpr) CanBeNtype() {}
// A SliceExpr is a slice expression X[Low:High] or X[Low:High:Max].
type SliceExpr struct {
miniExpr
X Node
Low Node
High Node
Max Node
}
func NewSliceExpr(pos src.XPos, op Op, x, low, high, max Node) *SliceExpr {
n := &SliceExpr{X: x, Low: low, High: high, Max: max}
n.pos = pos
n.op = op
return n
}
func (n *SliceExpr) SetOp(op Op) {
switch op {
default:
panic(n.no("SetOp " + op.String()))
case OSLICE, OSLICEARR, OSLICESTR, OSLICE3, OSLICE3ARR:
n.op = op
}
}
// IsSlice3 reports whether o is a slice3 op (OSLICE3, OSLICE3ARR).
// o must be a slicing op.
func (o Op) IsSlice3() bool {
switch o {
case OSLICE, OSLICEARR, OSLICESTR:
return false
case OSLICE3, OSLICE3ARR:
return true
}
base.Fatalf("IsSlice3 op %v", o)
return false
}
// A SliceHeader expression constructs a slice header from its parts.
type SliceHeaderExpr struct {
miniExpr
Ptr Node
Len Node
Cap Node
}
func NewSliceHeaderExpr(pos src.XPos, typ *types.Type, ptr, len, cap Node) *SliceHeaderExpr {
n := &SliceHeaderExpr{Ptr: ptr, Len: len, Cap: cap}
n.pos = pos
n.op = OSLICEHEADER
n.typ = typ
return n
}
// A StarExpr is a dereference expression *X.
// It may end up being a value or a type.
type StarExpr struct {
miniExpr
X Node
}
func NewStarExpr(pos src.XPos, x Node) *StarExpr {
n := &StarExpr{X: x}
n.op = ODEREF
n.pos = pos
return n
}
func (n *StarExpr) Implicit() bool { return n.flags&miniExprImplicit != 0 }
func (n *StarExpr) SetImplicit(b bool) { n.flags.set(miniExprImplicit, b) }
func (*StarExpr) CanBeNtype() {}
// SetOTYPE changes n to be an OTYPE node returning t,
// like all the type nodes in type.go.
func (n *StarExpr) SetOTYPE(t *types.Type) {
n.op = OTYPE
n.X = nil
n.typ = t
t.SetNod(n)
}
// A TypeAssertionExpr is a selector expression X.(Type).
// Before type-checking, the type is Ntype.
type TypeAssertExpr struct {
miniExpr
X Node
Ntype Ntype
// Runtime type information provided by walkDotType for
// assertions from non-empty interface to concrete type.
Itab *AddrExpr `mknode:"-"` // *runtime.itab for Type implementing X's type
}
func NewTypeAssertExpr(pos src.XPos, x Node, typ Ntype) *TypeAssertExpr {
n := &TypeAssertExpr{X: x, Ntype: typ}
n.pos = pos
n.op = ODOTTYPE
return n
}
func (n *TypeAssertExpr) SetOp(op Op) {
switch op {
default:
panic(n.no("SetOp " + op.String()))
case ODOTTYPE, ODOTTYPE2:
n.op = op
}
}
// A UnaryExpr is a unary expression Op X,
// or Op(X) for a builtin function that does not end up being a call.
type UnaryExpr struct {
miniExpr
X Node
}
func NewUnaryExpr(pos src.XPos, op Op, x Node) *UnaryExpr {
n := &UnaryExpr{X: x}
n.pos = pos
n.SetOp(op)
return n
}
func (n *UnaryExpr) SetOp(op Op) {
switch op {
default:
panic(n.no("SetOp " + op.String()))
case OBITNOT, ONEG, ONOT, OPLUS, ORECV,
OALIGNOF, OCAP, OCLOSE, OIMAG, OLEN, ONEW,
OOFFSETOF, OPANIC, OREAL, OSIZEOF,
OCHECKNIL, OCFUNC, OIDATA, OITAB, OSPTR, OVARDEF, OVARKILL, OVARLIVE:
n.op = op
}
}
// An InstExpr is a generic function or type instantiation.
type InstExpr struct {
miniExpr
X Node
Targs []Node
}
func NewInstExpr(pos src.XPos, op Op, x Node, targs []Node) *InstExpr {
n := &InstExpr{X: x, Targs: targs}
n.pos = pos
n.op = op
return n
}
func IsZero(n Node) bool {
switch n.Op() {
case ONIL:
return true
case OLITERAL:
switch u := n.Val(); u.Kind() {
case constant.String:
return constant.StringVal(u) == ""
case constant.Bool:
return !constant.BoolVal(u)
default:
return constant.Sign(u) == 0
}
case OARRAYLIT:
n := n.(*CompLitExpr)
for _, n1 := range n.List {
if n1.Op() == OKEY {
n1 = n1.(*KeyExpr).Value
}
if !IsZero(n1) {
return false
}
}
return true
case OSTRUCTLIT:
n := n.(*CompLitExpr)
for _, n1 := range n.List {
n1 := n1.(*StructKeyExpr)
if !IsZero(n1.Value) {
return false
}
}
return true
}
return false
}
// lvalue etc
func IsAddressable(n Node) bool {
switch n.Op() {
case OINDEX:
n := n.(*IndexExpr)
if n.X.Type() != nil && n.X.Type().IsArray() {
return IsAddressable(n.X)
}
if n.X.Type() != nil && n.X.Type().IsString() {
return false
}
fallthrough
case ODEREF, ODOTPTR:
return true
case ODOT:
n := n.(*SelectorExpr)
return IsAddressable(n.X)
case ONAME:
n := n.(*Name)
if n.Class == PFUNC {
return false
}
return true
case OLINKSYMOFFSET:
return true
}
return false
}
func StaticValue(n Node) Node {
for {
if n.Op() == OCONVNOP {
n = n.(*ConvExpr).X
continue
}
n1 := staticValue1(n)
if n1 == nil {
return n
}
n = n1
}
}
// staticValue1 implements a simple SSA-like optimization. If n is a local variable
// that is initialized and never reassigned, staticValue1 returns the initializer
// expression. Otherwise, it returns nil.
func staticValue1(nn Node) Node {
if nn.Op() != ONAME {
return nil
}
n := nn.(*Name)
if n.Class != PAUTO {
return nil
}
defn := n.Defn
if defn == nil {
return nil
}
var rhs Node
FindRHS:
switch defn.Op() {
case OAS:
defn := defn.(*AssignStmt)
rhs = defn.Y
case OAS2:
defn := defn.(*AssignListStmt)
for i, lhs := range defn.Lhs {
if lhs == n {
rhs = defn.Rhs[i]
break FindRHS
}
}
base.Fatalf("%v missing from LHS of %v", n, defn)
default:
return nil
}
if rhs == nil {
base.Fatalf("RHS is nil: %v", defn)
}
if reassigned(n) {
return nil
}
return rhs
}
// reassigned takes an ONAME node, walks the function in which it is defined, and returns a boolean
// indicating whether the name has any assignments other than its declaration.
// The second return value is the first such assignment encountered in the walk, if any. It is mostly
// useful for -m output documenting the reason for inhibited optimizations.
// NB: global variables are always considered to be re-assigned.
// TODO: handle initial declaration not including an assignment and followed by a single assignment?
func reassigned(name *Name) bool {
if name.Op() != ONAME {
base.Fatalf("reassigned %v", name)
}
// no way to reliably check for no-reassignment of globals, assume it can be
if name.Curfn == nil {
return true
}
// TODO(mdempsky): This is inefficient and becoming increasingly
// unwieldy. Figure out a way to generalize escape analysis's
// reassignment detection for use by inlining and devirtualization.
// isName reports whether n is a reference to name.
isName := func(x Node) bool {
n, ok := x.(*Name)
return ok && n.Canonical() == name
}
var do func(n Node) bool
do = func(n Node) bool {
switch n.Op() {
case OAS:
n := n.(*AssignStmt)
if isName(n.X) && n != name.Defn {
return true
}
case OAS2, OAS2FUNC, OAS2MAPR, OAS2DOTTYPE, OAS2RECV, OSELRECV2:
n := n.(*AssignListStmt)
for _, p := range n.Lhs {
if isName(p) && n != name.Defn {
return true
}
}
case OADDR:
n := n.(*AddrExpr)
if isName(OuterValue(n.X)) {
return true
}
case OCLOSURE:
n := n.(*ClosureExpr)
if Any(n.Func, do) {
return true
}
}
return false
}
return Any(name.Curfn, do)
}
// IsIntrinsicCall reports whether the compiler back end will treat the call as an intrinsic operation.
var IsIntrinsicCall = func(*CallExpr) bool { return false }
// SameSafeExpr checks whether it is safe to reuse one of l and r
// instead of computing both. SameSafeExpr assumes that l and r are
// used in the same statement or expression. In order for it to be
// safe to reuse l or r, they must:
// * be the same expression
// * not have side-effects (no function calls, no channel ops);
// however, panics are ok
// * not cause inappropriate aliasing; e.g. two string to []byte
// conversions, must result in two distinct slices
//
// The handling of OINDEXMAP is subtle. OINDEXMAP can occur both
// as an lvalue (map assignment) and an rvalue (map access). This is
// currently OK, since the only place SameSafeExpr gets used on an
// lvalue expression is for OSLICE and OAPPEND optimizations, and it
// is correct in those settings.
func SameSafeExpr(l Node, r Node) bool {
if l.Op() != r.Op() || !types.Identical(l.Type(), r.Type()) {
return false
}
switch l.Op() {
case ONAME:
return l == r
case ODOT, ODOTPTR:
l := l.(*SelectorExpr)
r := r.(*SelectorExpr)
return l.Sel != nil && r.Sel != nil && l.Sel == r.Sel && SameSafeExpr(l.X, r.X)
case ODEREF:
l := l.(*StarExpr)
r := r.(*StarExpr)
return SameSafeExpr(l.X, r.X)
case ONOT, OBITNOT, OPLUS, ONEG:
l := l.(*UnaryExpr)
r := r.(*UnaryExpr)
return SameSafeExpr(l.X, r.X)
case OCONVNOP:
l := l.(*ConvExpr)
r := r.(*ConvExpr)
return SameSafeExpr(l.X, r.X)
case OCONV:
l := l.(*ConvExpr)
r := r.(*ConvExpr)
// Some conversions can't be reused, such as []byte(str).
// Allow only numeric-ish types. This is a bit conservative.
return types.IsSimple[l.Type().Kind()] && SameSafeExpr(l.X, r.X)
case OINDEX, OINDEXMAP:
l := l.(*IndexExpr)
r := r.(*IndexExpr)
return SameSafeExpr(l.X, r.X) && SameSafeExpr(l.Index, r.Index)
case OADD, OSUB, OOR, OXOR, OMUL, OLSH, ORSH, OAND, OANDNOT, ODIV, OMOD:
l := l.(*BinaryExpr)
r := r.(*BinaryExpr)
return SameSafeExpr(l.X, r.X) && SameSafeExpr(l.Y, r.Y)
case OLITERAL:
return constant.Compare(l.Val(), token.EQL, r.Val())
case ONIL:
return true
}
return false
}
// ShouldCheckPtr reports whether pointer checking should be enabled for
// function fn at a given level. See debugHelpFooter for defined
// levels.
func ShouldCheckPtr(fn *Func, level int) bool {
return base.Debug.Checkptr >= level && fn.Pragma&NoCheckPtr == 0
}
// IsReflectHeaderDataField reports whether l is an expression p.Data
// where p has type reflect.SliceHeader or reflect.StringHeader.
func IsReflectHeaderDataField(l Node) bool {
if l.Type() != types.Types[types.TUINTPTR] {
return false
}
var tsym *types.Sym
switch l.Op() {
case ODOT:
l := l.(*SelectorExpr)
tsym = l.X.Type().Sym()
case ODOTPTR:
l := l.(*SelectorExpr)
tsym = l.X.Type().Elem().Sym()
default:
return false
}
if tsym == nil || l.Sym().Name != "Data" || tsym.Pkg.Path != "reflect" {
return false
}
return tsym.Name == "SliceHeader" || tsym.Name == "StringHeader"
}
func ParamNames(ft *types.Type) []Node {
args := make([]Node, ft.NumParams())
for i, f := range ft.Params().FieldSlice() {
args[i] = AsNode(f.Nname)
}
return args
}
// MethodSym returns the method symbol representing a method name
// associated with a specific receiver type.
//
// Method symbols can be used to distinguish the same method appearing
// in different method sets. For example, T.M and (*T).M have distinct
// method symbols.
//
// The returned symbol will be marked as a function.
func MethodSym(recv *types.Type, msym *types.Sym) *types.Sym {
sym := MethodSymSuffix(recv, msym, "")
sym.SetFunc(true)
return sym
}
// MethodSymSuffix is like methodsym, but allows attaching a
// distinguisher suffix. To avoid collisions, the suffix must not
// start with a letter, number, or period.
func MethodSymSuffix(recv *types.Type, msym *types.Sym, suffix string) *types.Sym {
if msym.IsBlank() {
base.Fatalf("blank method name")
}
rsym := recv.Sym()
if recv.IsPtr() {
if rsym != nil {
base.Fatalf("declared pointer receiver type: %v", recv)
}
rsym = recv.Elem().Sym()
}
// Find the package the receiver type appeared in. For
// anonymous receiver types (i.e., anonymous structs with
// embedded fields), use the "go" pseudo-package instead.
rpkg := Pkgs.Go
if rsym != nil {
rpkg = rsym.Pkg
}
var b bytes.Buffer
if recv.IsPtr() {
// The parentheses aren't really necessary, but
// they're pretty traditional at this point.
fmt.Fprintf(&b, "(%-S)", recv)
} else {
fmt.Fprintf(&b, "%-S", recv)
}
// A particular receiver type may have multiple non-exported
// methods with the same name. To disambiguate them, include a
// package qualifier for names that came from a different
// package than the receiver type.
if !types.IsExported(msym.Name) && msym.Pkg != rpkg {
b.WriteString(".")
b.WriteString(msym.Pkg.Prefix)
}
b.WriteString(".")
b.WriteString(msym.Name)
b.WriteString(suffix)
return rpkg.LookupBytes(b.Bytes())
}
// MethodExprName returns the ONAME representing the method
// referenced by expression n, which must be a method selector,
// method expression, or method value.
func MethodExprName(n Node) *Name {
name, _ := MethodExprFunc(n).Nname.(*Name)
return name
}
// MethodExprFunc is like MethodExprName, but returns the types.Field instead.
func MethodExprFunc(n Node) *types.Field {
switch n.Op() {
case ODOTMETH, OMETHEXPR, OCALLPART:
return n.(*SelectorExpr).Selection
}
base.Fatalf("unexpected node: %v (%v)", n, n.Op())
panic("unreachable")
}
| NewSelectorExpr |
doc.go | /*
Copyright 2018 The Kubernetes authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
| distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package v1alpha1 contains API Schema definitions for the cluster v1alpha1 API group
// +k8s:openapi-gen=true
// +k8s:deepcopy-gen=package,register
// +k8s:conversion-gen=sigs.k8s.io/cluster-api/pkg/apis/cluster
// +k8s:defaulter-gen=TypeMeta
// +groupName=cluster.k8s.io
package v1alpha1 | http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software |
setup.py | #!/usr/bin/env python
from setuptools import setup
try:
from pypandoc import convert_file
read_me = lambda f: convert_file(f, 'rst')
except ImportError:
print('pypandoc is not installed.')
read_me = lambda f: open(f, 'r').read() |
setup(name='echonetlite',
version='0.1.0',
description='Echonet Lite',
long_description=read_me('README.md'),
author='Keiichi SHIMA',
author_email='[email protected]',
url='https://github.com/keiichishima/echonetlite',
packages=['echonetlite'],
install_requires=['Twisted>=16.3.0'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 3.5',
'Topic :: Home Automation',
'Topic :: System :: Networking',
'Topic :: Software Development :: Libraries :: Python Modules'],
license='BSD License',
) | |
api.py | # -*- coding: utf-8 -*-
"""
requests.api
~~~~~~~~~~~~
This module implements the Requests API.
:copyright: (c) 2012 by Kenneth Reitz.
:license: Apache2, see LICENSE for more details.
"""
from . import sessions
def request(method, url, **kwargs):
"""Constructs and sends a :class:`Request <Request>`.
:param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary, list of tuples or bytes to send
in the body of the :class:`Request`.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
:param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
:param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.
``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``
or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string
defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers
to add for the file.
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) How many seconds to wait for the server to send data
before giving up, as a float, or a :ref:`(connect timeout, read
timeout) <timeouts>` tuple.
:type timeout: float or tuple
:param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``.
:type allow_redirects: bool
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
:param verify: (optional) Either a boolean, in which case it controls whether we verify
the server's TLS certificate, or a string, in which case it must be a path
to a CA bundle to use. Defaults to ``True``.
:param stream: (optional) if ``False``, the response content will be immediately downloaded.
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
:return: :class:`Response <Response>` object
:rtype: requests.Response
Usage::
>>> import requests
>>> req = requests.request('GET', 'http://httpbin.org/get')
<Response [200]>
"""
# By using the 'with' statement we are sure the session is closed, thus we
# avoid leaving sockets open which can trigger a ResourceWarning in some
# cases, and look like a memory leak in others.
with sessions.Session() as session:
return session.request(method=method, url=url, **kwargs)
def get(url, params=None, **kwargs):
r"""Sends a GET request.
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary, list of tuples or bytes to send
in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault("allow_redirects", True)
return request("get", url, params=params, **kwargs)
def options(url, **kwargs):
r"""Sends an OPTIONS request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault("allow_redirects", True)
return request("options", url, **kwargs)
def head(url, **kwargs):
r"""Sends a HEAD request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault("allow_redirects", False)
return request("head", url, **kwargs)
def post(url, data=None, json=None, **kwargs):
|
def put(url, data=None, **kwargs):
r"""Sends a PUT request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request("put", url, data=data, **kwargs)
def patch(url, data=None, **kwargs):
r"""Sends a PATCH request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request("patch", url, data=data, **kwargs)
def delete(url, **kwargs):
r"""Sends a DELETE request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request("delete", url, **kwargs)
| r"""Sends a POST request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request("post", url, data=data, json=json, **kwargs) |
BBAM_utils.py | import torch
from torch.autograd import Variable
from torchvision import models
import cv2
import sys
import numpy as np
import os
import math
import torch.nn.functional as F
idx_to_class = {0 : 'aeroplane', 1 : 'bicycle', 2 : 'bird', 3 : 'boat', 4 : 'bottle', 5 : 'bus', 6 : 'car', 7 : 'cat',
8 : 'chair', 9 : 'cow', 10 : 'table', 11 : 'dog', 12 : 'horse', 13 : 'motorbike', 14 : 'person',
15 : 'plant', 16 : 'sheep', 17 : 'sofa', 18 : 'train', 19 : 'monitor'}
def tv_norm(input, tv_beta, diagonal=False, sum=False):
# print(input.shape)
img = input[0, :]
if sum:
row_grad = torch.sum(torch.abs((img[:-1 , :] - img[1 :, :])).pow(tv_beta))
col_grad = torch.sum(torch.abs((img[: , :-1] - img[: , 1 :])).pow(tv_beta))
else:
row_grad = torch.mean(torch.abs((img[:-1, :] - img[1:, :])).pow(tv_beta))
col_grad = torch.mean(torch.abs((img[:, :-1] - img[:, 1:])).pow(tv_beta))
if diagonal:
diag = 0
if sum:
diag += torch.sum(torch.abs((img[:-1, :-1] - img[1:, 1:])).pow(tv_beta))
diag += torch.sum(torch.abs((img[1:, :-1] - img[:-1, 1:])).pow(tv_beta))
diag += torch.sum(torch.abs((img[:-1, 1:] - img[1:, :-1])).pow(tv_beta))
diag += torch.sum(torch.abs((img[1:, 1:] - img[:-1, :-1])).pow(tv_beta))
else:
diag += torch.mean(torch.abs((img[:-1, :-1] - img[1:, 1:])).pow(tv_beta))
diag += torch.mean(torch.abs((img[1:, :-1] - img[:-1, 1:])).pow(tv_beta))
diag += torch.mean(torch.abs((img[:-1, 1:] - img[1:, :-1])).pow(tv_beta))
diag += torch.mean(torch.abs((img[1:, 1:] - img[:-1, :-1])).pow(tv_beta))
return row_grad + col_grad + diag
return row_grad + col_grad
def numpy_to_torch(img, requires_grad = True, cuda_device=None): | use_cuda = torch.cuda.is_available()
if len(img.shape) < 3:
output = np.float32([img])
else:
output = np.expand_dims(img, axis=1)
# output = np.transpose(img, (2, 0, 1))
output = torch.from_numpy(output)
if use_cuda:
if cuda_device==None:
output = output.cuda()
else:
output = output.cuda(cuda_device)
# output = output.repeat(3, 1, 1)
v = Variable(output, requires_grad = requires_grad)
# v = v.repeat(3, 1, 1)
return v
color_dicts = [
[0.6, 0, 0.05],
[0.03, 0.19, 0.42],
[0, 0.27, 0.11],
[0.24, 0, 0.49],
[0.5, 0.25, 0.02],
[1, 0.5, 0],
[0.2, 0.2, 0.2],
[1, 0.1, 0.6],
[0.8, 0.8, 0]
]
def save_pred(image, boxes, save_path, image_id):
image[0] += 102.9801
image[1] += 115.9465
image[2] += 122.7717
image = image.data.cpu().numpy().transpose(1, 2, 0).astype('uint8')
for coord_idx, coords in enumerate(boxes):
image = cv2.UMat(image).get()
color = color_dicts[coord_idx%len(color_dicts)]
color = [int(c*255.0) for c in color]
color = color[::-1]
image = cv2.rectangle(image, (int(coords[0]), int(coords[1])),
(int(coords[2]), int(coords[3])), color, 5)
save_name = '%s/%s/box_prediction.jpg' % (save_path, image_id)
cv2.imwrite(save_name, image)
def save_mask(mask, masked_img=None, proposal=None, original_coord=None, perturbed_coord=None, iteration=None, proposal_idx=None, image_id=None, class_name=None, save_path_root=None, single_p_idx=None):
if not (masked_img is None):
masked_img[0] += 102.9801
masked_img[1] += 115.9465
masked_img[2] += 122.7717
masked_img = masked_img.data.cpu().numpy().transpose(1, 2, 0).astype('uint8')
mask = (255*mask.data.cpu().numpy().transpose(1, 2, 0)).astype('uint8')
color = [(255, 0, 0), (0, 255, 0), (0, 0, 255)] # blue: proposal, green: unturbed, red_ perturbed
if (proposal is not None) and (original_coord is not None) and (perturbed_coord is None):
for coord_idx, coords in enumerate([proposal, original_coord]):
coords = coords.detach().data.cpu().numpy()
masked_img = cv2.UMat(masked_img).get()
masked_img = cv2.rectangle(masked_img, (int(coords[0]), int(coords[1])),
(int(coords[2]), int(coords[3])), color[coord_idx], 5)
if not((proposal is None) or (original_coord is None) or (perturbed_coord is None)):
for coord_idx, coords in enumerate([proposal, original_coord, perturbed_coord]):
coords = coords.detach().data.cpu().numpy()
masked_img = cv2.UMat(masked_img).get()
masked_img = cv2.rectangle(masked_img, (int(coords[0]), int(coords[1])),
(int(coords[2]), int(coords[3])), color[coord_idx], 5)
if not (masked_img is None):
masked_img = cv2.resize(masked_img, None, fx=0.5, fy=0.5)
mask = cv2.resize(mask, (masked_img.shape[1], masked_img.shape[0]))
if single_p_idx is None:
save_path = '%s/%s/pidx_%04d_%s/' % (save_path_root, image_id, proposal_idx, class_name)
else:
save_path = '%s/%s/pidx_%04d_%s/' % (save_path_root, image_id, proposal_idx, class_name)
if not os.path.exists(save_path):
os.makedirs(save_path)
if single_p_idx is None:
if not (masked_img is None):
cv2.imwrite('%s/iter_%04d.jpg' % (save_path, iteration), masked_img)
cv2.imwrite('%s/iter_%04d_mask.jpg' % (save_path, iteration), mask)
else:
if not (masked_img is None):
cv2.imwrite('%s/pidx_%04d_img.jpg' % (save_path, single_p_idx), masked_img)
cv2.imwrite('%s/pidx_%04d_mask.jpg' % (save_path, single_p_idx), mask)
def get_max_iou(source, targets):
# target: multiple boxes
maxIoU = 0
for target in targets.bbox:
bb1, bb2 = {}, {}
bb1['x1'], bb1['x2'] = int(source[0]), int(source[2])
bb1['y1'], bb1['y2'] = int(source[1]), int(source[3])
bb2['x1'], bb2['x2'] = int(target[0]), int(target[2])
bb2['y1'], bb2['y2'] = int(target[1]), int(target[3])
# determine the coordinates of the intersection rectangle
x_left = max(bb1['x1'], bb2['x1'])
y_top = max(bb1['y1'], bb2['y1'])
x_right = min(bb1['x2'], bb2['x2'])
y_bottom = min(bb1['y2'], bb2['y2'])
if not(x_right < x_left or y_bottom < y_top):
# The intersection of two axis-aligned bounding boxes is always an
# axis-aligned bounding box
intersection_area = (x_right - x_left) * (y_bottom - y_top)
# compute the area of both AABBs
bb1_area = (bb1['x2'] - bb1['x1']) * (bb1['y2'] - bb1['y1'])
bb2_area = (bb2['x2'] - bb2['x1']) * (bb2['y2'] - bb2['y1'])
# compute the intersection over union by taking the intersection
# area and dividing it by the sum of prediction + ground-truth
# areas - the interesection area
iou = intersection_area / float(bb1_area + bb2_area - intersection_area)
assert iou >= 0.0
assert iou <= 1.0
if maxIoU < iou:
maxIoU = iou
return maxIoU
def get_single_iou(source, target):
# target: multiple boxes
maxIoU = 0
bb1, bb2 = {}, {}
bb1['x1'], bb1['x2'] = int(source[0]), int(source[2])
bb1['y1'], bb1['y2'] = int(source[1]), int(source[3])
bb2['x1'], bb2['x2'] = int(target[0]), int(target[2])
bb2['y1'], bb2['y2'] = int(target[1]), int(target[3])
# determine the coordinates of the intersection rectangle
x_left = max(bb1['x1'], bb2['x1'])
y_top = max(bb1['y1'], bb2['y1'])
x_right = min(bb1['x2'], bb2['x2'])
y_bottom = min(bb1['y2'], bb2['y2'])
if x_right < x_left or y_bottom < y_top:
return 0.0
# The intersection of two axis-aligned bounding boxes is always an
# axis-aligned bounding box
intersection_area = (x_right - x_left) * (y_bottom - y_top)
# compute the area of both AABBs
bb1_area = (bb1['x2'] - bb1['x1']) * (bb1['y2'] - bb1['y1'])
bb2_area = (bb2['x2'] - bb2['x1']) * (bb2['y2'] - bb2['y1'])
# compute the intersection over union by taking the intersection
# area and dividing it by the sum of prediction + ground-truth
# areas - the interesection area
iou = intersection_area / float(bb1_area + bb2_area - intersection_area)
return iou
def selected_positives(ious, pred_classes, displacements, proposal_iter):
ious, pred_classes, displacements = np.array(ious), np.array(pred_classes), np.array(displacements)
top_ious = np.argsort(-ious)
top_displacement = np.argsort(-displacements)
# include top 30%
positive_idxs = list(top_ious[:int(proposal_iter * 0.3)])
for d in top_displacement:
if ious[d] > 0.8:
positive_idxs.append(d)
return positive_idxs[:proposal_iter]
def imsmooth(tensor,
sigma,
stride=1,
padding=0,
padding_mode='constant',
padding_value=0):
"From TorchRay (https://github.com/facebookresearch/TorchRay)"
assert sigma >= 0
width = math.ceil(4 * sigma)
SQRT_TWO_DOUBLE = torch.tensor(math.sqrt(2), dtype=torch.float32)
SQRT_TWO_SINGLE = SQRT_TWO_DOUBLE.to(torch.float32)
EPSILON_SINGLE = torch.tensor(1.19209290E-07, dtype=torch.float32)
filt = (torch.arange(-width,
width + 1,
dtype=torch.float32,
device=tensor.device) /
(SQRT_TWO_SINGLE * sigma + EPSILON_SINGLE))
filt = torch.exp(-filt * filt)
filt /= torch.sum(filt)
num_channels = tensor.shape[1]
width = width + padding
if padding_mode == 'constant' and padding_value == 0:
other_padding = width
x = tensor
else:
# pad: (before, after) pairs starting from last dimension backward
x = F.pad(tensor,
(width, width, width, width),
mode=padding_mode,
value=padding_value)
other_padding = 0
padding = 0
x = F.conv2d(x,
filt.reshape((1, 1, -1, 1)).expand(num_channels, -1, -1, -1),
padding=(other_padding, padding),
stride=(stride, 1),
groups=num_channels)
x = F.conv2d(x,
filt.reshape((1, 1, 1, -1)).expand(num_channels, -1, -1, -1),
padding=(padding, other_padding),
stride=(1, stride),
groups=num_channels)
return x
class MaskGenerator:
r"""Mask generator.
The class takes as input the mask parameters and returns
as output a mask.
Args:
shape (tuple of int): output shape.
step (int): parameterization step in pixels.
sigma (float): kernel size.
clamp (bool, optional): whether to clamp the mask to [0,1]. Defaults to True.
pooling_mehtod (str, optional): `'softmax'` (default), `'sum'`, '`sigmoid`'.
Attributes:
shape (tuple): the same as the specified :attr:`shape` parameter.
shape_in (tuple): spatial size of the parameter tensor.
shape_out (tuple): spatial size of the output mask including margin.
"""
def __init__(self, shape, step, sigma, clamp=True, pooling_method='softmax'):
self.shape = shape
self.step = step
self.sigma = sigma
self.coldness = 20
self.clamp = clamp
self.pooling_method = pooling_method
assert int(step) == step
# self.kernel = lambda z: (z < 1).float()
self.kernel = lambda z: torch.exp(-2 * ((z - .5).clamp(min=0)**2))
self.margin = self.sigma
# self.margin = 0
self.padding = 1 + math.ceil((self.margin + sigma) / step)
self.radius = 1 + math.ceil(sigma / step)
self.shape_in = [math.ceil(z / step) for z in self.shape]
self.shape_mid = [
z + 2 * self.padding - (2 * self.radius + 1) + 1
for z in self.shape_in
]
self.shape_up = [self.step * z for z in self.shape_mid]
self.shape_out = [z - step + 1 for z in self.shape_up]
self.weight = torch.zeros((
1,
(2 * self.radius + 1)**2,
self.shape_out[0],
self.shape_out[1]
))
step_inv = [
torch.tensor(zm, dtype=torch.float32) /
torch.tensor(zo, dtype=torch.float32)
for zm, zo in zip(self.shape_mid, self.shape_up)
]
for ky in range(2 * self.radius + 1):
for kx in range(2 * self.radius + 1):
uy, ux = torch.meshgrid(
torch.arange(self.shape_out[0], dtype=torch.float32),
torch.arange(self.shape_out[1], dtype=torch.float32)
)
iy = torch.floor(step_inv[0] * uy) + ky - self.padding
ix = torch.floor(step_inv[1] * ux) + kx - self.padding
delta = torch.sqrt(
(uy - (self.margin + self.step * iy))**2 +
(ux - (self.margin + self.step * ix))**2
)
k = ky * (2 * self.radius + 1) + kx
self.weight[0, k] = self.kernel(delta / sigma)
def generate(self, mask_in):
r"""Generate a mask.
The function takes as input a parameter tensor :math:`\bar m` for
:math:`K` masks, which is a :math:`K\times 1\times H_i\times W_i`
tensor where `H_i\times W_i` are given by :attr:`shape_in`.
Args:
mask_in (:class:`torch.Tensor`): mask parameters.
Returns:
tuple: a pair of mask, cropped and full. The cropped mask is a
:class:`torch.Tensor` with the same spatial shape :attr:`shape`
as specfied upon creating this object. The second mask is the same,
but with an additional margin and shape :attr:`shape_out`.
"""
mask = F.unfold(mask_in,
(2 * self.radius + 1,) * 2,
padding=(self.padding,) * 2)
mask = mask.reshape(
len(mask_in), -1, self.shape_mid[0], self.shape_mid[1])
mask = F.interpolate(mask, size=self.shape_up, mode='nearest')
mask = F.pad(mask, (0, -self.step + 1, 0, -self.step + 1))
mask = self.weight * mask
if self.pooling_method == 'sigmoid':
if self.coldness == float('+Inf'):
mask = (mask.sum(dim=1, keepdim=True) - 5 > 0).float()
else:
mask = torch.sigmoid(
self.coldness * mask.sum(dim=1, keepdim=True) - 3
)
elif self.pooling_method == 'softmax':
if self.coldness == float('+Inf'):
mask = mask.max(dim=1, keepdim=True)[0]
else:
mask = (
mask * F.softmax(self.coldness * mask, dim=1)
).sum(dim=1, keepdim=True)
elif self.pooling_method == 'sum':
mask = mask.sum(dim=1, keepdim=True)
else:
assert False, f"Unknown pooling method {self.pooling_method}"
m = round(self.margin)
if self.clamp:
mask = mask.clamp(min=0, max=1)
cropped = mask[:, :, m:m + self.shape[0], m:m + self.shape[1]]
return cropped, mask
def to(self, dev):
"""Switch to another device.
Args:
dev: PyTorch device.
Returns:
MaskGenerator: self.
"""
self.weight = self.weight.to(dev)
return self | |
prom2json.go | package prom2json
import (
"crypto/tls"
"fmt"
"io"
"mime"
"net/http"
"github.com/matttproud/golang_protobuf_extensions/pbutil"
"github.com/prometheus/common/expfmt"
"github.com/prometheus/log"
dto "github.com/prometheus/client_model/go"
)
const acceptHeader = `application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited;q=0.7,text/plain;version=0.0.4;q=0.3`
// Family mirrors the MetricFamily proto message.
type Family struct {
//Time time.Time
Name string `json:"name"`
Help string `json:"help"`
Type string `json:"type"`
Metrics []interface{} `json:"metrics,omitempty"` // Either metric or summary.
}
// Metric is for all "single value" metrics, i.e. Counter, Gauge, and Untyped.
type Metric struct {
Labels map[string]string `json:"labels,omitempty"`
Value string `json:"value"`
}
// Summary mirrors the Summary proto message.
type Summary struct {
Labels map[string]string `json:"labels,omitempty"`
Quantiles map[string]string `json:"quantiles,omitempty"`
Count string `json:"count"`
Sum string `json:"sum"`
}
// Histogram mirrors the Histogram proto message.
type Histogram struct {
Labels map[string]string `json:"labels,omitempty"`
Buckets map[string]string `json:"buckets,omitempty"`
Count string `json:"count"`
Sum string `json:"sum"`
}
// NewFamily consumes a MetricFamily and transforms it to the local Family type.
func NewFamily(dtoMF *dto.MetricFamily) *Family {
mf := &Family{
//Time: time.Now(),
Name: dtoMF.GetName(),
Help: dtoMF.GetHelp(),
Type: dtoMF.GetType().String(),
Metrics: make([]interface{}, len(dtoMF.Metric)),
}
for i, m := range dtoMF.Metric {
if dtoMF.GetType() == dto.MetricType_SUMMARY {
mf.Metrics[i] = Summary{
Labels: makeLabels(m),
Quantiles: makeQuantiles(m),
Count: fmt.Sprint(m.GetSummary().GetSampleCount()),
Sum: fmt.Sprint(m.GetSummary().GetSampleSum()),
}
} else if dtoMF.GetType() == dto.MetricType_HISTOGRAM {
mf.Metrics[i] = Histogram{
Labels: makeLabels(m),
Buckets: makeBuckets(m),
Count: fmt.Sprint(m.GetHistogram().GetSampleCount()),
Sum: fmt.Sprint(m.GetSummary().GetSampleSum()),
}
} else {
mf.Metrics[i] = Metric{
Labels: makeLabels(m),
Value: fmt.Sprint(getValue(m)),
}
}
}
return mf
}
func getValue(m *dto.Metric) float64 |
func makeLabels(m *dto.Metric) map[string]string {
result := map[string]string{}
for _, lp := range m.Label {
result[lp.GetName()] = lp.GetValue()
}
return result
}
func makeQuantiles(m *dto.Metric) map[string]string {
result := map[string]string{}
for _, q := range m.GetSummary().Quantile {
result[fmt.Sprint(q.GetQuantile())] = fmt.Sprint(q.GetValue())
}
return result
}
func makeBuckets(m *dto.Metric) map[string]string {
result := map[string]string{}
for _, b := range m.GetHistogram().Bucket {
result[fmt.Sprint(b.GetUpperBound())] = fmt.Sprint(b.GetCumulativeCount())
}
return result
}
// FetchMetricFamilies retrieves metrics from the provided URL, decodes them
// into MetricFamily proto messages, and sends them to the provided channel. It
// returns after all MetricFamilies have been sent.
func FetchMetricFamilies(
url string, ch chan<- *dto.MetricFamily,
certificate string, key string,
) {
defer close(ch)
var transport *http.Transport
if certificate != "" && key != "" {
cert, err := tls.LoadX509KeyPair(certificate, key)
if err != nil {
log.Fatal(err)
}
tlsConfig := &tls.Config{
Certificates: []tls.Certificate{cert},
}
tlsConfig.BuildNameToCertificate()
transport = &http.Transport{TLSClientConfig: tlsConfig}
} else {
transport = &http.Transport{}
}
client := &http.Client{Transport: transport}
decodeContent(client, url, ch)
}
func decodeContent(client *http.Client, url string, ch chan<- *dto.MetricFamily) {
req, err := http.NewRequest("GET", url, nil)
if err != nil {
log.Fatalf("creating GET request for URL %q failed: %s", url, err)
}
req.Header.Add("Accept", acceptHeader)
resp, err := client.Do(req)
if err != nil {
log.Fatalf("executing GET request for URL %q failed: %s", url, err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
log.Fatalf("GET request for URL %q returned HTTP status %s", url, resp.Status)
}
ParseResponse(resp, ch)
}
// ParseResponse consumes an http.Response and pushes it to the MetricFamily
// channel. It returns when all all MetricFamilies are parsed and put on the
// channel.
func ParseResponse(resp *http.Response, ch chan<- *dto.MetricFamily) {
mediatype, params, err := mime.ParseMediaType(resp.Header.Get("Content-Type"))
if err == nil && mediatype == "application/vnd.google.protobuf" &&
params["encoding"] == "delimited" &&
params["proto"] == "io.prometheus.client.MetricFamily" {
for {
mf := &dto.MetricFamily{}
if _, err = pbutil.ReadDelimited(resp.Body, mf); err != nil {
if err == io.EOF {
break
}
log.Fatalln("reading metric family protocol buffer failed:", err)
}
ch <- mf
}
} else {
// We could do further content-type checks here, but the
// fallback for now will anyway be the text format
// version 0.0.4, so just go for it and see if it works.
var parser expfmt.TextParser
metricFamilies, err := parser.TextToMetricFamilies(resp.Body)
if err != nil {
log.Fatalln("reading text format failed:", err)
}
for _, mf := range metricFamilies {
ch <- mf
}
}
}
// AddLabel allows to add key/value labels to an already existing Family.
func (f *Family) AddLabel(key, val string) {
for i, item := range f.Metrics {
switch item.(type) {
case Metric:
m := item.(Metric)
m.Labels[key] = val
f.Metrics[i] = m
}
}
}
| {
if m.Gauge != nil {
return m.GetGauge().GetValue()
}
if m.Counter != nil {
return m.GetCounter().GetValue()
}
if m.Untyped != nil {
return m.GetUntyped().GetValue()
}
return 0.
} |
parsers.rs | use std::mem::size_of;
use anyhow::Context;
use byteorder::{ByteOrder, NativeEndian, BigEndian};
use crate::DecodeError;
pub fn parse_mac(payload: &[u8]) -> Result<[u8; 6], DecodeError> {
if payload.len() != 6 {
return Err(format!("invalid MAC address: {:?}", payload).into());
}
let mut address: [u8; 6] = [0; 6];
for (i, byte) in payload.iter().enumerate() {
address[i] = *byte;
}
Ok(address)
}
pub fn parse_ipv6(payload: &[u8]) -> Result<[u8; 16], DecodeError> {
if payload.len() != 16 {
return Err(format!("invalid IPv6 address: {:?}", payload).into());
}
let mut address: [u8; 16] = [0; 16];
for (i, byte) in payload.iter().enumerate() {
address[i] = *byte;
}
Ok(address)
}
pub fn parse_string(payload: &[u8]) -> Result<String, DecodeError> {
if payload.is_empty() {
return Ok(String::new());
}
// iproute2 is a bit inconstent with null-terminated strings.
let slice = if payload[payload.len() - 1] == 0 {
&payload[..payload.len() - 1]
} else {
&payload[..payload.len()]
};
let s = String::from_utf8(slice.to_vec()).context("invalid string")?;
Ok(s)
}
pub fn parse_u8(payload: &[u8]) -> Result<u8, DecodeError> {
if payload.len() != 1 {
return Err(format!("invalid u8: {:?}", payload).into());
}
Ok(payload[0])
}
pub fn parse_u32(payload: &[u8]) -> Result<u32, DecodeError> {
if payload.len() != size_of::<u32>() {
return Err(format!("invalid u32: {:?}", payload).into());
}
Ok(NativeEndian::read_u32(payload))
}
pub fn parse_u64(payload: &[u8]) -> Result<u64, DecodeError> {
if payload.len() != size_of::<u64>() {
return Err(format!("invalid u64: {:?}", payload).into());
}
Ok(NativeEndian::read_u64(payload))
}
pub fn parse_u16(payload: &[u8]) -> Result<u16, DecodeError> {
if payload.len() != size_of::<u16>() {
return Err(format!("invalid u16: {:?}", payload).into());
}
Ok(NativeEndian::read_u16(payload))
}
pub fn parse_i32(payload: &[u8]) -> Result<i32, DecodeError> {
if payload.len() != 4 {
return Err(format!("invalid u32: {:?}", payload).into());
}
Ok(NativeEndian::read_i32(payload))
}
pub fn parse_u16_be(payload: &[u8]) -> Result<u16, DecodeError> {
if payload.len() != size_of::<u16>() {
return Err(format!("invalid u16: {:?}", payload).into());
}
Ok(BigEndian::read_u16(payload)) | } |
|
jquery.pager.js | /*
* jQuery pager plugin
* Version 1.0 (12/22/2008)
* @requires jQuery v1.2.6 or later
*
* Example at: http://jonpauldavies.github.com/JQuery/Pager/PagerDemo.html
*
* Copyright (c) 2008-2009 Jon Paul Davies
* Dual licensed under the MIT and GPL licenses:
* http://www.opensource.org/licenses/mit-license.php
* http://www.gnu.org/licenses/gpl.html
*
* Read the related blog post and contact the author at http://www.j-dee.com/2008/12/22/jquery-pager-plugin/
*
* This version is far from perfect and doesn't manage it's own state, therefore contributions are more than welcome!
*
* Usage: .pager({ pagenumber: 1, pagecount: 15, buttonClickCallback: PagerClickTest });
*
* Where pagenumber is the visible page number
* pagecount is the total number of pages to display
* buttonClickCallback is the method to fire when a pager button is clicked.
*
* buttonClickCallback signiture is PagerClickTest = function(pageclickednumber)
* Where pageclickednumber is the number of the page clicked in the control.
*
* The included Pager.CSS file is a dependancy but can obviously tweaked to your wishes
* Tested in IE6 IE7 Firefox & Safari. Any browser strangeness, please report.
*/
(function($) {
$.fn.pager = function(options) {
var opts = $.extend({}, $.fn.pager.defaults, options);
return this.each(function() {
// empty out the destination element and then render out the pager with the supplied options
$(this).empty().append(renderpager(parseInt(options.pagenumber), parseInt(options.pagecount), options.buttonClickCallback));
// specify correct cursor activity
$('.pages li').mouseover(function() { document.body.style.cursor = "pointer"; }).mouseout(function() { document.body.style.cursor = "auto"; });
});
};
// render and return the pager with the supplied options
function renderpager(pagenumber, pagecount, buttonClickCallback) {
// setup $pager to hold render
var $pager = $('<ul class="pages"></ul>');
// add in the previous and next buttons
$pager.append(renderButton('首页', pagenumber, pagecount, buttonClickCallback)).append(renderButton('上一页', pagenumber, pagecount, buttonClickCallback));
// pager currently only handles 10 viewable pages ( could be easily parameterized, maybe in next version ) so handle edge cases
var startPoint = 1;
var endPoint = 9;
if (pagenumber > 4) {
startPoint = pagenumber - 4;
endPoint = pagenumber + 4;
}
if (endPoint > pagecount) {
startPoint = pagecount - 8;
endPoint = pagecount;
}
if (startPoint < 1) {
startPoint = 1;
}
// loop thru visible pages and render buttons
for (var page = startPoint; page <= endPoint; page++) {
var currentButton = $('<li class="page-number">' + (page) + '</li>');
page == pagenumber ? currentButton.addClass('pgCurrent') : currentButton.click(function() { buttonClickCallback(this.firstChild.data); });
currentButton.appendTo($pager);
}
// render in the next and last buttons before returning the whole rendered control back.
$pager.append(renderButton('下一页', pagenumber, pagecount, buttonClickCallback)).append(renderButton('末页', pagenumber, pagecount, buttonClickCallback));
return $pager;
}
// renders and returns a 'specialized' button, ie 'next', 'previous' etc. rather than a page number button
function renderButton(buttonLabel, pagenumber, pagecount, buttonClickCallback) {
var $Button = $('<li class="pgNext">' + buttonLabel + '</li>');
var destPage = 1;
// work out destination page for required button type | switch (buttonLabel) {
case "首页":
destPage = 1;
break;
case "上一页":
destPage = pagenumber - 1;
break;
case "下一页":
destPage = pagenumber + 1;
break;
case "末页":
destPage = pagecount;
break;
}
// disable and 'grey' out buttons if not needed.
if (buttonLabel == "首页" || buttonLabel == "上一页") {
pagenumber <= 1 ? $Button.addClass('pgEmpty') : $Button.click(function() { buttonClickCallback(destPage); });
}
else {
pagenumber >= pagecount ? $Button.addClass('pgEmpty') : $Button.click(function() { buttonClickCallback(destPage); });
}
return $Button;
}
// pager defaults. hardly worth bothering with in this case but used as placeholder for expansion in the next version
$.fn.pager.defaults = {
pagenumber: 1,
pagecount: 1
};
})(jQuery); | |
celery.py |
import os
from celery import Celery
from django.apps import apps, AppConfig
from django.conf import settings
if not settings.configured:
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.local') # pragma: no cover
app = Celery('ticketflix')
class CeleryAppConfig(AppConfig):
|
@app.task(bind=True)
def debug_task(self):
print(f'Request: {self.request!r}') # pragma: no cover
| name = 'ticketflix.taskapp'
verbose_name = 'Celery Config'
def ready(self):
# Using a string here means the worker will not have to
# pickle the object when using Windows.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')
installed_apps = [app_config.name for app_config in apps.get_app_configs()]
app.autodiscover_tasks(lambda: installed_apps, force=True)
if hasattr(settings, 'RAVEN_CONFIG'):
# Celery signal registration
# Since raven is required in production only,
# imports might (most surely will) be wiped out
# during PyCharm code clean up started
# in other environments.
# @formatter:off
from raven import Client as RavenClient
from raven.contrib.celery import register_signal as raven_register_signal
from raven.contrib.celery import register_logger_signal as raven_register_logger_signal
# @formatter:on
raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['dsn'])
raven_register_logger_signal(raven_client)
raven_register_signal(raven_client) |
DDR.py | import ok
from fpga import FPGA
import numpy as np
import time
import matplotlib.pyplot as plot
import struct
from collections import namedtuple
BLOCK_SIZE = (16384)
WRITE_SIZE=(8*1024*1024)
READ_SIZE = (8*1024*1024)
g_nMemSize = (8*1024*1024)
sample_size = (524288)
ep = namedtuple('ep', 'addr bits type')
control = ep(0x00, [i for i in range(32)], 'wi') # note this is active low
# wire outs for "1 deep FIFO"
one_deep_fifo = ep(0x20, [i for i in range(32)], 'wo')
# triggers in
valid = ep(0x40, 0, 'ti')
fpga_reset = ep(0x40, 1, 'ti')
fifo_reset = ep(0x40, 2, 'ti')
#given the amplitude, and the time between each step, returns array to be plotted
def make_flat_voltage(input_voltage):
time_axis = np.arange (0, np.pi*2 , (1/sample_size*2*np.pi) )
amplitude = np.arange (0, np.pi*2 , (1/sample_size*2*np.pi) )
for x in range (len(amplitude)):
amplitude[x] = input_voltage
amplitude = amplitude.astype(np.int32)
return time_axis, amplitude
#Given the amplitude and period, returns an array to be plotted
def make_sin_wave(amplitude_shift, frequency_shift=16):
time_axis = np.arange (0, np.pi*2 , (1/sample_size*2*np.pi) )
print ("length of time axis after creation ", len(time_axis))
amplitude = (amplitude_shift*1000*np.sin(time_axis))
y = len(amplitude)
for x in range (y):
amplitude[x]= amplitude[x]+(10000)
for x in range (y):
amplitude[x]= (int)(amplitude[x]/20000*16384)
amplitude = amplitude.astype(np.int32)
return time_axis, amplitude
#given a buffer, it writes a bytearray to the DDR3
def writeSDRAM(g_buf):
print ("Length of buffer at the top of WriteSDRAM", len(g_buf))
#Reset FIFOs
f.set_wire(0x30, 4)
f.set_wire(0x03, 0)
f.set_wire(0x03, 2)
print ("Writing to DDR...")
time1 = time.time()
#for i in range ((int)(len(g_buf)/WRITE_SIZE)):
r = f.xem.WriteToBlockPipeIn( epAddr= 0x80, blockSize= BLOCK_SIZE,
data= g_buf[0:(len(g_buf))])
print ("The length of the write is ", r)
time2 = time.time()
time3 = (time2-time1)
mbs = (int)(r/1024/1024/ time3)
print ("The speed of the write was ", mbs, " MegaBytes per second")
#below sets the HDL into read mode
f.set_wire(0x03, 4)
f.set_wire(0x03, 0)
f.set_wire(0x03, 1)
#reads to an empty array passed to the function
def readSDRAM():
amplitude = np.zeros((sample_size,), dtype=int)
pass_buf = bytearray(amplitude)
#Reset FIFOs
#below sets the HDL into read mode
f.set_wire(0x03, 4)
f.set_wire(0x03, 0)
f.set_wire(0x03, 1)
print ("Reading from DDR...")
#Address changed to A5
for i in range ((int)(g_nMemSize/WRITE_SIZE)):
r = f.xem.ReadFromBlockPipeOut( epAddr= 0xA0, blockSize= BLOCK_SIZE,
data= pass_buf)
print ("The length of the read is:", r)
return pass_buf
#given a buffer, it unpacks into into human readable float values
def unpack(buf):
unpacked_var = []
for x in range (sample_size):
unpacked_var.append(struct.unpack('i', buf[(x*4):((x+1)*4)]))
return unpacked_var
#Given two arrays, plots the x and y axis with hardcoded axis names
def testplot(x_axis, y_axis):
plot.plot(x_axis, y_axis)
plot.title('The outputted wave should look like this')
plot.xlabel('time')
plot.ylabel('amplitude (millivolts)')
plot.grid(True, which = 'both')
plot.axhline(y=0, color = 'k')
plot.show()
#given an amplitude and a period, it will write a waveform to the DDR3
def write_sin_wave (a):
time_axis, g_buf_init = make_sin_wave(a)
print ("The length of the array before casting ", len(g_buf_init))
pass_buf = bytearray(g_buf_init)
writeSDRAM(pass_buf)
#given and amplitude and a period, it will write a step function to the DDR3
def write_flat_voltage(input_voltage):
time_axis, g_buf_init = make_flat_voltage(input_voltage)
pass_buf2 = bytearray(g_buf_init)
writeSDRAM(pass_buf2)
#Reads and prints the contents of the DDR3
def print_DDR3():
g_rbuf = readSDRAM()
unpacked_g_rbuf = np.array(unpack(g_rbuf)).astype('float64')
for x in range (len(unpacked_g_rbuf)):
unpacked_g_rbuf[x] = (unpacked_g_rbuf[x]/1000)
testplot(np.arange (0, sample_size, 1), unpacked_g_rbuf)
def send_trig(ep_bit):
'''
expects a single bit, not yet implement for list of bits
'''
f.xem.ActivateTriggerIn(ep_bit.addr, ep_bit.bits)
if __name__ == "__main__":
f = FPGA(bitfile = '728.bit')
if (False == f.init_device()):
raise SystemExit
#Wait for the configuration
time.sleep(3)
factor = (int)(sample_size/8)
f.xem.SetWireInValue(0x04, factor)
#f.xem.SetWireInValue(0x04, 0xFF)
f.xem.UpdateWireIns()
#Sample rate speed, to bits 18:9
f.xem.SetWireInValue(0x02, 0x0000A000, 0x0003FF00 )
f.xem.UpdateWireIns()
write_sin_wave(2)
f.xem.WriteRegister(0x80000010, 0x00003410)
f.xem.ActivateTriggerIn(0x40, 8)
#f.xem.UpdateWireOuts()
#print (f.xem.GetWireOutValue(0x3E))
'''
time.sleep(2)
dacs = [1,2,3,4]
# SPI Master configuration: divide reg, ctrl reg, SS register
# MSB: 8 - set address, 4 - write data
# creg_val = 0x40003610 # Char length of 16; set both Tx_NEG, Rx_NEG; set ASS, IE. ADS7952
creg_val = 0x40003010 # Char length of 16; clear both Tx_NEG, Rx_NEG; set ASS, IE. AD5453
# val = 0x40001fff # AD5453 (half-scale)
for val in [0x80000051, 0x40000013, # divider (need to look into settings of 1 and 2 didn't show 16 clock cycles)
0x80000041, creg_val, # control register (CHAR_LEN = 16, bits 10,9, 13 and 12)
0x80000061, 0x40000001]: # slave select (just setting bit0)
f.set_wire(0x00, val, mask = 0xffffffff)
send_trig(valid)
| 0x80000041, creg_val | (1 << 8)]: # Control register - GO (bit 8)
f.set_wire(0x00, val, mask = 0xffffffff)
send_trig(valid)
''' | # now send SPI command
value = 0x40003FFF # AD5453 (half-scale)
for val in [0x80000001, value, # Tx register, data to send |
binary_dictionary.rs | use std::fs::File;
use std::io::{BufRead, BufReader, Error as IOError, Seek};
use std::path::Path;
use thiserror::Error;
use super::dictionary_header::{DictionaryHeader, DictionaryHeaderErr};
use super::double_array_lexicon::DoubleArrayLexicon;
use super::grammar::Grammar;
use super::lexicon::LexiconErr;
use super::system_dictionary_version::{
SYSTEM_DICT_VERSION, USER_DICT_VERSION_1, USER_DICT_VERSION_2,
};
#[derive(Error, Debug)]
pub enum ReadDictionaryErr {
#[error("invalid dictionary version")]
InvalidDictionaryVersionErr,
#[error("invalid system dictionary")]
InvalidSystemDictionaryErr,
#[error("invalid user dictionary")]
InvalidUserDictionaryErr,
#[error("not found grammar")]
NotFoundGrammarErr,
#[error("{0}")]
IOError(#[from] IOError),
#[error("{0}")]
DictionaryHeaderErr(#[from] DictionaryHeaderErr),
#[error("{0}")]
LexiconErr(#[from] LexiconErr),
}
pub struct BinaryDictionary {
pub grammar: Grammar,
header: DictionaryHeader,
pub lexicon: DoubleArrayLexicon,
}
impl BinaryDictionary {
fn new(
grammar: Grammar,
header: DictionaryHeader,
lexicon: DoubleArrayLexicon,
) -> BinaryDictionary |
pub fn read_dictionary_from_reader<R: Seek + BufRead>(
reader: &mut R,
) -> Result<BinaryDictionary, ReadDictionaryErr> {
let header = DictionaryHeader::from_reader(reader)?;
if SYSTEM_DICT_VERSION != header.version
&& USER_DICT_VERSION_1 != header.version
&& USER_DICT_VERSION_2 != header.version
{
return Err(ReadDictionaryErr::InvalidDictionaryVersionErr);
}
if header.version == USER_DICT_VERSION_1 {
return Err(ReadDictionaryErr::NotFoundGrammarErr);
}
let grammar = Grammar::from_reader(reader)?;
let lexicon = DoubleArrayLexicon::from_reader(reader)?;
Ok(BinaryDictionary::new(grammar, header, lexicon))
}
pub fn from_system_dictionary<P: AsRef<Path>>(
filename: P,
) -> Result<BinaryDictionary, ReadDictionaryErr> {
let mut reader = BufReader::new(File::open(filename)?);
let dictionary = BinaryDictionary::read_dictionary_from_reader(&mut reader)?;
if dictionary.header.version != SYSTEM_DICT_VERSION {
return Err(ReadDictionaryErr::InvalidSystemDictionaryErr);
}
Ok(dictionary)
}
pub fn from_user_dictionary<P: AsRef<Path>>(
filename: P,
) -> Result<BinaryDictionary, ReadDictionaryErr> {
let mut reader = BufReader::new(File::open(filename)?);
let dictionary = BinaryDictionary::read_dictionary_from_reader(&mut reader)?;
if USER_DICT_VERSION_1 != dictionary.header.version
&& USER_DICT_VERSION_2 != dictionary.header.version
{
return Err(ReadDictionaryErr::InvalidUserDictionaryErr);
}
Ok(dictionary)
}
}
| {
BinaryDictionary {
grammar,
header,
lexicon,
}
} |
pandas.py | from . import get_connector, get_engine
import pandas as pd
def get_dataframe(table_name, limit=None):
# limit query
|
def dataframe_to_table(df, table_name):
df.to_sql(table_name,
con=get_engine(),
index=False,
if_exists='replace')
| limit_query=""
if limit:
limit_query="limit {}".format(limit)
# create query
query = "SELECT * FROM {} {}".format(table_name, limit_query)
# get dataframe from sql query
df = pd.read_sql(query, con=get_connector())
return df |
med_kikisu.rs | /**
* 利き数
*/
use consoles::asserts::*;
use kifuwarabe_position::*;
use memory::number_board::*;
use movement_thinks::*;
use std::collections::HashSet;
/**
* 盤上の利き升調べ
*
* 用途:自殺手防止他
*
* TODO: 差分更新にしたい。
*/
pub fn refresh_kikisu(gen_ky: &Position) -> (
[NumberBoard; Sengo::Num as usize],
[NumberBoard; Koma::Num as usize]
){
// 利き数(先後別)
let mut local_kiki_su_by_sn : [Numbe | rBoard; Sengo::Num as usize] = [
NumberBoard::new(), NumberBoard::new(),
];
// 利きの数(先後付き駒別)
// 利き数(駒別なので30個ある)
let mut local_kiki_su_by_km : [NumberBoard; Koma::Num as usize] = [
NumberBoard::new(), NumberBoard::new(), NumberBoard::new(), NumberBoard::new(), NumberBoard::new(),
NumberBoard::new(), NumberBoard::new(), NumberBoard::new(), NumberBoard::new(), NumberBoard::new(),
NumberBoard::new(), NumberBoard::new(), NumberBoard::new(), NumberBoard::new(), NumberBoard::new(),
NumberBoard::new(), NumberBoard::new(), NumberBoard::new(), NumberBoard::new(), NumberBoard::new(),
NumberBoard::new(), NumberBoard::new(), NumberBoard::new(), NumberBoard::new(), NumberBoard::new(),
NumberBoard::new(), NumberBoard::new(), NumberBoard::new(), NumberBoard::new(),
];
// カウント
for km_dst in &KM_ARRAY
{
for x in SUJI_1..SUJI_10 {// 9..0 みたいに降順に書いても動かない?
for y in DAN_1..DAN_10 {
let ms_dst = suji_dan_to_ms( x, y );
assert_banjo_ms(ms_dst,"think 利き調べ");
// 移動元の升
let mut mv_src_hashset : HashSet<umasu> = HashSet::new();
insert_narazu_src_by_ms_km (&gen_ky, ms_dst, *km_dst, &mut mv_src_hashset);
insert_narumae_src_by_ms_km (&gen_ky, ms_dst, *km_dst, &mut mv_src_hashset);
// 打は考えない。盤上の利き数なので
let kikisu = mv_src_hashset.len();
let sn = km_to_sn( *km_dst);
// 駒別
local_kiki_su_by_km[*km_dst as usize].add_su_by_ms( ms_dst, kikisu as i8 );
// 先後別
local_kiki_su_by_sn[sn as usize].add_su_by_ms( ms_dst, kikisu as i8 );
}
}
}
(local_kiki_su_by_sn, local_kiki_su_by_km)
} |
|
main.rs | #![type_length_limit = "1232619"]
mod generator;
mod metrics;
mod parsers;
mod tests;
use graph_generator_lib::*;
use grapl_config::{env_helpers::{s3_event_emitters_from_env,
FromEnv},
*};
use grapl_observe::metric_reporter::MetricReporter;
use grapl_service::serialization::GraphDescriptionSerializer;
use log::*;
use rusoto_sqs::SqsClient;
use sqs_executor::{make_ten,
s3_event_emitter::S3ToSqsEventNotifier,
s3_event_retriever::S3PayloadRetriever,
time_based_key_fn};
use crate::{generator::OSQuerySubgraphGenerator,
metrics::OSQuerySubgraphGeneratorMetrics};
#[tokio::main]
async fn | () -> Result<(), Box<dyn std::error::Error>> {
let (env, _guard) = grapl_config::init_grapl_env!();
info!("Starting generic-subgraph-generator");
let sqs_client = SqsClient::from_env();
let cache = &mut event_caches(&env).await;
let metrics = OSQuerySubgraphGeneratorMetrics::new(&env.service_name);
let osquery_subgraph_generator =
&mut make_ten(async { OSQuerySubgraphGenerator::new(cache[0].clone(), metrics.clone()) })
.await;
let serializer = &mut make_ten(async { GraphDescriptionSerializer::default() }).await;
let s3_emitter =
&mut s3_event_emitters_from_env(&env, time_based_key_fn, S3ToSqsEventNotifier::from(&env))
.await;
let s3_payload_retriever = &mut make_ten(async {
S3PayloadRetriever::new(
|region_str| grapl_config::env_helpers::init_s3_client(®ion_str),
grapl_service::decoder::NdjsonDecoder::default(),
MetricReporter::new(&env.service_name),
)
})
.await;
info!("Starting process_loop");
sqs_executor::process_loop(
grapl_config::source_queue_url(),
grapl_config::dead_letter_queue_url(),
cache,
sqs_client.clone(),
osquery_subgraph_generator,
s3_payload_retriever,
s3_emitter,
serializer,
MetricReporter::new(&env.service_name),
)
.await;
info!("Exiting");
Ok(())
}
| main |
cram.go | package main
import (
"bytes"
"fmt"
"io/ioutil"
"os"
"strings"
)
// - read file
// --- replace newlines with tabs?
// - write file and append .cram
func main() {
if len(os.Args) < 2 {
fmt.Println("Error: needs a filepath")
os.Exit(0)
}
name := os.Args[1]
og, err := ioutil.ReadFile(name)
check(err)
err = os.Remove(name)
check(err)
var d []byte
if bytes.Count(og, []byte{'\n'}) < 1 {
d = uncram(og)
name = strings.Replace(name, ".cram", "", -1)
} else {
d = cram(og)
name = name + ".cram"
}
err = ioutil.WriteFile(name, d, os.ModePerm)
check(err)
}
func cram(og []byte) []byte {
return bytes.Replace(og, []byte{'\n'}, []byte{'\t'}, -1)
}
func uncram(og []byte) []byte |
func check(err error) {
if err != nil {
fmt.Println(err.Error())
os.Exit(0)
}
}
| {
return bytes.Replace(og, []byte{'\t'}, []byte{'\n'}, -1)
} |
auth.js | // The client ID is obtained from the Google Developers Console
// at https://console.developers.google.com/.
// If you run this code from a server other than http://localhost,
// you need to register your own client ID.
var OAUTH2_CLIENT_ID = '483640587877-ghsrf99ff5lrdm5tk4ua08astbbrrja5.apps.googleusercontent.com';
var OAUTH2_SCOPES = [
'https://www.googleapis.com/auth/youtube'
];
// Upon loading, the Google APIs JS client automatically invokes this callback.
googleApiClientReady = function() {
gapi.auth.init(function() {
window.setTimeout(checkAuth, 1);
});
};
// Attempt the immediate OAuth 2.0 client flow as soon as the page loads.
// If the currently logged-in Google Account has previously authorized
// the client specified as the OAUTH2_CLIENT_ID, then the authorization
// succeeds with no user intervention. Otherwise, it fails and the
// user interface that prompts for authorization needs to display.
function checkAuth() {
gapi.auth.authorize({
client_id: OAUTH2_CLIENT_ID,
scope: OAUTH2_SCOPES,
immediate: true
}, handleAuthResult);
}
// Handle the result of a gapi.auth.authorize() call.
function handleAuthResult(authResult) {
if (authResult && !authResult.error) {
// Authorization was successful. Hide authorization prompts and show
// content that should be visible after authorization succeeds.
$('.pre-auth').hide();
$('.post-auth').show();
loadAPIClientInterfaces();
} else {
// Make the #login-link clickable. Attempt a non-immediate OAuth 2.0
// client flow. The current function is called when that flow completes.
$('#login-link').click(function() {
gapi.auth.authorize({
client_id: OAUTH2_CLIENT_ID,
scope: OAUTH2_SCOPES,
immediate: false
}, handleAuthResult);
});
}
}
// Load the client interfaces for the YouTube Analytics and Data APIs, which
// are required to use the Google APIs JS client. More info is available at
// http://code.google.com/p/google-api-javascript-client/wiki/GettingStarted#Loading_the_Client
function | () {
gapi.client.load('youtube', 'v3', function() {
handleAPILoaded();
});
} | loadAPIClientInterfaces |
DeliverableGridSelect.js | import React, { useEffect, useState } from "react";
import Grid from "@mui/material/Grid";
import { useSelector } from "react-redux";
import DeliverablesSkeleton from "./components/DeliverablesSkeleton";
import { Stack } from "@mui/material";
import Link from "@mui/material/Link";
import { DataStore, Predicates, SortDirection } from "aws-amplify";
import * as models from "../../models/index";
import { dataStoreReadyStatusSelector } from "../../redux/Selectors";
import { convertListDataToObject } from "../../utilities"; | import EditableDeliverable from "./components/EditableDeliverable";
import AddableDeliverable from "./components/AddableDeliverable";
import _ from "lodash";
import GetError from "../../ErrorComponents/GetError";
const initialDeliverablesSortedState = {
deliverables: [],
defaults: [],
};
function DeliverableGridSelect(props) {
const [deliverablesSorted, setDeliverablesSorted] = useState(
initialDeliverablesSortedState
);
const [state, setState] = useState({});
const [errorState, setErrorState] = useState(null);
const [truncated, setTruncated] = useState(true);
const [availableDeliverables, setAvailableDeliverables] = useState({});
const dataStoreReadyStatus = useSelector(dataStoreReadyStatusSelector);
const [isFetching, setIsFetching] = useState(false);
function convertExistingDeliverablesToState() {
const result = {};
for (const d of props.deliverables) {
const deliverableType = availableDeliverables[d.deliverableType.id];
result[d.deliverableType.id] = {
count: d.count,
id: d.deliverableType.id,
label: deliverableType ? deliverableType.label : "",
createdAt: d.createdAt,
unit: d.unit,
orderInGrid: d.orderInGrid,
icon: deliverableType ? deliverableType.icon : "",
};
}
setState(result);
}
useEffect(convertExistingDeliverablesToState, [
props.deliverables,
availableDeliverables,
]);
function sortDeliverables() {
const result = {
deliverables: [],
defaults: [],
};
for (const i of Object.values(availableDeliverables)) {
const value = state[i.id];
if (value) {
result.deliverables.push(value);
} else {
result.defaults.push(i);
}
}
result.deliverables = result.deliverables.sort(
(a, b) => parseInt(a.orderInGrid) - parseInt(b.orderInGrid)
);
setDeliverablesSorted(result);
}
useEffect(sortDeliverables, [availableDeliverables, state]);
async function getAvailableDeliverables() {
if (!dataStoreReadyStatus) {
setIsFetching(true);
} else {
try {
const availableDeliverablesResult = await DataStore.query(
models.DeliverableType,
Predicates.ALL,
{
sort: (s) => s.createdAt(SortDirection.ASCENDING),
}
);
setAvailableDeliverables(
convertListDataToObject(availableDeliverablesResult)
);
setIsFetching(false);
} catch (e) {
setErrorState(e);
console.log(e);
}
}
}
useEffect(() => getAvailableDeliverables(), []);
function onAddNewDeliverable(deliverable) {
let orderInGrid = 0;
for (const d of Object.values(state)) {
if (d.orderInGrid > orderInGrid);
orderInGrid = parseInt(d.orderInGrid) + 1;
}
setState((prevState) => ({
...prevState,
[deliverable.id]: {
...deliverable,
orderInGrid,
},
}));
const {
createdAt,
updatedAt,
icon,
_lastChangedAt,
_deleted,
_version,
...rest
} = deliverable;
props.onChange({ ...rest, orderInGrid });
}
function onChangeUnit(deliverableId, unit) {
const existing = state[deliverableId];
if (existing) {
setState((prevState) => ({
...prevState,
[deliverableId]: { ...prevState[deliverableId], unit },
}));
}
props.onChange({ id: deliverableId, unit });
}
const onChangeCount = (deliverableId, count) => {
const existing = state[deliverableId];
if (existing) {
setState((prevState) => ({
...prevState,
[deliverableId]: { ...prevState[deliverableId], count },
}));
}
props.onChange({ id: deliverableId, count });
};
function onDelete(deliverableId) {
setState((prevState) => _.omit(prevState, deliverableId));
props.onDelete(deliverableId);
}
useEffect(
() => setTruncated(Object.values(availableDeliverables).length > 5),
[availableDeliverables]
);
if (!!errorState) {
return <GetError />;
} else if (isFetching) {
return <DeliverablesSkeleton />;
} else {
let count = 0;
return (
<Stack
spacing={
deliverablesSorted.deliverables.length > 0 &&
deliverablesSorted.defaults.length > 0
? 5
: 0
}
justifyContent={"flex-start"}
direction={"column"}
>
<Stack spacing={1} direction={"column"}>
{deliverablesSorted.deliverables.map((deliverable) => {
count++;
if (count > 5 && truncated) {
return (
<React.Fragment
key={deliverable.id}
></React.Fragment>
);
} else {
return (
<EditableDeliverable
key={deliverable.id}
onChangeCount={onChangeCount}
onChangeUnit={onChangeUnit}
onDelete={onDelete}
deliverable={deliverable}
/>
);
}
})}
</Stack>
<Stack spacing={1} direction={"column"}>
{deliverablesSorted.defaults.map((deliverableType) => {
count++;
if (count > 5 && truncated) {
return (
<React.Fragment
key={deliverableType.id}
></React.Fragment>
);
} else {
return (
<AddableDeliverable
key={deliverableType.id}
onAdd={onAddNewDeliverable}
deliverableType={deliverableType}
/>
);
}
})}
</Stack>
<Link
href="#"
onClick={(e) => {
setTruncated((prevState) => !prevState);
e.preventDefault();
}}
color="inherit"
>
{truncated ? "More..." : "Less..."}
</Link>
</Stack>
);
}
}
DeliverableGridSelect.propTypes = {
deliverables: PropTypes.arrayOf(PropTypes.object),
onChange: PropTypes.func,
onDelete: PropTypes.func,
};
DeliverableGridSelect.defaultProps = {
deliverables: [],
onChange: () => {},
onDelete: () => {},
};
export default DeliverableGridSelect; | import PropTypes from "prop-types"; |
rootmapping_fs_test.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugofs
import (
"io/ioutil"
"os"
"path/filepath"
"testing"
"github.com/spf13/viper"
qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/htesting"
"github.com/spf13/afero"
)
func TestLanguageRootMapping(t *testing.T) {
c := qt.New(t)
v := viper.New()
v.Set("contentDir", "content")
fs := NewBaseFileDecorator(afero.NewMemMapFs())
c.Assert(afero.WriteFile(fs, filepath.Join("content/sv/svdir", "main.txt"), []byte("main sv"), 0755), qt.IsNil)
c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent", "sv-f.txt"), []byte("some sv blog content"), 0755), qt.IsNil)
c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/myenblogcontent", "en-f.txt"), []byte("some en blog content in a"), 0755), qt.IsNil)
c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/myotherenblogcontent", "en-f2.txt"), []byte("some en content"), 0755), qt.IsNil)
c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvdocs", "sv-docs.txt"), []byte("some sv docs content"), 0755), qt.IsNil)
c.Assert(afero.WriteFile(fs, filepath.Join("themes/b/myenblogcontent", "en-b-f.txt"), []byte("some en content"), 0755), qt.IsNil)
rfs, err := NewRootMappingFs(fs,
RootMapping{
From: "content/blog", // Virtual path, first element is one of content, static, layouts etc.
To: "themes/a/mysvblogcontent", // Real path
Meta: FileMeta{"lang": "sv"},
},
RootMapping{
From: "content/blog",
To: "themes/a/myenblogcontent",
Meta: FileMeta{"lang": "en"},
},
RootMapping{
From: "content/blog",
To: "content/sv",
Meta: FileMeta{"lang": "sv"},
},
RootMapping{
From: "content/blog",
To: "themes/a/myotherenblogcontent",
Meta: FileMeta{"lang": "en"},
},
RootMapping{
From: "content/docs",
To: "themes/a/mysvdocs",
Meta: FileMeta{"lang": "sv"},
},
)
c.Assert(err, qt.IsNil)
collected, err := collectFilenames(rfs, "content", "content")
c.Assert(err, qt.IsNil)
c.Assert(collected, qt.DeepEquals, []string{"blog/en-f.txt", "blog/en-f2.txt", "blog/sv-f.txt", "blog/svdir/main.txt", "docs/sv-docs.txt"})
bfs := afero.NewBasePathFs(rfs, "content")
collected, err = collectFilenames(bfs, "", "")
c.Assert(err, qt.IsNil)
c.Assert(collected, qt.DeepEquals, []string{"blog/en-f.txt", "blog/en-f2.txt", "blog/sv-f.txt", "blog/svdir/main.txt", "docs/sv-docs.txt"})
dirs, err := rfs.Dirs(filepath.FromSlash("content/blog"))
c.Assert(err, qt.IsNil)
c.Assert(len(dirs), qt.Equals, 4)
getDirnames := func(name string, rfs *RootMappingFs) []string {
filename := filepath.FromSlash(name)
f, err := rfs.Open(filename)
c.Assert(err, qt.IsNil)
names, err := f.Readdirnames(-1)
f.Close()
c.Assert(err, qt.IsNil)
info, err := rfs.Stat(filename)
c.Assert(err, qt.IsNil)
f2, err := info.(FileMetaInfo).Meta().Open()
c.Assert(err, qt.IsNil)
names2, err := f2.Readdirnames(-1)
c.Assert(err, qt.IsNil)
c.Assert(names2, qt.DeepEquals, names)
f2.Close()
return names
}
rfsEn := rfs.Filter(func(rm RootMapping) bool {
return rm.Meta.Lang() == "en"
})
c.Assert(getDirnames("content/blog", rfsEn), qt.DeepEquals, []string{"en-f.txt", "en-f2.txt"})
rfsSv := rfs.Filter(func(rm RootMapping) bool {
return rm.Meta.Lang() == "sv"
})
c.Assert(getDirnames("content/blog", rfsSv), qt.DeepEquals, []string{"sv-f.txt", "svdir"})
// Make sure we have not messed with the original
c.Assert(getDirnames("content/blog", rfs), qt.DeepEquals, []string{"sv-f.txt", "en-f.txt", "svdir", "en-f2.txt"})
c.Assert(getDirnames("content", rfsSv), qt.DeepEquals, []string{"blog", "docs"})
c.Assert(getDirnames("content", rfs), qt.DeepEquals, []string{"blog", "docs"})
}
func TestRootMappingFsDirnames(t *testing.T) {
c := qt.New(t)
fs := NewBaseFileDecorator(afero.NewMemMapFs())
testfile := "myfile.txt"
c.Assert(fs.Mkdir("f1t", 0755), qt.IsNil)
c.Assert(fs.Mkdir("f2t", 0755), qt.IsNil)
c.Assert(fs.Mkdir("f3t", 0755), qt.IsNil)
c.Assert(afero.WriteFile(fs, filepath.Join("f2t", testfile), []byte("some content"), 0755), qt.IsNil)
rfs, err := NewRootMappingFsFromFromTo(fs, "static/bf1", "f1t", "static/cf2", "f2t", "static/af3", "f3t")
c.Assert(err, qt.IsNil)
fif, err := rfs.Stat(filepath.Join("static/cf2", testfile))
c.Assert(err, qt.IsNil)
c.Assert(fif.Name(), qt.Equals, "myfile.txt")
fifm := fif.(FileMetaInfo).Meta()
c.Assert(fifm.Filename(), qt.Equals, filepath.FromSlash("f2t/myfile.txt"))
root, err := rfs.Open(filepathSeparator)
c.Assert(err, qt.IsNil)
dirnames, err := root.Readdirnames(-1)
c.Assert(err, qt.IsNil)
c.Assert(dirnames, qt.DeepEquals, []string{"bf1", "cf2", "af3"})
}
func TestRootMappingFsFilename(t *testing.T) {
c := qt.New(t)
workDir, clean, err := htesting.CreateTempDir(Os, "hugo-root-filename")
c.Assert(err, qt.IsNil)
defer clean()
fs := NewBaseFileDecorator(Os)
testfilename := filepath.Join(workDir, "f1t/foo/file.txt")
c.Assert(fs.MkdirAll(filepath.Join(workDir, "f1t/foo"), 0777), qt.IsNil)
c.Assert(afero.WriteFile(fs, testfilename, []byte("content"), 0666), qt.IsNil)
rfs, err := NewRootMappingFsFromFromTo(fs, "static/f1", filepath.Join(workDir, "f1t"), "static/f2", filepath.Join(workDir, "f2t"))
c.Assert(err, qt.IsNil)
fi, err := rfs.Stat(filepath.FromSlash("static/f1/foo/file.txt"))
c.Assert(err, qt.IsNil)
fim := fi.(FileMetaInfo)
c.Assert(fim.Meta().Filename(), qt.Equals, testfilename)
_, err = rfs.Stat(filepath.FromSlash("static/f1"))
c.Assert(err, qt.IsNil)
}
func TestRootMappingFsMount(t *testing.T) {
c := qt.New(t)
fs := NewBaseFileDecorator(afero.NewMemMapFs())
testfile := "test.txt"
c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mynoblogcontent", testfile), []byte("some no content"), 0755), qt.IsNil)
c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/myenblogcontent", testfile), []byte("some en content"), 0755), qt.IsNil)
c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent", testfile), []byte("some sv content"), 0755), qt.IsNil)
c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/mysvblogcontent", "other.txt"), []byte("some sv content"), 0755), qt.IsNil)
c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/singlefiles", "no.txt"), []byte("no text"), 0755), qt.IsNil)
c.Assert(afero.WriteFile(fs, filepath.Join("themes/a/singlefiles", "sv.txt"), []byte("sv text"), 0755), qt.IsNil)
bfs := afero.NewBasePathFs(fs, "themes/a").(*afero.BasePathFs)
rm := []RootMapping{
// Directories
RootMapping{
From: "content/blog",
To: "mynoblogcontent",
Meta: FileMeta{"lang": "no"},
},
RootMapping{
From: "content/blog",
To: "myenblogcontent",
Meta: FileMeta{"lang": "en"},
},
RootMapping{
From: "content/blog",
To: "mysvblogcontent",
Meta: FileMeta{"lang": "sv"},
},
// Files
RootMapping{
From: "content/singles/p1.md",
To: "singlefiles/no.txt",
ToBasedir: "singlefiles",
Meta: FileMeta{"lang": "no"},
},
RootMapping{
From: "content/singles/p1.md",
To: "singlefiles/sv.txt",
ToBasedir: "singlefiles",
Meta: FileMeta{"lang": "sv"},
},
}
rfs, err := NewRootMappingFs(bfs, rm...)
c.Assert(err, qt.IsNil)
blog, err := rfs.Stat(filepath.FromSlash("content/blog"))
c.Assert(err, qt.IsNil)
c.Assert(blog.IsDir(), qt.Equals, true)
blogm := blog.(FileMetaInfo).Meta()
c.Assert(blogm.Lang(), qt.Equals, "no") // First match
f, err := blogm.Open()
c.Assert(err, qt.IsNil)
defer f.Close()
dirs1, err := f.Readdirnames(-1)
c.Assert(err, qt.IsNil)
// Union with duplicate dir names filtered.
c.Assert(dirs1, qt.DeepEquals, []string{"test.txt", "test.txt", "other.txt", "test.txt"})
files, err := afero.ReadDir(rfs, filepath.FromSlash("content/blog"))
c.Assert(err, qt.IsNil)
c.Assert(len(files), qt.Equals, 4)
testfilefi := files[1]
c.Assert(testfilefi.Name(), qt.Equals, testfile)
testfilem := testfilefi.(FileMetaInfo).Meta()
c.Assert(testfilem.Filename(), qt.Equals, filepath.FromSlash("themes/a/mynoblogcontent/test.txt"))
tf, err := testfilem.Open()
c.Assert(err, qt.IsNil)
defer tf.Close()
b, err := ioutil.ReadAll(tf)
c.Assert(err, qt.IsNil)
c.Assert(string(b), qt.Equals, "some no content")
// Check file mappings
single, err := rfs.Stat(filepath.FromSlash("content/singles/p1.md"))
c.Assert(err, qt.IsNil)
c.Assert(single.IsDir(), qt.Equals, false)
singlem := single.(FileMetaInfo).Meta()
c.Assert(singlem.Lang(), qt.Equals, "no") // First match
singlesDir, err := rfs.Open(filepath.FromSlash("content/singles"))
c.Assert(err, qt.IsNil)
defer singlesDir.Close()
singles, err := singlesDir.Readdir(-1)
c.Assert(err, qt.IsNil)
c.Assert(singles, qt.HasLen, 2)
for i, lang := range []string{"no", "sv"} {
fi := singles[i].(FileMetaInfo)
c.Assert(fi.Meta().PathFile(), qt.Equals, filepath.FromSlash("themes/a/singlefiles/"+lang+".txt"))
c.Assert(fi.Meta().Lang(), qt.Equals, lang)
c.Assert(fi.Name(), qt.Equals, "p1.md")
}
}
func TestRootMappingFsMountOverlap(t *testing.T) {
c := qt.New(t)
fs := NewBaseFileDecorator(afero.NewMemMapFs())
c.Assert(afero.WriteFile(fs, filepath.FromSlash("da/a.txt"), []byte("some no content"), 0755), qt.IsNil)
c.Assert(afero.WriteFile(fs, filepath.FromSlash("db/b.txt"), []byte("some no content"), 0755), qt.IsNil)
c.Assert(afero.WriteFile(fs, filepath.FromSlash("dc/c.txt"), []byte("some no content"), 0755), qt.IsNil)
c.Assert(afero.WriteFile(fs, filepath.FromSlash("de/e.txt"), []byte("some no content"), 0755), qt.IsNil)
rm := []RootMapping{
RootMapping{
From: "static",
To: "da",
},
RootMapping{
From: "static/b",
To: "db",
},
RootMapping{
From: "static/b/c",
To: "dc",
},
RootMapping{
From: "/static/e/",
To: "de",
},
}
rfs, err := NewRootMappingFs(fs, rm...)
c.Assert(err, qt.IsNil)
getDirnames := func(name string) []string {
name = filepath.FromSlash(name)
f, err := rfs.Open(name)
c.Assert(err, qt.IsNil)
defer f.Close()
names, err := f.Readdirnames(-1)
c.Assert(err, qt.IsNil)
return names
}
c.Assert(getDirnames("static"), qt.DeepEquals, []string{"a.txt", "b", "e"})
c.Assert(getDirnames("static/b"), qt.DeepEquals, []string{"b.txt", "c"})
c.Assert(getDirnames("static/b/c"), qt.DeepEquals, []string{"c.txt"})
fi, err := rfs.Stat(filepath.FromSlash("static/b/b.txt"))
c.Assert(err, qt.IsNil)
c.Assert(fi.Name(), qt.Equals, "b.txt")
}
func TestRootMappingFsOs(t *testing.T) | {
c := qt.New(t)
fs := afero.NewOsFs()
d, err := ioutil.TempDir("", "hugo-root-mapping")
c.Assert(err, qt.IsNil)
defer func() {
os.RemoveAll(d)
}()
testfile := "myfile.txt"
c.Assert(fs.Mkdir(filepath.Join(d, "f1t"), 0755), qt.IsNil)
c.Assert(fs.Mkdir(filepath.Join(d, "f2t"), 0755), qt.IsNil)
c.Assert(fs.Mkdir(filepath.Join(d, "f3t"), 0755), qt.IsNil)
c.Assert(afero.WriteFile(fs, filepath.Join(d, "f2t", testfile), []byte("some content"), 0755), qt.IsNil)
rfs, err := NewRootMappingFsFromFromTo(fs, "static/bf1", filepath.Join(d, "f1t"), "static/cf2", filepath.Join(d, "f2t"), "static/af3", filepath.Join(d, "f3t"))
c.Assert(err, qt.IsNil)
fif, err := rfs.Stat(filepath.Join("static/cf2", testfile))
c.Assert(err, qt.IsNil)
c.Assert(fif.Name(), qt.Equals, "myfile.txt")
root, err := rfs.Open(filepathSeparator)
c.Assert(err, qt.IsNil)
dirnames, err := root.Readdirnames(-1)
c.Assert(err, qt.IsNil)
c.Assert(dirnames, qt.DeepEquals, []string{"bf1", "cf2", "af3"})
} |
|
spot-order-query.go | package gobinance
import (
"context"
"fmt"
"net/http"
"time"
)
// QueryOrderOption is a function that applies optional parameters / overrides to a query order operation
type QueryOrderOption func(input *queryOrderInput)
// QueryOrderRecvWindow overrides the default receive window for a query order operation
func QueryOrderRecvWindow (d time.Duration) QueryOrderOption |
// QueryOrderByID fetches the order whose ID as assigned by the exchange is orderID
func (c *Client) QueryOrderByID(ctx context.Context, symbol string, orderID int64, opts ...QueryOrderOption) (SpotOrder, error) {
input := queryOrderInput{
Symbol: symbol,
OrderID: orderID,
}
return c.queryOrder(ctx, input, opts)
}
// QueryOrderByClientID fetches the order whose ID as assigned by the client is clientOrderID
func (c *Client) QueryOrderByClientID(ctx context.Context, symbol string, clientOrderID string, opts ...QueryOrderOption) (SpotOrder, error) {
input := queryOrderInput{
Symbol: symbol,
OrigClientOrderID: clientOrderID,
}
return c.queryOrder(ctx, input, opts)
}
func (c *Client) queryOrder(ctx context.Context, input queryOrderInput, opts []QueryOrderOption) (SpotOrder, error) {
applyQueryOrderOptions(&input, opts)
params, err := toURLValues(input)
if err != nil {
return SpotOrder{}, fmt.Errorf("error building request parameters: %w", err)
}
req, err := c.buildSignedRequest(ctx, http.MethodGet, "/api/v3/order", params)
if err != nil {
return SpotOrder{}, fmt.Errorf("error building request: %w", err)
}
var out SpotOrder
err = performRequest(c.Doer, req, &out)
return out, err
}
func applyQueryOrderOptions(in *queryOrderInput, opts []QueryOrderOption) {
for _, o := range opts {
o(in)
}
}
type queryOrderInput struct {
Symbol string `param:"symbol"`
OrderID int64 `param:"orderId,omitempty"`
OrigClientOrderID string `param:"origClientOrderId,omitempty"`
RecvWindow int64 `param:"recvWindow,omitempty"`
}
| {
return func(input *queryOrderInput) {
input.RecvWindow = d.Milliseconds()
}
} |
StarFavorite.tsx | import * as React from 'react'
function SvgStarFavorite(props: React.SVGProps<SVGSVGElement>) {
return (
<svg
data-name='Layer 3'
xmlns='http://www.w3.org/2000/svg'
viewBox='0 0 24 24'
width='1em'
height='1em'
{...props}
>
<path d='M0 0h24v24H0z' fill='none' />
<path
d='M7.733 20.829a1.5 1.5 0 01-2.171-1.592l.809-4.637-3.406-3.264a1.5 1.5 0 01.827-2.571l4.729-.676 2.135-4.259a1.5 1.5 0 012.688 0l2.135 4.259 4.729.676a1.5 1.5 0 01.827 2.571L17.629 14.6l.809 4.638a1.5 1.5 0 01-2.171 1.592L12 18.625z'
fill='none'
stroke='currentColor'
strokeLinecap='round'
strokeLinejoin='round'
strokeWidth={1.5}
/> | }
export default SvgStarFavorite | </svg>
) |
Link.tsx | import React from 'react';
import styled from "styled-components";
import {theme} from '../../theme'
import {
space,
layout,
fontWeight,
typography,
color,
} from 'styled-system'
interface LinkProps {
color?: string
fontSize?: number
fontWeight?: number
transition?: string
}
const defaultProps: LinkProps = {
color: theme.colors.grey[3],
fontSize: 3,
fontWeight: 400,
transition: "color 0.2s, border-color 0.2s, opacity 0.2s", | font-family: 'JosefinSans-Regular';
text-decoration: none;
transition: color 0.2s, border-color 0.2s, opacity 0.2s;
&:hover {
color: ${theme.colors.grey[2]};
}
${color}
${space}
${typography}
${fontWeight}
${layout}
`;
Link.defaultProps=defaultProps
export default Link; | }
const Link: React.FunctionComponent<LinkProps> = styled.a`
cursor: pointer; |
resize.py | from PIL import Image
import os
percent = 0.5
for file_name in os.listdir("../foto/"):
if file_name == "pic8.jpg":
| img = Image.open("../foto/"+str(file_name))
if img.size[0] > img.size[1]:
#foto orizzontale
hsize = int((float(img.size[0]) * float(percent)))
vsize = int((float(img.size[1]) * float(percent)))
else:
#foto verticale
hsize = int((float(img.size[0]) * float(percent)))
vsize = int((float(img.size[1]) * float(percent)))
img = img.resize((hsize, vsize), Image.ANTIALIAS)
img.save(file_name) |
|
parentheses.ts | /**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
import {
MemberExpression,
TSInferType,
AnyNode,
FlowNullableTypeAnnotation,
UpdateExpression,
ObjectExpression,
DoExpression,
LogicalExpression,
BinaryExpression,
SequenceExpression,
YieldExpression,
ClassExpression,
UnaryExpression,
SpreadElement,
SpreadProperty,
ArrowFunctionExpression,
AssignmentExpression,
ConditionalExpression,
UnionTypeAnnotation,
FlowFunctionTypeAnnotation,
OptionalCallExpression,
} from '@romejs/js-ast';
import {
isFor,
isUnaryLike,
isConditional,
isBinary,
} from '@romejs/js-ast-utils';
const PRECEDENCE = {
'||': 0,
'&&': 1,
'??': 1,
'|': 2,
'^': 3,
'&': 4,
'==': 5,
'===': 5,
'!=': 5,
'!==': 5,
'<': 6,
'>': 6,
'<=': 6,
'>=': 6,
in: 6,
instanceof: 6,
'>>': 7,
'<<': 7,
'>>>': 7,
'+': 8,
'-': 8,
'*': 9,
'/': 9,
'%': 9,
'**': 10,
};
function isClassExtendsClause(node: AnyNode, parent: AnyNode): boolean {
return (parent.type === 'ClassDeclaration' || parent.type ===
'ClassExpression') && parent.meta.superClass === node;
}
const parens: Map<AnyNode['type'], // rome-suppress-next-line lint/noExplicitAny
(node: any, parent: AnyNode, printStack: Array<AnyNode>) => boolean> = new Map();
export default parens;
parens.set('TSAsExpression', () => {
return true;
});
parens.set('TSTypeAssertion', () => {
return true;
});
parens.set('FlowNullableTypeAnnotation', (
node: FlowNullableTypeAnnotation,
parent: AnyNode,
): boolean => {
return parent.type === 'FlowArrayTypeAnnotation';
});
parens.set('MemberExpression', function UpdateExpression(
node: MemberExpression,
parent: AnyNode,
): boolean {
if (node.property.optional) {
return parent.type === 'CallExpression' && parent.callee === node ||
parent.type ===
'MemberExpression' &&
parent.object === node;
} else {
return false;
}
}); | parent: AnyNode,
): boolean {
return (// (foo++).test(), (foo++)[0]
parent.type === 'MemberExpression' && parent.object === node || // (foo++)()
parent.type === 'CallExpression' && parent.callee === node || // new (foo++)()
parent.type === 'NewExpression' && parent.callee === node ||
isClassExtendsClause(node, parent)
);
});
parens.set('ObjectExpression', function ObjectExpression(
node: ObjectExpression,
parent: AnyNode,
printStack: Array<AnyNode>,
): boolean {
return isFirstInStatement(printStack, {considerArrow: true});
});
parens.set('DoExpression', function DoExpression(
node: DoExpression,
parent: AnyNode,
printStack: Array<AnyNode>,
): boolean {
return isFirstInStatement(printStack);
});
function LogicalExpression(
node: BinaryExpression | LogicalExpression,
parent: AnyNode,
): boolean {
if (node.operator === '**' && parent.type === 'BinaryExpression' &&
parent.operator ===
'**') {
return parent.left === node;
}
if (isClassExtendsClause(node, parent)) {
return true;
}
if ((parent.type === 'CallExpression' || parent.type === 'NewExpression') &&
parent.callee ===
node || isUnaryLike(parent) || parent.type === 'MemberExpression' &&
parent.object ===
node || parent.type === 'AwaitExpression') {
return true;
}
if (isBinary(parent)) {
const parentOp = parent.operator;
const parentPos = PRECEDENCE[parentOp];
const nodeOp = node.operator;
const nodePos = PRECEDENCE[nodeOp];
if ( // Logical expressions with the same precedence don't need parens.
parentPos === nodePos && parent.right === node && parent.type !==
'LogicalExpression' || parentPos > nodePos) {
return true;
}
}
switch (node.operator) {
case '||':
if (parent.type === 'LogicalExpression') {
return parent.operator === '??' || parent.operator === '&&';
} else {
return false;
}
case '&&':
return parent.type === 'LogicalExpression' && parent.operator === '??';
case '??':
return parent.type === 'LogicalExpression' && parent.operator !== '??';
default:
return false;
}
}
parens.set('LogicalExpression', LogicalExpression);
parens.set('BinaryExpression', function BinaryExpression(
node: BinaryExpression,
parent: AnyNode,
): boolean {
// let i = (1 in []);
// for ((1 in []);;);
return node.operator === 'in' && (parent.type === 'VariableDeclarator' ||
isFor(parent)) || LogicalExpression(node, parent);
});
parens.set('SequenceExpression', function SequenceExpression(
node: SequenceExpression,
parent: AnyNode,
): boolean {
if ( // Although parentheses wouldn"t hurt around sequence
// expressions in the head of for loops, traditional style
// dictates that e.g. i++, j++ should not be wrapped with
// parentheses.
parent.type === 'ForStatement' || parent.type === 'ThrowStatement' ||
parent.type ===
'ReturnStatement' || parent.type === 'IfStatement' && parent.test ===
node || parent.type === 'WhileStatement' && parent.test === node ||
parent.type ===
'ForInStatement' &&
parent.right === node || parent.type === 'SwitchStatement' &&
parent.discriminant ===
node || parent.type === 'ExpressionStatement' && parent.expression ===
node) {
return false;
}
// Otherwise err on the side of overparenthesization, adding
// explicit exceptions above if this proves overzealous.
return true;
});
function YieldExpression(node: YieldExpression, parent: AnyNode): boolean {
return isBinary(parent) || isUnaryLike(parent) || parent.type ===
'MemberExpression' || parent.type === 'CallExpression' &&
parent.callee ===
node || parent.type === 'NewExpression' && parent.callee === node ||
parent.type ===
'AwaitExpression' &&
node.type === 'YieldExpression' ||
parent.type === 'ConditionalExpression' &&
node === parent.test || isClassExtendsClause(node, parent);
}
parens.set('YieldExpression', YieldExpression);
parens.set('AwaitExpression', YieldExpression);
parens.set('OptionalCallExpression', function OptionalCallExpression(
node: OptionalCallExpression,
parent: AnyNode,
): boolean {
return parent.type === 'CallExpression' && parent.callee === node ||
parent.type ===
'MemberExpression' &&
parent.object === node;
});
parens.set('ClassExpression', function ClassExpression(
node: ClassExpression,
parent: AnyNode,
printStack: Array<AnyNode>,
): boolean {
return isFirstInStatement(printStack, {considerDefaultExports: true});
});
function UnaryExpression(
node:
| UnaryExpression
| ArrowFunctionExpression
| AssignmentExpression
| ConditionalExpression
| SpreadElement
| SpreadProperty,
parent: AnyNode,
): boolean {
return parent.type === 'MemberExpression' && parent.object === node ||
parent.type ===
'CallExpression' &&
parent.callee === node || parent.type === 'NewExpression' &&
parent.callee ===
node || parent.type === 'BinaryExpression' && parent.operator === '**' &&
parent.left ===
node || isClassExtendsClause(node, parent);
}
parens.set('UnaryExpression', UnaryExpression);
parens.set('SpreadElement', UnaryExpression);
parens.set('SpreadProperty', UnaryExpression);
parens.set('FunctionExpression', function FunctionExpression(
node: AnyNode,
parent: AnyNode,
printStack: Array<AnyNode>,
): boolean {
return isFirstInStatement(printStack, {considerDefaultExports: true});
});
parens.set('ArrowFunctionExpression', function ArrowFunctionExpression(
node: ArrowFunctionExpression,
parent: AnyNode,
): boolean {
return parent.type === 'ExportLocalDeclaration' || ConditionalExpression(
node,
parent,
);
});
function ConditionalExpression(
node: ArrowFunctionExpression | AssignmentExpression | ConditionalExpression,
parent: AnyNode,
): boolean {
if (isUnaryLike(parent) || isBinary(parent) || parent.type ===
'ConditionalExpression' && parent.test === node || parent.type ===
'AwaitExpression' || parent.type === 'MemberExpression' &&
parent.object ===
node && parent.property.optional || parent.type ===
'OptionalCallExpression' && parent.callee === node || parent.type ===
'TaggedTemplateExpression' || parent.type === 'TSTypeAssertion' ||
parent.type ===
'TSAsExpression') {
return true;
}
return UnaryExpression(node, parent);
}
parens.set('ConditionalExpression', ConditionalExpression);
parens.set('AssignmentExpression', function AssignmentExpression(
node: AssignmentExpression,
parent: AnyNode,
): boolean {
if (node.left.type === 'AssignmentObjectPattern') {
return true;
} else {
return ConditionalExpression(node, parent);
}
});
function UnionTypeAnnotation(node: UnionTypeAnnotation, parent: AnyNode) {
return parent.type === 'FlowArrayTypeAnnotation' || parent.type ===
'FlowNullableTypeAnnotation' || parent.type ===
'IntersectionTypeAnnotation' ||
parent.type === 'UnionTypeAnnotation' || parent.type === 'TSArrayType' ||
parent.type ===
'TSOptionalType';
}
parens.set('UnionTypeAnnotation', UnionTypeAnnotation);
parens.set('IntersectionTypeAnnotation', UnionTypeAnnotation);
parens.set('TSInferType', function TSInferType(
node: TSInferType,
parent: AnyNode,
): boolean {
return parent.type === 'TSArrayType' || parent.type === 'TSOptionalType';
});
parens.set('FlowFunctionTypeAnnotation', function FlowFunctionTypeAnnotation(
node: FlowFunctionTypeAnnotation,
parent: AnyNode,
printStack: Array<AnyNode>,
) {
// Check if we are the return type of an arrow
for (const printNode of printStack) {
if (printNode.type === 'ArrowFunctionExpression' &&
printNode.head.returnType ===
node) {
return true;
}
}
// ((a: () => A) => (a: A) => A)
if (node.returnType !== undefined && node.returnType.type ===
'FlowFunctionTypeAnnotation') {
return true;
}
return (// (() => A) | (() => B)
parent.type === 'UnionTypeAnnotation' || // (() => A) & (() => B)
parent.type === 'IntersectionTypeAnnotation' || // (() => A)[]
parent.type === 'FlowArrayTypeAnnotation'
);
});
// Walk up the print stack to deterimine if our node can come first
// in statement.
function isFirstInStatement(
printStack: Array<AnyNode>,
{considerArrow = false, considerDefaultExports = false} = {},
): boolean {
let i = printStack.length - 1;
let node = printStack[i];
i--;
let parent = printStack[i];
while (i > 0) {
if (parent.type === 'ExpressionStatement' && parent.expression === node ||
parent.type ===
'TaggedTemplateExpression' || considerDefaultExports &&
parent.type ===
'ExportDefaultDeclaration' && parent.declaration === node ||
considerArrow &&
parent.type === 'ArrowFunctionExpression' &&
parent.body === node) {
return true;
}
if (parent.type === 'CallExpression' && parent.callee === node ||
parent.type ===
'SequenceExpression' &&
parent.expressions[0] === node ||
parent.type === 'MemberExpression' &&
parent.object === node || isConditional(parent) && parent.test ===
node || isBinary(parent) && parent.left === node || parent.type ===
'AssignmentExpression' && parent.left === node) {
node = parent;
i--;
parent = printStack[i];
} else {
return false;
}
}
return false;
} |
parens.set('UpdateExpression', function UpdateExpression(
node: UpdateExpression, |
business.py | # Copyright © 2019 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Meta information about the service.
Currently this only provides API versioning information
"""
from datetime import datetime
from flask import current_app
from colin_api.exceptions import BusinessNotFoundException
from colin_api.resources.db import DB
from colin_api.utils import convert_to_json_date, convert_to_json_datetime, stringify_list
class Business:
"""Class to contain all model-like functions such as getting and setting from database."""
business = None
def __init__(self):
"""Initialize with all values None."""
def get_corp_num(self):
"""Get corporation number, aka identifier."""
return self.business['identifier']
def as_dict(self):
"""Return dict version of self."""
return {
'business': self.business
}
@classmethod
def _get_last_ar_dates_for_reset(cls, cursor, event_info: list, event_ids: list):
"""Get the previous AR/AGM dates."""
events_by_corp_num = {}
for info in event_info:
if info['corp_num'] not in events_by_corp_num or events_by_corp_num[info['corp_num']] > info['event_id']:
events_by_corp_num[info['corp_num']] = info['event_id']
dates_by_corp_num = []
for corp_num in events_by_corp_num:
cursor.execute(f"""
SELECT event.corp_num, event.event_timestmp, filing.period_end_dt, filing.agm_date, filing.filing_typ_cd
FROM event
JOIN filing on filing.event_id = event.event_id
WHERE event.event_id not in ({stringify_list(event_ids)}) AND event.corp_num=:corp_num
ORDER BY event.event_timestmp desc
""",
corp_num=corp_num
)
dates = {'corp_num': corp_num}
for row in cursor.fetchall():
row = dict(zip([x[0].lower() for x in cursor.description], row))
if 'event_date' not in dates or dates['event_date'] < row['event_timestmp']:
dates['event_date'] = row['event_timestmp']
# set ar_date to closest period_end_dt.
# this is not always the first one that gets returned if 2 were filed on the same day
if row['period_end_dt'] and ('ar_date' not in dates or dates['ar_date'] < row['period_end_dt']):
dates['ar_date'] = row['period_end_dt']
dates['ar_filed_date'] = row['event_timestmp']
# this may be different than ar_date if the last ar had no agm
if row['agm_date'] and ('agm_date' not in dates or dates['agm_date'] < row['agm_date']):
dates['agm_date'] = row['agm_date']
# if there are no ARs for this coop then use date of incorporation
if row['filing_typ_cd'] == 'OTINC' and 'agm_date' not in dates:
dates['agm_date'] = row['event_timestmp']
dates['ar_filed_date'] = row['event_timestmp']
dates_by_corp_num.append(dates)
return dates_by_corp_num
@classmethod
def find_by_identifier(cls, identifier: str = None): # pylint: disable=too-many-statements;
"""Return a Business by identifier."""
business = None
if not identifier:
return None
try:
# get record
cursor = DB.connection.cursor()
cursor.execute("""
select corp.CORP_NUM as identifier, CORP_FROZEN_TYP_CD, corp_typ_cd type,
filing.period_end_dt as last_ar_date, LAST_AR_FILED_DT as last_ar_filed_date, LAST_AGM_DATE,
corp_op_state.full_desc as state, corp_state.state_typ_cd as corp_state,
t_name.corp_nme as legal_name,
t_assumed_name.CORP_NME as assumed_name, RECOGNITION_DTS as founding_date,
BN_15 as business_number, CAN_JUR_TYP_CD, OTHR_JURIS_DESC
from CORPORATION corp
left join CORP_NAME t_name on t_name.corp_num = corp.corp_num and t_name.CORP_NAME_TYP_CD='CO'
AND t_name.END_EVENT_ID is null
left join CORP_NAME t_assumed_name on t_assumed_name.corp_num = corp.corp_num
and t_assumed_name.CORP_NAME_TYP_CD='AS' AND t_assumed_name.END_EVENT_ID is null
join CORP_STATE on CORP_STATE.corp_num = corp.corp_num and CORP_STATE.end_event_id is null
join CORP_OP_STATE on CORP_OP_STATE.state_typ_cd = CORP_STATE.state_typ_cd
left join JURISDICTION on JURISDICTION.corp_num = corp.corp_num
join event on corp.corp_num = event.corp_num
left join filing on event.event_id = filing.event_id and filing.filing_typ_cd = 'OTANN'
where corp_typ_cd = 'CP'
and corp.CORP_NUM=:corp_num
order by last_ar_date desc nulls last""", corp_num=identifier)
business = cursor.fetchone()
if not business:
raise BusinessNotFoundException(identifier=identifier)
# add column names to resultset to build out correct json structure and make manipulation below more robust
# (better than column numbers)
business = dict(zip([x[0].lower() for x in cursor.description], business))
# get last ledger date from EVENT table and add to business record
# note - FILE event type is correct for new filings; CONVOTHER is for events/filings pulled over from COBRS
# during initial data import for Coops.
cursor.execute("""
select max(EVENT_TIMESTMP) as last_ledger_timestamp from EVENT
where EVENT_TYP_CD in('FILE', 'CONVOTHER') and CORP_NUM = '{}'""".format(identifier))
last_ledger_timestamp = cursor.fetchone()[0]
business['last_ledger_timestamp'] = last_ledger_timestamp
# if this is an XPRO, get correct jurisdiction; otherwise, it's BC
if business['type'] == 'XCP':
if business['can_jur_typ_cd'] == 'OT':
business['jurisdiction'] = business['othr_juris_desc']
else:
business['jurisdiction'] = business['can_jur_typ_cd']
else:
business['jurisdiction'] = 'BC'
# set name
if business['assumed_name']:
business['legal_name'] = business['assumed_name']
# set status - In Good Standing if certain criteria met, otherwise use original value
if business['state'] == 'Active' and \
business['last_ar_filed_date'] is not None and \
isinstance(business['last_ar_filed_date'], datetime) and \
business['last_agm_date'] is not None and isinstance(business['last_agm_date'], datetime):
if business['last_ar_filed_date'] > business['last_agm_date']:
business['status'] = 'In Good Standing'
else:
business['status'] = business['state']
else:
business['status'] = business['state']
# convert dates and date-times to correct json format and convert to camel case for schema names
business['foundingDate'] = convert_to_json_datetime(business['founding_date'])
business['lastAgmDate'] = convert_to_json_date(business['last_agm_date'])
business['lastArDate'] = convert_to_json_date(business['last_ar_date']) if business['last_ar_date'] \
else business['lastAgmDate']
business['lastLedgerTimestamp'] = convert_to_json_datetime(business['last_ledger_timestamp'])
business['businessNumber'] = business['business_number']
business['corpState'] = business['corp_state']
business['legalName'] = business['legal_name']
business['legalType'] = business['type']
# remove unnecessary fields (
del business['can_jur_typ_cd']
del business['othr_juris_desc']
del business['assumed_name']
del business['state']
del business['business_number']
del business['corp_frozen_typ_cd']
del business['corp_state']
del business['founding_date']
del business['last_agm_date']
del business['last_ar_filed_date']
del business['last_ledger_timestamp']
del business['legal_name']
del business['type']
del business['last_ar_date']
# add cache_id todo: set to real value
business['cacheId'] = 0
# convert to Business object
business_obj = Business()
business_obj.business = business
return business_obj
except Exception as err:
# general catch-all exception
current_app.logger.error(err.with_traceback(None))
# pass through exception to caller
raise err
@classmethod
def update_corporation(cls, cursor, corp_num: str = None, date: str = None, annual_report: bool = False):
"""Update corporation record.
:param cursor: oracle cursor
:param corp_num: (str) corporation number
:param date: (str) last agm date
:param annual_report: (bool) whether or not this was an annual report
"""
try:
if annual_report:
if date:
cursor.execute("""
UPDATE corporation
SET
LAST_AR_FILED_DT = sysdate,
LAST_AGM_DATE = TO_DATE(:agm_date, 'YYYY-mm-dd'),
LAST_LEDGER_DT = sysdate
WHERE corp_num = :corp_num
""",
agm_date=date,
corp_num=corp_num
)
else:
cursor.execute("""
UPDATE corporation
SET
LAST_AR_FILED_DT = sysdate,
LAST_LEDGER_DT = sysdate
WHERE corp_num = :corp_num
""",
corp_num=corp_num
)
else:
cursor.execute("""
UPDATE corporation
SET
LAST_LEDGER_DT = sysdate
WHERE corp_num = :corp_num
""",
corp_num=corp_num
)
except Exception as err:
current_app.logger.error(err.with_traceback(None))
raise err
@classmethod
def update_corp_state(cls, cursor, event_id, corp_num, state='ACT'):
"""Update corporation state.
End previous corp_state record (end event id) and and create new corp_state record.
:param cursor: oracle cursor
:param event_id: (int) event id for corresponding event
:param corp_num: (str) corporation number
:param state: (str) state of corporation
"""
try:
cursor.execute("""
UPDATE corp_state
SET end_event_id = :event_id
WHERE corp_num = :corp_num and end_event_id is NULL
""",
event_id=event_id,
corp_num=corp_num
)
except Exception as err:
current_app.logger.error(err.with_traceback(None))
raise err
try:
cursor.execute("""
INSERT INTO corp_state (corp_num, start_event_id, state_typ_cd)
VALUES (:corp_num, :event_id, :state)
""",
event_id=event_id,
corp_num=corp_num,
state=state
)
except Exception as err:
current_app.logger.error(err.with_traceback(None))
raise err
@classmethod
def reset_corporations(cls, cursor, event_info: list, event_ids: list):
"""Reset the corporations to what they were before the given events."""
if len(event_info) < 1:
return
dates_by_corp_num = cls._get_last_ar_dates_for_reset(cursor=cursor, event_info=event_info, event_ids=event_ids)
for item in dates_by_corp_num:
try:
cursor.execute("""
UPDATE corporation
SET
LAST_AR_FILED_DT = :ar_filed_date,
LAST_AGM_DATE = :agm_date,
LAST_LEDGER_DT = :event_date
WHERE corp_num = :corp_num
""",
agm_date=item['agm_date'] if item['agm_date'] else item['ar_date'],
ar_filed_date=item['ar_filed_date'],
event_date=item['event_date'],
corp_num=item['corp_num']
)
except Exception as err:
current_app.logger.error(f'Error in Business: Failed to reset corporation for {item["corp_num"]}')
raise err
@classmethod
def reset_corp_states(cls, cursor, event_ids: list):
" | ""Reset the corp states to what they were before the given events."""
if len(event_ids) < 1:
return
# delete corp_state rows created on these events
try:
cursor.execute(f"""
DELETE FROM corp_state
WHERE start_event_id in ({stringify_list(event_ids)})
""")
except Exception as err:
current_app.logger.error(f'Error in Business: Failed delete corp_state rows for events {event_ids}')
raise err
# reset corp_state rows ended on these events
try:
cursor.execute(f"""
UPDATE corp_state
SET end_event_id = null
WHERE end_event_id in ({stringify_list(event_ids)})
""")
except Exception as err:
current_app.logger.error(f'Error in Business: Failed reset ended corp_state rows for events {event_ids}')
raise err
|
|
rng.rs | use bevy::app::ScheduleRunnerSettings;
use bevy::prelude::*;
use bevy_rng::*;
use std::time::Duration;
fn main() {
// Don't register the plugin (non-deterministic)...
App::build()
.add_resource(ScheduleRunnerSettings::run_once())
.add_plugins(MinimalPlugins)
.add_system(random_number_1.system())
.add_system(random_number_2.system())
.run();
// ...don't provide a seed (same as above)...
App::build()
.add_resource(ScheduleRunnerSettings::run_once())
.add_plugins(MinimalPlugins)
.add_plugin(RngPlugin::default())
.add_system(random_number_1.system())
.add_system(random_number_2.system())
.run();
// ...seed from u64 (deterministic)...
App::build()
.add_resource(ScheduleRunnerSettings::run_once())
.add_plugins(MinimalPlugins)
.add_plugin(RngPlugin::from(42)) | .run();
// ...or from a string (same as above).
App::build()
.add_resource(ScheduleRunnerSettings::run_loop(Duration::from_millis(100)))
.add_plugins(MinimalPlugins)
.add_plugin(RngPlugin::from("your seed here"))
.add_system(random_number_1.system())
.add_system(random_number_2.system())
.run();
}
fn random_number_1(mut rng: Local<Rng>) {
println!("1: {}", rng.gen::<f64>());
}
fn random_number_2(mut rng: Local<Rng>) {
println!("2: {}", rng.gen::<f64>());
} | .add_system(random_number_1.system())
.add_system(random_number_2.system()) |
main.go | package main
import (
"context"
"flag"
"fmt"
"io"
"net"
"net/http"
"net/rpc"
"net/rpc/jsonrpc"
"os"
"os/signal"
"path/filepath"
"sync"
"syscall"
dbm "github.com/33cn/chain33/common/db"
logf "github.com/33cn/chain33/common/log"
"github.com/33cn/chain33/common/log/log15"
chain33Types "github.com/33cn/chain33/types"
"github.com/33cn/plugin/plugin/dapp/cross2eth/ebrelayer/relayer"
chain33Relayer "github.com/33cn/plugin/plugin/dapp/cross2eth/ebrelayer/relayer/chain33"
ethRelayer "github.com/33cn/plugin/plugin/dapp/cross2eth/ebrelayer/relayer/ethereum"
"github.com/33cn/plugin/plugin/dapp/cross2eth/ebrelayer/relayer/events"
ebrelayerTypes "github.com/33cn/plugin/plugin/dapp/cross2eth/ebrelayer/types"
relayerTypes "github.com/33cn/plugin/plugin/dapp/cross2eth/ebrelayer/types"
tml "github.com/BurntSushi/toml"
"github.com/btcsuite/btcd/limits"
)
var (
configPath = flag.String("f", "", "configfile")
versionCmd = flag.Bool("s", false, "version")
//IPWhiteListMap ...
IPWhiteListMap = make(map[string]bool)
mainlog = log15.New("relayer manager", "main")
)
func main() {
flag.Parse()
if *versionCmd {
fmt.Println(relayerTypes.Version4Relayer)
return
}
if *configPath == "" {
*configPath = "relayer.toml"
}
err := os.Chdir(pwd())
if err != nil {
panic(err)
}
d, err := os.Getwd()
if err != nil {
panic(err)
}
mainlog.Info("current dir:", "dir", d)
err = limits.SetLimits()
if err != nil {
panic(err)
}
cfg := initCfg(*configPath)
mainlog.Info("Starting FUZAMEI Chain33-X-Ethereum relayer software:", "\n Name: ", cfg.Title)
logf.SetFileLog(convertLogCfg(cfg.Log))
ctx, cancel := context.WithCancel(context.Background())
var wg sync.WaitGroup
mainlog.Info("db info:", " Dbdriver = ", cfg.SyncTxConfig.Dbdriver, ", DbPath = ", cfg.SyncTxConfig.DbPath, ", DbCache = ", cfg.SyncTxConfig.DbCache)
mainlog.Info("deploy info:", "BridgeRegistry", cfg.BridgeRegistry)
mainlog.Info("db info:", " Dbdriver = ", cfg.SyncTxConfig.Dbdriver, ", DbPath = ", cfg.SyncTxConfig.DbPath, ", DbCache = ", cfg.SyncTxConfig.DbCache)
db := dbm.NewDB("relayer_db_service", cfg.SyncTxConfig.Dbdriver, cfg.SyncTxConfig.DbPath, cfg.SyncTxConfig.DbCache)
ethBridgeClaimChan := make(chan *ebrelayerTypes.EthBridgeClaim, 100)
chain33MsgChan := make(chan *events.Chain33Msg, 100)
mainlog.Info("deploy info for chain33:", "cfg.Deploy4Chain33", cfg.Deploy4Chain33)
chain33StartPara := &chain33Relayer.Chain33StartPara{
ChainName: cfg.ChainName,
Ctx: ctx,
SyncTxConfig: cfg.SyncTxConfig,
BridgeRegistryAddr: cfg.BridgeRegistryOnChain33,
DeployInfo: cfg.Deploy4Chain33,
DBHandle: db,
EthBridgeClaimChan: ethBridgeClaimChan,
Chain33MsgChan: chain33MsgChan,
ChainID: cfg.ChainID4Chain33,
}
chain33RelayerService := chain33Relayer.StartChain33Relayer(chain33StartPara)
ethStartPara := ðRelayer.EthereumStartPara{
DbHandle: db,
EthProvider: cfg.EthProvider,
EthProviderHttp: cfg.EthProviderCli,
BridgeRegistryAddr: cfg.BridgeRegistry,
DeployInfo: cfg.Deploy,
Degree: cfg.EthMaturityDegree,
BlockInterval: cfg.EthBlockFetchPeriod,
EthBridgeClaimChan: ethBridgeClaimChan,
Chain33MsgChan: chain33MsgChan,
ProcessWithDraw: cfg.ProcessWithDraw,
}
ethRelayerService := ethRelayer.StartEthereumRelayer(ethStartPara)
relayerManager := relayer.NewRelayerManager(chain33RelayerService, ethRelayerService, db)
mainlog.Info("ebrelayer", "cfg.JrpcBindAddr = ", cfg.JrpcBindAddr)
startRPCServer(cfg.JrpcBindAddr, relayerManager)
ch := make(chan os.Signal, 1)
signal.Notify(ch, syscall.SIGTERM)
go func() {
<-ch
cancel()
wg.Wait()
os.Exit(0)
}()
}
func convertLogCfg(log *relayerTypes.Log) *chain33Types.Log {
return &chain33Types.Log{
Loglevel: log.Loglevel,
LogConsoleLevel: log.LogConsoleLevel,
LogFile: log.LogFile,
MaxFileSize: log.MaxFileSize,
MaxBackups: log.MaxBackups,
MaxAge: log.MaxAge,
LocalTime: log.LocalTime,
Compress: log.Compress,
CallerFile: log.CallerFile,
CallerFunction: log.CallerFunction,
}
}
func pwd() string {
dir, err := filepath.Abs(filepath.Dir(os.Args[0]))
if err != nil {
panic(err)
}
return dir
}
func | (path string) *relayerTypes.RelayerConfig {
var cfg relayerTypes.RelayerConfig
if _, err := tml.DecodeFile(path, &cfg); err != nil {
fmt.Println(err)
os.Exit(-1)
}
//fmt.Println(cfg)
return &cfg
}
//IsIPWhiteListEmpty ...
func IsIPWhiteListEmpty() bool {
return len(IPWhiteListMap) == 0
}
//IsInIPWhitelist 判断ipAddr是否在ip地址白名单中
func IsInIPWhitelist(ipAddrPort string) bool {
ipAddr, _, err := net.SplitHostPort(ipAddrPort)
if err != nil {
return false
}
ip := net.ParseIP(ipAddr)
if ip.IsLoopback() {
return true
}
if _, ok := IPWhiteListMap[ipAddr]; ok {
return true
}
return false
}
//RPCServer ...
type RPCServer struct {
*rpc.Server
}
//ServeHTTP ...
func (r *RPCServer) ServeHTTP(w http.ResponseWriter, req *http.Request) {
mainlog.Info("ServeHTTP", "request address", req.RemoteAddr)
if !IsIPWhiteListEmpty() {
if !IsInIPWhitelist(req.RemoteAddr) {
mainlog.Info("ServeHTTP", "refuse connect address", req.RemoteAddr)
w.WriteHeader(401)
return
}
}
r.Server.ServeHTTP(w, req)
}
//HandleHTTP ...
func (r *RPCServer) HandleHTTP(rpcPath, debugPath string) {
http.Handle(rpcPath, r)
}
//HTTPConn ...
type HTTPConn struct {
in io.Reader
out io.Writer
}
//Read ...
func (c *HTTPConn) Read(p []byte) (n int, err error) { return c.in.Read(p) }
//Write ...
func (c *HTTPConn) Write(d []byte) (n int, err error) { return c.out.Write(d) }
//Close ...
func (c *HTTPConn) Close() error { return nil }
func startRPCServer(address string, api interface{}) {
listener, err := net.Listen("tcp", address)
if err != nil {
fmt.Println("监听失败,端口可能已经被占用")
panic(err)
}
srv := &RPCServer{rpc.NewServer()}
_ = srv.Server.Register(api)
srv.HandleHTTP(rpc.DefaultRPCPath, rpc.DefaultDebugPath)
var handler http.Handler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path == "/" {
serverCodec := jsonrpc.NewServerCodec(&HTTPConn{in: r.Body, out: w})
w.Header().Set("Content-type", "application/json")
w.WriteHeader(200)
err := srv.ServeRequest(serverCodec)
if err != nil {
mainlog.Debug("http", "Error while serving JSON request: %v", err)
return
}
}
})
_ = http.Serve(listener, handler)
}
| initCfg |
sign-up.controller.ts | import {Request, Response} from 'express';
// DB
import {setActivationToken, setHash} from '../../utils/auth.utils';
import {Profile} from '../../utils/interfaces/Profile';
import {Status} from '../../utils/interfaces/Status';
import {insertProfile} from '../../utils/profile/insertProfile';
import formData from 'form-data';
import Mailgun from 'mailgun.js';
import Client from 'mailgun.js/dist/lib/client';
// Interfaces (represent the DB model and types of the columns associated with a specific DB table)
export async function signupProfileController(request: Request, response: Response): Promise<Response | undefined> {
try {
const mailgun: Mailgun = new Mailgun(formData)
const mailgunClient: Client = mailgun.client({username: "api", key: <string>process.env.MAILGUN_API_KEY})
const {profilePhoto, profileAboutMe, profileJobTitle, profileEmail, profileName, profileUrl, profileSkills, profilePassword} = request.body;
console.log("profile Password", profilePassword)
const profileHash = await setHash(profilePassword);
const profileActivationToken = setActivationToken();
const basePath = `${request.protocol}://${request.get('host')}${request.originalUrl}/activation/${profileActivationToken}`
console.log(profileActivationToken)
const message = `<h2>Welcome to Amarillo Tech Hub.</h2>
<p>Please check your email for an activation to confirm your account</p>
<p><a href="${basePath}">${basePath}</a></p>
`
const mailgunMessage = {
from: `Mailgun Sandbox <postmaster@${process.env.MAILGUN_DOMAIN}>`,
to: profileEmail,
subject: 'One step closer to Sticky Head -- Account Activation',
html: message
}
const profile: Profile = {
profileId: null,
profileAboutMe,
profileActivationToken,
profileEmail,
profileHash,
profileJobTitle,
profileName,
profilePhoto: "https://placekitten.com/200/200",
profileUrl,
profileResume: null,
profileSkills
};
await insertProfile(profile)
//await mailgunClient.messages.create(<string>process.env.MAILGUN_DOMAIN, mailgunMessage)
const status: Status = {
status: 200,
message: 'Profile successfully created please check your email.',
data: null
};
return response.json(status)
} catch (error: any) {
console.error(error)
const status: Status = {
status: 500,
message: error.message,
data: null | };
return response.json(status);
}
} | |
mod.rs | // Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//! Parsing and serialization of Internet Control Message Protocol (ICMP) packets.
#[macro_use]
mod macros;
mod common;
mod icmpv4;
mod icmpv6;
pub mod mld;
pub mod ndp;
#[cfg(test)]
mod testdata;
pub use self::common::*;
pub use self::icmpv4::*;
pub use self::icmpv6::*;
use core::cmp;
use core::convert::{TryFrom, TryInto};
use core::fmt::Debug;
use core::marker::PhantomData;
use core::mem;
use core::ops::Deref;
use byteorder::{ByteOrder, NetworkEndian};
use internet_checksum::Checksum;
use net_types::ip::{Ip, IpAddress, Ipv4, Ipv6};
use never::Never;
use packet::records::options::{Options, OptionsImpl};
use packet::{
AsFragmentedByteSlice, BufferView, FragmentedByteSlice, FromRaw, PacketBuilder,
PacketConstraints, ParsablePacket, ParseMetadata, SerializeBuffer,
};
use zerocopy::{AsBytes, ByteSlice, FromBytes, LayoutVerified, Unaligned};
use crate::error::{ParseError, ParseResult};
use crate::ip::{IpExt, Ipv4Proto, Ipv6Proto};
use crate::ipv4::{self, Ipv4PacketRaw};
use crate::ipv6::Ipv6PacketRaw;
use crate::U16;
#[derive(Copy, Clone, Default, Debug, FromBytes, AsBytes, Unaligned)]
#[repr(C)]
struct HeaderPrefix {
msg_type: u8,
code: u8,
checksum: [u8; 2],
/* NOTE: The "Rest of Header" field is stored in message types rather than
* in the HeaderPrefix. This helps consolidate how callers access data about the
* packet, and is consistent with ICMPv6, which treats the field as part of
* messages rather than the header. */
}
impl HeaderPrefix {
fn set_msg_type<T: Into<u8>>(&mut self, msg_type: T) {
self.msg_type = msg_type.into();
}
}
/// Peek at an ICMP header to see what message type is present.
///
/// Since `IcmpPacket` is statically typed with the message type expected, this
/// type must be known ahead of time before calling `parse`. If multiple
/// different types are valid in a given parsing context, and so the caller
/// cannot know ahead of time which type to use, `peek_message_type` can be used
/// to peek at the header first to figure out which static type should be used
/// in a subsequent call to `parse`.
///
/// Note that `peek_message_type` only inspects certain fields in the header,
/// and so `peek_message_type` succeeding does not guarantee that a subsequent
/// call to `parse` will also succeed.
pub fn peek_message_type<MessageType: TryFrom<u8>>(bytes: &[u8]) -> ParseResult<MessageType> {
let (hdr_pfx, _) = LayoutVerified::<_, HeaderPrefix>::new_unaligned_from_prefix(bytes)
.ok_or_else(debug_err_fn!(ParseError::Format, "too few bytes for header"))?;
MessageType::try_from(hdr_pfx.msg_type).map_err(|_| {
debug_err!(ParseError::NotSupported, "unrecognized message type: {:x}", hdr_pfx.msg_type,)
})
}
/// An extension trait adding ICMP-related associated types to `Ipv4` and `Ipv6`.
///
/// This trait is kept separate from `IcmpIpExt` to not require a type parameter
/// that implements `ByteSlice`.
pub trait IcmpIpTypes: Ip {
/// The type of an ICMP parameter problem code.
///
/// For `Ipv4`, this is `Icmpv4ParameterProblemCode`, and for `Ipv6` this
/// is `Icmpv6ParameterProblemCode`.
type ParameterProblemCode: PartialEq + Send + Sync + Debug;
/// The type of an ICMP parameter problem pointer.
///
/// For `Ipv4`, this is `u8`, and for `Ipv6` this is `u32`.
type ParameterProblemPointer: PartialEq + Send + Sync + Debug;
/// The type of an ICMP parameter header length.
///
/// For `Ipv4`, this is `usize`, and for `Ipv6` this is `()`.
type HeaderLen: PartialEq + Send + Sync + Debug;
}
// A default implementation for any I: Ip. This is to convince the Rust compiler
// that, given an I: Ip, it's guaranteed to implement IcmpIpTypes. We humans know
// that Ipv4 and Ipv6 are the only types implementing Ip and so, since we
// implement IcmpIpTypes for both of these types, this is fine. The compiler isn't
// so smart. This implementation should never actually be used.
impl<I: Ip> IcmpIpTypes for I {
default type ParameterProblemCode = Never;
default type ParameterProblemPointer = Never;
default type HeaderLen = Never;
}
impl IcmpIpTypes for Ipv4 {
type ParameterProblemCode = Icmpv4ParameterProblemCode;
type ParameterProblemPointer = u8;
type HeaderLen = usize;
}
impl IcmpIpTypes for Ipv6 {
type ParameterProblemCode = Icmpv6ParameterProblemCode;
type ParameterProblemPointer = u32;
type HeaderLen = ();
}
/// An extension trait adding ICMP-related functionality to `Ipv4` and `Ipv6`.
pub trait IcmpIpExt: IpExt {
/// The type of ICMP messages.
///
/// For `Ipv4`, this is `Icmpv4MessageType`, and for `Ipv6`, this is
/// `Icmpv6MessageType`.
type IcmpMessageType: IcmpMessageType;
/// The identifier for this ICMP version.
///
/// This value will be found in an IPv4 packet's Protocol field (for ICMPv4
/// packets) or an IPv6 fixed header's or last extension header's Next
/// Heeader field (for ICMPv6 packets).
const ICMP_IP_PROTO: <Self as IpExt>::Proto;
/// Compute the length of the header of the packet prefix stored in `bytes`.
///
/// Given the prefix of a packet stored in `bytes`, compute the length of
/// the header of that packet, or `bytes.len()` if `bytes` does not contain
/// the entire header. If the version is IPv6, the returned length should
/// include all extension headers.
fn header_len(bytes: &[u8]) -> usize;
}
impl IcmpIpExt for Ipv4 {
type IcmpMessageType = Icmpv4MessageType;
const ICMP_IP_PROTO: Ipv4Proto = Ipv4Proto::Icmp;
fn header_len(bytes: &[u8]) -> usize {
if bytes.len() < ipv4::IPV4_MIN_HDR_LEN {
return bytes.len();
}
let (header_prefix, _) =
LayoutVerified::<_, ipv4::HeaderPrefix>::new_unaligned_from_prefix(bytes).unwrap();
cmp::min(header_prefix.ihl() as usize * 4, bytes.len())
}
}
impl IcmpIpExt for Ipv6 {
type IcmpMessageType = Icmpv6MessageType;
const ICMP_IP_PROTO: Ipv6Proto = Ipv6Proto::Icmpv6;
// TODO: Re-implement this in terms of partial parsing, and then get rid of
// the `header_len` method.
fn header_len(_bytes: &[u8]) -> usize {
// NOTE: We panic here rather than doing log_unimplemented! because
// there's no sane default value for this function. If it's called, it
// doesn't make sense for the program to continue executing; if we did,
// it would cause bugs in the caller.
unimplemented!()
}
}
/// An ICMP or ICMPv6 packet
///
/// 'IcmpPacketType' is implemented by `Icmpv4Packet` and `Icmpv6Packet`
pub trait IcmpPacketType<B: ByteSlice, I: Ip>:
Sized + ParsablePacket<B, IcmpParseArgs<I::Addr>, Error = ParseError>
{
}
impl<B: ByteSlice> IcmpPacketType<B, Ipv4> for Icmpv4Packet<B> {}
impl<B: ByteSlice> IcmpPacketType<B, Ipv6> for Icmpv6Packet<B> {}
// TODO(joshlf): Once we have generic associated types, refactor this so that we
// don't have to bind B ahead of time. Removing that requirement would make some
// APIs (in particular, IcmpPacketBuilder) simpler by removing the B parameter
// from them as well.
/// `MessageBody` represents the parsed body of the ICMP packet.
///
/// - For messages that expect no body, the `MessageBody` is of type `()`.
/// - For NDP messages, the `MessageBody` is of the type `ndp::Options`.
/// - For all other messages, the `MessageBody` will be of the type
/// `OriginalPacket`, which is a thin wrapper around `B`.
pub trait MessageBody<B>: Sized {
/// Whether or not a message body is expected in an ICMP packet.
const EXPECTS_BODY: bool = true;
/// Parse the MessageBody from the provided bytes.
fn parse(bytes: B) -> ParseResult<Self>
where
B: ByteSlice;
/// The length of the underlying buffer.
fn len(&self) -> usize
where
B: ByteSlice;
/// Is the body empty?
///
/// `b.is_empty()` is equivalent to `b.len() == 0`.
fn is_empty(&self) -> bool
where
B: ByteSlice,
{
self.len() == 0
}
/// Return the underlying bytes.
fn bytes(&self) -> &[u8]
where
B: Deref<Target = [u8]>;
}
impl<B> MessageBody<B> for () {
const EXPECTS_BODY: bool = false;
fn parse(bytes: B) -> ParseResult<()>
where
B: ByteSlice,
{
if !bytes.is_empty() {
return debug_err!(Err(ParseError::Format), "unexpected message body");
}
Ok(())
}
fn len(&self) -> usize {
0
}
fn bytes(&self) -> &[u8] {
&[]
}
}
/// A thin wrapper around B which implements `MessageBody`.
#[derive(Debug)]
pub struct OriginalPacket<B>(B);
impl<B: ByteSlice + Deref<Target = [u8]>> OriginalPacket<B> {
/// Returns the the body of the original packet.
pub fn body<I: IcmpIpExt>(&self) -> &[u8] {
// TODO(joshlf): Can these debug_asserts be triggered by external input?
let header_len = I::header_len(&self.0);
debug_assert!(header_len <= self.0.len());
debug_assert!(I::VERSION.is_v6() || self.0.len() - header_len == 8);
&self.0[header_len..]
}
}
impl<B> MessageBody<B> for OriginalPacket<B> {
fn parse(bytes: B) -> ParseResult<OriginalPacket<B>> {
Ok(OriginalPacket(bytes))
}
fn len(&self) -> usize
where
B: ByteSlice,
{
self.0.len()
}
fn bytes(&self) -> &[u8]
where
B: Deref<Target = [u8]>,
{
&self.0
}
}
impl<B, O: for<'a> OptionsImpl<'a>> MessageBody<B> for Options<B, O> {
fn parse(bytes: B) -> ParseResult<Options<B, O>>
where
B: ByteSlice,
{
Self::parse(bytes).map_err(|_e| debug_err!(ParseError::Format, "unable to parse options"))
}
fn len(&self) -> usize
where
B: ByteSlice,
{
self.bytes().len()
}
fn bytes(&self) -> &[u8]
where
B: Deref<Target = [u8]>,
|
}
/// An ICMP message.
pub trait IcmpMessage<I: IcmpIpExt, B: ByteSlice>:
Sized + Copy + FromBytes + AsBytes + Unaligned
{
/// The type of codes used with this message.
///
/// The ICMP header includes an 8-bit "code" field. For a given message
/// type, different values of this field carry different meanings. Not all
/// code values are used - some may be invalid. This type represents a
/// parsed code. For example, for TODO, it is the TODO type.
type Code: Into<u8> + Copy + Debug;
/// The type of the body used with this message.
type Body: MessageBody<B>;
/// The type corresponding to this message type.
///
/// The value of the "type" field in the ICMP header corresponding to
/// messages of this type.
const TYPE: I::IcmpMessageType;
/// Parse a `Code` from an 8-bit number.
///
/// Parse a `Code` from the 8-bit "code" field in the ICMP header. Not all
/// values for this field are valid. If an invalid value is passed,
/// `code_from_u8` returns `None`.
fn code_from_u8(code: u8) -> Option<Self::Code>;
}
/// The type of an ICMP message.
///
/// `IcmpMessageType` is implemented by `Icmpv4MessageType` and
/// `Icmpv6MessageType`.
pub trait IcmpMessageType: TryFrom<u8> + Into<u8> + Copy {
/// Is this an error message?
///
/// For ICMP, this is true for the Destination Unreachable, Redirect, Source
/// Quench, Time Exceeded, and Parameter Problem message types. For ICMPv6,
/// this is true for the Destination Unreachable, Packet Too Big, Time
/// Exceeded, and Parameter Problem message types.
fn is_err(self) -> bool;
}
#[derive(Copy, Clone, Debug, FromBytes, Unaligned)]
#[repr(C)]
struct Header<M> {
prefix: HeaderPrefix,
message: M,
}
// So long as `M: Unaligned`, there will be no padding between the
// `HeaderPrefix` and `M`. Since `HeaderPrefix` itself is `Unaligned`, the
// alignment of `Header<M>` will be 1, meaning that no post-padding will need to
// be added to get to a multiple of the alignment. Since there is no padding,
// then so long as `M: AsBytes`, all of `Header<M>: AsBytes`.
unsafe impl<M: AsBytes + Unaligned> AsBytes for Header<M> {
// We're doing a bad thing, but it's necessary until derive(AsBytes)
// supports type parameters.
fn only_derive_is_allowed_to_implement_this_trait() {}
}
/// A partially parsed and not yet validated ICMP packet.
///
/// An `IcmpPacketRaw` provides minimal parsing of an ICMP packet. Namely, it
/// only requires that the header and message (in ICMPv6, these are both
/// considered part of the header) are present, and that the header has the
/// expected message type. The body may be missing (or an unexpected body may be
/// present). Other than the message type, no header, message, or body field
/// values will be validated.
///
/// [`IcmpPacket`] provides a [`FromRaw`] implementation that can be used to
/// validate an [`IcmpPacketRaw`].
#[derive(Debug)]
pub struct IcmpPacketRaw<I: IcmpIpExt, B: ByteSlice, M: IcmpMessage<I, B>> {
header: LayoutVerified<B, Header<M>>,
message_body: B,
_marker: PhantomData<I>,
}
impl<I: IcmpIpExt, B: ByteSlice, M: IcmpMessage<I, B>> IcmpPacketRaw<I, B, M> {
/// Get the ICMP message.
pub fn message(&self) -> &M {
&self.header.message
}
}
/// An ICMP packet.
///
/// An `IcmpPacket` shares its underlying memory with the byte slice it was
/// parsed from, meaning that no copying or extra allocation is necessary.
#[derive(Debug)]
pub struct IcmpPacket<I: IcmpIpExt, B: ByteSlice, M: IcmpMessage<I, B>> {
header: LayoutVerified<B, Header<M>>,
message_body: M::Body,
_marker: PhantomData<I>,
}
/// Arguments required to parse an ICMP packet.
pub struct IcmpParseArgs<A: IpAddress> {
src_ip: A,
dst_ip: A,
}
impl<A: IpAddress> IcmpParseArgs<A> {
/// Construct a new `IcmpParseArgs`.
pub fn new<S: Into<A>, D: Into<A>>(src_ip: S, dst_ip: D) -> IcmpParseArgs<A> {
IcmpParseArgs { src_ip: src_ip.into(), dst_ip: dst_ip.into() }
}
}
impl<B: ByteSlice, I: IcmpIpExt, M: IcmpMessage<I, B>> ParsablePacket<B, ()>
for IcmpPacketRaw<I, B, M>
{
type Error = ParseError;
fn parse_metadata(&self) -> ParseMetadata {
ParseMetadata::from_packet(self.header.bytes().len(), self.message_body.len(), 0)
}
fn parse<BV: BufferView<B>>(mut buffer: BV, _args: ()) -> ParseResult<Self> {
let header = buffer
.take_obj_front::<Header<M>>()
.ok_or_else(debug_err_fn!(ParseError::Format, "too few bytes for header"))?;
let message_body = buffer.into_rest();
if header.prefix.msg_type != M::TYPE.into() {
return debug_err!(Err(ParseError::NotExpected), "unexpected message type");
}
Ok(IcmpPacketRaw { header, message_body, _marker: PhantomData })
}
}
impl<B: ByteSlice, I: IcmpIpExt, M: IcmpMessage<I, B>>
FromRaw<IcmpPacketRaw<I, B, M>, IcmpParseArgs<I::Addr>> for IcmpPacket<I, B, M>
{
type Error = ParseError;
fn try_from_raw_with(
raw: IcmpPacketRaw<I, B, M>,
args: IcmpParseArgs<I::Addr>,
) -> ParseResult<Self> {
let IcmpPacketRaw { header, message_body, _marker } = raw;
if !M::Body::EXPECTS_BODY && !message_body.is_empty() {
return debug_err!(Err(ParseError::Format), "unexpected message body");
}
let _: M::Code = M::code_from_u8(header.prefix.code).ok_or_else(debug_err_fn!(
ParseError::Format,
"unrecognized code: {}",
header.prefix.code
))?;
let checksum = Self::compute_checksum(&header, &message_body, args.src_ip, args.dst_ip)
.ok_or_else(debug_err_fn!(ParseError::Format, "packet too large"))?;
if checksum != [0, 0] {
return debug_err!(Err(ParseError::Checksum), "invalid checksum");
}
let message_body = M::Body::parse(message_body)?;
Ok(IcmpPacket { header, message_body, _marker })
}
}
impl<B: ByteSlice, I: IcmpIpExt, M: IcmpMessage<I, B>> ParsablePacket<B, IcmpParseArgs<I::Addr>>
for IcmpPacket<I, B, M>
{
type Error = ParseError;
fn parse_metadata(&self) -> ParseMetadata {
ParseMetadata::from_packet(self.header.bytes().len(), self.message_body.len(), 0)
}
fn parse<BV: BufferView<B>>(buffer: BV, args: IcmpParseArgs<I::Addr>) -> ParseResult<Self> {
IcmpPacketRaw::parse(buffer, ()).and_then(|p| IcmpPacket::try_from_raw_with(p, args))
}
}
impl<I: IcmpIpExt, B: ByteSlice, M: IcmpMessage<I, B>> IcmpPacket<I, B, M> {
/// Get the ICMP message.
pub fn message(&self) -> &M {
&self.header.message
}
/// Get the ICMP body.
pub fn body(&self) -> &M::Body {
&self.message_body
}
/// Get the ICMP message code.
///
/// The code provides extra details about the message. Each message type has
/// its own set of codes that are allowed.
pub fn code(&self) -> M::Code {
// infallible since it was validated in parse
M::code_from_u8(self.header.prefix.code).unwrap()
}
/// Construct a builder with the same contents as this packet.
pub fn builder(&self, src_ip: I::Addr, dst_ip: I::Addr) -> IcmpPacketBuilder<I, B, M> {
IcmpPacketBuilder { src_ip, dst_ip, code: self.code(), msg: *self.message() }
}
}
fn compute_checksum_fragmented<
I: IcmpIpExt,
B: ByteSlice,
BB: packet::Fragment,
M: IcmpMessage<I, B>,
>(
header: &Header<M>,
message_body: &FragmentedByteSlice<'_, BB>,
src_ip: I::Addr,
dst_ip: I::Addr,
) -> Option<[u8; 2]> {
let mut c = Checksum::new();
if I::VERSION.is_v6() {
c.add_bytes(src_ip.bytes());
c.add_bytes(dst_ip.bytes());
let icmpv6_len = mem::size_of::<Header<M>>() + message_body.len();
let mut len_bytes = [0; 4];
NetworkEndian::write_u32(&mut len_bytes, icmpv6_len.try_into().ok()?);
c.add_bytes(&len_bytes[..]);
c.add_bytes(&[0, 0, 0]);
c.add_bytes(&[Ipv6Proto::Icmpv6.into()]);
}
c.add_bytes(&[header.prefix.msg_type, header.prefix.code]);
c.add_bytes(&header.prefix.checksum);
c.add_bytes(header.message.as_bytes());
for p in message_body.iter_fragments() {
c.add_bytes(p);
}
Some(c.checksum())
}
impl<I: IcmpIpExt, B: ByteSlice, M: IcmpMessage<I, B>> IcmpPacket<I, B, M> {
/// Compute the checksum, including the checksum field itself.
///
/// `compute_checksum` returns `None` if the version is IPv6 and the total
/// ICMP packet length overflows a u32.
fn compute_checksum(
header: &Header<M>,
message_body: &[u8],
src_ip: I::Addr,
dst_ip: I::Addr,
) -> Option<[u8; 2]> {
let mut body = [message_body];
compute_checksum_fragmented(header, &body.as_fragmented_byte_slice(), src_ip, dst_ip)
}
}
impl<I: IcmpIpExt, B: ByteSlice, M: IcmpMessage<I, B, Body = OriginalPacket<B>>>
IcmpPacket<I, B, M>
{
/// Get the body of the packet that caused this ICMP message.
///
/// This ICMP message contains some of the bytes of the packet that caused
/// this message to be emitted. `original_packet_body` returns as much of
/// the body of that packet as is contained in this message. For IPv4, this
/// is guaranteed to be 8 bytes. For IPv6, there are no guarantees about the
/// length.
pub fn original_packet_body(&self) -> &[u8] {
self.message_body.body::<I>()
}
/// Returns the original packt that caused this ICMP message.
///
/// This ICMP message contains some of the bytes of the packet that caused
/// this message to be emitted. `original_packet` returns as much of the
/// body of that packet as is contained in this message. For IPv4, this is
/// guaranteed to be 8 bytes. For IPv6, there are no guarantees about the
/// length.
pub fn original_packet(&self) -> &OriginalPacket<B> {
&self.message_body
}
}
impl<B: ByteSlice, M: IcmpMessage<Ipv4, B, Body = OriginalPacket<B>>> IcmpPacket<Ipv4, B, M> {
/// Attempt to partially parse the original packet as an IPv4 packet.
///
/// `f` will be invoked on the result of calling `Ipv4PacketRaw::parse` on
/// the original packet.
pub fn with_original_packet<O, F: FnOnce(Result<Ipv4PacketRaw<&[u8]>, &[u8]>) -> O>(
&self,
f: F,
) -> O {
let mut bv = self.message_body.0.deref();
f(Ipv4PacketRaw::parse(&mut bv, ()).map_err(|_| self.message_body.0.deref()))
}
}
impl<B: ByteSlice, M: IcmpMessage<Ipv6, B, Body = OriginalPacket<B>>> IcmpPacket<Ipv6, B, M> {
/// Attempt to partially parse the original packet as an IPv6 packet.
///
/// `f` will be invoked on the result of calling `Ipv6PacketRaw::parse` on
/// the original packet.
pub fn with_original_packet<O, F: FnOnce(Result<Ipv6PacketRaw<&[u8]>, &[u8]>) -> O>(
&self,
f: F,
) -> O {
let mut bv = self.message_body.0.deref();
f(Ipv6PacketRaw::parse(&mut bv, ()).map_err(|_| self.message_body.0.deref()))
}
}
impl<I: IcmpIpExt, B: ByteSlice, M: IcmpMessage<I, B, Body = ndp::Options<B>>> IcmpPacket<I, B, M> {
/// Get the pared list of NDP options from the ICMP message.
pub fn ndp_options(&self) -> &ndp::Options<B> {
&self.message_body
}
}
/// A builder for ICMP packets.
#[derive(Debug)]
pub struct IcmpPacketBuilder<I: IcmpIpExt, B: ByteSlice, M: IcmpMessage<I, B>> {
src_ip: I::Addr,
dst_ip: I::Addr,
code: M::Code,
msg: M,
}
impl<I: IcmpIpExt, B: ByteSlice, M: IcmpMessage<I, B>> IcmpPacketBuilder<I, B, M> {
/// Construct a new `IcmpPacketBuilder`.
pub fn new<S: Into<I::Addr>, D: Into<I::Addr>>(
src_ip: S,
dst_ip: D,
code: M::Code,
msg: M,
) -> IcmpPacketBuilder<I, B, M> {
IcmpPacketBuilder { src_ip: src_ip.into(), dst_ip: dst_ip.into(), code, msg }
}
}
// TODO(joshlf): Figure out a way to split body and non-body message types by
// trait and implement PacketBuilder for some and InnerPacketBuilder for others.
impl<I: IcmpIpExt, B: ByteSlice, M: IcmpMessage<I, B>> PacketBuilder
for IcmpPacketBuilder<I, B, M>
{
fn constraints(&self) -> PacketConstraints {
// The maximum body length constraint to make sure the body length
// doesn't overflow the 32-bit length field in the pseudo-header used
// for calculating the checksum.
//
// Note that, for messages that don't take bodies, it's important that
// we don't just set this to 0. Trying to serialize a body in a message
// type which doesn't take bodies is a programmer error, so we should
// panic in that case. Setting the max_body_len to 0 would surface the
// issue as an MTU error, which would hide the underlying problem.
// Instead, we assert in serialize. Eventually, we will hopefully figure
// out a way to implement InnerPacketBuilder (rather than PacketBuilder)
// for these message types, and this won't be an issue anymore.
PacketConstraints::new(mem::size_of::<Header<M>>(), 0, 0, core::u32::MAX as usize)
}
fn serialize(&self, buffer: &mut SerializeBuffer<'_>) {
use packet::BufferViewMut;
let (mut prefix, message_body, _) = buffer.parts();
// implements BufferViewMut, giving us take_obj_xxx_zero methods
let mut prefix = &mut prefix;
assert!(
M::Body::EXPECTS_BODY || message_body.is_empty(),
"body provided for message that doesn't take a body"
);
// SECURITY: Use _zero constructors to ensure we zero memory to prevent
// leaking information from packets previously stored in this buffer.
let mut header =
prefix.take_obj_front_zero::<Header<M>>().expect("too few bytes for ICMP message");
header.prefix.set_msg_type(M::TYPE);
header.prefix.code = self.code.into();
header.message = self.msg;
let checksum = compute_checksum_fragmented(&header, message_body, self.src_ip, self.dst_ip)
.unwrap_or_else(|| {
panic!(
"total ICMP packet length of {} overflows 32-bit length field of pseudo-header",
header.bytes().len() + message_body.len(),
)
});
header.prefix.checksum = checksum;
}
}
/// The type of ICMP codes that are unused.
///
/// Some ICMP messages do not use codes. In Rust, the `IcmpMessage::Code` type
/// associated with these messages is `IcmpUnusedCode`. The only valid numerical
/// value for this code is 0.
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub struct IcmpUnusedCode;
impl From<IcmpUnusedCode> for u8 {
fn from(_: IcmpUnusedCode) -> u8 {
0
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, FromBytes, AsBytes, Unaligned)]
#[repr(C)]
struct IdAndSeq {
id: U16,
seq: U16,
}
impl IdAndSeq {
fn new(id: u16, seq: u16) -> IdAndSeq {
IdAndSeq { id: U16::new(id), seq: U16::new(seq) }
}
}
#[cfg(test)]
mod tests {
use packet::ParseBuffer;
use super::*;
#[test]
fn test_partial_parse() {
// Test various behaviors of parsing the `IcmpPacketRaw` type.
let reference_header = Header {
prefix: HeaderPrefix {
msg_type: <IcmpEchoRequest as IcmpMessage<Ipv4, &[u8]>>::TYPE.into(),
code: 0,
checksum: [0, 0],
},
message: IcmpEchoRequest::new(1, 1),
};
// Test that a too-short header is always rejected even if its contents
// are otherwise valid (the checksum here is probably invalid, but we
// explicitly check that it's a `Format` error, not a `Checksum`
// error).
let mut buf = &reference_header.as_bytes()[..7];
assert_eq!(
buf.parse::<IcmpPacketRaw<Ipv4, _, IcmpEchoRequest>>().unwrap_err(),
ParseError::Format
);
// Test that a properly-sized header is rejected if the message type is wrong.
let mut header = reference_header;
header.prefix.msg_type = <IcmpEchoReply as IcmpMessage<Ipv4, &[u8]>>::TYPE.into();
let mut buf = header.as_bytes();
assert_eq!(
buf.parse::<IcmpPacketRaw<Ipv4, _, IcmpEchoRequest>>().unwrap_err(),
ParseError::NotExpected
);
// Test that an invalid code is accepted.
let mut header = reference_header;
header.prefix.code = 0xFF;
let mut buf = header.as_bytes();
assert!(buf.parse::<IcmpPacketRaw<Ipv4, _, IcmpEchoRequest>>().is_ok());
// Test that an invalid checksum is accepted. Instead of calculating the
// correct checksum, we just provide two different checksums. They can't
// both be valid.
let mut buf = reference_header.as_bytes();
assert!(buf.parse::<IcmpPacketRaw<Ipv4, _, IcmpEchoRequest>>().is_ok());
let mut header = reference_header;
header.prefix.checksum = [1, 1];
let mut buf = header.as_bytes();
assert!(buf.parse::<IcmpPacketRaw<Ipv4, _, IcmpEchoRequest>>().is_ok());
}
}
| {
self.bytes()
} |
request.rs | use crate::client::Graph;
use graph_http::types::Collection;
use graph_http::types::Content;
use graph_http::GraphResponse;
use graph_http::IntoResponse;
use handlebars::*;
use reqwest::Method;
register_client!(ItemRequest,);
register_client!(ItemsRequest, ());
register_client!(VersionsRequest,);
impl<'a, Client> ItemRequest<'a, Client>
where
Client: graph_http::RequestClient,
{
pub fn id<ID: AsRef<str>>(&self, id: ID) -> ItemsRequest<'a, Client> |
get!({
doc: "# Get items from sites",
name: list_items,
response: Collection<serde_json::Value>,
path: "/items",
params: 0,
has_body: false
});
post!({
doc: "# Create new navigation property to items for sites",
name: create_items,
response: serde_json::Value,
path: "/items",
params: 0,
has_body: true
});
}
impl<'a, Client> ItemsRequest<'a, Client>
where
Client: graph_http::RequestClient,
{
pub fn versions(&self) -> VersionsRequest<'a, Client> {
VersionsRequest::new(self.client)
}
get!({
doc: "# Get items from sites",
name: get_items,
response: serde_json::Value,
path: "/items/{{RID}}",
params: 0,
has_body: false
});
patch!({
doc: "# Update the navigation property items in sites",
name: update_items,
response: GraphResponse<Content>,
path: "/items/{{RID}}",
params: 0,
has_body: true
});
get!({
doc: "# Get analytics from sites",
name: get_analytics,
response: serde_json::Value,
path: "/items/{{RID}}/analytics",
params: 0,
has_body: false
});
get!({
doc: "# Get driveItem from sites",
name: get_drive_item,
response: serde_json::Value,
path: "/items/{{RID}}/driveItem",
params: 0,
has_body: false
});
patch!({
doc: "# Update the navigation property driveItem in sites",
name: update_drive_item,
response: GraphResponse<Content>,
path: "/items/{{RID}}/driveItem",
params: 0,
has_body: true
});
get!({
doc: "# Get fields from sites",
name: get_fields,
response: serde_json::Value,
path: "/items/{{RID}}/fields",
params: 0,
has_body: false
});
patch!({
doc: "# Update the navigation property fields in sites",
name: update_fields,
response: GraphResponse<Content>,
path: "/items/{{RID}}/fields",
params: 0,
has_body: true
});
get!({
doc: "# Get versions from sites",
name: list_versions,
response: Collection<serde_json::Value>,
path: "/items/{{RID}}/versions",
params: 0,
has_body: false
});
post!({
doc: "# Create new navigation property to versions for sites",
name: create_versions,
response: serde_json::Value,
path: "/items/{{RID}}/versions",
params: 0,
has_body: true
});
get!({
doc: "# Get versions from sites",
name: get_versions,
response: serde_json::Value,
path: "/items/{{RID}}/versions/{{id}}",
params: 1,
has_body: false
});
patch!({
doc: "# Update the navigation property versions in sites",
name: update_versions,
response: GraphResponse<Content>,
path: "/items/{{RID}}/versions/{{id}}",
params: 1,
has_body: true
});
}
impl<'a, Client> VersionsRequest<'a, Client>
where
Client: graph_http::RequestClient,
{
get!({
doc: "# Get fields from sites",
name: get_fields,
response: serde_json::Value,
path: "/items/{{RID}}/versions/{{id}}/fields",
params: 1,
has_body: false
});
patch!({
doc: "# Update the navigation property fields in sites",
name: update_fields,
response: GraphResponse<Content>,
path: "/items/{{RID}}/versions/{{id}}/fields",
params: 1,
has_body: true
});
post!({
doc: "# Invoke action restoreVersion",
name: restore_version,
response: GraphResponse<Content>,
path: "/items/{{RID}}/versions/{{id}}/restoreVersion",
params: 1,
has_body: false
});
}
| {
ItemsRequest::new(id.as_ref(), self.client)
} |
main.rs | extern crate config;
use state::LocalStorage;
use std::error::Error;
use std::fs;
use serenity::{
async_trait,
model::{channel::Message, gateway::Ready},
prelude::*,
};
struct Configuration {
channel_id: String,
discord_token: String
}
static CONFIG: LocalStorage<Configuration> = LocalStorage::new();
struct Handler;
#[async_trait]
impl EventHandler for Handler {
async fn message(&self, _ctx: Context, msg: Message) {
// let channel_id = CONFIG.get().channel_id.parse::<u64>().unwrap();
let channel_id = match CONFIG.get().channel_id.parse::<u64>() {
Ok(val) => val,
Err(_e) => return
};
if msg.channel_id.as_u64() != &channel_id {
return;
}
fs::write("currentsong.txt", &msg.content).expect("Unable to write file");
println!("Updated song: {}", msg.content);
}
async fn ready(&self, _: Context, ready: Ready) {
println!("{} is connected!", ready.user.name);
}
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn Error>> {
let mut settings = config::Config::default();
settings
// Add in `./config.toml`
.merge(config::File::with_name("config.toml")).unwrap() |
CONFIG.set(move || Configuration {
discord_token: settings.get_str("DISCORD_TOKEN").expect("DISCORD_TOKEN is required in config.toml"),
channel_id: settings.get_str("CHANNEL_ID").expect("CHANNEL_ID is required in config.toml")
});
// Create a new instance of the Client, logging in as a bot. This will
// automatically prepend your bot token with "Bot ", which is a requirement
// by Discord for bot users.
let token = &CONFIG.get().discord_token;
let mut client = Client::builder(token)
.event_handler(Handler)
.await
.expect("Err creating client");
if let Err(why) = client.start().await {
println!("Client error: {:?}", why);
}
Ok(())
} | // Add in settings from the environment (with a prefix of APP)
// Eg.. `APP_DEBUG=1 ./target/app` would set the `debug` key
.merge(config::Environment::with_prefix("APP")).unwrap();
|
modelEpochs.py | import copy
import numpy as np
from numpy import log10
import os
from toolz import pipe as p
from tensorboardX import SummaryWriter
import torch
import torch.nn as nn
import numpy as np
import preprocessing as pp
def findParam(model, name_filter):
if callable(name_filter):
fn = name_filter
else:
name_filter = [name_filter] if type(name_filter) is str else name_filter
fn = lambda param_name: all(
component in param_name for component in name_filter)
return [(pn, pv) for (pn, pv) in model.named_parameters() if fn(pn)]
def setParameterRequiresGrad(model, requires_grad = False, params = None):
params = model.parameters() if params is None else params
for param in params:
param.requires_grad = requires_grad
def runEpochs(
model, criterion,
dataloaders, dataset_sizes, device,
log_params_verbose, num_epochs,
optimizer, scheduler,
writer):
best_model_wts = copy.deepcopy(model.state_dict())
best_acc = 0.0
prev_model_wts = best_model_wts
for epoch in range(num_epochs):
epoch_acc, model_wts = _run_epoch(
model,
criterion, dataloaders, dataset_sizes, device,
epoch, log_params_verbose, num_epochs,
optimizer, scheduler, writer)
_log_coef_diffs(writer, epoch, prev_model_wts, model_wts)
prev_model_wts = model_wts
if epoch_acc > best_acc:
best_acc = epoch_acc
best_model_wts = model_wts
# load best model weights
model.load_state_dict(best_model_wts)
return (model, best_acc)
def viewParamsToBeUpdated(model):
return [n for (n,p) in model.named_parameters() if p.requires_grad == True]
def add_graph_model(writer, model, dataloaders, device):
inputs, classes = p(dataloaders['train'], iter, next)
inputs = inputs.to(device)
classes = classes.to(device)
writer.add_graph(model, inputs)
def _run_epoch(model,
criterion, dataloaders, dataset_sizes, device,
epoch, log_params_verbose, num_epochs,
optimizer, scheduler, writer):
print('Epoch {}/{}'.format(epoch, num_epochs - 1))
print('-' * 10)
n_samples = {'train': 0, 'val': 0}
# Each epoch has a training and validation phase
for phase in ['train', 'val']:
is_train = phase == 'train'
if is_train:
scheduler.step()
model.train()
else:
model.eval()
running_loss = 0.0
running_corrects = 0
for inputs, labels in dataloaders[phase]:
n_samples[phase] = n_samples[phase] + len(labels)
inputs = inputs.to(device)
labels = labels.to(device)
preds, loss = _take_step(
model, criterion, optimizer, inputs, labels, is_train)
# statistics
running_loss += loss.item() * inputs.size(0)
running_corrects += torch.sum(preds == labels.data)
epoch_loss = running_loss / dataset_sizes[phase]
epoch_acc = running_corrects.double() / dataset_sizes[phase]
_log_epoch_phase_stats(writer, epoch, phase, epoch_loss, epoch_acc)
if log_params_verbose:
_log_model_params_verbose(writer, model, epoch, phase)
# deep copy the model
model_wts = copy.deepcopy(model.state_dict())
_log_lr(writer, epoch, scheduler)
print('# training samples')
print(n_samples['train'])
print('# val samples')
print(n_samples['val'])
return epoch_acc, model_wts
def _take_step(model, criterion, optimizer, inputs, labels, is_train):
# zero the parameter gradients
|
def _add_scope(scope, k):
return scope + '/' + k
def _add_scope_gen(scope):
return lambda k: _add_scope(scope, k)
def _log_model_params_verbose(writer, model, run_num, scope, use_hist = False):
def write(tag, param):
fn = writer.add_histogram if use_hist else writer.add_scalar
param = param if use_hist else param.abs().mean()
return fn(tag, param, run_num)
with torch.no_grad():
for (name, param) in model.named_parameters():
p(name,
_add_scope_gen(scope),
lambda tag: write(tag, param)
)
def _log_lr(writer, epoch, scheduler):
lr = p(scheduler.get_lr(), np.array)[0]
p('lr',
_add_scope_gen('lr'),
lambda _: writer.add_scalar(_, lr, epoch)
)
p('log10_lr',
_add_scope_gen('lr'),
lambda _: writer.add_scalar(_, log10(lr), epoch)
)
def _log_epoch_phase_stats(writer, epoch, scope, epoch_loss, epoch_acc):
log_measure = lambda k, v: p(k,
_add_scope_gen(scope),
lambda _ : writer.add_scalar(_, v, epoch)
)
log_measure('loss', epoch_loss)
log_measure('accuracy', epoch_acc)
print('{} Loss: {:.4f} Acc: {:.4f}'.format(
scope, epoch_loss, epoch_acc))
def _log_coef_diffs(writer, epoch, prev_model_state, curr_model_state):
def write(name, curr):
diff = curr - prev_model_state[name]
p(name,
_add_scope_gen('params'),
lambda _: writer.add_scalar(
_ + '.diff', diff.abs().mean(), epoch)
)
with torch.no_grad():
for name in curr_model_state:
if ('weight' in name or 'bias' in name):
write(name, curr_model_state[name])
| optimizer.zero_grad()
# forward
# track history if only in train
with torch.set_grad_enabled(is_train):
outputs = model(inputs)
_, preds = torch.max(outputs, 1)
loss = criterion(outputs, labels)
# backward + optimize only if in training phase
if is_train:
loss.backward()
optimizer.step()
return preds, loss |
dma_int_st.rs | #[doc = "Register `DMA_INT_ST` reader"]
pub struct R(crate::R<DMA_INT_ST_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<DMA_INT_ST_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<DMA_INT_ST_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<DMA_INT_ST_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Field `DMA_INFIFO_FULL_ERR_INT_ST` reader - The status bit for SPI_DMA_INFIFO_FULL_ERR_INT interrupt."]
pub struct DMA_INFIFO_FULL_ERR_INT_ST_R(crate::FieldReader<bool, bool>);
impl DMA_INFIFO_FULL_ERR_INT_ST_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
DMA_INFIFO_FULL_ERR_INT_ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for DMA_INFIFO_FULL_ERR_INT_ST_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `DMA_OUTFIFO_EMPTY_ERR_INT_ST` reader - The status bit for SPI_DMA_OUTFIFO_EMPTY_ERR_INT interrupt."]
pub struct DMA_OUTFIFO_EMPTY_ERR_INT_ST_R(crate::FieldReader<bool, bool>);
impl DMA_OUTFIFO_EMPTY_ERR_INT_ST_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
DMA_OUTFIFO_EMPTY_ERR_INT_ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for DMA_OUTFIFO_EMPTY_ERR_INT_ST_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SLV_EX_QPI_INT_ST` reader - The status bit for SPI slave Ex_QPI interrupt."]
pub struct SLV_EX_QPI_INT_ST_R(crate::FieldReader<bool, bool>);
impl SLV_EX_QPI_INT_ST_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
SLV_EX_QPI_INT_ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for SLV_EX_QPI_INT_ST_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SLV_EN_QPI_INT_ST` reader - The status bit for SPI slave En_QPI interrupt."]
pub struct SLV_EN_QPI_INT_ST_R(crate::FieldReader<bool, bool>);
impl SLV_EN_QPI_INT_ST_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
SLV_EN_QPI_INT_ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for SLV_EN_QPI_INT_ST_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SLV_CMD7_INT_ST` reader - The status bit for SPI slave CMD7 interrupt."]
pub struct SLV_CMD7_INT_ST_R(crate::FieldReader<bool, bool>);
impl SLV_CMD7_INT_ST_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
SLV_CMD7_INT_ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for SLV_CMD7_INT_ST_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SLV_CMD8_INT_ST` reader - The status bit for SPI slave CMD8 interrupt."]
pub struct SLV_CMD8_INT_ST_R(crate::FieldReader<bool, bool>);
impl SLV_CMD8_INT_ST_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
SLV_CMD8_INT_ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for SLV_CMD8_INT_ST_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SLV_CMD9_INT_ST` reader - The status bit for SPI slave CMD9 interrupt."]
pub struct SLV_CMD9_INT_ST_R(crate::FieldReader<bool, bool>);
impl SLV_CMD9_INT_ST_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
SLV_CMD9_INT_ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for SLV_CMD9_INT_ST_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SLV_CMDA_INT_ST` reader - The status bit for SPI slave CMDA interrupt."]
pub struct SLV_CMDA_INT_ST_R(crate::FieldReader<bool, bool>);
impl SLV_CMDA_INT_ST_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
SLV_CMDA_INT_ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for SLV_CMDA_INT_ST_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SLV_RD_DMA_DONE_INT_ST` reader - The status bit for SPI_SLV_RD_DMA_DONE_INT interrupt."]
pub struct SLV_RD_DMA_DONE_INT_ST_R(crate::FieldReader<bool, bool>);
impl SLV_RD_DMA_DONE_INT_ST_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
SLV_RD_DMA_DONE_INT_ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for SLV_RD_DMA_DONE_INT_ST_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SLV_WR_DMA_DONE_INT_ST` reader - The status bit for SPI_SLV_WR_DMA_DONE_INT interrupt."]
pub struct SLV_WR_DMA_DONE_INT_ST_R(crate::FieldReader<bool, bool>);
impl SLV_WR_DMA_DONE_INT_ST_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
SLV_WR_DMA_DONE_INT_ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for SLV_WR_DMA_DONE_INT_ST_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SLV_RD_BUF_DONE_INT_ST` reader - The status bit for SPI_SLV_RD_BUF_DONE_INT interrupt."]
pub struct SLV_RD_BUF_DONE_INT_ST_R(crate::FieldReader<bool, bool>);
impl SLV_RD_BUF_DONE_INT_ST_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
SLV_RD_BUF_DONE_INT_ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for SLV_RD_BUF_DONE_INT_ST_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SLV_WR_BUF_DONE_INT_ST` reader - The status bit for SPI_SLV_WR_BUF_DONE_INT interrupt."]
pub struct SLV_WR_BUF_DONE_INT_ST_R(crate::FieldReader<bool, bool>);
impl SLV_WR_BUF_DONE_INT_ST_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
SLV_WR_BUF_DONE_INT_ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for SLV_WR_BUF_DONE_INT_ST_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `TRANS_DONE_INT_ST` reader - The status bit for SPI_TRANS_DONE_INT interrupt."]
pub struct TRANS_DONE_INT_ST_R(crate::FieldReader<bool, bool>);
impl TRANS_DONE_INT_ST_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
TRANS_DONE_INT_ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for TRANS_DONE_INT_ST_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `DMA_SEG_TRANS_DONE_INT_ST` reader - The status bit for SPI_DMA_SEG_TRANS_DONE_INT interrupt."]
pub struct DMA_SEG_TRANS_DONE_INT_ST_R(crate::FieldReader<bool, bool>);
impl DMA_SEG_TRANS_DONE_INT_ST_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
DMA_SEG_TRANS_DONE_INT_ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for DMA_SEG_TRANS_DONE_INT_ST_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SEG_MAGIC_ERR_INT_ST` reader - The status bit for SPI_SEG_MAGIC_ERR_INT interrupt."]
pub struct SEG_MAGIC_ERR_INT_ST_R(crate::FieldReader<bool, bool>);
impl SEG_MAGIC_ERR_INT_ST_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
SEG_MAGIC_ERR_INT_ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for SEG_MAGIC_ERR_INT_ST_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SLV_BUF_ADDR_ERR_INT_ST` reader - The status bit for SPI_SLV_BUF_ADDR_ERR_INT interrupt."]
pub struct SLV_BUF_ADDR_ERR_INT_ST_R(crate::FieldReader<bool, bool>);
impl SLV_BUF_ADDR_ERR_INT_ST_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
SLV_BUF_ADDR_ERR_INT_ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for SLV_BUF_ADDR_ERR_INT_ST_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SLV_CMD_ERR_INT_ST` reader - The status bit for SPI_SLV_CMD_ERR_INT interrupt."]
pub struct SLV_CMD_ERR_INT_ST_R(crate::FieldReader<bool, bool>);
impl SLV_CMD_ERR_INT_ST_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
SLV_CMD_ERR_INT_ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for SLV_CMD_ERR_INT_ST_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `MST_RX_AFIFO_WFULL_ERR_INT_ST` reader - The status bit for SPI_MST_RX_AFIFO_WFULL_ERR_INT interrupt."]
pub struct MST_RX_AFIFO_WFULL_ERR_INT_ST_R(crate::FieldReader<bool, bool>);
impl MST_RX_AFIFO_WFULL_ERR_INT_ST_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
MST_RX_AFIFO_WFULL_ERR_INT_ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for MST_RX_AFIFO_WFULL_ERR_INT_ST_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `MST_TX_AFIFO_REMPTY_ERR_INT_ST` reader - The status bit for SPI_MST_TX_AFIFO_REMPTY_ERR_INT interrupt."]
pub struct MST_TX_AFIFO_REMPTY_ERR_INT_ST_R(crate::FieldReader<bool, bool>);
impl MST_TX_AFIFO_REMPTY_ERR_INT_ST_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
MST_TX_AFIFO_REMPTY_ERR_INT_ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for MST_TX_AFIFO_REMPTY_ERR_INT_ST_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `APP2_INT_ST` reader - The status bit for SPI_APP2_INT interrupt."]
pub struct APP2_INT_ST_R(crate::FieldReader<bool, bool>);
impl APP2_INT_ST_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
APP2_INT_ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for APP2_INT_ST_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `APP1_INT_ST` reader - The status bit for SPI_APP1_INT interrupt."]
pub struct APP1_INT_ST_R(crate::FieldReader<bool, bool>);
impl APP1_INT_ST_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
APP1_INT_ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for APP1_INT_ST_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl R {
#[doc = "Bit 0 - The status bit for SPI_DMA_INFIFO_FULL_ERR_INT interrupt."]
#[inline(always)]
pub fn dma_infifo_full_err_int_st(&self) -> DMA_INFIFO_FULL_ERR_INT_ST_R {
DMA_INFIFO_FULL_ERR_INT_ST_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - The status bit for SPI_DMA_OUTFIFO_EMPTY_ERR_INT interrupt."]
#[inline(always)]
pub fn dma_outfifo_empty_err_int_st(&self) -> DMA_OUTFIFO_EMPTY_ERR_INT_ST_R {
DMA_OUTFIFO_EMPTY_ERR_INT_ST_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - The status bit for SPI slave Ex_QPI interrupt."]
#[inline(always)]
pub fn slv_ex_qpi_int_st(&self) -> SLV_EX_QPI_INT_ST_R {
SLV_EX_QPI_INT_ST_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - The status bit for SPI slave En_QPI interrupt."]
#[inline(always)]
pub fn slv_en_qpi_int_st(&self) -> SLV_EN_QPI_INT_ST_R {
SLV_EN_QPI_INT_ST_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - The status bit for SPI slave CMD7 interrupt."]
#[inline(always)]
pub fn slv_cmd7_int_st(&self) -> SLV_CMD7_INT_ST_R {
SLV_CMD7_INT_ST_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - The status bit for SPI slave CMD8 interrupt."]
#[inline(always)]
pub fn slv_cmd8_int_st(&self) -> SLV_CMD8_INT_ST_R {
SLV_CMD8_INT_ST_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - The status bit for SPI slave CMD9 interrupt."]
#[inline(always)]
pub fn slv_cmd9_int_st(&self) -> SLV_CMD9_INT_ST_R {
SLV_CMD9_INT_ST_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - The status bit for SPI slave CMDA interrupt."]
#[inline(always)]
pub fn slv_cmda_int_st(&self) -> SLV_CMDA_INT_ST_R {
SLV_CMDA_INT_ST_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 8 - The status bit for SPI_SLV_RD_DMA_DONE_INT interrupt."]
#[inline(always)]
pub fn slv_rd_dma_done_int_st(&self) -> SLV_RD_DMA_DONE_INT_ST_R {
SLV_RD_DMA_DONE_INT_ST_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 9 - The status bit for SPI_SLV_WR_DMA_DONE_INT interrupt."]
#[inline(always)]
pub fn slv_wr_dma_done_int_st(&self) -> SLV_WR_DMA_DONE_INT_ST_R {
SLV_WR_DMA_DONE_INT_ST_R::new(((self.bits >> 9) & 0x01) != 0)
}
#[doc = "Bit 10 - The status bit for SPI_SLV_RD_BUF_DONE_INT interrupt."]
#[inline(always)]
pub fn slv_rd_buf_done_int_st(&self) -> SLV_RD_BUF_DONE_INT_ST_R {
SLV_RD_BUF_DONE_INT_ST_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 11 - The status bit for SPI_SLV_WR_BUF_DONE_INT interrupt."]
#[inline(always)]
pub fn slv_wr_buf_done_int_st(&self) -> SLV_WR_BUF_DONE_INT_ST_R {
SLV_WR_BUF_DONE_INT_ST_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 12 - The status bit for SPI_TRANS_DONE_INT interrupt."]
#[inline(always)]
pub fn trans_done_int_st(&self) -> TRANS_DONE_INT_ST_R {
TRANS_DONE_INT_ST_R::new(((self.bits >> 12) & 0x01) != 0) | DMA_SEG_TRANS_DONE_INT_ST_R::new(((self.bits >> 13) & 0x01) != 0)
}
#[doc = "Bit 14 - The status bit for SPI_SEG_MAGIC_ERR_INT interrupt."]
#[inline(always)]
pub fn seg_magic_err_int_st(&self) -> SEG_MAGIC_ERR_INT_ST_R {
SEG_MAGIC_ERR_INT_ST_R::new(((self.bits >> 14) & 0x01) != 0)
}
#[doc = "Bit 15 - The status bit for SPI_SLV_BUF_ADDR_ERR_INT interrupt."]
#[inline(always)]
pub fn slv_buf_addr_err_int_st(&self) -> SLV_BUF_ADDR_ERR_INT_ST_R {
SLV_BUF_ADDR_ERR_INT_ST_R::new(((self.bits >> 15) & 0x01) != 0)
}
#[doc = "Bit 16 - The status bit for SPI_SLV_CMD_ERR_INT interrupt."]
#[inline(always)]
pub fn slv_cmd_err_int_st(&self) -> SLV_CMD_ERR_INT_ST_R {
SLV_CMD_ERR_INT_ST_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 17 - The status bit for SPI_MST_RX_AFIFO_WFULL_ERR_INT interrupt."]
#[inline(always)]
pub fn mst_rx_afifo_wfull_err_int_st(&self) -> MST_RX_AFIFO_WFULL_ERR_INT_ST_R {
MST_RX_AFIFO_WFULL_ERR_INT_ST_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bit 18 - The status bit for SPI_MST_TX_AFIFO_REMPTY_ERR_INT interrupt."]
#[inline(always)]
pub fn mst_tx_afifo_rempty_err_int_st(&self) -> MST_TX_AFIFO_REMPTY_ERR_INT_ST_R {
MST_TX_AFIFO_REMPTY_ERR_INT_ST_R::new(((self.bits >> 18) & 0x01) != 0)
}
#[doc = "Bit 19 - The status bit for SPI_APP2_INT interrupt."]
#[inline(always)]
pub fn app2_int_st(&self) -> APP2_INT_ST_R {
APP2_INT_ST_R::new(((self.bits >> 19) & 0x01) != 0)
}
#[doc = "Bit 20 - The status bit for SPI_APP1_INT interrupt."]
#[inline(always)]
pub fn app1_int_st(&self) -> APP1_INT_ST_R {
APP1_INT_ST_R::new(((self.bits >> 20) & 0x01) != 0)
}
}
#[doc = "SPI DMA interrupt status register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dma_int_st](index.html) module"]
pub struct DMA_INT_ST_SPEC;
impl crate::RegisterSpec for DMA_INT_ST_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [dma_int_st::R](R) reader structure"]
impl crate::Readable for DMA_INT_ST_SPEC {
type Reader = R;
}
#[doc = "`reset()` method sets DMA_INT_ST to value 0"]
impl crate::Resettable for DMA_INT_ST_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
} | }
#[doc = "Bit 13 - The status bit for SPI_DMA_SEG_TRANS_DONE_INT interrupt."]
#[inline(always)]
pub fn dma_seg_trans_done_int_st(&self) -> DMA_SEG_TRANS_DONE_INT_ST_R { |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.