file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
birth_death_simulator_test.py | import unittest
import networkx as nx
import numpy as np
from typing import List, Tuple
from cassiopeia.data.CassiopeiaTree import CassiopeiaTree
from cassiopeia.mixins import TreeSimulatorError
from cassiopeia.simulator.BirthDeathFitnessSimulator import (
BirthDeathFitnessSimulator,
)
import cassiopeia.data.utilities as utilities
def extract_tree_statistics(
tree: CassiopeiaTree,
) -> Tuple[List[float], int, bool]:
"""A helper function for testing simulated trees.
Outputs the total lived time for each extant lineage, the number of extant
lineages, and whether the tree has the expected node degrees (to ensure
unifurcations were collapsed).
Args:
tree: The tree to test
Returns:
The total time lived for each leaf, the number of leaves, and if the
degrees only have degree 0 or 2
"""
times = []
out_degrees = []
for i in tree.nodes:
if tree.is_leaf(i):
times.append(tree.get_time(i))
out_degrees.append(len(tree.children(i)))
out_degrees.pop(0)
correct_degrees = all(x == 2 or x == 0 for x in out_degrees)
return times, len(times), correct_degrees
class BirthDeathSimulatorTest(unittest.TestCase):
def test_bad_waiting_distributions(self):
"""Ensures errors when invalid distributions are given."""
with self.assertRaises(TreeSimulatorError):
bd_sim = BirthDeathFitnessSimulator(
lambda _: -1, 1, experiment_time=1
)
tree = bd_sim.simulate_tree()
with self.assertRaises(TreeSimulatorError):
bd_sim = BirthDeathFitnessSimulator(lambda _: 0, 1, num_extant=4)
tree = bd_sim.simulate_tree()
with self.assertRaises(TreeSimulatorError):
bd_sim = BirthDeathFitnessSimulator(
lambda _: 1, 1, lambda: -1, num_extant=1
)
tree = bd_sim.simulate_tree()
with self.assertRaises(TreeSimulatorError):
bd_sim = BirthDeathFitnessSimulator(
lambda _: 1, 1, lambda: 0, experiment_time=1
)
tree = bd_sim.simulate_tree()
with self.assertRaises(TreeSimulatorError):
bd_sim = BirthDeathFitnessSimulator(
lambda _: 1,
1,
lambda: 0,
mutation_distribution=lambda: -1,
fitness_distribution=lambda: 1,
experiment_time=1,
)
tree = bd_sim.simulate_tree()
def test_bad_stopping_conditions(self):
"""Ensures errors when an invalid stopping conditions are given."""
with self.assertRaises(TreeSimulatorError):
bd_sim = BirthDeathFitnessSimulator(lambda _: 1, 1, lambda: 2)
with self.assertRaises(TreeSimulatorError):
bd_sim = BirthDeathFitnessSimulator(
lambda _: 1, 1, lambda: 2, num_extant=0.5
)
with self.assertRaises(TreeSimulatorError):
bd_sim = BirthDeathFitnessSimulator(
lambda _: 1, 1, lambda: 2, num_extant=-1
)
with self.assertRaises(TreeSimulatorError):
bd_sim = BirthDeathFitnessSimulator(
lambda _: 1, 1, lambda: 2, num_extant=0
)
with self.assertRaises(TreeSimulatorError):
bd_sim = BirthDeathFitnessSimulator(
lambda _: 1, 1, lambda: 2, experiment_time=-1
)
with self.assertRaises(TreeSimulatorError):
bd_sim = BirthDeathFitnessSimulator(
lambda _: 1, 1, lambda: 2, experiment_time=0
)
def test_dead_at_start(self):
"""Ensures errors in base case where all lineages die on first event."""
with self.assertRaises(TreeSimulatorError):
bd_sim = BirthDeathFitnessSimulator(
lambda _: 2, 1, lambda: 1, num_extant=4
)
tree = bd_sim.simulate_tree()
with self.assertRaises(TreeSimulatorError):
bd_sim = BirthDeathFitnessSimulator(
lambda _: 2, 1, lambda: 1, experiment_time=4
)
tree = bd_sim.simulate_tree()
def test_dead_before_end(self):
"""Ensures errors when all lineages die before stopping condition."""
birth_wd = lambda scale: np.random.exponential(scale)
death_wd = lambda: np.random.exponential(0.6)
with self.assertRaises(TreeSimulatorError):
bd_sim = BirthDeathFitnessSimulator(
birth_wd, 0.5, death_wd, num_extant=8, random_seed=5
)
tree = bd_sim.simulate_tree()
with self.assertRaises(TreeSimulatorError):
bd_sim = BirthDeathFitnessSimulator(
birth_wd, 0.5, death_wd, experiment_time=2, random_seed=5
)
tree = bd_sim.simulate_tree()
def test_single_lineage(self):
"""Tests base case that stopping conditions work before divisions."""
bd_sim = BirthDeathFitnessSimulator(lambda _: 1, 1, num_extant=1)
tree = bd_sim.simulate_tree()
results = extract_tree_statistics(tree)
self.assertEqual(results[1], 1)
self.assertEqual(tree.get_branch_length("0", "1"), 1.0)
self.assertEqual(results[0], [1])
bd_sim = BirthDeathFitnessSimulator(lambda _: 1, 1, experiment_time=1)
tree = bd_sim.simulate_tree()
results = extract_tree_statistics(tree)
self.assertEqual(results[1], 1)
self.assertEqual(tree.get_branch_length("0", "1"), 1.0)
self.assertEqual(results[0], [1])
def test_constant_yule(self):
"""Tests small case without death with constant waiting times."""
bd_sim = BirthDeathFitnessSimulator(lambda _: 1, 1, num_extant=32)
tree = bd_sim.simulate_tree()
results = extract_tree_statistics(tree)
for i in results[0]:
self.assertEqual(i, 6)
self.assertEqual(results[1], 32)
self.assertTrue(results[2])
bd_sim = BirthDeathFitnessSimulator(lambda _: 1, 1, experiment_time=6)
tree = bd_sim.simulate_tree()
results = extract_tree_statistics(tree)
for i in results[0]:
self.assertEqual(i, 6)
self.assertEqual(results[1], 32)
self.assertTrue(results[2])
def test_nonconstant_yule(self):
"""Tests case without death with variable waiting times."""
birth_wd = lambda scale: np.random.exponential(scale)
bd_sim = BirthDeathFitnessSimulator(
birth_wd, 1, num_extant=16, random_seed=54
)
tree = bd_sim.simulate_tree()
results = extract_tree_statistics(tree)
self.assertTrue(all(np.isclose(x, results[0][0]) for x in results[0]))
self.assertEqual(results[1], 16)
self.assertTrue(results[2])
self.assertEqual(max([int(i) for i in tree.nodes]), 31)
bd_sim = BirthDeathFitnessSimulator(
birth_wd, 1, experiment_time=2, random_seed=54
)
tree = bd_sim.simulate_tree()
results = extract_tree_statistics(tree)
for i in results[0]:
self.assertEqual(i, 2)
self.assertTrue(results[2])
def test_nonconstant_birth_death(self):
"""Tests case with with variable birth and death waiting times.
Also, tests pruning dead lineages and unifurcation collapsing."""
birth_wd = lambda scale: np.random.exponential(scale)
death_wd = lambda: np.random.exponential(1.5)
bd_sim = BirthDeathFitnessSimulator(
birth_wd, 0.5, death_wd, num_extant=8, random_seed=1234
)
tree = bd_sim.simulate_tree()
results = extract_tree_statistics(tree)
self.assertTrue(all(np.isclose(x, results[0][0]) for x in results[0]))
self.assertEqual(results[1], 8)
self.assertTrue(results[2])
self.assertNotIn("9", tree.nodes)
self.assertNotIn("2", tree.nodes)
bd_sim = BirthDeathFitnessSimulator(
birth_wd, 0.5, death_wd, experiment_time=2, random_seed=1234
)
tree = bd_sim.simulate_tree()
results = extract_tree_statistics(tree)
for i in results[0]:
self.assertTrue(np.isclose(i, 2))
self.assertTrue(results[2])
self.assertNotIn("9", tree.nodes)
self.assertNotIn("2", tree.nodes)
def test_nonconstant_birth_death_no_unifurcation_collapsing(self):
"""Tests case with with variable birth and death waiting times.
Checks that unifurcations are not collapsed."""
birth_wd = lambda scale: np.random.exponential(scale)
death_wd = lambda: np.random.exponential(1.5)
bd_sim = BirthDeathFitnessSimulator(
birth_wd,
0.5,
death_wd,
num_extant=8,
collapse_unifurcations=False,
random_seed=12,
)
tree = bd_sim.simulate_tree()
results = extract_tree_statistics(tree)
self.assertTrue(all(np.isclose(x, results[0][0]) for x in results[0]))
self.assertEqual(results[1], 8)
self.assertFalse(results[2])
self.assertNotIn("3", tree.nodes)
self.assertIn("2", tree.nodes)
self.assertIn("6", tree.nodes)
bd_sim = BirthDeathFitnessSimulator(
birth_wd,
0.5,
death_wd,
experiment_time=1.3,
collapse_unifurcations=False,
random_seed=12,
)
tree = bd_sim.simulate_tree()
results = extract_tree_statistics(tree)
for i in results[0]:
self.assertTrue(np.isclose(i, 1.3))
self.assertFalse(results[2])
self.assertNotIn("3", tree.nodes)
self.assertIn("2", tree.nodes)
self.assertIn("6", tree.nodes)
def test_nonconstant_birth_death_both_stopping_conditions(self):
"""Tests case with with variable birth and death waiting times.
Checks that using both stopping conditions works fine."""
birth_wd = lambda scale: np.random.exponential(scale)
death_wd = lambda: np.random.exponential(1.5)
bd_sim = BirthDeathFitnessSimulator(
birth_wd,
0.5,
death_wd,
num_extant=8,
experiment_time=2,
random_seed=17,
)
tree = bd_sim.simulate_tree()
results = extract_tree_statistics(tree)
self.assertTrue(all(np.isclose(x, results[0][0]) for x in results[0]))
self.assertTrue(all(x > 1 for x in results[0]))
self.assertEqual(results[1], 8)
self.assertTrue(results[2])
bd_sim = BirthDeathFitnessSimulator(
birth_wd,
0.5,
death_wd,
num_extant=8,
experiment_time=1,
random_seed=17,
)
tree = bd_sim.simulate_tree()
results = extract_tree_statistics(tree)
for i in results[0]:
self.assertTrue(np.isclose(i, 1))
self.assertEqual(results[1], 3)
self.assertTrue(results[2])
def test_nonconstant_yule_with_predictable_fitness(self):
"""Tests case with birth and death with constant fitness."""
def check_fitness_values_as_expected(tree: nx.DiGraph):
"""Checks if the fitness value stored at each node is what we
expect given deterministic fitness evolution"""
tree = tree.copy()
for u, v in tree.edges:
tree[u][v]["val"] = 1
tree.nodes["0"]["depth"] = 0
for u, v in nx.dfs_edges(tree, source="0"):
tree.nodes[v]["depth"] = (
tree.nodes[u]["depth"] + tree[u][v]["val"]
)
leaves = [n for n in tree if tree.out_degree(n) == 0]
for i in tree.nodes:
if i in leaves:
self.assertTrue(
np.isclose(
tree.nodes[i]["birth_scale"],
0.5 * 0.98 ** (2 * (tree.nodes[i]["depth"] - 1)),
)
)
else:
self.assertTrue(
np.isclose(
tree.nodes[i]["birth_scale"],
0.5 * 0.98 ** (2 * tree.nodes[i]["depth"]),
)
)
birth_wd = lambda scale: np.random.exponential(scale)
bd_sim = BirthDeathFitnessSimulator(
birth_wd,
0.5,
mutation_distribution=lambda: 2,
fitness_distribution=lambda: 1,
fitness_base=0.98,
num_extant=8,
random_seed=1234,
)
tree = bd_sim.simulate_tree()
results = extract_tree_statistics(tree)
self.assertTrue(all(np.isclose(x, results[0][0]) for x in results[0]))
self.assertEqual(results[1], 8)
self.assertTrue(results[2])
check_fitness_values_as_expected(tree.get_tree_topology())
bd_sim = BirthDeathFitnessSimulator(
birth_wd,
0.5,
mutation_distribution=lambda: 2,
fitness_distribution=lambda: 1,
fitness_base=0.98,
experiment_time=0.6,
random_seed=1234,
)
tree = bd_sim.simulate_tree()
results = extract_tree_statistics(tree)
for i in results[0]:
self.assertTrue(np.isclose(i, 0.6))
self.assertTrue(results[2])
check_fitness_values_as_expected(tree.get_tree_topology()) | """Tests a case with variable birth and death waiting times, as well
as variable fitness evolution. Also tests pruning and collapsing."""
birth_wd = lambda scale: np.random.exponential(scale)
death_wd = lambda: np.random.exponential(0.6)
mut_dist = lambda: 1 if np.random.uniform() < 0.2 else 0
fit_dist = lambda: np.random.uniform(-1, 1)
bd_sim = BirthDeathFitnessSimulator(
birth_wd,
0.5,
death_wd,
mut_dist,
fit_dist,
1.5,
num_extant=8,
random_seed=12364,
)
tree = bd_sim.simulate_tree()
results = extract_tree_statistics(tree)
self.assertTrue(all(np.isclose(x, results[0][0]) for x in results[0]))
self.assertEqual(results[1], 8)
self.assertTrue(results[2])
self.assertNotIn(2, tree.nodes)
self.assertNotIn(3, tree.nodes)
bd_sim = BirthDeathFitnessSimulator(
birth_wd,
0.5,
death_wd,
mut_dist,
fit_dist,
1.5,
experiment_time=3,
random_seed=12364,
)
tree = bd_sim.simulate_tree()
results = extract_tree_statistics(tree)
for i in results[0]:
self.assertTrue(np.isclose(i, 3))
self.assertTrue(results[2])
self.assertNotIn(2, tree.nodes)
self.assertNotIn(3, tree.nodes)
if __name__ == "__main__":
unittest.main() |
def test_nonconstant_birth_death_with_variable_fitness(self): |
game.rs | use crate::joystick::Direction;
use hal::time::Instant;
use heapless::{consts::*, Vec};
use wyhash::wyrng;
#[derive(Debug, PartialEq, Clone, Copy)]
struct Point {
x: u8,
y: u8,
}
struct | {
body: Vec<Point, U64>, // TODO: Make size of snake (and map) configurable
direction: Direction,
}
impl Snake {
pub fn new(start_point: Point, start_direction: Direction) -> Self {
let mut body = Vec::new();
body.push(start_point)
.expect("Unable to add element to empty vec");
Snake {
body,
direction: start_direction,
}
}
pub fn slither(&mut self, new_direction: Option<Direction>, ate_fruit: bool) {
// Update the snake's direction if supplied
if let Some(dir) = new_direction {
if let Some(dir) = Snake::valid_direction(dir) {
// Don't let the snake turn 180 in on itself.
if !dir.opposite(&self.direction) {
self.direction = dir;
}
}
}
// Given the current heading, we want to add a segment to the front of the snake
let next_head = self.next_head(self.direction, self.head());
self.body.reverse();
if !self.is_full() {
self.body.push(next_head).expect("Snake has grown too long");
}
self.body.reverse();
if !ate_fruit {
self.body.pop(); // Remove segment from tail of the snake
}
}
pub fn render(&self) -> [u8; 8] {
let mut snake = [0, 0, 0, 0, 0, 0, 0, 0];
for &p in self.body.iter() {
snake[p.y as usize] |= (1 << p.x) as u8;
}
snake
}
pub fn is_full(&self) -> bool {
self.body.len() == self.body.capacity()
}
pub fn len(&self) -> usize {
self.body.len()
}
pub fn head(&self) -> Point {
self.body[0]
}
pub fn collided_with_tail(&self) -> bool {
let head = self.head();
for &body in self.body.iter().skip(1) {
if body == head {
return true;
}
}
false
}
fn next_head(&self, direction: Direction, current_head: Point) -> Point {
let mut next = current_head;
match direction {
Direction::North => {
next.y = Snake::bounded_subtract_one(current_head.y.into(), 8) as u8
}
Direction::East => next.x = Snake::bounded_subtract_one(current_head.x.into(), 8) as u8,
Direction::South => next.y = Snake::bounded_add_one(current_head.y.into(), 8) as u8,
Direction::West => next.x = Snake::bounded_add_one(current_head.x.into(), 8) as u8,
_ => panic!("Unhandled direction: {:?}", direction),
}
next
}
fn bounded_add_one(val: u32, bound: u32) -> u32 {
(val + 1) % bound
}
fn bounded_subtract_one(val: u32, bound: u32) -> u32 {
if val == 0 {
bound - 1
} else {
val - 1
}
}
fn valid_direction(direction: Direction) -> Option<Direction> {
// Keep processing as simple as possible by ignoring some points of the compass
match direction {
Direction::NorthWest
| Direction::SouthEast
| Direction::NorthEast
| Direction::SouthWest => None,
_ => Some(direction),
}
}
}
#[derive(PartialEq, Debug, Clone, Copy)]
pub enum GameState {
Running,
GameOver,
}
pub struct Game {
snake: Snake,
fruit: Point,
seed: Instant,
state: GameState,
}
impl Game {
pub fn new(seed: Instant) -> Self {
Game {
seed,
snake: Snake::new(Game::random_point(seed), Direction::West),
fruit: Game::random_point(seed),
state: GameState::Running,
}
}
/// Tick the game forward. Will return the state of the game after the 'tick'
///
pub fn tick(&mut self, user_input: Option<Direction>) -> GameState {
if self.state == GameState::GameOver {
return self.state;
}
// Check if the snake's head is on top of the fruit then slither forward.
let ate_fruit: bool = self.snake.head() == self.fruit;
self.snake.slither(user_input, ate_fruit);
// Check for gameover state
if self.is_game_over() {
self.state = GameState::GameOver;
return self.state;
}
if ate_fruit {
self.fruit = Game::random_point(self.seed);
}
GameState::Running
}
/// Return a representation of the game world
///
pub fn render(&mut self) -> [u8; 8] {
let mut world = self.snake.render();
world[self.fruit.y as usize] |= (1 << self.fruit.x) as u8;
world
}
/// Reset the snake's length, the location of fruit, and the direction of the snake
///
pub fn reset(&mut self) {
self.snake = Snake::new(Game::random_point(self.seed), Direction::West);
self.fruit = Game::random_point(self.seed);
self.state = GameState::Running;
}
/// Check for game over conditions
///
pub fn is_game_over(&self) -> bool {
self.snake.collided_with_tail() || self.snake.is_full()
}
/// Get the current score.
///
pub fn score(&self) -> usize {
// We -1 as the player always starts with at least one segment (the snake's head)
self.snake.len() - 1
}
/// Generate a random x / y co-ordinate.
///
fn random_point(seed: Instant) -> Point {
Point {
x: wyrng(&mut (seed.elapsed() as u64)) as u8 % 8,
y: wyrng(&mut (seed.elapsed() as u64)) as u8 % 8,
}
}
}
| Snake |
news_spider.py | import scrapy
from os import path
INDEX_PATH = './resources/index'
RECORD_PATH = './resources/record'
class | (scrapy.Spider):
name = "index"
start_urls = ['http://fund.10jqka.com.cn/smxw_list/index_1.shtml']
def parse_record(self, response):
filename: str = response.url.split('/')[-1]
with open(path.join(RECORD_PATH, filename), 'wb') as f:
f.write(response.body)
self.log(f"Save file {filename}")
def parse(self, response):
page = int(response.url.split('/')[-1].lstrip('index_').rstrip('.shtml'))
filename = f"index-{page}.shtml"
with open(path.join(INDEX_PATH, filename), 'wb') as f:
f.write(response.body)
self.log(f'Saved file {filename}')
for href in response.css('div.listNews > div > h1 > a::attr(href)'):
yield response.follow(href, self.parse_record)
next_page = response.css('.nextpage > a::attr(href)').get()
if next_page is not None:
yield scrapy.Request(next_page, callback=self.parse)
| NewsSpider |
generate_load.py | import barnum, random, time, json, requests, math, os
from mysql.connector import connect, Error
from kafka import KafkaProducer
# CONFIG
userSeedCount = 10000
itemSeedCount = 1000
purchaseGenCount = 500000
purchaseGenEveryMS = 100
pageviewMultiplier = 75 # Translates to 75x purchases, currently 750/sec or 65M/day
itemInventoryMin = 1000
itemInventoryMax = 5000
itemPriceMin = 5
itemPriceMax = 500
mysqlHost = 'mysql'
mysqlPort = '3306'
mysqlUser = 'root'
mysqlPass = 'debezium'
kafkaHostPort = os.getenv('KAFKA_ADDR', 'kafka:9092')
kafkaTopic = 'pageviews'
debeziumHostPort = 'debezium:8083'
channels = ['organic search', 'paid search', 'referral', 'social', 'display']
categories = ['widgets', 'gadgets', 'doodads', 'clearance']
# INSERT TEMPLATES
item_insert = "INSERT INTO shop.items (name, category, price, inventory) VALUES ( %s, %s, %s, %s )"
user_insert = "INSERT INTO shop.users (email, is_vip) VALUES ( %s, %s )"
purchase_insert = "INSERT INTO shop.purchases (user_id, item_id, quantity, purchase_price) VALUES ( %s, %s, %s, %s )"
#Initialize Debezium (Kafka Connect Component)
requests.post(('http://%s/connectors' % debeziumHostPort),
json={
"name": "mysql-connector",
"config": {
"connector.class": "io.debezium.connector.mysql.MySqlConnector",
"database.hostname": mysqlHost,
"database.port": mysqlPort,
"database.user": mysqlUser,
"database.password": mysqlPass,
"database.server.name": mysqlHost,
"database.server.id": '1234',
"database.history.kafka.bootstrap.servers": kafkaHostPort,
"database.history.kafka.topic": "mysql-history",
"time.precision.mode": "connect"
}
}
)
#Initialize Kafka
producer = KafkaProducer(bootstrap_servers=[kafkaHostPort],
value_serializer=lambda x:
json.dumps(x).encode('utf-8'))
def generatePageview(viewer_id, target_id, page_type):
|
try:
with connect(
host=mysqlHost,
user=mysqlUser,
password=mysqlPass,
) as connection:
with connection.cursor() as cursor:
print("Initializing shop database...")
cursor.execute('CREATE DATABASE IF NOT EXISTS shop;')
cursor.execute(
"""CREATE TABLE IF NOT EXISTS shop.users
(
id SERIAL PRIMARY KEY,
email VARCHAR(255),
is_vip BOOLEAN DEFAULT FALSE,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
);"""
)
cursor.execute(
"""CREATE TABLE IF NOT EXISTS shop.items
(
id SERIAL PRIMARY KEY,
name VARCHAR(100),
category VARCHAR(100),
price DECIMAL(7,2),
inventory INT,
inventory_updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
);"""
)
cursor.execute(
"""CREATE TABLE IF NOT EXISTS shop.purchases
(
id SERIAL PRIMARY KEY,
user_id BIGINT UNSIGNED REFERENCES user(id),
item_id BIGINT UNSIGNED REFERENCES item(id),
status TINYINT UNSIGNED DEFAULT 1,
quantity INT UNSIGNED DEFAULT 1,
purchase_price DECIMAL(12,2),
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
);"""
)
connection.commit()
print("Seeding data...")
cursor.executemany(
item_insert,
[
(
barnum.create_nouns(),
random.choice(categories),
random.randint(itemPriceMin*100,itemPriceMax*100)/100,
random.randint(itemInventoryMin,itemInventoryMax)
) for i in range(itemSeedCount)
]
)
cursor.executemany(
user_insert,
[
(
barnum.create_email(),
(random.randint(0,10) > 8)
) for i in range(userSeedCount)
]
)
connection.commit()
print("Getting item ID and PRICEs...")
cursor.execute("SELECT id, price FROM shop.items")
item_prices = [(row[0], row[1]) for row in cursor]
print("Preparing to loop + seed kafka pageviews and purchases")
for i in range(purchaseGenCount):
# Get a user and item to purchase
purchase_item = random.choice(item_prices)
purchase_user = random.randint(0,userSeedCount-1)
purchase_quantity = random.randint(1,5)
# Write purchaser pageview
producer.send(kafkaTopic, key=str(purchase_user).encode('ascii'), value=generatePageview(purchase_user, purchase_item[0], 'products'))
# Write random pageviews to products or profiles
pageviewOscillator = int(pageviewMultiplier + (math.sin(time.time()/1000)*50))
for i in range(pageviewOscillator):
rand_user = random.randint(0,userSeedCount)
rand_page_type = random.choice(['products', 'profiles'])
target_id_max_range = itemSeedCount if rand_page_type == 'products' else userSeedCount
producer.send(kafkaTopic, key=str(rand_user).encode('ascii'), value=generatePageview(rand_user, random.randint(0,target_id_max_range), rand_page_type))
# Write purchase row
cursor.execute(
purchase_insert,
(
purchase_user,
purchase_item[0],
purchase_quantity,
purchase_item[1] * purchase_quantity
)
)
connection.commit()
#Pause
time.sleep(purchaseGenEveryMS/1000)
connection.close()
except Error as e:
print(e)
| return {
"user_id": viewer_id,
"url": f'/{page_type}/{target_id}',
"channel": random.choice(channels),
"received_at": int(time.time())
} |
cartesian_constraint.rs | use na::{DVector, Real};
use std::ops::Range;
use crate::joint::JointConstraint;
use crate::math::{AngularVector, Point, ANGULAR_DIM, Rotation};
use crate::object::{BodyPartHandle, BodySet};
use crate::solver::helper;
use crate::solver::{ConstraintSet, GenericNonlinearConstraint, IntegrationParameters,
NonlinearConstraintGenerator};
/// A constraint that removes all relative angular motion between two body parts.
pub struct CartesianConstraint<N: Real> {
b1: BodyPartHandle,
b2: BodyPartHandle,
anchor1: Point<N>,
ref_frame1: Rotation<N>,
anchor2: Point<N>,
ref_frame2: Rotation<N>,
ang_impulses: AngularVector<N>,
bilateral_ground_rng: Range<usize>,
bilateral_rng: Range<usize>,
}
impl<N: Real> CartesianConstraint<N> {
/// Creates a cartesian constraint between two body parts.
///
/// This will ensure the rotational parts of the frames given identified by `ref_frame1` and
/// `ref_frame2` and attached to the corresponding bodies will coincide.
pub fn new(
b1: BodyPartHandle,
b2: BodyPartHandle,
anchor1: Point<N>,
ref_frame1: Rotation<N>,
anchor2: Point<N>,
ref_frame2: Rotation<N>,
) -> Self {
CartesianConstraint {
b1,
b2,
anchor1,
ref_frame1,
anchor2,
ref_frame2,
ang_impulses: AngularVector::zeros(),
bilateral_ground_rng: 0..0,
bilateral_rng: 0..0,
}
}
/// Changes the reference frame for the first body part.
pub fn set_reference_frame_1(&mut self, ref_frame1: Rotation<N>) {
self.ref_frame1 = ref_frame1
}
/// Changes the reference frame for the second body part.
pub fn set_reference_frame_2(&mut self, frame2: Rotation<N>) |
/// Changes the attach point for the first body part.
pub fn set_anchor_1(&mut self, anchor1: Point<N>) {
self.anchor1 = anchor1
}
/// Changes the attach point for the second body part.
pub fn set_anchor_2(&mut self, anchor2: Point<N>) {
self.anchor2 = anchor2
}
}
impl<N: Real> JointConstraint<N> for CartesianConstraint<N> {
fn num_velocity_constraints(&self) -> usize {
ANGULAR_DIM
}
fn anchors(&self) -> (BodyPartHandle, BodyPartHandle) {
(self.b1, self.b2)
}
fn velocity_constraints(
&mut self,
_: &IntegrationParameters<N>,
bodies: &BodySet<N>,
ext_vels: &DVector<N>,
ground_j_id: &mut usize,
j_id: &mut usize,
jacobians: &mut [N],
constraints: &mut ConstraintSet<N>,
) {
let body1 = try_ret!(bodies.body(self.b1.0));
let body2 = try_ret!(bodies.body(self.b2.0));
let part1 = try_ret!(body1.part(self.b1.1));
let part2 = try_ret!(body2.part(self.b2.1));
let pos1 = body1.position_at_material_point(part1, &self.anchor1) * self.ref_frame1;
let pos2 = body2.position_at_material_point(part2, &self.anchor2) * self.ref_frame2;
let anchor1 = Point::from(pos1.translation.vector);
let anchor2 = Point::from(pos2.translation.vector);
let assembly_id1 = body1.companion_id();
let assembly_id2 = body2.companion_id();
let first_bilateral_ground = constraints.velocity.bilateral_ground.len();
let first_bilateral = constraints.velocity.bilateral.len();
helper::cancel_relative_angular_velocity(
body1,
part1,
body2,
part2,
assembly_id1,
assembly_id2,
&anchor1,
&anchor2,
ext_vels,
&self.ang_impulses,
0,
ground_j_id,
j_id,
jacobians,
constraints,
);
self.bilateral_ground_rng =
first_bilateral_ground..constraints.velocity.bilateral_ground.len();
self.bilateral_rng = first_bilateral..constraints.velocity.bilateral.len();
}
fn cache_impulses(&mut self, constraints: &ConstraintSet<N>) {
for c in &constraints.velocity.bilateral_ground[self.bilateral_ground_rng.clone()] {
self.ang_impulses[c.impulse_id] = c.impulse;
}
for c in &constraints.velocity.bilateral[self.bilateral_rng.clone()] {
self.ang_impulses[c.impulse_id] = c.impulse;
}
}
}
impl<N: Real> NonlinearConstraintGenerator<N> for CartesianConstraint<N> {
fn num_position_constraints(&self, bodies: &BodySet<N>) -> usize {
// FIXME: calling this at each iteration of the non-linear resolution is costly.
if self.is_active(bodies) {
1
} else {
0
}
}
fn position_constraint(
&self,
params: &IntegrationParameters<N>,
_: usize,
bodies: &mut BodySet<N>,
jacobians: &mut [N],
) -> Option<GenericNonlinearConstraint<N>> {
let body1 = bodies.body(self.b1.0)?;
let body2 = bodies.body(self.b2.0)?;
let part1 = body1.part(self.b1.1)?;
let part2 = body2.part(self.b2.1)?;
let pos1 = body1.position_at_material_point(part1, &self.anchor1) * self.ref_frame1;
let pos2 = body2.position_at_material_point(part2, &self.anchor2) * self.ref_frame2;
let anchor1 = Point::from(pos1.translation.vector);
let anchor2 = Point::from(pos2.translation.vector);
let rotation1 = pos1.rotation;
let rotation2 = pos2.rotation;
helper::cancel_relative_rotation(
params,
body1,
part1,
body2,
part2,
&anchor1,
&anchor2,
&rotation1,
&rotation2,
jacobians,
)
}
}
| {
self.ref_frame2 = frame2
} |
node.rs | // Copyright Materialize, Inc. and contributors. All rights reserved.
//
// Use of this software is governed by the Business Source License
// included in the LICENSE file.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0.
//! A driver for interacting with Maelstrom
//!
//! This translates input into requests and requests into output. It also
//! handles issuing Maelstrom [service] requests. It's very roughly based off of
//! the node in Maelstrom's [ruby examples].
//!
//! [service]: https://github.com/jepsen-io/maelstrom/blob/v0.2.1/doc/services.md
//! [ruby examples]: https://github.com/jepsen-io/maelstrom/blob/v0.2.1/demo/ruby/node.rb
use std::collections::HashMap;
use std::io::{BufRead, Write};
use std::sync::{Arc, Mutex};
use anyhow::anyhow;
use async_trait::async_trait;
use serde_json::Value;
use tokio::sync::oneshot;
use tracing::{info, trace};
use mz_persist::location::ExternalError;
use mz_persist_client::ShardId;
use crate::maelstrom::api::{Body, ErrorCode, MaelstromError, Msg, MsgId, NodeId};
use crate::maelstrom::Args;
/// An implementor of a Maelstrom [workload].
///
/// [workload]: https://github.com/jepsen-io/maelstrom/blob/v0.2.1/doc/workload.md
#[async_trait]
pub trait Service: Sized + Send + Sync {
/// Construct this service.
///
/// Maelstrom services are available via the [Handle].
async fn init(args: &Args, handle: &Handle) -> Result<Self, MaelstromError>;
/// Respond to a single request.
///
/// Implementations must either panic or respond by calling
/// [Handle::send_res] exactly once.
async fn eval(&self, handle: Handle, src: NodeId, req: Body);
}
/// Runs the RPC loop, accepting Maelstrom workload requests, issuing responses,
/// and communicating with Maelstrom services.
pub fn run<R, W, S>(args: Args, read: R, write: W) -> Result<(), anyhow::Error>
where
R: BufRead,
W: Write + Send + Sync + 'static,
S: Service + 'static,
{
let mut node = Node::<S>::new(args, write);
for line in read.lines() {
let line = line.map_err(|err| anyhow!("req read failed: {}", err))?;
trace!("raw: [{}]", line);
let req: Msg = line
.parse()
.map_err(|err| anyhow!("invalid req {}: {}", err, line))?;
if req.body.in_reply_to().is_none() {
info!("req: {}", req);
} else {
trace!("req: {}", req);
}
node.handle(req);
}
Ok(())
}
struct Node<S>
where
S: Service + 'static,
{
args: Args,
core: Arc<Mutex<Core>>,
node_id: Option<NodeId>,
service: Arc<AsyncInitOnceWaitable<Arc<S>>>,
}
impl<S> Node<S>
where
S: Service + 'static,
{
fn new<W>(args: Args, write: W) -> Self
where
W: Write + Send + Sync + 'static,
{
let core = Core {
write: Box::new(write),
next_msg_id: MsgId(0),
callbacks: HashMap::new(),
};
Node {
args,
core: Arc::new(Mutex::new(core)),
node_id: None,
service: Arc::new(AsyncInitOnceWaitable::new()),
}
}
pub fn handle(&mut self, msg: Msg) {
// If we've been initialized (i.e. have a NodeId), respond to the
// message.
if let Some(node_id) = self.node_id.as_ref() {
// This message is not for us
if node_id != &msg.dest {
return;
}
let handle = Handle {
node_id: node_id.clone(),
core: Arc::clone(&self.core),
};
let body = match handle.maybe_handle_service_res(&msg.src, msg.body) {
Ok(()) => return,
Err(x) => x,
};
let service = Arc::clone(&self.service);
let _ = mz_ore::task::spawn(|| format!("maelstrom::handle"), async move {
let service = service.get().await;
let () = service.eval(handle, msg.src, body).await;
});
return;
}
// Otherwise, if we haven't yet been initialized, then the only message
// type we are allowed to process is ReqInit.
match msg.body {
Body::ReqInit {
msg_id, node_id, ..
} => {
// Set the NodeId.
self.node_id = Some(node_id.clone());
let handle = Handle {
node_id,
core: Arc::clone(&self.core),
};
// Respond to the init req.
//
// NB: This must come _before_ service init! We want service
// init to be able to use Maelstrom services, but Maelstrom
// doesn't make services available to nodes that haven't yet
// responded to init.
let in_reply_to = msg_id;
handle.send_res(msg.src, move |msg_id| Body::ResInit {
msg_id,
in_reply_to,
});
// Tricky! Run the service init in a task in case it uses
// Maelstrom services. This is because Maelstrom services return
// responses on stdin, which means we need to be processing the
// run loop concurrently with this. This is also the reason for
// the AsyncInitOnceWaitable nonsense.
let args = self.args.clone();
let service_init = Arc::clone(&self.service);
let _ = mz_ore::task::spawn(|| format!("maelstrom::init"), async move {
let service = match S::init(&args, &handle).await {
Ok(x) => x,
Err(err) => {
// If service initialization fails, there's nothing
// to do but panic. Any retries should be pushed
// into the impl of `init`.
panic!("service initialization failed: {}", err);
}
};
service_init.init_once(Arc::new(service)).await;
});
}
// All other reqs are a no-op. We can't even error without a NodeId.
_ => {}
}
}
}
struct Core {
write: Box<dyn Write + Send + Sync>,
next_msg_id: MsgId,
callbacks: HashMap<MsgId, oneshot::Sender<Body>>,
}
impl std::fmt::Debug for Core {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// Destructure the struct to be defensive against new fields.
let Core {
write: _,
next_msg_id,
callbacks,
} = self;
f.debug_struct("Core")
.field("next_msg_id", &next_msg_id)
.field("callbacks", &callbacks.keys().collect::<Vec<_>>())
.finish_non_exhaustive()
}
}
impl Core {
fn alloc_msg_id(&mut self) -> MsgId {
self.next_msg_id = self.next_msg_id.next();
self.next_msg_id
}
}
/// A handle to interact with Node.
#[derive(Debug, Clone)]
pub struct Handle {
node_id: NodeId,
core: Arc<Mutex<Core>>,
}
impl Handle {
/// Send a response to Maelstrom.
///
/// `dest` should be the `src` of the response. To make a service request,
/// use [Self::send_service_req] instead.
pub fn send_res<BodyFn: FnOnce(MsgId) -> Body>(&self, dest: NodeId, res_fn: BodyFn) {
let mut core = self.core.lock().expect("mutex poisoned");
let msg_id = core.alloc_msg_id();
let res = Msg {
src: self.node_id.clone(),
dest,
body: res_fn(msg_id),
};
info!("res: {}", res);
write!(core.write.as_mut(), "{}\n", res).expect("res write failed");
}
/// Issue a service request to Maelstrom.
///
/// `dest` should be the service name. To respond to a request, use
/// [Self::send_res] instead.
pub async fn send_service_req<BodyFn: FnOnce(MsgId) -> Body>(
&self,
dest: NodeId,
req_fn: BodyFn,
) -> Body {
let (tx, rx) = oneshot::channel();
{
let mut core = self.core.lock().expect("mutex poisoned");
let msg_id = core.alloc_msg_id();
core.callbacks.insert(msg_id, tx);
let req = Msg {
src: self.node_id.clone(),
dest,
body: req_fn(msg_id),
};
trace!("svc: {}", req);
write!(core.write.as_mut(), "{}\n", req).expect("req write failed");
}
rx.await.expect("internal error: callback oneshot dropped")
}
/// Attempts to handle a msg as a service response, returning it back if it
/// isn't one.
pub fn maybe_handle_service_res(&self, src: &NodeId, msg: Body) -> Result<(), Body> {
let in_reply_to = match msg.in_reply_to() {
Some(x) => x,
None => return Err(msg),
};
let mut core = self.core.lock().expect("mutex poisoned");
let callback = match core.callbacks.remove(&in_reply_to) {
Some(x) => x,
None => {
self.send_res(src.clone(), |msg_id| Body::Error {
msg_id: Some(msg_id),
in_reply_to,
code: ErrorCode::MalformedRequest,
text: format!("no callback expected for {:?}", in_reply_to),
});
return Ok(());
}
};
if let Err(_) = callback.send(msg) {
// The caller is no longer listening. This is safe to ignore.
return Ok(());
}
Ok(())
}
/// Returns a [ShardId] for this Maelstrom run.
///
/// Uses Maelstrom services to ensure all nodes end up with the same id.
pub async fn maybe_init_shard_id(&self) -> Result<ShardId, MaelstromError> {
let proposal = ShardId::new();
let key = "SHARD";
loop {
let from = Value::Null;
let to = Value::from(proposal.to_string());
match self
.lin_kv_compare_and_set(Value::from(key), from, to, Some(true))
.await
{
Ok(()) => {
info!("initialized maelstrom shard to {}", proposal);
return Ok(proposal);
}
Err(MaelstromError {
code: ErrorCode::PreconditionFailed,
..
}) => match self.lin_kv_read(Value::from(key)).await? {
Some(value) => {
let value = value.as_str().ok_or_else(|| {
ExternalError::from(anyhow!("invalid SHARD {}", value))
})?;
let shard_id = value.parse::<ShardId>().map_err(|err| {
ExternalError::from(anyhow!("invalid SHARD {}: {}", value, err))
})?;
info!("fetched maelstrom shard id {}", shard_id);
return Ok(shard_id);
}
None => continue,
},
Err(err) => return Err(err),
}
}
}
/// Issues a Maelstrom lin-kv service read request.
pub async fn lin_kv_read(&self, key: Value) -> Result<Option<Value>, MaelstromError> {
let dest = NodeId("lin-kv".to_string());
let res = self
.send_service_req(dest, move |msg_id| Body::ReqLinKvRead { msg_id, key })
.await;
match res {
Body::Error {
code: ErrorCode::KeyDoesNotExist,
..
} => Ok(None),
Body::Error { code, text, .. } => Err(MaelstromError { code, text }),
Body::ResLinKvRead { value, .. } => Ok(Some(value)),
res => unimplemented!("unsupported res: {:?}", res),
}
}
/// Issues a Maelstrom lin-kv service write request.
pub async fn lin_kv_write(&self, key: Value, value: Value) -> Result<(), MaelstromError> {
let dest = NodeId("lin-kv".to_string());
let res = self
.send_service_req(dest, move |msg_id| Body::ReqLinKvWrite {
msg_id,
key,
value,
})
.await;
match res {
Body::Error { code, text, .. } => Err(MaelstromError { code, text }),
Body::ResLinKvWrite { .. } => Ok(()),
res => unimplemented!("unsupported res: {:?}", res),
}
}
/// Issues a Maelstrom lin-kv service cas request.
pub async fn lin_kv_compare_and_set(
&self,
key: Value,
from: Value,
to: Value,
create_if_not_exists: Option<bool>,
) -> Result<(), MaelstromError> {
trace!(
"lin_kv_compare_and_set key={:?} from={:?} to={:?} create_if_not_exists={:?}",
key,
from,
to,
create_if_not_exists
);
let dest = NodeId("lin-kv".to_string());
let res = self
.send_service_req(dest, move |msg_id| Body::ReqLinKvCaS {
msg_id,
key,
from,
to, | .await;
match res {
Body::Error { code, text, .. } => Err(MaelstromError { code, text }),
Body::ResLinKvCaS { .. } => Ok(()),
res => unimplemented!("unsupported res: {:?}", res),
}
}
}
/// A helper for a value that is initialized once, but used from many async
/// places.
///
/// This name sure is a mouthful. Anyone have a suggestion?
#[derive(Debug)]
struct AsyncInitOnceWaitable<T: Clone> {
core: tokio::sync::Mutex<(Option<T>, Vec<oneshot::Sender<T>>)>,
}
impl<T: Clone> AsyncInitOnceWaitable<T> {
pub fn new() -> Self {
let core = (None, Vec::new());
AsyncInitOnceWaitable {
core: tokio::sync::Mutex::new(core),
}
}
pub async fn init_once(&self, t: T) {
let mut core = self.core.lock().await;
assert!(core.0.is_none(), "init called more than once");
core.0 = Some(t.clone());
for tx in core.1.drain(..) {
let _ = tx.send(t.clone());
}
}
pub async fn get(&self) -> T {
let rx = {
let mut core = self.core.lock().await;
if let Some(x) = core.0.as_ref() {
return x.clone();
}
let (tx, rx) = tokio::sync::oneshot::channel();
core.1.push(tx);
rx
};
rx.await.expect("internal error: waiter oneshot dropped")
}
} | create_if_not_exists,
}) |
lib.rs | use pmutil::{q, IdentExt, SpanExt};
use proc_macro2::Span;
use syn::{
parse, punctuated::Punctuated, spanned::Spanned, Arm, BinOp, Block, Data, DeriveInput, Expr,
ExprBinary, ExprBlock, Field, FieldPat, Fields, Ident, Index, Member, Pat, PatIdent, PatStruct,
PatTuple, Path, Stmt, Token,
};
/// Derives `swc_common::TypeEq`.
///
/// - Field annotated with `#[use_eq]` will be compared using `==`.
/// - Field annotated with `#[not_type]` will be ignored
#[proc_macro_derive(TypeEq, attributes(not_type, use_eq, use_eq_ignore_span))]
pub fn derive_type_eq(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
Deriver {
trait_name: Ident::new("TypeEq", Span::call_site()),
method_name: Ident::new("type_eq", Span::call_site()),
ignore_field: Box::new(|field| {
// Search for `#[not_type]`.
for attr in &field.attrs {
if attr.path.is_ident("not_type") {
return true;
}
}
false
}),
}
.derive(item)
}
/// Derives `swc_common::EqIgnoreSpan`.
///
///
/// Fields annotated with `#[not_spanned]` or `#[use_eq]` will use` ==` instead
/// of `eq_ignore_span`.
#[proc_macro_derive(EqIgnoreSpan, attributes(not_spanned, use_eq))]
pub fn derive_eq_ignore_span(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
Deriver {
trait_name: Ident::new("EqIgnoreSpan", Span::call_site()),
method_name: Ident::new("eq_ignore_span", Span::call_site()),
ignore_field: Box::new(|_field| {
// We call eq_ignore_span for all fields.
false
}),
}
.derive(item)
}
struct Deriver {
trait_name: Ident,
method_name: Ident,
ignore_field: Box<dyn Fn(&Field) -> bool>,
}
impl Deriver {
fn derive(&self, item: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input: DeriveInput = parse(item).unwrap();
let body = self.make_body(&input.data);
q!(
Vars {
TraitName: &self.trait_name,
Type: &input.ident,
method_name: &self.method_name,
body,
},
{
#[automatically_derived]
impl ::swc_common::TraitName for Type {
#[allow(non_snake_case)]
fn method_name(&self, other: &Self) -> bool {
body
}
}
}
)
.into()
}
fn make_body(&self, data: &Data) -> Expr {
match data {
Data::Struct(s) => {
let arm = self.make_arm_from_fields(q!({ Self }).parse(), &s.fields);
q!(Vars { arm }, (match (self, other) { arm })).parse()
}
Data::Enum(e) => {
//
let mut arms = Punctuated::<_, Token![,]>::default();
for v in &e.variants {
let arm = self.make_arm_from_fields(
q!(Vars { Variant: &v.ident }, { Self::Variant }).parse(),
&v.fields,
);
arms.push(arm);
}
arms.push(
q!({
_ => false
})
.parse(),
);
q!(Vars { arms }, (match (self, other) { arms })).parse()
} |
fn make_arm_from_fields(&self, pat_path: Path, fields: &Fields) -> Arm {
let mut l_pat_fields = Punctuated::<_, Token![,]>::default();
let mut r_pat_fields = Punctuated::<_, Token![,]>::default();
let mut exprs = vec![];
for (i, field) in fields
.iter()
.enumerate()
.filter(|(_, f)| !(self.ignore_field)(f))
{
let method_name = if field
.attrs
.iter()
.any(|attr| attr.path.is_ident("not_spanned") || attr.path.is_ident("use_eq"))
{
Ident::new("eq", Span::call_site())
} else if field
.attrs
.iter()
.any(|attr| attr.path.is_ident("use_eq_ignore_span"))
{
Ident::new("eq_ignore_span", Span::call_site())
} else {
self.method_name.clone()
};
let base = field
.ident
.clone()
.unwrap_or_else(|| Ident::new(&format!("_{}", i), field.ty.span()));
//
let l_binding_ident = base.new_ident_with(|base| format!("_l_{}", base));
let r_binding_ident = base.new_ident_with(|base| format!("_r_{}", base));
let make_pat_field = |ident: &Ident| FieldPat {
attrs: Default::default(),
member: match &field.ident {
Some(v) => Member::Named(v.clone()),
None => Member::Unnamed(Index {
index: i as _,
span: field.ty.span(),
}),
},
colon_token: Some(ident.span().as_token()),
pat: Box::new(Pat::Ident(PatIdent {
attrs: Default::default(),
by_ref: Some(field.ident.span().as_token()),
mutability: None,
ident: ident.clone(),
subpat: None,
})),
};
l_pat_fields.push(make_pat_field(&l_binding_ident));
r_pat_fields.push(make_pat_field(&r_binding_ident));
exprs.push(
q!(
Vars {
method_name: &method_name,
l: &l_binding_ident,
r: &r_binding_ident
},
{ l.method_name(r) }
)
.parse::<Expr>(),
);
}
// true && a.type_eq(&other.a) && b.type_eq(&other.b)
let mut expr: Expr = q!({ true }).parse();
for expr_el in exprs {
expr = Expr::Binary(ExprBinary {
attrs: Default::default(),
left: Box::new(expr),
op: BinOp::And(Span::call_site().as_token()),
right: Box::new(expr_el),
});
}
Arm {
attrs: Default::default(),
pat: Pat::Tuple(PatTuple {
attrs: Default::default(),
paren_token: Span::call_site().as_token(),
elems: {
let mut elems = Punctuated::default();
elems.push(Pat::Struct(PatStruct {
attrs: Default::default(),
path: pat_path.clone(),
brace_token: Span::call_site().as_token(),
fields: l_pat_fields,
dot2_token: Some(Span::call_site().as_token()),
}));
elems.push(Pat::Struct(PatStruct {
attrs: Default::default(),
path: pat_path,
brace_token: Span::call_site().as_token(),
fields: r_pat_fields,
dot2_token: Some(Span::call_site().as_token()),
}));
elems
},
}),
guard: Default::default(),
fat_arrow_token: Span::call_site().as_token(),
body: Box::new(Expr::Block(ExprBlock {
attrs: Default::default(),
label: Default::default(),
block: Block {
brace_token: Span::call_site().as_token(),
stmts: vec![Stmt::Expr(expr)],
},
})),
comma: Default::default(),
}
}
} | Data::Union(_) => unimplemented!("union"),
}
} |
smoke_test.go | // +build integration
package cognitoidentityprovider
| "testing"
"time"
"github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/service/cognitoidentityprovider"
"github.com/aws/aws-sdk-go-v2/service/internal/integrationtest"
"github.com/awslabs/smithy-go"
)
func TestInteg_00_ListUserPools(t *testing.T) {
ctx, cancelFn := context.WithTimeout(context.Background(), 5*time.Second)
defer cancelFn()
cfg, err := integrationtest.LoadConfigWithDefaultRegion("us-west-2")
if err != nil {
t.Fatalf("failed to load config, %v", err)
}
client := cognitoidentityprovider.NewFromConfig(cfg)
params := &cognitoidentityprovider.ListUserPoolsInput{
MaxResults: aws.Int32(10),
}
_, err = client.ListUserPools(ctx, params)
if err != nil {
t.Errorf("expect no error, got %v", err)
}
}
func TestInteg_01_DescribeUserPool(t *testing.T) {
ctx, cancelFn := context.WithTimeout(context.Background(), 5*time.Second)
defer cancelFn()
cfg, err := integrationtest.LoadConfigWithDefaultRegion("us-west-2")
if err != nil {
t.Fatalf("failed to load config, %v", err)
}
client := cognitoidentityprovider.NewFromConfig(cfg)
params := &cognitoidentityprovider.DescribeUserPoolInput{
UserPoolId: aws.String("us-east-1:aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee"),
}
_, err = client.DescribeUserPool(ctx, params)
if err == nil {
t.Fatalf("expect request to fail")
}
var apiErr smithy.APIError
if !errors.As(err, &apiErr) {
t.Fatalf("expect error to be API error, was not, %v", err)
}
if len(apiErr.ErrorCode()) == 0 {
t.Errorf("expect non-empty error code")
}
if len(apiErr.ErrorMessage()) == 0 {
t.Errorf("expect non-empty error message")
}
} | import (
"context"
"errors" |
hash_ext.rs | use crate::assert_length;
use crate::encode;
use crate::hash_type;
use crate::HashType;
use crate::HashableContent;
use crate::HashableContentBytes;
use crate::AiHash;
use crate::AiHashOf;
use crate::AiHashed;
use crate::AI_HASH_CORE_LEN;
use hash_type::HashTypeAsync;
use hash_type::HashTypeSync;
/// The maximum size to hash synchronously. Anything larger than this will
/// take too long to hash within a single tokio context
pub const MAX_HASHABLE_CONTENT_LEN: usize = 16 * 1000 * 1000; // 16 MiB
impl<T: HashTypeSync> AiHash<T> {
/// Synchronously hash a reference to the given content to produce a AiHash
/// If the content is larger than MAX_HASHABLE_CONTENT_LEN, this will **panic**!
pub fn with_data_sync<C: HashableContent<HashType = T>>(content: &C) -> AiHash<T> {
hash_from_content(content)
}
}
impl<T, C> AiHashed<C>
where
T: HashTypeSync,
C: HashableContent<HashType = T>,
{
/// Compute the hash of this content and store it alongside
pub fn from_content_sync(content: C) -> Self {
let hash: AiHashOf<C> = AiHash::<T>::with_data_sync(&content);
Self { content, hash }
}
/// Verify that the cached hash matches the content.
/// Important to run this after e.g. deserialization.
pub fn verify_hash_sync(&self) -> Result<(), AiHash<T>> {
let hash = AiHash::<T>::with_data_sync(&self.content);
if self.hash == hash {
Ok(())
} else {
Err(hash)
}
}
}
impl<T: HashTypeAsync> AiHash<T> {
/// Asynchronously hash a reference to the given content to produce a AiHash
// TODO: this needs to be pushed onto a background thread if the content is large
pub async fn with_data<C: HashableContent<HashType = T>>(content: &C) -> AiHash<T> |
}
impl<T, C> AiHashed<C>
where
T: HashTypeAsync,
C: HashableContent<HashType = T>,
{
/// Compute the hash of this content and store it alongside
pub async fn from_content(content: C) -> Self {
let hash: AiHashOf<C> = AiHash::<T>::with_data(&content).await;
Self { content, hash }
}
/// Verify that the cached hash matches the content.
/// Important to run this after e.g. deserialization.
pub async fn verify_hash(&self) -> Result<(), AiHash<T>> {
let hash = AiHash::<T>::with_data(&self.content).await;
if self.hash == hash {
Ok(())
} else {
Err(hash)
}
}
}
fn hash_from_content<T: HashType, C: HashableContent<HashType = T>>(content: &C) -> AiHash<T> {
match content.hashable_content() {
HashableContentBytes::Content(sb) => {
let bytes: Vec<u8> = aingle_middleware_bytes::UnsafeBytes::from(sb).into();
let hash = encode::blake2b_256(&bytes);
assert_length!(AI_HASH_CORE_LEN, &hash);
AiHash::<T>::from_raw_32_and_type(hash, content.hash_type())
}
HashableContentBytes::Prehashed39(bytes) => AiHash::from_raw_39_panicky(bytes),
}
}
| {
hash_from_content(content)
} |
tailwind.config.js | module.exports = {
purge: ["./src/**/*.{js,jsx,ts,tsx}", "./public/index.html"],
darkMode: false, // or 'media' or 'class' | colors: {
primary: "#000000",
secondary: "#20852D",
footer: "#1A1A1A",
btn: "#F49E0B",
},
fontFamily: {
Opensans: ['"Open Sans"', "cursive"],
Boston: ['"Boston"', "cursive"],
Know: ['"Know"', "cursive"],
},
},
},
variants: {
extend: {},
},
plugins: [],
}; | theme: {
extend: { |
ffmpeg.py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
import io
import math
import os
import shutil
from typing import Any, Dict, Optional, Union
import augly.audio.utils as audutils
import ffmpeg
import numpy as np
from augly.utils import pathmgr, SILENT_AUDIO_PATH
from augly.utils.ffmpeg import FFMPEG_PATH, FFPROBE_PATH
from ffmpeg.nodes import FilterableStream
def combine_frames_and_audio_to_file(
raw_frames: str,
audio: Optional[Union[str, io.BytesIO]],
output_path: str,
framerate: float,
) -> None:
frame_dir = os.path.dirname(raw_frames)
if not os.path.isdir(frame_dir):
raise RuntimeError(
f"Got raw frames glob path of {raw_frames}, but {frame_dir} is not "
"a directory"
)
video_stream = ffmpeg.input(raw_frames, pattern_type="glob", framerate=framerate)
video_stream = video_stream.filter(
"pad", **{"width": "ceil(iw/2)*2", "height": "ceil(ih/2)*2"}
)
merge_video_and_audio(video_stream, audio, output_path)
def extract_audio_to_file(video_path: str, output_audio_path: str) -> None:
audio_info = get_audio_info(video_path)
sample_rate = str(audio_info["sample_rate"])
codec = audio_info["codec_name"]
if os.path.splitext(output_audio_path)[-1] == ".aac":
(
ffmpeg.input(video_path, loglevel="quiet")
.output(output_audio_path, acodec=codec, ac=1)
.overwrite_output()
.run(cmd=FFMPEG_PATH)
)
else:
out, err = (
ffmpeg.input(video_path, loglevel="quiet")
.output("-", format="f32le", acodec="pcm_f32le", ac=1, ar=sample_rate)
.run(cmd=FFMPEG_PATH, capture_stdout=True, capture_stderr=True)
)
audio = np.frombuffer(out, np.float32)
audutils.ret_and_save_audio(audio, output_audio_path, int(sample_rate))
def extract_frames_to_dir(
video_path: str,
output_dir: str,
output_pattern: str = "raw_frame%08d.jpg",
quality: int = 0,
scale: float = 1,
) -> None:
video_info = get_video_info(video_path)
(
ffmpeg.input(video_path, ss=0, loglevel="quiet")
.filter("scale", f"iw*{scale}", f"ih*{scale}")
.output(
os.path.join(output_dir, output_pattern),
vframes=video_info["nb_frames"],
**{"qscale:v": quality},
)
.overwrite_output()
.run(cmd=FFMPEG_PATH)
)
def get_audio_info(media_path: str) -> Dict[str, Any]:
"""
Returns whatever ffprobe returns. Of particular use are things such as the
encoder ("codec_name") used for audio encoding, the sample rate ("sample_rate"),
and length in seconds ("duration")
Accepts as input either an audio or video path.
"""
try:
local_media_path = pathmgr.get_local_path(media_path)
except RuntimeError:
raise FileNotFoundError(f"Provided media path {media_path} does not exist")
probe = ffmpeg.probe(local_media_path, cmd=FFPROBE_PATH)
audio_info = next(
(stream for stream in probe["streams"] if stream["codec_type"] == "audio"),
None,
)
assert (
audio_info is not None
), "Error retrieving audio metadata, please verify that an audio stream exists"
return audio_info
def get_video_fps(video_path: str) -> Optional[float]:
video_info = get_video_info(video_path)
try:
frame_rate = video_info["avg_frame_rate"]
# ffmpeg often returns fractional framerates, e.g. 225480/7523
if "/" in frame_rate:
num, denom = (float(f) for f in frame_rate.split("/"))
return num / denom
else:
return float(frame_rate)
except Exception:
return None
def get_video_info(video_path: str) -> Dict[str, Any]:
"""
Returns whatever ffprobe returns. Of particular use are things such as the FPS
("avg_frame_rate"), number of raw frames ("nb_frames"), height and width of each
frame ("height", "width") and length in seconds ("duration")
"""
try:
local_video_path = pathmgr.get_local_path(video_path)
except RuntimeError:
raise FileNotFoundError(f"Provided video path {video_path} does not exist")
probe = ffmpeg.probe(local_video_path, cmd=FFPROBE_PATH)
video_info = next(
(stream for stream in probe["streams"] if stream["codec_type"] == "video"),
None,
)
assert (
video_info is not None
), "Error retrieving video metadata, please verify that the video file exists"
return video_info
def has_audio_stream(video_path: str) -> bool:
streams = ffmpeg.probe(video_path, cmd=FFPROBE_PATH)["streams"]
for stream in streams:
if stream["codec_type"] == "audio":
return True
return False
def add_silent_audio(
video_path: str,
output_path: Optional[str] = None,
duration: Optional[float] = None,
) -> None:
local_video_path = pathmgr.get_local_path(video_path)
if local_video_path != video_path:
assert (
output_path is not None
), "If remote video_path is provided, an output_path must be provided" | if has_audio_stream(video_path):
if video_path != output_path:
shutil.copy(video_path, output_path)
return
duration = duration or float(get_video_info(video_path)["duration"])
video = ffmpeg.input(video_path).video
silent_audio_path = pathmgr.get_local_path(SILENT_AUDIO_PATH)
audio = ffmpeg.input(silent_audio_path, stream_loop=math.ceil(duration)).audio
output = ffmpeg.output(video, audio, output_path, pix_fmt="yuv420p", t=duration)
output.overwrite_output().run(cmd=FFMPEG_PATH)
def merge_video_and_audio(
video_stream: FilterableStream,
audio: Optional[Union[str, io.BytesIO]],
output_path: str,
) -> None:
kwargs = {"c:v": "libx264", "c:a": "copy", "bsf:a": "aac_adtstoasc"}
if audio:
audio_stream = ffmpeg.input(audio, loglevel="quiet")
output = ffmpeg.output(
video_stream, audio_stream, output_path, pix_fmt="yuv420p", **kwargs
).overwrite_output()
else:
output = ffmpeg.output(
video_stream, output_path, pix_fmt="yuv420p", **kwargs
).overwrite_output()
output.run(cmd=FFMPEG_PATH) | video_path = local_video_path
output_path = output_path or video_path
|
vk_physical_device_fragment_shader_barycentric_features.rs | // Generated by `scripts/generate.js`
use std::os::raw::c_char;
use std::ops::Deref;
use std::ptr;
use std::cmp;
use std::mem;
use utils::c_bindings::*;
use utils::vk_convert::*;
use utils::vk_null::*;
use utils::vk_ptr::*;
use utils::vk_traits::*;
use vulkan::vk::*;
use vulkan::vk::{VkStructureType,RawVkStructureType};
/// Wrapper for [VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV.html).
#[derive(Debug, Clone)]
pub struct VkPhysicalDeviceFragmentShaderBarycentricFeatures {
pub fragment_shader_barycentric: bool,
}
#[doc(hidden)]
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct RawVkPhysicalDeviceFragmentShaderBarycentricFeatures {
pub s_type: RawVkStructureType,
pub next: *mut c_void,
pub fragment_shader_barycentric: u32,
}
impl VkWrappedType<RawVkPhysicalDeviceFragmentShaderBarycentricFeatures> for VkPhysicalDeviceFragmentShaderBarycentricFeatures {
fn vk_to_raw(src: &VkPhysicalDeviceFragmentShaderBarycentricFeatures, dst: &mut RawVkPhysicalDeviceFragmentShaderBarycentricFeatures) {
dst.s_type = vk_to_raw_value(&VkStructureType::PhysicalDeviceFragmentShaderBarycentricFeaturesNv);
dst.next = ptr::null_mut();
dst.fragment_shader_barycentric = vk_to_raw_value(&src.fragment_shader_barycentric);
}
}
impl VkRawType<VkPhysicalDeviceFragmentShaderBarycentricFeatures> for RawVkPhysicalDeviceFragmentShaderBarycentricFeatures {
fn vk_to_wrapped(src: &RawVkPhysicalDeviceFragmentShaderBarycentricFeatures) -> VkPhysicalDeviceFragmentShaderBarycentricFeatures {
VkPhysicalDeviceFragmentShaderBarycentricFeatures {
fragment_shader_barycentric: u32::vk_to_wrapped(&src.fragment_shader_barycentric),
}
}
}
impl Default for VkPhysicalDeviceFragmentShaderBarycentricFeatures {
fn default() -> VkPhysicalDeviceFragmentShaderBarycentricFeatures {
VkPhysicalDeviceFragmentShaderBarycentricFeatures {
fragment_shader_barycentric: false, | }
}
}
impl VkSetup for VkPhysicalDeviceFragmentShaderBarycentricFeatures {
fn vk_setup(&mut self, fn_table: *mut VkFunctionTable) {
}
}
impl VkFree for RawVkPhysicalDeviceFragmentShaderBarycentricFeatures {
fn vk_free(&self) {
}
} | |
completion.go | /*
Copyright © 2021 NAME HERE <EMAIL ADDRESS>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. | import (
"os"
"github.com/spf13/cobra"
)
// completionCmd represents the completion command
var completionCmd = &cobra.Command{
Use: "completion [bash|zsh|fish|powershell]",
Short: "Generate completion script",
Long: `To load completions:
Bash:
$ source <(ecsher completion bash)
# To load completions for each session, execute once:
# Linux:
$ ecsher completion bash > /etc/bash_completion.d/ecsher
# macOS:
$ ecsher completion bash > /usr/local/etc/bash_completion.d/ecsher
Zsh:
# If shell completion is not already enabled in your environment,
# you will need to enable it. You can execute the following once:
$ echo "autoload -U compinit; compinit" >> ~/.zshrc
# To load completions for each session, execute once:
$ ecsher completion zsh > "${fpath[1]}/_ecsher"
# You will need to start a new shell for this setup to take effect.
fish:
$ ecsher completion fish | source
# To load completions for each session, execute once:
$ ecsher completion fish > ~/.config/fish/completions/ecsher.fish
PowerShell:
PS> ecsher completion powershell | Out-String | Invoke-Expression
# To load completions for every new session, run:
PS> ecsher completion powershell > ecsher.ps1
# and source this file from your PowerShell profile.
`,
DisableFlagsInUseLine: true,
ValidArgs: []string{"bash", "zsh", "fish", "powershell"},
Args: cobra.ExactValidArgs(1),
Run: func(cmd *cobra.Command, args []string) {
err := runCompletion(cmd, args[0])
cobra.CheckErr(err)
},
}
func init() {
rootCmd.AddCommand(completionCmd)
}
func runCompletion(cmd *cobra.Command, arg string) error {
switch arg {
case "bash":
return cmd.Root().GenBashCompletion(os.Stdout)
case "zsh":
return cmd.Root().GenZshCompletion(os.Stdout)
case "fish":
return cmd.Root().GenFishCompletion(os.Stdout, true)
case "powershell":
return cmd.Root().GenPowerShellCompletionWithDesc(os.Stdout)
default:
return nil
}
} | */
package cmd
|
dashboardPieChart.directive.js | /**
* @author v.lugovksy
* created on 16.12.2015
*/
(function () {
'use strict';
angular.module('BlurAdmin.pages.smartWifi')
.directive('dashboardPieChart', dashboardPieChart);
/** @ngInject */
function dashboardPieChart() {
return {
restrict: 'E',
controller: 'DashboardPieChartCtrl',
templateUrl: 'app/pages/smartWifi/dashboard/dashboardPieChart/dashboardPieChart.html' | }
})(); | }; |
invoice_files.py | import os
def | (invoices, year=False):
for invoice in invoices:
if invoice.invoice_file:
# Get folder for this invoice and create it if it doesn't exist
if not invoice.afa:
folder = invoice.invoice_type.name
else:
folder = 'afa'
if not os.path.exists(folder):
os.mkdir(folder)
invoice_name = '{}-{}-{}.{}'.format(
invoice.contact_alias,
invoice.invoice_number,
invoice.date.isoformat(),
invoice.invoice_file_type,
)
path = os.path.join(folder, invoice_name)
with open(path, "wb") as invoice_file:
invoice_file.write(invoice.invoice_file)
| get_invoice_files |
services.py | # Copyright (c) 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import posixpath
from muranoclient.common import base
def normalize_path(f):
@functools.wraps(f)
def f_normalize_path(*args, **kwargs):
path = args[2] if len(args) >= 3 else kwargs['path']
# path formally is just absolute unix path
if not posixpath.isabs(path):
|
args = list(args)
if len(args) >= 3:
args[2] = args[2][1:]
else:
kwargs['path'] = kwargs['path'][1:]
return f(*args, **kwargs)
return f_normalize_path
class Service(base.Resource):
def __repr__(self):
return '<Service %s>' % self._info
def data(self, **kwargs):
return self.manager.data(self, **kwargs)
def _add_details(self, info):
if isinstance(info, dict):
for k, v in info.items():
setattr(self, k, v)
class ServiceManager(base.Manager):
resource_class = Service
def list(self, environment_id, session_id=None):
if session_id:
headers = {'X-Configuration-Session': session_id}
else:
headers = {}
return self._list("/v1/environments/{0}/services".
format(environment_id), headers=headers)
@normalize_path
def get(self, environment_id, path, session_id=None):
if session_id:
headers = {'X-Configuration-Session': session_id}
else:
headers = {}
return self._get('/v1/environments/{0}/services/{1}'.
format(environment_id, path), headers=headers)
@normalize_path
def post(self, environment_id, path, data, session_id):
headers = {'X-Configuration-Session': session_id}
result = self._create('/v1/environments/{0}/services/{1}'.
format(environment_id, path), data,
headers=headers, return_raw=True)
if isinstance(result, list):
return [self.resource_class(self, item) for item in result]
else:
return self.resource_class(self, result)
@normalize_path
def put(self, environment_id, path, data, session_id):
headers = {'X-Configuration-Session': session_id}
return self._update('/v1/environments/{0}/services/{1}'.
format(environment_id, path), data,
headers=headers)
@normalize_path
def delete(self, environment_id, path, session_id):
headers = {'X-Configuration-Session': session_id}
path = '/v1/environments/{0}/services/{1}'.format(environment_id, path)
return self._delete(path, headers=headers)
| raise ValueError("Parameter 'path' should start with '/'") |
wsgi.py | """
WSGI config for Finetooth project.
It exposes the WSGI callable as a module-level variable named ``application``. | For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.wsgi import get_wsgi_application
if os.path.exists(".development"):
application = get_wsgi_application()
else:
from dj_static import Cling
application = Cling(get_wsgi_application()) | |
driver.rs | /*!
Implementation of [`ChainDriver`].
*/
use core::str::FromStr;
use core::time::Duration;
use eyre::eyre;
use semver::Version;
use serde_json as json;
use std::fs;
use std::path::PathBuf;
use std::process::{Command, Stdio};
use std::str;
use toml;
use tracing::debug;
use ibc::core::ics24_host::identifier::ChainId;
use ibc_relayer::keyring::{HDPath, KeyEntry, KeyFile};
use crate::chain::exec::{simple_exec, ExecOutput};
use crate::chain::version::get_chain_command_version;
use crate::error::{handle_generic_error, Error};
use crate::ibc::denom::Denom;
use crate::types::env::{EnvWriter, ExportEnv};
use crate::types::process::ChildProcess;
use crate::types::wallet::{Wallet, WalletAddress, WalletId};
use crate::util::file::pipe_to_file;
use crate::util::random::random_u32;
use crate::util::retry::assert_eventually_succeed;
pub mod interchain;
pub mod query_txs;
pub mod tagged;
pub mod transfer;
/**
Number of times (seconds) to try and query a wallet to reach the
target amount, as used by [`assert_eventual_wallet_amount`].
We set this to around 60 seconds to make sure that the tests still
pass in slower environments like the CI.
If you encounter retry error, try increasing this constant. If the
test is taking much longer to reach eventual consistency, it might
be indication of some underlying performance issues.
*/
const WAIT_WALLET_AMOUNT_ATTEMPTS: u16 = 60;
const COSMOS_HD_PATH: &str = "m/44'/118'/0'/0/0";
/**
A driver for interacting with a chain full nodes through command line.
The name `ChainDriver` is inspired by
[WebDriver](https://developer.mozilla.org/en-US/docs/Web/WebDriver),
which is the term used to describe programs that control spawning of the
web browsers. In our case, the ChainDriver is used to spawn and manage
chain full nodes.
Currently the `ChainDriver` is hardcoded to support only a single version
of Gaia chain. In the future, we will want to turn this into one or more
`ChainDriver` traits so that they can be used to spawn multiple chain
implementations other than a single version of Gaia.
*/
#[derive(Debug, Clone)]
pub struct ChainDriver {
/**
The filesystem path to the Gaia CLI. Defaults to `gaiad`.
*/
pub command_path: String,
pub command_version: Option<Version>,
/**
The ID of the chain.
*/
pub chain_id: ChainId,
/**
The home directory for the full node to store data files.
*/
pub home_path: String,
/**
The port used for RPC.
*/
pub rpc_port: u16,
/**
The port used for GRPC.
*/
pub grpc_port: u16,
pub grpc_web_port: u16,
/**
The port used for P2P. (Currently unused other than for setup)
*/
pub p2p_port: u16,
}
impl ExportEnv for ChainDriver {
fn export_env(&self, writer: &mut impl EnvWriter) {
writer.write_env("CMD", &self.command_path);
writer.write_env("HOME", &self.home_path);
writer.write_env("RPC_ADDR", &self.rpc_address());
writer.write_env("GRPC_ADDR", &self.grpc_address());
}
}
impl ChainDriver {
/// Create a new [`ChainDriver`]
pub fn | (
command_path: String,
chain_id: ChainId,
home_path: String,
rpc_port: u16,
grpc_port: u16,
grpc_web_port: u16,
p2p_port: u16,
) -> Result<Self, Error> {
// Assume we're on Gaia 6 if we can't get a version
// (eg. with `icad`, which returns an empty string).
let command_version = get_chain_command_version(&command_path)?;
Ok(Self {
command_path,
command_version,
chain_id,
home_path,
rpc_port,
grpc_port,
grpc_web_port,
p2p_port,
})
}
/// Returns the full URL for the RPC address.
pub fn rpc_address(&self) -> String {
format!("http://localhost:{}", self.rpc_port)
}
/// Returns the full URL for the WebSocket address.
pub fn websocket_address(&self) -> String {
format!("ws://localhost:{}/websocket", self.rpc_port)
}
/// Returns the full URL for the GRPC address.
pub fn grpc_address(&self) -> String {
format!("http://localhost:{}", self.grpc_port)
}
/**
Returns the full URL for the RPC address to listen to when starting
the full node.
This is somehow different from [`rpc_address`](ChainDriver::rpc_address)
as it requires the `"tcp://"` scheme.
*/
pub fn rpc_listen_address(&self) -> String {
format!("tcp://localhost:{}", self.rpc_port)
}
/**
Returns the full URL for the GRPC address to listen to when starting
the full node.
This is somehow different from [`grpc_address`](ChainDriver::grpc_address)
as it requires no scheme to be specified.
*/
pub fn grpc_listen_address(&self) -> String {
format!("localhost:{}", self.grpc_port)
}
/**
Execute the gaiad command with the given command line arguments, and
returns the STDOUT result as String.
This is not the most efficient way of interacting with the CLI, but
is sufficient for testing purposes of interacting with the `gaiad`
commmand.
The function also output debug logs that show what command is being
executed, so that users can manually re-run the commands by
copying from the logs.
*/
pub fn exec(&self, args: &[&str]) -> Result<ExecOutput, Error> {
simple_exec(self.chain_id.as_str(), &self.command_path, args)
}
/**
Initialized the chain data stores.
This is used by
[`bootstrap_single_node`](crate::bootstrap::single::bootstrap_single_node).
*/
pub fn initialize(&self) -> Result<(), Error> {
self.exec(&[
"--home",
&self.home_path,
"--chain-id",
self.chain_id.as_str(),
"init",
self.chain_id.as_str(),
])?;
Ok(())
}
/**
Modify the Gaia genesis file.
*/
pub fn update_genesis_file(
&self,
file: &str,
cont: impl FnOnce(&mut serde_json::Value) -> Result<(), Error>,
) -> Result<(), Error> {
let config1 = self.read_file(&format!("config/{}", file))?;
let mut config2 = serde_json::from_str(&config1).map_err(handle_generic_error)?;
cont(&mut config2)?;
let config3 = serde_json::to_string_pretty(&config2).map_err(handle_generic_error)?;
self.write_file("config/genesis.json", &config3)?;
Ok(())
}
/**
Write the string content to a file path relative to the chain home
directory.
This is not efficient but is sufficient for testing purposes.
*/
pub fn write_file(&self, file_path: &str, content: &str) -> Result<(), Error> {
let full_path = PathBuf::from(&self.home_path).join(file_path);
let full_path_str = format!("{}", full_path.display());
fs::write(full_path, content)?;
debug!("created new file {:?}", full_path_str);
Ok(())
}
/**
Read the content at a file path relative to the chain home
directory, and return the result as a string.
This is not efficient but is sufficient for testing purposes.
*/
pub fn read_file(&self, file_path: &str) -> Result<String, Error> {
let full_path = PathBuf::from(&self.home_path).join(file_path);
let res = fs::read_to_string(full_path)?;
Ok(res)
}
/**
Add a wallet with random ID to the full node's keyring.
*/
pub fn add_random_wallet(&self, prefix: &str) -> Result<Wallet, Error> {
let num = random_u32();
let wallet_id = format!("{}-{:x}", prefix, num);
self.add_wallet(&wallet_id)
}
/**
Add a wallet with the given ID to the full node's keyring.
*/
pub fn add_wallet(&self, wallet_id: &str) -> Result<Wallet, Error> {
let output = self.exec(&[
"--home",
self.home_path.as_str(),
"keys",
"add",
wallet_id,
"--keyring-backend",
"test",
"--output",
"json",
])?;
// gaia6 somehow displays result in stderr instead of stdout
let seed_content = if output.stdout.is_empty() {
output.stderr
} else {
output.stdout
};
let json_val: json::Value = json::from_str(&seed_content).map_err(handle_generic_error)?;
let wallet_address = json_val
.get("address")
.ok_or_else(|| eyre!("expect address string field to be present in json result"))?
.as_str()
.ok_or_else(|| eyre!("expect address string field to be present in json result"))?
.to_string();
let seed_path = format!("{}-seed.json", wallet_id);
self.write_file(&seed_path, &seed_content)?;
let hd_path = HDPath::from_str(COSMOS_HD_PATH)
.map_err(|e| eyre!("failed to create HDPath: {:?}", e))?;
let key_file: KeyFile = json::from_str(&seed_content).map_err(handle_generic_error)?;
let key = KeyEntry::from_key_file(key_file, &hd_path).map_err(handle_generic_error)?;
Ok(Wallet::new(wallet_id.to_string(), wallet_address, key))
}
/**
Add a wallet address to the genesis account list for an uninitialized
full node.
*/
pub fn add_genesis_account(
&self,
wallet: &WalletAddress,
amounts: &[(&Denom, u64)],
) -> Result<(), Error> {
let amounts_str = itertools::join(
amounts
.iter()
.map(|(denom, amount)| format!("{}{}", amount, denom)),
",",
);
self.exec(&[
"--home",
&self.home_path,
"add-genesis-account",
&wallet.0,
&amounts_str,
])?;
Ok(())
}
/**
Add a wallet ID with the given stake amount to be the genesis validator
for an uninitialized chain.
*/
pub fn add_genesis_validator(
&self,
wallet_id: &WalletId,
denom: &Denom,
amount: u64,
) -> Result<(), Error> {
let amount_str = format!("{}{}", amount, denom);
self.exec(&[
"--home",
&self.home_path,
"gentx",
&wallet_id.0,
"--keyring-backend",
"test",
"--chain-id",
self.chain_id.as_str(),
&amount_str,
])?;
Ok(())
}
/**
Call `gaiad collect-gentxs` to generate the genesis transactions.
*/
pub fn collect_gen_txs(&self) -> Result<(), Error> {
self.exec(&["--home", &self.home_path, "collect-gentxs"])?;
Ok(())
}
/**
Modify the Gaia chain config which is saved in toml format.
*/
pub fn update_chain_config(
&self,
file: &str,
cont: impl FnOnce(&mut toml::Value) -> Result<(), Error>,
) -> Result<(), Error> {
let config1 = self.read_file(&format!("config/{}", file))?;
let mut config2 = toml::from_str(&config1).map_err(handle_generic_error)?;
cont(&mut config2)?;
let config3 = toml::to_string_pretty(&config2).map_err(handle_generic_error)?;
self.write_file("config/config.toml", &config3)?;
Ok(())
}
pub fn is_v6_or_later(&self) -> bool {
match &self.command_version {
Some(version) => version.major >= 6,
None => true,
}
}
/**
Start a full node by running in the background `gaiad start`.
Returns a [`ChildProcess`] that stops the full node process when the
value is dropped.
*/
pub fn start(&self) -> Result<ChildProcess, Error> {
let base_args = [
"--home",
&self.home_path,
"start",
"--pruning",
"nothing",
"--grpc.address",
&self.grpc_listen_address(),
"--rpc.laddr",
&self.rpc_listen_address(),
];
// Gaia v6 requires the GRPC web port to be unique,
// but the argument is not available in earlier version
let extra_args = [
"--grpc-web.address",
&format!("localhost:{}", self.grpc_web_port),
];
let args: Vec<&str> = if self.is_v6_or_later() {
let mut list = base_args.to_vec();
list.extend_from_slice(&extra_args);
list
} else {
base_args.to_vec()
};
let mut child = Command::new(&self.command_path)
.args(&args)
.stdin(Stdio::null())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()?;
let stdout = child
.stdout
.take()
.ok_or_else(|| eyre!("expected stdout to be present in child process"))?;
let stderr = child
.stderr
.take()
.ok_or_else(|| eyre!("expected stderr to be present in child process"))?;
pipe_to_file(stdout, &format!("{}/stdout.log", self.home_path))?;
pipe_to_file(stderr, &format!("{}/stderr.log", self.home_path))?;
Ok(ChildProcess::new(child))
}
/**
Query for the balances for a given wallet address and denomination
*/
pub fn query_balance(&self, wallet_id: &WalletAddress, denom: &Denom) -> Result<u64, Error> {
let res = self
.exec(&[
"--node",
&self.rpc_listen_address(),
"query",
"bank",
"balances",
&wallet_id.0,
"--denom",
denom.as_str(),
"--output",
"json",
])?
.stdout;
let amount_str = json::from_str::<json::Value>(&res)
.map_err(handle_generic_error)?
.get("amount")
.ok_or_else(|| eyre!("expected amount field"))?
.as_str()
.ok_or_else(|| eyre!("expected string field"))?
.to_string();
let amount = u64::from_str(&amount_str).map_err(handle_generic_error)?;
Ok(amount)
}
/**
Assert that a wallet should eventually have the expected amount in the
given denomination.
*/
pub fn assert_eventual_wallet_amount(
&self,
wallet: &WalletAddress,
target_amount: u64,
denom: &Denom,
) -> Result<(), Error> {
assert_eventually_succeed(
&format!("wallet reach {} amount {} {}", wallet, target_amount, denom),
WAIT_WALLET_AMOUNT_ATTEMPTS,
Duration::from_secs(1),
|| {
let amount = self.query_balance(wallet, denom)?;
if amount == target_amount {
Ok(())
} else {
Err(Error::generic(eyre!(
"current balance of account {} with amount {} does not match the target amount {}",
wallet,
amount,
target_amount
)))
}
},
)?;
Ok(())
}
}
| create |
runner.go | package runner
import (
"Compiler/lib"
"errors"
"strconv"
)
type Runner struct {
variables map[string]int
}
func NewRunner() Runner {
return Runner{variables: make(map[string]int)}
}
//局部变量
func (runner *Runner) Variables() map[string]int {
return runner.variables
}
//运算表达式
func (runner *Runner) Evaluate(ast *lib.ASTNode) (ret int, err error) {
switch ast.Type {
case lib.ASTNodeType_Programm: //程序入口
for _, child := range ast.Children {
if ret, err = runner.Evaluate(child); err != nil {
return 0, err
}
}
case lib.ASTNodeType_IntDeclaration: //整形变量声明:'int' Id ( = additive) ';'
runner.variables[ast.Text] = 0 //初始化变量
if len(ast.Children) > 0 { //存在对象声明
if ret, err = runner.Evaluate(ast.Children[0]); err != nil {
return 0, err
}
runner.variables[ast.Text] = ret
}
case lib.ASTNodeType_ExpressionStmt: //表达式语句:additive ';'
if ret, err = runner.Evaluate(ast.Children[0]); err != nil {
return 0, err
}
case lib.ASTNodeType_AssignmentStmt: //赋值语句:assignmentStatement -> id = additive ';'
var ok bool
if ret, ok = runner.variables[ast.Text]; !ok {
return 0, errors.New("变量" + ast.Text + "未定义")
}
if ret, err = runner.Evaluate(ast.Children[0]); err != nil {
return 0, err
}
runner.variables[ast.Text] = ret
case lib.ASTNodeType_Primary: //基础表达式,实际上基本表达式推导后不是基本表达式节点,它直接返回子节点,所以该case不会执行
case lib.ASTNodeType_Multiplicative: //乘法表达式:
var l, r int
if l, err = runner.Evaluate(ast.Children[0]); err != nil {
return 0, err
}
if r, err = runner.Evaluate(ast.Children[1]); err != nil {
return 0, err
}
if ast.Text == "*" { //乘法
ret = l * r
} else { //除法
ret = l / r
}
case lib | //加法表达式
var l, r int
if l, err = runner.Evaluate(ast.Children[0]); err != nil {
return 0, err
}
if r, err = runner.Evaluate(ast.Children[1]); err != nil {
return 0, err
}
if ast.Text == "+" { //加法
ret = l + r
} else { //减法
ret = l - r
}
case lib.ASTNodeType_Identifier: //标识符
var ok bool
if ret, ok = runner.variables[ast.Text]; !ok {
return 0, errors.New("变量" + ast.Text + "未定义")
}
case lib.ASTNodeType_IntLiteral: //整型字面量
if ret, err = strconv.Atoi(ast.Text); err != nil {
return 0, err
}
}
return
}
| .ASTNodeType_Additive: |
class.go | /*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package output
import (
"io"
"strings"
"github.com/kubernetes-incubator/service-catalog/pkg/svcat/service-catalog"
)
func getScope(class servicecatalog.Class) string {
if class.GetNamespace() != "" {
return servicecatalog.NamespaceScope
}
return servicecatalog.ClusterScope
}
func writeClassListTable(w io.Writer, classes []servicecatalog.Class) {
t := NewListTable(w)
t.SetHeader([]string{
"Name",
"Namespace",
"Description",
})
t.SetVariableColumn(3)
for _, class := range classes {
t.Append([]string{
class.GetExternalName(),
class.GetNamespace(),
class.GetDescription(),
})
}
t.Render()
}
// WriteClassList prints a list of classes in the specified output format.
func WriteClassList(w io.Writer, outputFormat string, classes ...servicecatalog.Class) {
switch outputFormat {
case FormatJSON:
writeJSON(w, classes)
case FormatYAML:
writeYAML(w, classes, 0)
case FormatTable:
writeClassListTable(w, classes)
}
}
// WriteClass prints a single class in the specified output format.
func WriteClass(w io.Writer, outputFormat string, class servicecatalog.Class) {
switch outputFormat {
case FormatJSON:
writeJSON(w, class)
case FormatYAML:
writeYAML(w, class, 0)
case FormatTable:
writeClassListTable(w, []servicecatalog.Class{class})
}
}
// WriteClassDetails prints details for a single class.
func | (w io.Writer, class servicecatalog.Class) {
scope := getScope(class)
spec := class.GetSpec()
t := NewDetailsTable(w)
t.Append([]string{"Name:", spec.ExternalName})
if class.GetNamespace() != "" {
t.Append([]string{"Namespace:", class.GetNamespace()})
}
t.AppendBulk([][]string{
{"Scope:", scope},
{"Description:", spec.Description},
{"UUID:", class.GetName()},
{"Status:", class.GetStatusText()},
{"Tags:", strings.Join(spec.Tags, ", ")},
{"Broker:", class.GetServiceBrokerName()},
})
t.Render()
}
// WriteClassAndPlanDetails prints details for multiple classes and plans
func WriteClassAndPlanDetails(w io.Writer, classes []servicecatalog.Class, plans [][]servicecatalog.Plan) {
t := NewListTable(w)
t.SetHeader([]string{
"Class",
"Plans",
"Description",
})
for i, class := range classes {
for i, plan := range plans[i] {
if i == 0 {
t.Append([]string{
class.GetExternalName(),
plan.GetExternalName(),
class.GetSpec().Description,
})
} else {
t.Append([]string{
"",
plan.GetExternalName(),
"",
})
}
}
}
t.table.SetAutoWrapText(true)
t.SetVariableColumn(3)
t.Render()
}
| WriteClassDetails |
cart-item.tsx | import React from 'react';
import { gql, useQuery } from '@apollo/client';
import LaunchTile from '../components/launch-tile';
import { LAUNCH_TILE_DATA } from '../pages/launches';
import * as LaunchDetailTypes from '../pages/__generated__/LaunchDetails';
export const GET_LAUNCH = gql`
query GetLaunch($launchId: ID!) {
launch(id: $launchId) {
...LaunchTile
}
}
${LAUNCH_TILE_DATA}
`; |
const CartItem: React.FC<CartItemProps> = ({ launchId }) => {
const { data, loading, error } = useQuery<LaunchDetailTypes.LaunchDetails, LaunchDetailTypes.LaunchDetailsVariables>(
GET_LAUNCH,
{ variables: { launchId } }
);
if (loading) return <p>Loading...</p>;
if (error) return <p>ERROR: {error.message}</p>;
if (!data) return <p>Not found</p>;
return data.launch && <LaunchTile launch={data.launch} />;
}
export default CartItem; |
interface CartItemProps extends LaunchDetailTypes.LaunchDetailsVariables {} |
test_compress_string_in_memory.py | import unittest
from hypothesis import given, settings
from hypothesis.strategies import text
from sys import getsizeof
from compressStr import compress_string, decompress_string
class TestMyGzip(unittest.TestCase):
"""
Unit tests for the my_gzip module.
"""
@given(string=text())
@settings(max_examples=100)
def test_combined(self, string):
"""
Test to confirm UTF-8 text is not be modified by this process.
:param string: UTF-8 Characters
"""
self.assertEqual(string, decompress_string(compress_string(string)))
@given(string=text(min_size=25))
@settings(max_examples=10)
def test_compression(self, string):
"""
Test to confirm the compressed string is smaller than the original. |
:param string: UTF-8 Characters
"""
self.assertLess(getsizeof(compress_string(string)), getsizeof(string))
if __name__ == '__main__':
unittest.main() |
Using min_size because an empty string, is smaller than an empty
compressed object. This min size setting ensures that we do not get
false positive failures. |
test_default_api.py | # coding: utf-8
"""
axxell-api
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import os
import sys
import unittest
import AxxellClient
from AxxellClient.rest import ApiException
from AxxellClient.apis.default_api import DefaultApi
class TestDefaultApi(unittest.TestCase):
""" DefaultApi unit test stubs """
def setUp(self):
self.api = AxxellClient.apis.default_api.DefaultApi()
def tearDown(self):
pass
def test_aggregate_count_events(self):
"""
Test case for aggregate_count_events
"""
pass
def test_aggregate_effective(self):
"""
Test case for aggregate_effective
"""
pass
def test_aggregate_events(self):
"""
Test case for aggregate_events
"""
pass
def test_aggregate_recent(self):
"""
Test case for aggregate_recent
"""
pass
def test_aggregate_top(self):
"""
Test case for aggregate_top
"""
pass
def test_auth_store(self):
"""
Test case for auth_store
"""
pass
def test_delete_all_events(self):
"""
Test case for delete_all_events
"""
pass
def test_delete_all_items(self):
"""
Test case for delete_all_items
"""
pass
def | (self):
"""
Test case for delete_item
"""
pass
def test_recommend_interesting(self):
"""
Test case for recommend_interesting
"""
pass
def test_recommend_similar(self):
"""
Test case for recommend_similar
"""
pass
def test_register_event(self):
"""
Test case for register_event
"""
pass
def test_register_item(self):
"""
Test case for register_item
"""
pass
def test_register_store(self):
"""
Test case for register_store
"""
pass
def test_retrieve_events(self):
"""
Test case for retrieve_events
"""
pass
def test_retrieve_items(self):
"""
Test case for retrieve_items
"""
pass
if __name__ == '__main__':
unittest.main()
| test_delete_item |
const.py | # -*- coding: utf-8 -*-
#
# Copyright 2017 Ricequant, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from enum import Enum
class CustomEnum(Enum):
|
# noinspection PyPep8Naming
class EXECUTION_PHASE(CustomEnum):
GLOBAL = "[全局]"
ON_INIT = "[程序初始化]"
BEFORE_TRADING = "[日内交易前]"
ON_BAR = "[盘中 handle_bar 函数]"
ON_TICK = "[盘中 handle_tick 函数]"
AFTER_TRADING = "[日内交易后]"
FINALIZED = "[程序结束]"
SCHEDULED = "[scheduler函数内]"
# noinspection PyPep8Naming
class RUN_TYPE(CustomEnum):
# TODO: 取消 RUN_TYPE, 取而代之的是使用开启哪些Mod来控制策略所运行的类型
# Back Test
BACKTEST = "BACKTEST"
# Paper Trading
PAPER_TRADING = "PAPER_TRADING"
# Live Trading
LIVE_TRADING = 'LIVE_TRADING'
# noinspection PyPep8Naming
class DEFAULT_ACCOUNT_TYPE(CustomEnum):
"""
* 关于 ACCOUNT_TYPE,目前主要表示为交易账户。STOCK / FUTURE / OPTION 目前均表示为中国 对应的交易账户。
* ACCOUNT_TYPE 不区分交易所,比如 A 股区分上海交易所和深圳交易所,但对应的都是一个账户,因此统一为 STOCK
* 目前暂时不添加其他 DEFAULT_ACCOUNT_TYPE 类型,如果需要增加自定义账户及类型,请参考 https://github.com/ricequant/rqalpha/issues/160
"""
TOTAL = 0
BENCHMARK = 1
# 股票
STOCK = 2
# 期货
FUTURE = 3
# 期权
OPTION = 4
# noinspection PyPep8Naming
class BAR_STATUS(CustomEnum):
LIMIT_UP = "LIMIT_UP"
LIMIT_DOWN = "LIMIT_DOWN"
NORMAL = "NORMAL"
ERROR = "ERROR"
# noinspection PyPep8Naming
class MATCHING_TYPE(CustomEnum):
CURRENT_BAR_CLOSE = "CURRENT_BAR_CLOSE"
NEXT_BAR_OPEN = "NEXT_BAR_OPEN"
NEXT_TICK_LAST = "NEXT_TICK_LAST"
NEXT_TICK_BEST_OWN = "NEXT_TICK_BEST_OWN"
NEXT_TICK_BEST_COUNTERPARTY = "NEXT_TICK_BEST_COUNTERPARTY"
# noinspection PyPep8Naming
class ORDER_TYPE(CustomEnum):
MARKET = "MARKET"
LIMIT = "LIMIT"
# noinspection PyPep8Naming
class ORDER_STATUS(CustomEnum):
PENDING_NEW = "PENDING_NEW"
ACTIVE = "ACTIVE"
FILLED = "FILLED"
REJECTED = "REJECTED"
PENDING_CANCEL = "PENDING_CANCEL"
CANCELLED = "CANCELLED"
# noinspection PyPep8Naming
class SIDE(CustomEnum):
BUY = "BUY"
SELL = "SELL"
# noinspection PyPep8Naming
class POSITION_EFFECT(CustomEnum):
OPEN = "OPEN"
CLOSE = "CLOSE"
CLOSE_TODAY = "CLOSE_TODAY"
# noinspection PyPep8Naming
class EXC_TYPE(CustomEnum):
USER_EXC = "USER_EXC"
SYSTEM_EXC = "SYSTEM_EXC"
NOTSET = "NOTSET"
# noinspection PyPep8Naming
class INSTRUMENT_TYPE(CustomEnum):
CS = "CS"
FUTURE = "FUTURE"
OPTION = "OPTION"
ETF = "ETF"
LOF = "LOF"
INDX = "INDX"
FENJI_MU = "FENJI_MU"
FENJI_A = "FENJI_A"
FENJI_B = "FENJI_B"
PUBLIC_FUND = 'PublicFund'
# noinspection PyPep8Naming
class PERSIST_MODE(CustomEnum):
ON_CRASH = "ON_CRASH"
REAL_TIME = "REAL_TIME"
ON_NORMAL_EXIT = "ON_NORMAL_EXIT"
# noinspection PyPep8Naming
class MARGIN_TYPE(CustomEnum):
BY_MONEY = "BY_MONEY"
BY_VOLUME = "BY_VOLUME"
# noinspection PyPep8Naming
class COMMISSION_TYPE(CustomEnum):
BY_MONEY = "BY_MONEY"
BY_VOLUME = "BY_VOLUME"
# noinspection PyPep8Naming
class EXIT_CODE(CustomEnum):
EXIT_SUCCESS = "EXIT_SUCCESS"
EXIT_USER_ERROR = "EXIT_USER_ERROR"
EXIT_INTERNAL_ERROR = "EXIT_INTERNAL_ERROR"
# noinspection PyPep8Naming
class HEDGE_TYPE(CustomEnum):
HEDGE = "hedge"
SPECULATION = "speculation"
ARBITRAGE = "arbitrage"
# noinspection PyPep8Naming
class DAYS_CNT(object):
DAYS_A_YEAR = 365
TRADING_DAYS_A_YEAR = 252
class CURRENCY(CustomEnum):
CNY = "CNY" # 人民币
USD = "USD" # 美元
EUR = "EUR" # 欧元
HKD = "HKD" # 港币
GBP = "GBP" # 英镑
JPY = "JPY" # 日元
KRW = "KWR" # 韩元
CAD = "CAD" # 加元
AUD = "AUD" # 澳元
CHF = "CHF" # 瑞郎
SGD = "SGD" # 新加坡元
MYR = "MYR" # 马拉西亚币
IDR = "IDR" # 印尼币
NZD = "NZD" # 新西兰币
VND = "VND" # 越南盾
THB = "THB" # 泰铢
PHP = "PHP" # 菲律宾币
UNDERLYING_SYMBOL_PATTERN = "([a-zA-Z]+)\d+"
NIGHT_TRADING_NS = ["CU", "AL", "ZN", "PB", "SN", "NI", "RB", "HC", "BU", "RU", "AU", "AG", "Y", "M", "A", "B", "P",
"J", "JM", "I", "CF", "SR", "OI", "MA", "ZC", "FG", "RM", "CY", "TA"]
| def __repr__(self):
return "%s.%s" % (
self.__class__.__name__, self._name_) |
arrow-line.js | let data = {
"body": "<path d=\"M27.66 15.61L18 6l-9.66 9.61A1 1 0 1 0 9.75 17L17 9.81v19.13a1 1 0 1 0 2 0V9.81L26.25 17a1 1 0 0 0 1.41-1.42z\" class=\"clr-i-outline clr-i-outline-path-1\" fill=\"currentColor\"/>",
"width": 36,
"height": 36 | };
export default data; |
|
ha-thingtalk-placeholders.ts | import { HassEntity } from "home-assistant-js-websocket";
import {
css,
CSSResult,
customElement,
html,
internalProperty,
LitElement,
property,
PropertyValues,
TemplateResult,
} from "lit-element";
import { fireEvent } from "../../../../common/dom/fire_event";
import { computeDomain } from "../../../../common/entity/compute_domain";
import { applyPatch, getPath } from "../../../../common/util/patch";
import "../../../../components/device/ha-area-devices-picker";
import "../../../../components/entity/ha-entity-picker";
import {
AreaRegistryEntry,
subscribeAreaRegistry,
} from "../../../../data/area_registry";
import {
DeviceRegistryEntry,
subscribeDeviceRegistry,
} from "../../../../data/device_registry";
import { subscribeEntityRegistry } from "../../../../data/entity_registry";
import { domainToName } from "../../../../data/integration";
import { SubscribeMixin } from "../../../../mixins/subscribe-mixin";
import { PolymerChangedEvent } from "../../../../polymer-types";
import { haStyleDialog } from "../../../../resources/styles";
import { HomeAssistant } from "../../../../types";
import { Placeholder, PlaceholderContainer } from "./dialog-thingtalk";
declare global {
// for fire event
interface HASSDomEvents {
"placeholders-filled": { value: PlaceholderValues };
}
}
export interface PlaceholderValues {
[key: string]: {
[index: number]: { | [index: number]: { device_id?: string; entity_id?: string };
};
};
}
export interface ExtraInfo {
[key: string]: {
[index: number]: {
[index: number]: {
area_id?: string;
device_ids?: string[];
manualEntity: boolean;
};
};
};
}
interface DeviceEntitiesLookup {
[deviceId: string]: string[];
}
@customElement("ha-thingtalk-placeholders")
export class ThingTalkPlaceholders extends SubscribeMixin(LitElement) {
@property({ attribute: false }) public hass!: HomeAssistant;
@property() public opened!: boolean;
public skip!: () => void;
@property() public placeholders!: PlaceholderContainer;
@internalProperty() private _error?: string;
private _deviceEntityLookup: DeviceEntitiesLookup = {};
@internalProperty() private _extraInfo: ExtraInfo = {};
@internalProperty() private _placeholderValues: PlaceholderValues = {};
private _devices?: DeviceRegistryEntry[];
private _areas?: AreaRegistryEntry[];
private _search = false;
public hassSubscribe() {
return [
subscribeEntityRegistry(this.hass.connection, (entries) => {
for (const entity of entries) {
if (!entity.device_id) {
continue;
}
if (!(entity.device_id in this._deviceEntityLookup)) {
this._deviceEntityLookup[entity.device_id] = [];
}
if (
!this._deviceEntityLookup[entity.device_id].includes(
entity.entity_id
)
) {
this._deviceEntityLookup[entity.device_id].push(entity.entity_id);
}
}
}),
subscribeDeviceRegistry(this.hass.connection!, (devices) => {
this._devices = devices;
this._searchNames();
}),
subscribeAreaRegistry(this.hass.connection!, (areas) => {
this._areas = areas;
this._searchNames();
}),
];
}
protected updated(changedProps: PropertyValues) {
if (changedProps.has("placeholders")) {
this._search = true;
this._searchNames();
}
}
protected render(): TemplateResult {
return html`
<ha-paper-dialog
modal
with-backdrop
.opened=${this.opened}
@opened-changed="${this._openedChanged}"
>
<h2>
${this.hass.localize(
`ui.panel.config.automation.thingtalk.link_devices.header`
)}
</h2>
<paper-dialog-scrollable>
${this._error ? html` <div class="error">${this._error}</div> ` : ""}
${Object.entries(this.placeholders).map(
([type, placeholders]) =>
html`
<h3>
${this.hass.localize(
`ui.panel.config.automation.editor.${type}s.name`
)}:
</h3>
${placeholders.map((placeholder) => {
if (placeholder.fields.includes("device_id")) {
const extraInfo = getPath(this._extraInfo, [
type,
placeholder.index,
]);
return html`
<ha-area-devices-picker
.type=${type}
.placeholder=${placeholder}
@value-changed=${this._devicePicked}
.hass=${this.hass}
.area=${extraInfo ? extraInfo.area_id : undefined}
.devices=${extraInfo && extraInfo.device_ids
? extraInfo.device_ids
: undefined}
.includeDomains=${placeholder.domains}
.includeDeviceClasses=${placeholder.device_classes}
.label=${this._getLabel(
placeholder.domains,
placeholder.device_classes
)}
></ha-area-devices-picker>
${extraInfo && extraInfo.manualEntity
? html`
<h3>
${this.hass.localize(
`ui.panel.config.automation.thingtalk.link_devices.ambiguous_entities`
)}
</h3>
${Object.keys(extraInfo.manualEntity).map(
(idx) => html`
<ha-entity-picker
id="device-entity-picker"
.type=${type}
.placeholder=${placeholder}
.index=${idx}
@change=${this._entityPicked}
.includeDomains=${placeholder.domains}
.includeDeviceClasses=${placeholder.device_classes}
.hass=${this.hass}
.label=${`${this._getLabel(
placeholder.domains,
placeholder.device_classes
)} of device ${this._getDeviceName(
getPath(this._placeholderValues, [
type,
placeholder.index,
idx,
"device_id",
])
)}`}
.entityFilter=${(state: HassEntity) => {
const devId = this._placeholderValues[type][
placeholder.index
][idx].device_id;
return this._deviceEntityLookup[
devId
].includes(state.entity_id);
}}
></ha-entity-picker>
`
)}
`
: ""}
`;
}
if (placeholder.fields.includes("entity_id")) {
return html`
<ha-entity-picker
.type=${type}
.placeholder=${placeholder}
@change=${this._entityPicked}
.includeDomains=${placeholder.domains}
.includeDeviceClasses=${placeholder.device_classes}
.hass=${this.hass}
.label=${this._getLabel(
placeholder.domains,
placeholder.device_classes
)}
></ha-entity-picker>
`;
}
return html`
<div class="error">
${this.hass.localize(
`ui.panel.config.automation.thingtalk.link_devices.unknown_placeholder`
)}<br />
${placeholder.domains}<br />
${placeholder.fields.map(
(field) => html` ${field}<br /> `
)}
</div>
`;
})}
`
)}
</paper-dialog-scrollable>
<div class="paper-dialog-buttons">
<mwc-button class="left" @click="${this.skip}">
${this.hass.localize(`ui.common.skip`)}
</mwc-button>
<mwc-button @click="${this._done}" .disabled=${!this._isDone}>
${this.hass.localize(`ui.panel.config.automation.thingtalk.create`)}
</mwc-button>
</div>
</ha-paper-dialog>
`;
}
private _getDeviceName(deviceId: string): string {
if (!this._devices) {
return "";
}
const foundDevice = this._devices.find((device) => device.id === deviceId);
if (!foundDevice) {
return "";
}
return foundDevice.name_by_user || foundDevice.name || "";
}
private _searchNames() {
if (!this._search || !this._areas || !this._devices) {
return;
}
this._search = false;
Object.entries(this.placeholders).forEach(([type, placeholders]) =>
placeholders.forEach((placeholder) => {
if (!placeholder.name) {
return;
}
const name = placeholder.name;
const foundArea = this._areas!.find((area) =>
area.name.toLowerCase().includes(name)
);
if (foundArea) {
applyPatch(
this._extraInfo,
[type, placeholder.index, "area_id"],
foundArea.area_id
);
this.requestUpdate("_extraInfo");
return;
}
const foundDevices = this._devices!.filter((device) => {
const deviceName = device.name_by_user || device.name;
if (!deviceName) {
return false;
}
return deviceName.toLowerCase().includes(name);
});
if (foundDevices.length) {
applyPatch(
this._extraInfo,
[type, placeholder.index, "device_ids"],
foundDevices.map((device) => device.id)
);
this.requestUpdate("_extraInfo");
}
})
);
}
private get _isDone(): boolean {
return Object.entries(this.placeholders).every(([type, placeholders]) =>
placeholders.every((placeholder) =>
placeholder.fields.every((field) => {
const entries: {
[key: number]: { device_id?: string; entity_id?: string };
} = getPath(this._placeholderValues, [type, placeholder.index]);
if (!entries) {
return false;
}
const values = Object.values(entries);
return values.every(
(entry) => entry[field] !== undefined && entry[field] !== ""
);
})
)
);
}
private _getLabel(domains: string[], deviceClasses?: string[]) {
return `${domains
.map((domain) => domainToName(this.hass.localize, domain))
.join(", ")}${
deviceClasses ? ` of type ${deviceClasses.join(", ")}` : ""
}`;
}
private _devicePicked(ev: CustomEvent): void {
const value: string[] = ev.detail.value;
if (!value) {
return;
}
const target = ev.target as any;
const placeholder = target.placeholder as Placeholder;
const type = target.type;
let oldValues = getPath(this._placeholderValues, [type, placeholder.index]);
if (oldValues) {
oldValues = Object.values(oldValues);
}
const oldExtraInfo = getPath(this._extraInfo, [type, placeholder.index]);
if (this._placeholderValues[type]) {
delete this._placeholderValues[type][placeholder.index];
}
if (this._extraInfo[type]) {
delete this._extraInfo[type][placeholder.index];
}
if (!value.length) {
this.requestUpdate("_placeholderValues");
return;
}
value.forEach((deviceId, index) => {
let oldIndex;
if (oldValues) {
const oldDevice = oldValues.find((oldVal, idx) => {
oldIndex = idx;
return oldVal.device_id === deviceId;
});
if (oldDevice) {
applyPatch(
this._placeholderValues,
[type, placeholder.index, index],
oldDevice
);
if (oldExtraInfo) {
applyPatch(
this._extraInfo,
[type, placeholder.index, index],
oldExtraInfo[oldIndex]
);
}
return;
}
}
applyPatch(
this._placeholderValues,
[type, placeholder.index, index, "device_id"],
deviceId
);
if (!placeholder.fields.includes("entity_id")) {
return;
}
const devEntities = this._deviceEntityLookup[deviceId];
const entities = devEntities.filter((eid) => {
if (placeholder.device_classes) {
const stateObj = this.hass.states[eid];
if (!stateObj) {
return false;
}
return (
placeholder.domains.includes(computeDomain(eid)) &&
stateObj.attributes.device_class &&
placeholder.device_classes.includes(
stateObj.attributes.device_class
)
);
}
return placeholder.domains.includes(computeDomain(eid));
});
if (entities.length === 0) {
// Should not happen because we filter the device picker on domain
this._error = `No ${placeholder.domains
.map((domain) => domainToName(this.hass.localize, domain))
.join(", ")} entities found in this device.`;
} else if (entities.length === 1) {
applyPatch(
this._placeholderValues,
[type, placeholder.index, index, "entity_id"],
entities[0]
);
this.requestUpdate("_placeholderValues");
} else {
delete this._placeholderValues[type][placeholder.index][index]
.entity_id;
applyPatch(
this._extraInfo,
[type, placeholder.index, "manualEntity", index],
true
);
this.requestUpdate("_placeholderValues");
}
});
fireEvent(
this.shadowRoot!.querySelector("ha-paper-dialog")! as HTMLElement,
"iron-resize"
);
}
private _entityPicked(ev: Event): void {
const target = ev.target as any;
const placeholder = target.placeholder as Placeholder;
const value = target.value;
const type = target.type;
const index = target.index || 0;
applyPatch(
this._placeholderValues,
[type, placeholder.index, index, "entity_id"],
value
);
this.requestUpdate("_placeholderValues");
}
private _done(): void {
fireEvent(this, "placeholders-filled", { value: this._placeholderValues });
}
private _openedChanged(ev: PolymerChangedEvent<boolean>): void {
// The opened-changed event doesn't leave the shadowdom so we re-dispatch it
this.dispatchEvent(new CustomEvent(ev.type, ev));
}
static get styles(): CSSResult[] {
return [
haStyleDialog,
css`
ha-paper-dialog {
max-width: 500px;
}
mwc-button.left {
margin-right: auto;
}
paper-dialog-scrollable {
margin-top: 10px;
}
h3 {
margin: 10px 0 0 0;
font-weight: 500;
}
.error {
color: var(--error-color);
}
`,
];
}
}
declare global {
interface HTMLElementTagNameMap {
"ha-thingtalk-placeholders": ThingTalkPlaceholders;
}
} | |
jit.rs | //! The JIT driver uses [`cranelift_simplejit`] to JIT execute programs without writing any object
//! files.
use std::cell::RefCell;
use std::ffi::CString;
use std::os::raw::{c_char, c_int};
use rustc_codegen_ssa::CrateInfo;
use rustc_middle::mir::mono::MonoItem;
use cranelift_jit::{JITBuilder, JITModule};
use crate::prelude::*;
use crate::{CodegenCx, CodegenMode};
thread_local! {
pub static CURRENT_MODULE: RefCell<Option<JITModule>> = RefCell::new(None);
}
pub(super) fn run_jit(tcx: TyCtxt<'_>, codegen_mode: CodegenMode) -> ! {
if !tcx.sess.opts.output_types.should_codegen() {
tcx.sess.fatal("JIT mode doesn't work with `cargo check`.");
}
#[cfg(unix)]
unsafe {
// When not using our custom driver rustc will open us without the RTLD_GLOBAL flag, so
// __cg_clif_global_atomic_mutex will not be exported. We fix this by opening ourself again
// as global.
// FIXME remove once atomic_shim is gone
let mut dl_info: libc::Dl_info = std::mem::zeroed();
assert_ne!(
libc::dladdr(run_jit as *const libc::c_void, &mut dl_info),
0
);
assert_ne!(
libc::dlopen(dl_info.dli_fname, libc::RTLD_NOW | libc::RTLD_GLOBAL),
std::ptr::null_mut(),
);
}
let imported_symbols = load_imported_symbols_for_jit(tcx);
let mut jit_builder = JITBuilder::with_isa(
crate::build_isa(tcx.sess),
cranelift_module::default_libcall_names(),
);
jit_builder.hotswap(matches!(codegen_mode, CodegenMode::JitLazy));
jit_builder.symbols(imported_symbols);
let mut jit_module = JITModule::new(jit_builder);
assert_eq!(pointer_ty(tcx), jit_module.target_config().pointer_type());
let sig = Signature {
params: vec![
AbiParam::new(jit_module.target_config().pointer_type()),
AbiParam::new(jit_module.target_config().pointer_type()),
],
returns: vec![AbiParam::new(
jit_module.target_config().pointer_type(), /*isize*/
)],
call_conv: CallConv::triple_default(&crate::target_triple(tcx.sess)),
};
let main_func_id = jit_module
.declare_function("main", Linkage::Import, &sig)
.unwrap();
let (_, cgus) = tcx.collect_and_partition_mono_items(LOCAL_CRATE);
let mono_items = cgus
.iter()
.map(|cgu| cgu.items_in_deterministic_order(tcx).into_iter())
.flatten()
.collect::<FxHashMap<_, (_, _)>>()
.into_iter()
.collect::<Vec<(_, (_, _))>>();
let mut cx = crate::CodegenCx::new(tcx, jit_module, false, false);
super::time(tcx, "codegen mono items", || {
super::predefine_mono_items(&mut cx, &mono_items);
for (mono_item, (linkage, visibility)) in mono_items {
let linkage = crate::linkage::get_clif_linkage(mono_item, linkage, visibility);
match mono_item {
MonoItem::Fn(inst) => match codegen_mode {
CodegenMode::Aot => unreachable!(),
CodegenMode::Jit => {
cx.tcx.sess.time("codegen fn", || {
crate::base::codegen_fn(&mut cx, inst, linkage)
});
}
CodegenMode::JitLazy => codegen_shim(&mut cx, inst),
},
MonoItem::Static(def_id) => {
crate::constant::codegen_static(&mut cx.constants_cx, def_id);
}
MonoItem::GlobalAsm(item_id) => {
let item = cx.tcx.hir().item(item_id);
tcx.sess.span_fatal(item.span, "Global asm is not supported in JIT mode");
}
}
}
});
let (mut jit_module, global_asm, _debug, mut unwind_context) =
tcx.sess.time("finalize CodegenCx", || cx.finalize());
jit_module.finalize_definitions();
if !global_asm.is_empty() {
tcx.sess.fatal("Inline asm is not supported in JIT mode");
}
crate::main_shim::maybe_create_entry_wrapper(tcx, &mut jit_module, &mut unwind_context, true);
crate::allocator::codegen(tcx, &mut jit_module, &mut unwind_context);
tcx.sess.abort_if_errors();
jit_module.finalize_definitions();
let _unwind_register_guard = unsafe { unwind_context.register_jit(&jit_module) };
let finalized_main: *const u8 = jit_module.get_finalized_function(main_func_id);
println!("Rustc codegen cranelift will JIT run the executable, because -Cllvm-args=mode=jit was passed");
let f: extern "C" fn(c_int, *const *const c_char) -> c_int =
unsafe { ::std::mem::transmute(finalized_main) };
let args = ::std::env::var("CG_CLIF_JIT_ARGS").unwrap_or_else(|_| String::new());
let args = std::iter::once(&*tcx.crate_name(LOCAL_CRATE).as_str().to_string())
.chain(args.split(' '))
.map(|arg| CString::new(arg).unwrap())
.collect::<Vec<_>>();
let mut argv = args.iter().map(|arg| arg.as_ptr()).collect::<Vec<_>>();
// Push a null pointer as a terminating argument. This is required by POSIX and
// useful as some dynamic linkers use it as a marker to jump over.
argv.push(std::ptr::null());
CURRENT_MODULE
.with(|current_module| assert!(current_module.borrow_mut().replace(jit_module).is_none()));
let ret = f(args.len() as c_int, argv.as_ptr());
std::process::exit(ret);
}
#[no_mangle]
extern "C" fn | (instance_ptr: *const Instance<'static>) -> *const u8 {
rustc_middle::ty::tls::with(|tcx| {
// lift is used to ensure the correct lifetime for instance.
let instance = tcx.lift(unsafe { *instance_ptr }).unwrap();
CURRENT_MODULE.with(|jit_module| {
let mut jit_module = jit_module.borrow_mut();
let jit_module = jit_module.as_mut().unwrap();
let mut cx = crate::CodegenCx::new(tcx, jit_module, false, false);
let name = tcx.symbol_name(instance).name.to_string();
let sig = crate::abi::get_function_sig(tcx, cx.module.isa().triple(), instance);
let func_id = cx
.module
.declare_function(&name, Linkage::Export, &sig)
.unwrap();
cx.module.prepare_for_function_redefine(func_id).unwrap();
tcx.sess.time("codegen fn", || {
crate::base::codegen_fn(&mut cx, instance, Linkage::Export)
});
let (jit_module, global_asm, _debug_context, unwind_context) = cx.finalize();
assert!(global_asm.is_empty());
jit_module.finalize_definitions();
std::mem::forget(unsafe { unwind_context.register_jit(&jit_module) });
jit_module.get_finalized_function(func_id)
})
})
}
fn load_imported_symbols_for_jit(tcx: TyCtxt<'_>) -> Vec<(String, *const u8)> {
use rustc_middle::middle::dependency_format::Linkage;
let mut dylib_paths = Vec::new();
let crate_info = CrateInfo::new(tcx);
let formats = tcx.dependency_formats(LOCAL_CRATE);
let data = &formats
.iter()
.find(|(crate_type, _data)| *crate_type == rustc_session::config::CrateType::Executable)
.unwrap()
.1;
for &(cnum, _) in &crate_info.used_crates_dynamic {
let src = &crate_info.used_crate_source[&cnum];
match data[cnum.as_usize() - 1] {
Linkage::NotLinked | Linkage::IncludedFromDylib => {}
Linkage::Static => {
let name = tcx.crate_name(cnum);
let mut err = tcx
.sess
.struct_err(&format!("Can't load static lib {}", name.as_str()));
err.note("rustc_codegen_cranelift can only load dylibs in JIT mode.");
err.emit();
}
Linkage::Dynamic => {
dylib_paths.push(src.dylib.as_ref().unwrap().0.clone());
}
}
}
let mut imported_symbols = Vec::new();
for path in dylib_paths {
use object::{Object, ObjectSymbol};
let lib = libloading::Library::new(&path).unwrap();
let obj = std::fs::read(path).unwrap();
let obj = object::File::parse(&obj).unwrap();
imported_symbols.extend(obj.dynamic_symbols().filter_map(|symbol| {
let name = symbol.name().unwrap().to_string();
if name.is_empty() || !symbol.is_global() || symbol.is_undefined() {
return None;
}
let dlsym_name = if cfg!(target_os = "macos") {
// On macOS `dlsym` expects the name without leading `_`.
assert!(name.starts_with('_'), "{:?}", name);
&name[1..]
} else {
&name
};
let symbol: libloading::Symbol<'_, *const u8> =
unsafe { lib.get(dlsym_name.as_bytes()) }.unwrap();
Some((name, *symbol))
}));
std::mem::forget(lib)
}
tcx.sess.abort_if_errors();
imported_symbols
}
pub(super) fn codegen_shim<'tcx>(cx: &mut CodegenCx<'tcx, impl Module>, inst: Instance<'tcx>) {
let tcx = cx.tcx;
let pointer_type = cx.module.target_config().pointer_type();
let name = tcx.symbol_name(inst).name.to_string();
let sig = crate::abi::get_function_sig(tcx, cx.module.isa().triple(), inst);
let func_id = cx
.module
.declare_function(&name, Linkage::Export, &sig)
.unwrap();
let instance_ptr = Box::into_raw(Box::new(inst));
let jit_fn = cx
.module
.declare_function(
"__clif_jit_fn",
Linkage::Import,
&Signature {
call_conv: cx.module.target_config().default_call_conv,
params: vec![AbiParam::new(pointer_type)],
returns: vec![AbiParam::new(pointer_type)],
},
)
.unwrap();
let mut trampoline = Function::with_name_signature(ExternalName::default(), sig.clone());
let mut builder_ctx = FunctionBuilderContext::new();
let mut trampoline_builder = FunctionBuilder::new(&mut trampoline, &mut builder_ctx);
let jit_fn = cx
.module
.declare_func_in_func(jit_fn, trampoline_builder.func);
let sig_ref = trampoline_builder.func.import_signature(sig);
let entry_block = trampoline_builder.create_block();
trampoline_builder.append_block_params_for_function_params(entry_block);
let fn_args = trampoline_builder
.func
.dfg
.block_params(entry_block)
.to_vec();
trampoline_builder.switch_to_block(entry_block);
let instance_ptr = trampoline_builder
.ins()
.iconst(pointer_type, instance_ptr as u64 as i64);
let jitted_fn = trampoline_builder.ins().call(jit_fn, &[instance_ptr]);
let jitted_fn = trampoline_builder.func.dfg.inst_results(jitted_fn)[0];
let call_inst = trampoline_builder
.ins()
.call_indirect(sig_ref, jitted_fn, &fn_args);
let ret_vals = trampoline_builder.func.dfg.inst_results(call_inst).to_vec();
trampoline_builder.ins().return_(&ret_vals);
cx.module
.define_function(
func_id,
&mut Context::for_function(trampoline),
&mut cranelift_codegen::binemit::NullTrapSink {},
)
.unwrap();
}
| __clif_jit_fn |
gesture.ts | export function handleGesture(
velocity: number,
diffY: number,
itemHeight: number,
y: number,
isLoop: boolean,
maxCount: number,
current: number,
minCount: number
): Promise<{ movingCount: number; isSkipAnimation: boolean }> {
const ratio = velocity < 1 ? 1 : velocity / 2;
const isMovingSlowly = velocity < 0.2 && diffY > itemHeight;
let movingCount = Math.abs((y / itemHeight) * ratio);
if (isLoop) {
movingCount = movingCount > maxCount ? maxCount - 1 / ratio : movingCount;
}
if (y > 0) {
if (!isLoop && current + movingCount > maxCount) {
movingCount = maxCount - current + 1;
}
} else {
if (!isLoop && current - movingCount < minCount) {
movingCount = current - minCount + 1;
}
movingCount = movingCount * -1;
}
| return Promise.resolve({ movingCount, isSkipAnimation: isMovingSlowly });
} | |
collapse_vars.rs | use super::Optimizer;
use crate::mode::Mode;
use swc_ecma_ast::*;
use swc_ecma_utils::ident::IdentLike;
/// Methods related to the option `collapse_vars`.
impl<M> Optimizer<'_, M>
where
M: Mode, | }
if self.ctx.in_try_block || self.ctx.executed_multiple_time || self.ctx.in_cond {
return;
}
match &*e {
Expr::Assign(assign @ AssignExpr { op: op!("="), .. }) => {
//
let left = match &assign.left {
PatOrExpr::Expr(_) => return,
PatOrExpr::Pat(left) => match &**left {
Pat::Ident(i) => i,
_ => return,
},
};
if let Some(usage) = self
.data
.as_ref()
.and_then(|data| data.vars.get(&left.to_id()))
{
if !usage.declared
|| !usage.is_fn_local
|| usage.assign_count != 1
|| usage.var_kind == Some(VarDeclKind::Const)
{
return;
}
if usage.used_in_loop || usage.used_in_cond {
match &*assign.right {
Expr::Lit(..) | Expr::Ident(..) => {}
_ => return,
}
}
if usage.usage_count >= 2 {
match &*assign.right {
Expr::Lit(..) => {}
_ => return,
}
}
}
let value = match &*assign.right {
Expr::Lit(Lit::Str(s)) if s.value.len() > 3 => return,
Expr::Lit(..)
| Expr::Member(MemberExpr {
computed: false, ..
}) => assign.right.clone(),
_ => return,
};
tracing::debug!(
"collpase_vars: Decided to inline {}{:?}",
left.id.sym,
left.id.span.ctxt
);
self.lits.insert(left.to_id(), value);
}
_ => {}
}
}
} | {
pub(super) fn collapse_assignment_to_vars(&mut self, e: &mut Expr) {
if !self.options.collapse_vars {
return; |
net_test.go | package stats
import (
"context"
"testing"
"time"
"github.com/sirupsen/logrus"
"github.com/stretchr/testify/assert"
)
func TestNetStatsBasic(t *testing.T) {
logrus.SetLevel(logrus.DebugLevel)
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
ps := NewNetStats(ctx, 120*time.Second, 2*time.Second, 1)
time.Sleep(4 * time.Second)
assert.GreaterOrEqual(t, len(ps.NICs), 1)
for _, n := range ps.NICs {
_, ok := n.BytesRecv.Timeseries.Last()
assert.True(t, ok)
_, ok = n.PacketsRecv.Timeseries.Last()
assert.True(t, ok)
}
}
func TestNetTop(t *testing.T) | {
logrus.SetLevel(logrus.DebugLevel)
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
ps := NewNetStats(ctx, 120*time.Second, 2*time.Second, 1)
time.Sleep(5 * time.Second)
td := ps.TopByteRate(true)
assert.Greater(t, len(td), 0)
v, ok := td[0].BytesRecv.Rate(2 * time.Second)
assert.True(t, ok)
assert.Less(t, v, 1000000.0)
td = ps.TopByteRate(false)
assert.Greater(t, len(td), 0)
td = ps.TopPacketRate(true)
assert.Greater(t, len(td), 0)
v, ok = td[0].PacketsRecv.Rate(2 * time.Second)
assert.True(t, ok)
assert.Less(t, v, 100.0)
td = ps.TopPacketRate(false)
assert.Greater(t, len(td), 0)
td = ps.TopErrorsRate(true)
assert.Greater(t, len(td), 0)
v, ok = td[0].ErrIn.Rate(2 * time.Second)
assert.True(t, ok)
assert.Less(t, v, 10.0)
td = ps.TopErrorsRate(false)
assert.Greater(t, len(td), 0)
} |
|
recent_counter.py | """
space : O(n)
time : O(n)
"""
class RecentCounter:
def __init__(self):
self.history = []
def ping(self, t: int) -> int:
| self.history.append(t)
s = t - 3000
while self.history[0] < s:
self.history.pop(0)
return len(self.history) |
|
csat.go | package command
import (
"SchoolDay/env"
"SchoolDay/extension"
"time"
"github.com/bwmarrin/discordgo"
)
func Csat(s *discordgo.Session, m *discordgo.MessageCreate, args []string) | {
channelId := m.ChannelID
date, err := extension.NtpTimeKorea()
if err != nil {
log.Warningln(err)
return
}
csatDate, err := time.Parse("20060102", env.CsatDate)
if err != nil {
log.Fatal(err)
return
}
dDay := csatDate.Sub(date)
if dDay.Microseconds() > 0 {
extension.ChannelMessageSend(s, channelId, "%d 대학수학능력시험까지 **%d일 %d시간 %d분 %d초**", csatDate.Year()+1, int(dDay.Hours())/24, int(dDay.Hours())%24, int(dDay.Minutes())%60, int(dDay.Seconds())%60)
} else {
extension.ChannelMessageSend(s, channelId, "%d 대학수학능력시험 **D-Day**", csatDate.Year()+1)
}
}
|
|
handler.go | package rox
import (
"github.com/nomos/go-lokas" | "net/http"
)
type Handler func(w ResponseWriter,r *http.Request,a lokas.IProcess)
func CreateHandler(h Handler)Handler {
return h
} | |
contextMenu.js | var currentEditableItem;
var oldText;
var currentListItem;
const {
remote
} = require('electron')
const {
Menu,
MenuItem
} = remote
function setupContextMenu(items) {
const contextMenu = new Menu()
| contextMenu.append(new MenuItem({
label: item.repoName,
click() {
$('#gitRepoUrl').val(item.repoUrl);
}
}))
})
window.addEventListener('contextmenu', (e) => {
e.preventDefault();
var x = e.clientX;
var y = e.clientY;
var el = document.elementFromPoint(x, y);
if ($(el).hasClass('gitRepoUrl')) {
currentListItem = el;
contextMenu.popup({
window: remote.getCurrentWindow()
})
}
}, false)
} | _.each(items, function(item){ |
credentials.go | // Code generated by go-bindata.
// sources:
// config.yaml
// DO NOT EDIT!
package config
import (
"bytes"
"compress/gzip"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
"time"
)
func bindataRead(data []byte, name string) ([]byte, error) {
gz, err := gzip.NewReader(bytes.NewBuffer(data))
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
var buf bytes.Buffer
_, err = io.Copy(&buf, gz)
clErr := gz.Close()
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
if clErr != nil {
return nil, err
}
return buf.Bytes(), nil
}
type asset struct {
bytes []byte
info os.FileInfo
}
type bindataFileInfo struct {
name string
size int64
mode os.FileMode
modTime time.Time
}
func (fi bindataFileInfo) Name() string {
return fi.name
}
func (fi bindataFileInfo) Size() int64 {
return fi.size
}
func (fi bindataFileInfo) Mode() os.FileMode {
return fi.mode
}
func (fi bindataFileInfo) ModTime() time.Time {
return fi.modTime
}
func (fi bindataFileInfo) IsDir() bool {
return false
}
func (fi bindataFileInfo) Sys() interface{} {
return nil
}
var _configYaml = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x2a\x49\xcd\x49\x4d\x2f\x4a\xcc\x8d\xcf\x4e\xad\xb4\x52\x30\xb6\xb0\x34\xb1\x30\xb4\x34\x37\xb2\x72\x74\x74\x4f\xc9\x2a\x74\xcb\x4e\x71\xd5\x0d\xca\x4b\x71\x8f\xaf\x08\x49\xab\xac\x4a\xf3\xf3\xcb\xcc\xcf\x72\xf3\x32\xa9\x2a\x32\xf2\x72\x04\x04\x00\x00\xff\xff\x60\x6c\xd8\xaf\x3b\x00\x00\x00")
func configYamlBytes() ([]byte, error) {
return bindataRead(
_configYaml,
"config.yaml",
)
}
func configYaml() (*asset, error) {
bytes, err := configYamlBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{name: "config.yaml", size: 59, mode: os.FileMode(420), modTime: time.Unix(1503001114, 0)}
a := &asset{bytes: bytes, info: info}
return a, nil
}
// Asset loads and returns the asset for the given name.
// It returns an error if the asset could not be found or
// could not be loaded.
func Asset(name string) ([]byte, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("Asset %s can't read by error: %v", name, err)
}
return a.bytes, nil
}
return nil, fmt.Errorf("Asset %s not found", name)
}
// MustAsset is like Asset but panics when Asset would return an error.
// It simplifies safe initialization of global variables.
func MustAsset(name string) []byte {
a, err := Asset(name)
if err != nil {
panic("asset: Asset(" + name + "): " + err.Error())
}
return a
}
// AssetInfo loads and returns the asset info for the given name.
// It returns an error if the asset could not be found or
// could not be loaded.
func | (name string) (os.FileInfo, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("AssetInfo %s can't read by error: %v", name, err)
}
return a.info, nil
}
return nil, fmt.Errorf("AssetInfo %s not found", name)
}
// AssetNames returns the names of the assets.
func AssetNames() []string {
names := make([]string, 0, len(_bindata))
for name := range _bindata {
names = append(names, name)
}
return names
}
// _bindata is a table, holding each asset generator, mapped to its name.
var _bindata = map[string]func() (*asset, error){
"config.yaml": configYaml,
}
// AssetDir returns the file names below a certain
// directory embedded in the file by go-bindata.
// For example if you run go-bindata on data/... and data contains the
// following hierarchy:
// data/
// foo.txt
// img/
// a.png
// b.png
// then AssetDir("data") would return []string{"foo.txt", "img"}
// AssetDir("data/img") would return []string{"a.png", "b.png"}
// AssetDir("foo.txt") and AssetDir("notexist") would return an error
// AssetDir("") will return []string{"data"}.
func AssetDir(name string) ([]string, error) {
node := _bintree
if len(name) != 0 {
cannonicalName := strings.Replace(name, "\\", "/", -1)
pathList := strings.Split(cannonicalName, "/")
for _, p := range pathList {
node = node.Children[p]
if node == nil {
return nil, fmt.Errorf("Asset %s not found", name)
}
}
}
if node.Func != nil {
return nil, fmt.Errorf("Asset %s not found", name)
}
rv := make([]string, 0, len(node.Children))
for childName := range node.Children {
rv = append(rv, childName)
}
return rv, nil
}
type bintree struct {
Func func() (*asset, error)
Children map[string]*bintree
}
var _bintree = &bintree{nil, map[string]*bintree{
"config.yaml": &bintree{configYaml, map[string]*bintree{}},
}}
// RestoreAsset restores an asset under the given directory
func RestoreAsset(dir, name string) error {
data, err := Asset(name)
if err != nil {
return err
}
info, err := AssetInfo(name)
if err != nil {
return err
}
err = os.MkdirAll(_filePath(dir, filepath.Dir(name)), os.FileMode(0755))
if err != nil {
return err
}
err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode())
if err != nil {
return err
}
err = os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime())
if err != nil {
return err
}
return nil
}
// RestoreAssets restores an asset under the given directory recursively
func RestoreAssets(dir, name string) error {
children, err := AssetDir(name)
// File
if err != nil {
return RestoreAsset(dir, name)
}
// Dir
for _, child := range children {
err = RestoreAssets(dir, filepath.Join(name, child))
if err != nil {
return err
}
}
return nil
}
func _filePath(dir, name string) string {
cannonicalName := strings.Replace(name, "\\", "/", -1)
return filepath.Join(append([]string{dir}, strings.Split(cannonicalName, "/")...)...)
}
| AssetInfo |
simwords.py | # -*- coding: utf-8 -*-
"""
Created on Thu Jun 3 17:03:45 2021
@author: shangfr
"""
import joblib
import operator
from functools import reduce
from whoosh.index import open_dir
from whoosh.qparser import QueryParser
import pkg_resources
INDEX_DIR = pkg_resources.resource_filename('cnsyn', 'query')
ix = open_dir(INDEX_DIR)
words_tree = joblib.load(INDEX_DIR+'/words_tree_mini.m')
tree_model = words_tree['model']
words_id = words_tree['words']
words_emb = tree_model.get_arrays()[0]
def search(word, topK=10, origin='all'):
'''
Parameters
----------
word : str
查询词.
topK : int, optional
查询返回同义词数量. The default is 10.
origin : str, optional
词源. The default is 'all'.
Returns
-------
sim_words : list
同义词列表.
'''
word_list = []
with ix.searcher() as searcher:
query = QueryParser("words", ix.schema).parse(word)
results = searcher.search(query, limit=None)
if len(results) > 0:
for result in results:
if origin == 'all':
word_list.append(result.fields()['words'].split())
elif origin == result.fields()['origin']:
word_list.append(result.fields()['words'].split())
else:
pass
if word_list != []:
sim_words = reduce(operator.add, word_list)
sim_words = list(set(sim_words))
else:
sim_words = word_list
return sim_words[0:topK]
def anns(word, topK=10):
'''
Para | rs
----------
word : str
查询词.
topK : int, optional
k nearest neighbors. The default is 10.
return_distance : bool, optional
if True, return distances to neighbors of each point if False, return only neighbors. The default is True.
Returns
-------
sim_words : list
同义词列表.
'''
word_key = [x[0] for x in words_id.items() if word == x[1]]
if word_key == []:
sim_words = []
else:
word_emb = words_emb[word_key]
ind = tree_model.query(
word_emb, k=topK, return_distance=False)
sim_words = [words_id.get(i) for i in ind.ravel()]
return sim_words
if __name__ == '__main__':
# 查询同义词(全部词库)
word = '中山广场'
search(word)
search(word, topK=3)
# 使用wiki词库
search(word, origin='wiki')
# 使用中文同义词字典库
search(word, origin='cndict')
# 基于向量查同义词
anns(word)
anns(word, topK=3)
word = input("Please input the word you want to search: ")
step_result = "Something went wrong......"
try:
step_result = search(word)
finally:
print(step_result)
| mete |
app.module.ts | import {NgModule} from '@angular/core';
import {BrowserModule} from '@angular/platform-browser';
import {AppComponent} from './app.component';
import {HomePageComponent} from './component/home-page/home-page.component';
import {ProductTableComponent} from './component/product-table/product-table.component';
import {ProductDialogComponent} from './component/product-dialog/product-dialog.component';
import {BrowserAnimationsModule} from '@angular/platform-browser/animations';
import {MatToolbarModule} from "@angular/material/toolbar";
import {MatButtonModule} from "@angular/material/button";
import {MatCardModule} from "@angular/material/card";
import {MatIconModule} from "@angular/material/icon";
import {MatTableModule} from "@angular/material/table";
import {MatDialogModule} from "@angular/material/dialog";
import {MatFormFieldModule} from "@angular/material/form-field";
import {ReactiveFormsModule} from "@angular/forms";
import {MatInputModule} from "@angular/material/input";
import {MatSelectModule} from "@angular/material/select"; | import {MatSnackBarModule} from "@angular/material/snack-bar";
@NgModule({
declarations: [
AppComponent,
HomePageComponent,
ProductTableComponent,
ProductDialogComponent,
NotificationComponent,
],
imports: [
BrowserModule,
BrowserAnimationsModule,
HttpClientModule,
MatToolbarModule,
MatButtonModule,
MatCardModule,
MatIconModule,
MatTableModule,
MatDialogModule,
MatFormFieldModule,
MatSnackBarModule,
ReactiveFormsModule,
MatInputModule,
MatSelectModule
],
providers: [],
bootstrap: [AppComponent]
})
export class AppModule {
} | import {HttpClientModule} from "@angular/common/http";
import {NotificationComponent} from './component/notification/notification.component'; |
dice.rs | //! Generators for random test data.
use dicetest::prelude::*;
use crate::{Dim, Infinitesimal, Jet, Number};
#[cfg(any(test, feature = "big-rational-number"))]
/// Generates an arbitrary [`BigRational`].
pub fn big_rational_number() -> impl Die<num_rational::BigRational> {
use num_traits::{One, Zero};
let special_number_die = dice::one_of().three(
num_rational::BigRational::zero(),
num_rational::BigRational::one(),
-num_rational::BigRational::one(),
);
let number_die = dice::from_fn(|mut fate| {
let numerator = fate.roll(dice::u32(..)).into();
let denominator = fate.roll(dice::i32(1..)).into();
num_rational::BigRational::new(numerator, denominator)
});
dice::weighted_one_of_die().two((1, special_number_die), (7, number_die))
}
#[cfg(any(test, feature = "big-rational-number"))]
/// Generates an arbitrary non-zero [`BigRational`].
pub fn big_rational_non_zero_number() -> impl Die<num_rational::BigRational> {
use num_traits::One;
let special_number_die = dice::one_of().two(
num_rational::BigRational::one(),
-num_rational::BigRational::one(),
);
let number_die = dice::from_fn(|mut fate| {
let numerator = fate.roll(dice::u32(1..)).into();
let denominator = fate.roll(dice::i32(1..)).into();
num_rational::BigRational::new(numerator, denominator)
});
dice::weighted_one_of_die().two((1, special_number_die), (7, number_die))
}
/// Generates an arbitrary [`Dim`] that is limited by [`dicetest::Limit`].
pub fn dim() -> impl Die<Dim> {
dice::length(..).map(Dim)
}
/// Generates an [`Infinitesimal`] using [`Infinitesimal::one`].
///
/// # Panic
///
/// This function panics if the given dimension count is zero.
pub fn infinitesimal_one<N: Number, I: Infinitesimal<N>>(dim: Dim) -> impl Die<I> {
assert!(
dim.0 != 0,
"Generator infinitesimal_one must not be used with dimension count 0"
);
dice::from_fn(move |mut fate| {
let idx = fate.roll(dice::uni_usize(dim.indices()));
I::one(idx, dim)
})
}
/// Generates an [`Infinitesimal`] with arbitrary elements.
pub fn infinitesimal<N: Number, I: Infinitesimal<N>, NDI: Die<N>>(
dim: Dim,
infinitesimal_number_die: NDI,
) -> impl Die<I> {
dice::from_fn(move |mut fate| {
let infinitesimal_zeros_die = dice::from_fn(|_| I::zeros(dim));
let safe_infinitesimal_one_die = dice::from_fn(|mut fate| {
if dim.0 == 0 {
I::zeros(Dim(0))
} else {
fate.roll(infinitesimal_one(dim))
}
});
let infinitesimal_dense_die = dice::from_fn(|mut fate| {
let elems = fate.roll(dice::vec(&infinitesimal_number_die, dim.0));
I::from_dense(elems)
});
let infinitesimal_sparse_die = dice::from_fn(|mut fate| {
if dim.0 == 0 {
I::zeros(Dim(0))
} else {
let number_with_index_die =
dice::zip().two(dice::uni_usize(dim.indices()), &infinitesimal_number_die);
let elems = fate.roll(dice::vec(number_with_index_die, 0..=dim.0));
I::from_sparse(elems, dim)
}
});
// Compose different generators because each of them has a different distribution
let infinitesimal_die = dice::one_of_die().three(
dice::one_of_die().two(&infinitesimal_zeros_die, &safe_infinitesimal_one_die),
&infinitesimal_dense_die,
&infinitesimal_sparse_die,
);
fate.roll(infinitesimal_die)
})
}
/// Generates a [`Jet`] using [`Jet::constant`].
pub fn jet_constant<N: Number, I: Infinitesimal<N>, NDR: Die<N>>(
dim: Dim,
real_number_die: NDR,
) -> impl Die<Jet<N, I>> {
dice::from_fn(move |mut fate| {
let real = fate.roll(&real_number_die);
Jet::constant(real, dim)
})
}
/// Generates a [`Jet`] using [`Jet::variable`].
///
/// # Panic
///
/// This function panics if the given dimension count is zero.
pub fn jet_variable<N: Number, I: Infinitesimal<N>, NDR: Die<N>>(
dim: Dim,
real_number_die: NDR,
) -> impl Die<Jet<N, I>> {
assert!(
dim.0 != 0,
"Generator jet_variable must not be used with dimension count 0"
);
dice::from_fn(move |mut fate| {
let real = fate.roll(&real_number_die);
let idx = fate.roll(dice::uni_usize(dim.indices()));
Jet::variable(real, idx, dim)
})
}
/// Generates a [`Jet`] with arbitrary values for the real part and the infinitesimal part.
pub fn | <N: Number, I: Infinitesimal<N>, NDR: Die<N>, NDI: Die<N>>(
dim: Dim,
real_number_die: NDR,
infinitesimal_number_die: NDI,
) -> impl Die<Jet<N, I>> {
dice::from_fn(move |mut fate| {
let any_jet_die = dice::zip()
.two(
&real_number_die,
infinitesimal(dim, &infinitesimal_number_die),
)
.map_once(|(real, infinitesimal)| Jet::new(real, infinitesimal));
let safe_variable_die = dice::from_fn(|mut fate| {
if dim.0 == 0 {
fate.roll(jet_constant(dim, &real_number_die))
} else {
fate.roll(jet_variable(dim, &real_number_die))
}
});
// Compose different generators because each of them has a different distribution
let jet_die = dice::one_of_die_once().three(
jet_constant(dim, &real_number_die),
safe_variable_die,
any_jet_die,
);
fate.roll(jet_die)
})
}
| jet |
pdac_w1_0_35.rs | #[doc = "Reader of register PDAC_W1_0_35"]
pub type R = crate::R<u32, super::PDAC_W1_0_35>;
#[doc = "Writer for register PDAC_W1_0_35"]
pub type W = crate::W<u32, super::PDAC_W1_0_35>;
#[doc = "Register PDAC_W1_0_35 `reset()`'s with value 0"]
impl crate::ResetValue for super::PDAC_W1_0_35 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Exclusive Access Lock\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum EAL_A {
#[doc = "0: Lock disabled"]
EAL_0,
#[doc = "1: Lock disabled until next reset"]
EAL_1,
#[doc = "2: Lock enabled, lock state = available"]
EAL_2,
#[doc = "3: Lock enabled, lock state = not available"]
EAL_3,
}
impl From<EAL_A> for u8 {
#[inline(always)]
fn from(variant: EAL_A) -> Self {
match variant {
EAL_A::EAL_0 => 0,
EAL_A::EAL_1 => 1,
EAL_A::EAL_2 => 2,
EAL_A::EAL_3 => 3,
}
}
}
#[doc = "Reader of field `EAL`"]
pub type EAL_R = crate::R<u8, EAL_A>;
impl EAL_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> EAL_A {
match self.bits {
0 => EAL_A::EAL_0,
1 => EAL_A::EAL_1,
2 => EAL_A::EAL_2,
3 => EAL_A::EAL_3,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `EAL_0`"]
#[inline(always)]
pub fn is_eal_0(&self) -> bool {
*self == EAL_A::EAL_0
}
#[doc = "Checks if the value of the field is `EAL_1`"]
#[inline(always)]
pub fn is_eal_1(&self) -> bool {
*self == EAL_A::EAL_1
}
#[doc = "Checks if the value of the field is `EAL_2`"]
#[inline(always)]
pub fn is_eal_2(&self) -> bool {
*self == EAL_A::EAL_2
}
#[doc = "Checks if the value of the field is `EAL_3`"]
#[inline(always)]
pub fn is_eal_3(&self) -> bool {
*self == EAL_A::EAL_3
}
}
#[doc = "Write proxy for field `EAL`"]
pub struct EAL_W<'a> {
w: &'a mut W,
}
impl<'a> EAL_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: EAL_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "Lock disabled"]
#[inline(always)]
pub fn eal_0(self) -> &'a mut W {
self.variant(EAL_A::EAL_0)
}
#[doc = "Lock disabled until next reset"]
#[inline(always)]
pub fn eal_1(self) -> &'a mut W {
self.variant(EAL_A::EAL_1)
}
#[doc = "Lock enabled, lock state = available"]
#[inline(always)]
pub fn eal_2(self) -> &'a mut W {
self.variant(EAL_A::EAL_2)
}
#[doc = "Lock enabled, lock state = not available"]
#[inline(always)]
pub fn eal_3(self) -> &'a mut W {
self.variant(EAL_A::EAL_3)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 24)) | (((value as u32) & 0x03) << 24);
self.w
}
}
#[doc = "Lock\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LK2_A {
#[doc = "0: Entire PDACs can be written."]
LK2_0,
#[doc = "1: Entire PDACs can be written."]
LK2_1,
#[doc = "2: Domain x can only update the DxACP field and the LK2 field; no other PDACs fields can be written."]
LK2_2,
#[doc = "3: PDACs is locked (read-only) until the next reset."]
LK2_3,
}
impl From<LK2_A> for u8 {
#[inline(always)]
fn from(variant: LK2_A) -> Self {
match variant {
LK2_A::LK2_0 => 0,
LK2_A::LK2_1 => 1,
LK2_A::LK2_2 => 2,
LK2_A::LK2_3 => 3,
}
}
}
#[doc = "Reader of field `LK2`"]
pub type LK2_R = crate::R<u8, LK2_A>;
impl LK2_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LK2_A {
match self.bits {
0 => LK2_A::LK2_0,
1 => LK2_A::LK2_1,
2 => LK2_A::LK2_2,
3 => LK2_A::LK2_3,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `LK2_0`"]
#[inline(always)]
pub fn is_lk2_0(&self) -> bool {
*self == LK2_A::LK2_0
}
#[doc = "Checks if the value of the field is `LK2_1`"]
#[inline(always)]
pub fn is_lk2_1(&self) -> bool {
*self == LK2_A::LK2_1
}
#[doc = "Checks if the value of the field is `LK2_2`"]
#[inline(always)]
pub fn is_lk2_2(&self) -> bool {
*self == LK2_A::LK2_2
}
#[doc = "Checks if the value of the field is `LK2_3`"]
#[inline(always)]
pub fn is_lk2_3(&self) -> bool {
*self == LK2_A::LK2_3
}
}
#[doc = "Write proxy for field `LK2`"]
pub struct LK2_W<'a> {
w: &'a mut W,
}
impl<'a> LK2_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LK2_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "Entire PDACs can be written."]
#[inline(always)]
pub fn lk2_0(self) -> &'a mut W {
self.variant(LK2_A::LK2_0)
}
#[doc = "Entire PDACs can be written."]
#[inline(always)]
pub fn lk2_1(self) -> &'a mut W {
self.variant(LK2_A::LK2_1)
}
#[doc = "Domain x can only update the DxACP field and the LK2 field; no other PDACs fields can be written."]
#[inline(always)]
pub fn lk2_2(self) -> &'a mut W {
self.variant(LK2_A::LK2_2)
}
#[doc = "PDACs is locked (read-only) until the next reset."]
#[inline(always)]
pub fn lk2_3(self) -> &'a mut W {
self.variant(LK2_A::LK2_3)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 29)) | (((value as u32) & 0x03) << 29);
self.w
}
}
#[doc = "Valid\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum VLD_A {
#[doc = "0: The PDACs assignment is invalid."]
VLD_0,
#[doc = "1: The PDACs assignment is valid."]
VLD_1,
}
impl From<VLD_A> for bool {
#[inline(always)]
fn from(variant: VLD_A) -> Self {
match variant {
VLD_A::VLD_0 => false,
VLD_A::VLD_1 => true,
}
}
}
#[doc = "Reader of field `VLD`"]
pub type VLD_R = crate::R<bool, VLD_A>;
impl VLD_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> VLD_A {
match self.bits {
false => VLD_A::VLD_0,
true => VLD_A::VLD_1,
}
}
#[doc = "Checks if the value of the field is `VLD_0`"]
#[inline(always)]
pub fn is_vld_0(&self) -> bool {
*self == VLD_A::VLD_0
}
#[doc = "Checks if the value of the field is `VLD_1`"]
#[inline(always)]
pub fn is_vld_1(&self) -> bool {
*self == VLD_A::VLD_1
}
}
#[doc = "Write proxy for field `VLD`"]
pub struct VLD_W<'a> {
w: &'a mut W,
}
impl<'a> VLD_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: VLD_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "The PDACs assignment is invalid."]
#[inline(always)]
pub fn vld_0(self) -> &'a mut W {
self.variant(VLD_A::VLD_0)
}
#[doc = "The PDACs assignment is valid."]
#[inline(always)]
pub fn vld_1(self) -> &'a mut W {
self.variant(VLD_A::VLD_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31);
self.w
}
}
impl R {
#[doc = "Bits 24:25 - Exclusive Access Lock"]
#[inline(always)]
pub fn eal(&self) -> EAL_R {
EAL_R::new(((self.bits >> 24) & 0x03) as u8)
}
#[doc = "Bits 29:30 - Lock"]
#[inline(always)]
pub fn lk2(&self) -> LK2_R {
LK2_R::new(((self.bits >> 29) & 0x03) as u8)
} | #[inline(always)]
pub fn vld(&self) -> VLD_R {
VLD_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 24:25 - Exclusive Access Lock"]
#[inline(always)]
pub fn eal(&mut self) -> EAL_W {
EAL_W { w: self }
}
#[doc = "Bits 29:30 - Lock"]
#[inline(always)]
pub fn lk2(&mut self) -> LK2_W {
LK2_W { w: self }
}
#[doc = "Bit 31 - Valid"]
#[inline(always)]
pub fn vld(&mut self) -> VLD_W {
VLD_W { w: self }
}
} | #[doc = "Bit 31 - Valid"] |
ars_sim_obstacles_detector_ros_node.py | #!/usr/bin/env python
import numpy as np
from numpy import *
import os
import rospy
from ars_sim_obstacles_detector_ros import *
def main():
|
''' MAIN '''
if __name__ == '__main__':
main() | ars_sim_obstacles_detector_ros = ArsSimObstaclesDetectorRos()
ars_sim_obstacles_detector_ros.init()
ars_sim_obstacles_detector_ros.open()
try:
ars_sim_obstacles_detector_ros.run()
except rospy.ROSInterruptException:
pass
return 0 |
fscache.rs | use notify::{RecommendedWatcher, Watcher, DebouncedEvent, RecursiveMode};
use async_value::{Async, Stale};
use std::sync::{Arc, RwLock};
use std::sync::mpsc::{channel, Sender, Receiver};
use std::collections::{HashMap, HashSet};
use std::time::Duration;
use std::path::PathBuf;
use crate::files::{Files, File, SortBy};
use crate::widget::Events;
use crate::fail::{HResult, HError, ErrorLog, Backtrace, ArcBacktrace};
#[derive(Debug, Clone)]
pub struct DirSettings {
sort: SortBy,
dirs_first: bool,
reverse: bool,
show_hidden: bool,
filter: Option<String>,
}
impl DirSettings {
fn new() -> DirSettings {
DirSettings {
sort: SortBy::Name,
dirs_first: true,
reverse: false,
show_hidden: true,
filter: None
}
}
}
#[derive(Debug, Clone)]
pub struct TabSettings {
selection: Option<File>,
multi_selections: Vec<File>,
dir_settings: DirSettings,
}
impl TabSettings {
fn new() -> TabSettings {
TabSettings {
selection: None,
multi_selections: vec![],
dir_settings: DirSettings::new()
}
}
}
impl std::fmt::Debug for FsCache {
fn fmt(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter,
"{:?}\n{:?}\n{:?}",
self.tab_settings,
self.watched_dirs,
self.files)
}
}
unsafe impl Sync for FsCache {}
#[derive(Clone)]
pub struct FsCache {
files: Arc<RwLock<HashMap<File, Files>>>,
pub tab_settings: Arc<RwLock<HashMap<File, TabSettings>>>,
watched_dirs: Arc<RwLock<HashSet<File>>>,
watcher: Arc<RwLock<RecommendedWatcher>>,
pub fs_changes: Arc<RwLock<Vec<(File, Option<File>, Option<File>)>>>,
sender: Sender<Events>,
}
impl FsCache {
pub fn new(sender: Sender<Events>) -> FsCache {
let (tx_fs_event, rx_fs_event) = channel();
let watcher = RecommendedWatcher::new(tx_fs_event,
Duration::from_secs(2)).unwrap();
let fs_cache = FsCache {
files: Arc::new(RwLock::new(HashMap::new())),
tab_settings: Arc::new(RwLock::new(HashMap::new())),
watched_dirs: Arc::new(RwLock::new(HashSet::new())),
watcher: Arc::new(RwLock::new(watcher)),
fs_changes: Arc::new(RwLock::new(vec![])),
sender: sender.clone(),
};
watch_fs(rx_fs_event,
fs_cache.files.clone(),
fs_cache.fs_changes.clone(),
sender.clone());
fs_cache
}
pub fn new_client(&self, settings: HashMap<File, TabSettings>) -> HResult<FsCache> {
let mut cache = self.clone();
cache.tab_settings = Arc::new(RwLock::new(settings));
Ok(cache)
}
}
pub type CachedFiles = (Option<File>, Async<Files>);
impl FsCache {
pub fn get_files(&self, dir: &File, stale: Stale) -> HResult<CachedFiles> {
if self.files.read()?.contains_key(dir) {
self.get_cached_files(dir)
} else {
let dir = dir.clone();
let selection = self.get_selection(&dir).ok();
let cache = self.clone();
let files = Async::new(move |_| {
let mut files = Files::new_from_path_cancellable(&dir.path, stale)?;
cache.add_watch(&dir).log();
FsCache::apply_settingss(&cache, &mut files).ok();
Ok(files)
});
Ok((selection, files))
}
}
pub fn get_files_sync(&self, dir: &File) -> HResult<Files> {
let files = self.get_files(&dir, Stale::new())?.1;
let mut files = files.run_sync()?;
FsCache::apply_settingss(&self, &mut files).ok();
let files = FsCache::ensure_not_empty(files)?;
self.add_watch(&dir).log();
Ok(files)
}
pub fn get_selection(&self, dir: &File) -> HResult<File> {
Ok(self.tab_settings.read()?.get(&dir).as_ref()?.selection.as_ref()?.clone())
}
pub fn save_settings(&self, files: &Files, selection: Option<File>) -> HResult<()> {
let dir = files.directory.clone();
let tab_settings = FsCache::extract_tab_settings(&files, selection);
self.tab_settings.write()?.insert(dir, tab_settings);
Ok(())
}
pub fn put_files(&self, files: &Files, selection: Option<File>) -> HResult<()> |
pub fn is_cached(&self, dir: &File) -> HResult<bool> {
Ok(self.files.read()?.contains_key(dir))
}
pub fn watch_only(&self, open_dirs: HashSet<File>) -> HResult<()> {
let removable = self.watched_dirs
.read()?
.difference(&open_dirs)
.map(|dir| dir.clone())
.collect::<Vec<File>>();
for watch in removable {
self.remove_watch(&watch).log();
}
Ok(())
}
fn add_watch(&self, dir: &File) -> HResult<()> {
if !self.watched_dirs.read()?.contains(&dir) {
self.watcher.write()?.watch(&dir.path, RecursiveMode::NonRecursive)?;
self.watched_dirs.write()?.insert(dir.clone());
}
Ok(())
}
fn remove_watch(&self, dir: &File) -> HResult<()> {
if self.watched_dirs.read()?.contains(&dir) {
self.watched_dirs.write()?.remove(dir);
self.watcher.write()?.unwatch(&dir.path)?
}
Ok(())
}
fn get_cached_files(&self, dir: &File) -> HResult<CachedFiles> {
let tab_settings = match self.tab_settings.read()?.get(&dir) {
Some(tab_settings) => tab_settings.clone(),
None => TabSettings::new()
};
let selection = tab_settings.selection.clone();
let file_cache = self.files.clone();
let dir = dir.clone();
let files = Async::new(move |_| {
let mut files = file_cache.read()
.map_err(|e| HError::from(e))?
.get(&dir)
.ok_or(HError::NoneError(Backtrace::new_arced()))?
.clone();
let tab_settings = &tab_settings;
files.sort = tab_settings.dir_settings.sort;
files.dirs_first = tab_settings.dir_settings.dirs_first;
files.reverse = tab_settings.dir_settings.reverse;
files.show_hidden = tab_settings.dir_settings.show_hidden;
files.filter = tab_settings.dir_settings.filter.clone();
if tab_settings.multi_selections.len() > 0 {
for file in &mut files.files {
for selected_files in &tab_settings.multi_selections {
if file.path == selected_files.path {
file.selected = true;
}
}
}
}
files.sort();
let files = FsCache::ensure_not_empty(files)?;
Ok(files)
});
Ok((selection, files))
}
pub fn apply_settingss(cache: &FsCache,
files: &mut Files)
-> HResult<()> {
let dir = &files.directory;
let tab_settings = cache.tab_settings.read()?.get(&dir).cloned();
if tab_settings.is_none() { return Ok(()) }
let tab_settings = tab_settings?;
files.sort = tab_settings.dir_settings.sort;
files.dirs_first = tab_settings.dir_settings.dirs_first;
files.reverse = tab_settings.dir_settings.reverse;
files.show_hidden = tab_settings.dir_settings.show_hidden;
files.filter = tab_settings.dir_settings.filter.clone();
if tab_settings.multi_selections.len() > 0 {
for file in &mut files.files {
for selected_files in &tab_settings.multi_selections {
if file.path == selected_files.path {
file.selected = true;
}
}
}
}
files.sort();
Ok(())
}
pub fn ensure_not_empty(mut files: Files) -> HResult<Files> {
if files.len() == 0 {
let path = &files.directory.path;
let placeholder = File::new_placeholder(&path)?;
files.files.push(placeholder);
}
Ok(files)
}
fn extract_tab_settings(files: &Files, selection: Option<File>) -> TabSettings {
TabSettings {
selection: selection,
multi_selections: files.get_selected().into_iter().cloned().collect(),
dir_settings: DirSettings {
sort: files.sort,
dirs_first: files.dirs_first,
reverse: files.reverse,
show_hidden: files.show_hidden,
filter: files.filter.clone(),
}
}
}
}
fn watch_fs(rx_fs_events: Receiver<DebouncedEvent>,
fs_cache: Arc<RwLock<HashMap<File, Files>>>,
fs_changes: Arc<RwLock<Vec<(File, Option<File>, Option<File>)>>>,
sender: Sender<Events>) {
std::thread::spawn(move || -> HResult<()> {
for event in rx_fs_events.iter() {
apply_event(&fs_cache, &fs_changes, event).log();
sender.send(Events::WidgetReady).ok();
}
Ok(())
});
}
fn apply_event(_fs_cache: &Arc<RwLock<HashMap<File, Files>>>,
fs_changes: &Arc<RwLock<Vec<(File, Option<File>, Option<File>)>>>,
event: DebouncedEvent)
-> HResult<()> {
let path = &event.get_source_path()?;
let dirpath = path.parent()
.map(|path| path.to_path_buf())
.unwrap_or_else(|| PathBuf::from("/"));
let dir = File::new_from_path(&dirpath, None)?;
let old_file = File::new_from_path(&path, None)?;
let mut new_file = match event {
DebouncedEvent::Remove(_) => None,
_ => Some(File::new_from_path(&path, None)?)
};
new_file.as_mut().map(|file| file.meta_sync());
fs_changes.write()?.push((dir,
Some(old_file),
new_file));
// for dir in fs_cache.write()?.values_mut() {
// if dir.path_in_here(&path).unwrap_or(false) {
// let old_file = dir.find_file_with_path(&path).cloned();
// let dirty_meta = old_file
// .as_ref()
// .map(|f| f.dirty_meta.clone())
// .unwrap_or(None);
// let mut new_file = match event {
// DebouncedEvent::Remove(_) => None,
// _ => Some(File::new_from_path(&path, dirty_meta)?)
// };
// new_file.as_mut().map(|file| file.meta_sync());
// dir.replace_file(old_file.as_ref(), new_file.clone()).log();
// fs_changes.write()?.push((dir.directory.clone(),
// old_file,
// new_file));
// }
// }
Ok(())
}
trait PathFromEvent {
fn get_source_path(&self) -> HResult<&PathBuf>;
}
impl PathFromEvent for DebouncedEvent {
fn get_source_path(&self) -> HResult<&PathBuf> {
match self {
DebouncedEvent::Create(path) |
DebouncedEvent::Write(path) |
DebouncedEvent::Chmod(path) |
DebouncedEvent::Remove(path) |
DebouncedEvent::NoticeWrite(path) |
DebouncedEvent::NoticeRemove(path) => Ok(path),
DebouncedEvent::Rename(old_path, _) => Ok(old_path),
DebouncedEvent::Error(err, path)
=> Err(HError::INotifyError(format!("{}, {:?}", err, path),
Backtrace::new_arced())),
DebouncedEvent::Rescan
=> Err(HError::INotifyError("Need to rescan".to_string(),
Backtrace::new_arced()))
}
}
}
| {
let dir = files.directory.clone();
let tab_settings = FsCache::extract_tab_settings(&files, selection);
self.tab_settings.write()?.insert(dir.clone(), tab_settings);
// let mut file_cache = self.files.write()?;
// if file_cache.contains_key(&files.directory) {
// if files.meta_updated {
// let mut files = files.clone();
// files.meta_updated = false;
// file_cache.insert(dir, files);
// }
// } else {
// file_cache.insert(dir, files.clone());
// }
Ok(())
} |
upsert.js | require(`dotenv`).config()
const fs = require(`fs`)
const path = require(`path`)
const yaml = require(`js-yaml`)
const mongodb = require(`mongodb`)
const { MongoClient } = mongodb
module.exports = () => {
MongoClient
.connect(`mongodb+srv://${process.env.MONGODB_HOST}`, {
ssl: true,
auth: {
user: process.env.MONGODB_USERNAME,
password: process.env.MONGODB_PASSWORD,
},
w: `majority`,
useUnifiedTopology: true,
})
.then((client) => {
const db = client
.db(process.env.MONGODB_DB)
try {
const entries = fs.readdirSync(path.resolve(`./quest-log/entry`))
const threads = fs.readdirSync(path.resolve(`./quest-log/thread`))
const dialogs = fs.readdirSync(path.resolve(`./quest-log/dialog`))
const entryCollection = db.collection(process.env.MONGODB_ENTRY_COLLECTION)
const threadCollection = db.collection(process.env.MONGODB_THREAD_COLLECTION)
const dialogCollection = db.collection(process.env.MONGODB_DIALOG_COLLECTION)
Promise.all([
entryCollection
.deleteMany({})
.then(() => entryCollection.insertMany(entries.map((entry) => JSON.parse(fs.readFileSync(`./quest-log/entry/${entry}`).toString())),
{
ordered: false,
}))
.then(({ result }) => {
if (result.ok) {
console.log(`entires - done!`)
} else {
console.log(`there was a problem inserting entries...`)
}
}),
threadCollection
.deleteMany({})
.then(() => threadCollection.insertMany(threads.map((thread) => JSON.parse(fs.readFileSync(`./quest-log/thread/${thread}`).toString())),
{
ordered: false,
}))
.then(({ result }) => {
if (result.ok) { | }),
dialogCollection
.deleteMany({})
.then(() => dialogCollection.insertMany(dialogs.map((dialog) => yaml.safeLoad(fs.readFileSync(`./quest-log/dialog/${dialog}`, `utf8`)))))
.then(({ result }) => {
if (result.ok) {
console.log(`dialog - done!`)
} else {
console.log(`there was a problem inserting dialog...`)
}
}),
])
.catch((err) => {
console.error(err)
process.exit(1)
})
.finally(() => client.close())
} catch(error) {
throw error;
}
})
.catch((err) => {
console.error(err)
process.exit(1)
})
} | console.log(`threads - done!`)
} else {
console.log(`there was a problem inserting threads...`)
} |
testupload.go | package main
import (
"bytes"
"errors"
"fmt"
"github.com/git-lfs/git-lfs/tools"
)
// "upload" - all missing
func uploadAllMissing(oidsExist, oidsMissing []TestObject) error {
retobjs, err := callBatchApi("upload", oidsMissing)
if err != nil {
return err
}
if len(retobjs) != len(oidsMissing) {
return fmt.Errorf("Incorrect number of returned objects, expected %d, got %d", len(oidsMissing), len(retobjs))
}
var errbuf bytes.Buffer
for _, o := range retobjs {
_, ok := o.Rel("upload")
if !ok {
errbuf.WriteString(fmt.Sprintf("Missing upload link for %s\n", o.Oid))
}
// verify link is optional so don't check
}
if errbuf.Len() > 0 {
return errors.New(errbuf.String())
}
return nil
}
// "upload" - all present
func uploadAllExists(oidsExist, oidsMissing []TestObject) error {
retobjs, err := callBatchApi("upload", oidsExist)
if err != nil {
return err
}
if len(retobjs) != len(oidsExist) {
return fmt.Errorf("Incorrect number of returned objects, expected %d, got %d", len(oidsExist), len(retobjs))
}
var errbuf bytes.Buffer
for _, o := range retobjs {
link, ok := o.Rel("upload")
if ok {
errbuf.WriteString(fmt.Sprintf("Upload link should not exist for %s, was %s\n", o.Oid, link))
}
}
if errbuf.Len() > 0 {
return errors.New(errbuf.String())
}
return nil
}
// "upload" - mix of missing & present
func uploadMixed(oidsExist, oidsMissing []TestObject) error {
existSet := tools.NewStringSetWithCapacity(len(oidsExist))
for _, o := range oidsExist {
existSet.Add(o.Oid)
} | }
calloids := interleaveTestData(oidsExist, oidsMissing)
retobjs, err := callBatchApi("upload", calloids)
if err != nil {
return err
}
count := len(oidsExist) + len(oidsMissing)
if len(retobjs) != count {
return fmt.Errorf("Incorrect number of returned objects, expected %d, got %d", count, len(retobjs))
}
var errbuf bytes.Buffer
for _, o := range retobjs {
link, ok := o.Rel("upload")
if existSet.Contains(o.Oid) {
if ok {
errbuf.WriteString(fmt.Sprintf("Upload link should not exist for %s, was %s\n", o.Oid, link))
}
}
if missingSet.Contains(o.Oid) && !ok {
errbuf.WriteString(fmt.Sprintf("Missing upload link for %s\n", o.Oid))
}
}
if errbuf.Len() > 0 {
return errors.New(errbuf.String())
}
return nil
}
func uploadEdgeCases(oidsExist, oidsMissing []TestObject) error {
errorCases := make([]TestObject, 0, 5)
errorCodeMap := make(map[string]int, 5)
errorReasonMap := make(map[string]string, 5)
validCases := make([]TestObject, 0, 1)
validReasonMap := make(map[string]string, 5)
// Invalid SHAs - code 422
// Too short
sha := "a345cde"
errorCases = append(errorCases, TestObject{Oid: sha, Size: 99})
errorCodeMap[sha] = 422
errorReasonMap[sha] = "SHA is too short"
// Too long
sha = "1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"
errorCases = append(errorCases, TestObject{Oid: sha, Size: 99})
errorCodeMap[sha] = 422
errorReasonMap[sha] = "SHA is too long"
// Invalid characters -----!---------------------------------!
sha = "bf3e3e2af9366a3b704ax0c31de5afa64193ebabffde2091936ad2G7510bc03a"
errorCases = append(errorCases, TestObject{Oid: sha, Size: 99})
errorCodeMap[sha] = 422
errorReasonMap[sha] = "SHA contains invalid characters"
// Invalid size - code 422
sha = "e3bf3e2af9366a3b704af0c31de5afa64193ebabffde2091936ad237510bc03a"
errorCases = append(errorCases, TestObject{Oid: sha, Size: -1})
errorCodeMap[sha] = 422
errorReasonMap[sha] = "Negative size"
sha = "d2983e2af9366a3b704af0c31de5afa64193ebabffde2091936ad237510bc03a"
errorCases = append(errorCases, TestObject{Oid: sha, Size: -125})
errorCodeMap[sha] = 422
errorReasonMap[sha] = "Negative size"
// Zero size - should be allowed
sha = "159f6ac723b9023b704af0c31de5afa64193ebabffde2091936ad237510bc03a"
validCases = append(validCases, TestObject{Oid: sha, Size: 0})
validReasonMap[sha] = "Zero size should be allowed"
calloids := interleaveTestData(errorCases, validCases)
retobjs, err := callBatchApi("upload", calloids)
if err != nil {
return err
}
count := len(errorCases) + len(validCases)
if len(retobjs) != count {
return fmt.Errorf("Incorrect number of returned objects, expected %d, got %d", count, len(retobjs))
}
var errbuf bytes.Buffer
for _, o := range retobjs {
link, ok := o.Rel("upload")
if code, iserror := errorCodeMap[o.Oid]; iserror {
reason, _ := errorReasonMap[o.Oid]
if ok {
errbuf.WriteString(fmt.Sprintf("Upload link should not exist for %s, was %s, reason %s\n", o.Oid, link, reason))
}
if o.Error == nil {
errbuf.WriteString(fmt.Sprintf("Upload should include an error for invalid object %s, reason %s", o.Oid, reason))
} else if o.Error.Code != code {
errbuf.WriteString(fmt.Sprintf("Upload error code for missing object %s should be %d, got %d, reason %s\n", o.Oid, code, o.Error.Code, reason))
}
}
if reason, reasonok := validReasonMap[o.Oid]; reasonok {
if !ok {
errbuf.WriteString(fmt.Sprintf("Missing upload link for %s, should be present because %s\n", o.Oid, reason))
}
}
}
if errbuf.Len() > 0 {
return errors.New(errbuf.String())
}
return nil
}
func init() {
addTest("Test upload: all missing", uploadAllMissing)
addTest("Test upload: all present", uploadAllExists)
addTest("Test upload: mixed", uploadMixed)
addTest("Test upload: edge cases", uploadEdgeCases)
} | missingSet := tools.NewStringSetWithCapacity(len(oidsMissing))
for _, o := range oidsMissing {
missingSet.Add(o.Oid) |
hcdcmds_test.go | // Copyright (c) 2015-2017 The Decred developers
// Copyright (c) 2018-2020 The Hc developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package hcjson_test
import (
"bytes"
"encoding/json"
"fmt"
"reflect"
"testing"
"github.com/HcashOrg/hcd/hcjson"
)
// TestBtcdCmds tests all of the btcd extended commands marshal and unmarshal
// into valid results include handling of optional fields being omitted in the
// marshalled command, while optional fields with defaults have the default
// assigned on unmarshalled commands.
func | (t *testing.T) {
t.Parallel()
testID := int(1)
tests := []struct {
name string
newCmd func() (interface{}, error)
staticCmd func() interface{}
marshalled string
unmarshalled interface{}
}{
{
name: "debuglevel",
newCmd: func() (interface{}, error) {
return hcjson.NewCmd("debuglevel", "trace")
},
staticCmd: func() interface{} {
return hcjson.NewDebugLevelCmd("trace")
},
marshalled: `{"jsonrpc":"1.0","method":"debuglevel","params":["trace"],"id":1}`,
unmarshalled: &hcjson.DebugLevelCmd{
LevelSpec: "trace",
},
},
{
name: "getstakeversions",
newCmd: func() (interface{}, error) {
return hcjson.NewCmd("getstakeversions", "deadbeef", 1)
},
staticCmd: func() interface{} {
return hcjson.NewGetStakeVersionsCmd("deadbeef", 1)
},
marshalled: `{"jsonrpc":"1.0","method":"getstakeversions","params":["deadbeef",1],"id":1}`,
unmarshalled: &hcjson.GetStakeVersionsCmd{
Hash: "deadbeef",
Count: 1,
},
},
{
name: "getvoteinfo",
newCmd: func() (interface{}, error) {
return hcjson.NewCmd("getvoteinfo", 1)
},
staticCmd: func() interface{} {
return hcjson.NewGetVoteInfoCmd(1)
},
marshalled: `{"jsonrpc":"1.0","method":"getvoteinfo","params":[1],"id":1}`,
unmarshalled: &hcjson.GetVoteInfoCmd{
Version: 1,
},
},
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
// Marshal the command as created by the new static command
// creation function.
marshalled, err := hcjson.MarshalCmd(testID, test.staticCmd())
if err != nil {
t.Errorf("MarshalCmd #%d (%s) unexpected error: %v", i,
test.name, err)
continue
}
if !bytes.Equal(marshalled, []byte(test.marshalled)) {
t.Errorf("Test #%d (%s) unexpected marshalled data - "+
"got %s, want %s", i, test.name, marshalled,
test.marshalled)
continue
}
// Ensure the command is created without error via the generic
// new command creation function.
cmd, err := test.newCmd()
if err != nil {
t.Errorf("Test #%d (%s) unexpected NewCmd error: %v ",
i, test.name, err)
}
// Marshal the command as created by the generic new command
// creation function.
marshalled, err = hcjson.MarshalCmd(testID, cmd)
if err != nil {
t.Errorf("MarshalCmd #%d (%s) unexpected error: %v", i,
test.name, err)
continue
}
if !bytes.Equal(marshalled, []byte(test.marshalled)) {
t.Errorf("Test #%d (%s) unexpected marshalled data - "+
"got %s, want %s", i, test.name, marshalled,
test.marshalled)
continue
}
var request hcjson.Request
if err := json.Unmarshal(marshalled, &request); err != nil {
t.Errorf("Test #%d (%s) unexpected error while "+
"unmarshalling JSON-RPC request: %v", i,
test.name, err)
continue
}
cmd, err = hcjson.UnmarshalCmd(&request)
if err != nil {
t.Errorf("UnmarshalCmd #%d (%s) unexpected error: %v", i,
test.name, err)
continue
}
if !reflect.DeepEqual(cmd, test.unmarshalled) {
t.Errorf("Test #%d (%s) unexpected unmarshalled command "+
"- got %s, want %s", i, test.name,
fmt.Sprintf("(%T) %+[1]v", cmd),
fmt.Sprintf("(%T) %+[1]v\n", test.unmarshalled))
continue
}
}
}
| TestDcrdCmds |
0016_auto_20200407_2042.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2020-04-07 17:42
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('crawl', '0015_remove_article_news_source'),
]
operations = [
migrations.RenameField( | old_name='source',
new_name='news_source',
),
] | model_name='article', |
p868.rs | use crate::lc::Solution;
impl Solution {
pub fn binary_gap(n: i32) -> i32 {
let mut v = n;
while v & 1 == 0 {
v = v >> 1;
}
v = v >> 1;
let mut shift = 1;
let mut max = 0;
while v > 0 {
if v & 1 == 1 {
if shift > max {
max = shift;
}
shift = 0;
}
v = v >> 1;
shift += 1;
}
max
}
}
#[test]
fn test() | {
assert_eq!(2, Solution::binary_gap(1000));
assert_eq!(2, Solution::binary_gap(22));
assert_eq!(0, Solution::binary_gap(8));
assert_eq!(2, Solution::binary_gap(5));
} |
|
sf_aes_key_r0_5.rs | #[doc = "Register `sf_aes_key_r0_5` reader"]
pub struct R(crate::R<SF_AES_KEY_R0_5_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<SF_AES_KEY_R0_5_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<SF_AES_KEY_R0_5_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<SF_AES_KEY_R0_5_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `sf_aes_key_r0_5` writer"]
pub struct W(crate::W<SF_AES_KEY_R0_5_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<SF_AES_KEY_R0_5_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0 | }
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<SF_AES_KEY_R0_5_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<SF_AES_KEY_R0_5_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `sf_aes_key_r0_5` reader - "]
pub struct SF_AES_KEY_R0_5_R(crate::FieldReader<u32, u32>);
impl SF_AES_KEY_R0_5_R {
#[inline(always)]
pub(crate) fn new(bits: u32) -> Self {
SF_AES_KEY_R0_5_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for SF_AES_KEY_R0_5_R {
type Target = crate::FieldReader<u32, u32>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `sf_aes_key_r0_5` writer - "]
pub struct SF_AES_KEY_R0_5_W<'a> {
w: &'a mut W,
}
impl<'a> SF_AES_KEY_R0_5_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = value;
self.w
}
}
impl R {
#[doc = "Bits 0:31"]
#[inline(always)]
pub fn sf_aes_key_r0_5(&self) -> SF_AES_KEY_R0_5_R {
SF_AES_KEY_R0_5_R::new(self.bits)
}
}
impl W {
#[doc = "Bits 0:31"]
#[inline(always)]
pub fn sf_aes_key_r0_5(&mut self) -> SF_AES_KEY_R0_5_W {
SF_AES_KEY_R0_5_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "sf_aes_key_r0_5.\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [sf_aes_key_r0_5](index.html) module"]
pub struct SF_AES_KEY_R0_5_SPEC;
impl crate::RegisterSpec for SF_AES_KEY_R0_5_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [sf_aes_key_r0_5::R](R) reader structure"]
impl crate::Readable for SF_AES_KEY_R0_5_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [sf_aes_key_r0_5::W](W) writer structure"]
impl crate::Writable for SF_AES_KEY_R0_5_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets sf_aes_key_r0_5 to value 0"]
impl crate::Resettable for SF_AES_KEY_R0_5_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
} | |
seeder.module.ts | import { Module } from '@nestjs/common';
import { UserQuestionnaireAnswerFactory } from './userQuestionnaireAnswerFactory';
import { DatabaseSeeder } from './databaseSeeder';
import {
loadConfigModule,
loadMikroOrmModule,
} from '../shared/utils/loadModule';
import { SeedConfig } from './seed.config';
import { UserInvestmentQuestionnaireModule } from '../user-investment-questionnaire/userInvestmentQuestionnaire.module';
import { InvestmentQuestionnaireModule } from '../investment-questionnaire/investmentQuestionnaire.module';
import { RiskLevelModule } from '../risk-level/riskLevel.module';
import { UserRiskLevelFactory } from './userRiskLevelFactory';
import { PortfolioModule } from '../portfolio/portfolio.module';
import { UserRecommendedPortfolioFactory } from './userRecommendedPortfolioFactory';
import { InvestmentValueModule } from '../investment-value/investmentValue.module';
import { UserInvestmentValueFactory } from './userInvestmentValueFactory';
import { EventEmitterModule } from '@nestjs/event-emitter';
import { UserAssetClassFactory } from './userAssetClassFactory';
import { UserAssetClassModule } from '../user-asset-class/userAssetClass.module';
import { AssetClassModule } from '../asset-class/assetClass.module';
import { ManagedPortfolioFactory } from './managedPortfolioFactory';
import { ManagedPortfolioModule } from '../managed-portfolio/managedPortfolio.module';
import { OptimizerModule } from '../optimizer/optimizer.module';
import { DrivewealthModule } from '../drivewealth/drivewealth.module';
import { UserInvestmentProfileModule } from '../user-investment-profile/userInvestmentProfile.module';
import { UserInvestmentProfileFactory } from './userInvestmentProfileFactory';
@Module({
imports: [
loadMikroOrmModule(),
loadConfigModule(),
UserInvestmentQuestionnaireModule,
InvestmentQuestionnaireModule,
RiskLevelModule,
PortfolioModule,
InvestmentValueModule,
UserAssetClassModule,
AssetClassModule,
EventEmitterModule.forRoot(),
ManagedPortfolioModule,
OptimizerModule,
DrivewealthModule,
UserInvestmentProfileModule,
],
providers: [
DatabaseSeeder,
UserQuestionnaireAnswerFactory,
UserRiskLevelFactory,
UserRecommendedPortfolioFactory,
UserInvestmentValueFactory,
UserAssetClassFactory,
SeedConfig,
ManagedPortfolioFactory,
UserInvestmentProfileFactory,
],
})
export class | {}
| SeederModule |
main.rs | fn main() {
let tup = [(2,4), (4,5), (4,5)];
println!("{:?}", tup[0]);
}
fn | () {
} | find_concatenate_numbers |
helpers.py | import re
from subprocess import CalledProcessError
from typing import Any, List, Optional, Tuple
from avionix._process_utils import custom_check_output
def _space_split(output_line: str):
return [
value
for value in re.split(r"(\t| +)", output_line)
if not re.match(r"^\s*$", value)
]
def _get_name_locations(names: List[str], name_string: str):
locs: List[Any] = []
last_pos = 0
for name in names:
last_pos = name_string.find(name, last_pos)
locs.append(last_pos)
for i, loc in enumerate(locs):
if i + 1 < len(locs):
locs[i] = (loc, locs[i + 1])
continue
locs[i] = (loc, len(name_string))
return locs
def _split_using_locations(locations: List[Tuple[int, int]], values_string: str):
vals = []
for i, loc in enumerate(locations):
start = loc[0]
end = loc[1]
if i == len(locations) - 1:
vals.append(values_string[start:].strip())
continue
vals.append(values_string[start:end].strip())
return vals
def | (output: str):
output_lines = output.split("\n")
# Remove warning lines
if output_lines[0].find('WARNING') != -1:
output_lines.pop(0)
names = _space_split(output_lines[0])
value_locations = _get_name_locations(names, output_lines[0])
value_rows = []
for line in output_lines[1:]:
if line.strip():
values = _split_using_locations(value_locations, line)
value_rows.append(values)
return {names[i]: row for i, row in enumerate(zip(*value_rows))}
class KubectlGetException(Exception):
def __init__(self, msg: str):
super().__init__(msg)
def kubectl_get(resource: str, namespace: Optional[str] = None, wide: bool = False):
try:
command = f"kubectl get {resource}"
if namespace:
command += f" -n {namespace}"
if wide:
command += " -o wide"
return parse_output_to_dict(custom_check_output(command))
except CalledProcessError as err:
raise KubectlGetException(err.output)
| parse_output_to_dict |
set-role.component.ts | import { Component, OnInit, ViewChild } from '@angular/core';
import { NzModalRef, NzMessageService } from 'ng-zorro-antd';
import { _HttpClient } from '@delon/theme';
import { SFComponent, SFSchema, SFUISchema } from '@delon/form';
import { CacheService } from '@delon/cache';
import { zip } from 'rxjs';
@Component({
selector: 'app-sys-organization-set-role',
templateUrl: './set-role.component.html',
})
export class SysOrganizationSetRoleComponent implements OnInit {
record: any = {}; | properties: {
sysRoleIdList: {
type: 'string',
title: '角色',
enum: [],
ui: {
widget: 'transfer',
titles: ['未拥有', '已拥有'],
showSearch: true,
// listStyle:{ 'width.px': 300, 'height.px': 300 }
},
default: []
},
},
required: [],
}
ui: SFUISchema = {};
constructor(
private modal: NzModalRef,
private msgSrv: NzMessageService,
public http: _HttpClient,
private cacheService: CacheService
) {
}
ngOnInit(): void {
zip(
this.http.get(`/chen/admin/sys/organization/${this.record.id}/sysRole`),
this.http.get(`/chen/admin/online/loginUser/sysRoleList`),
).subscribe(([sysOrganizationRoleList, sysRoleList]: any[]) => {
const defaultSysOrganizationRoleIdList = sysOrganizationRoleList.map((value, index, array) => {
return value.id;
});
const sysRoleListEnum = sysRoleList.map((value, index, array) => {
return {title: value.name, value: value.id, disabled: value.status === 'DISABLED' };
});
this.schema.properties.sysRoleIdList.enum = sysRoleListEnum;
this.sysOrganizationRole = {sysRoleIdList: defaultSysOrganizationRoleIdList};
try {
this.sf.refreshSchema();
} catch (e) {
// 如果有缓存就会异常"ERROR Error: Invalid Schema"
}
});
}
save(value: any) {
this.http.put(`/chen/admin/sys/organization/${this.record.id}/setSysRole`, value).subscribe(res => {
this.msgSrv.success('保存成功');
this.modal.close(true);
});
}
close() {
this.modal.destroy();
}
} | sysOrganizationRole: any;
@ViewChild('sf', { static: true }) sf: SFComponent;
schema: SFSchema = { |
0009_auto_20171025_0558.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-10-25 10:58
from __future__ import unicode_literals
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
| dependencies = [
('flexible_reports', '0008_auto_20171025_0553'),
]
operations = [
migrations.AlterField(
model_name='reportelement',
name='datasource',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='flexible_reports.Datasource', verbose_name='Datasource'),
),
] |
|
message-bubbles.min.js | !function(){function | (i){s.msg=void 0===s.msg?0:s.msg,s.msg<i.length&&(i[s.msg].addEventListener("animationend",function(){s(i)}),i[s.msg].classList.contains("msg-visible")||(s.msg<i.length-1&&(i[s.msg].classList.contains("right-msg")&&i[s.msg+1].classList.contains("right-msg")||!i[s.msg].classList.contains("right-msg")&&!i[s.msg+1].classList.contains("right-msg")&&!i[s.msg+1].classList.contains("time"))&&i[s.msg].classList.add("no-tail"),i[s.msg].classList.add("msg-visible")),s.msg++)}window.addEventListener("load",function(){var i=document.querySelectorAll("ul.rounded-messages.reveal-messages li");i.length>0&&s(i)})}();
| s |
mesh.rs | use std::error::Error;
use std::ops::{Deref, DerefMut};
use std::path::PathBuf;
use fenris::connectivity::Segment2d2Connectivity;
use fenris::geometry::polymesh::{PolyMesh2d, PolyMesh3d};
use fenris::mesh::{Mesh2d, QuadMesh2d, TriangleMesh2d};
use fenris::model::{FiniteElementInterpolator, MakeInterpolator};
use fenris::nalgebra::{Point2, U2, U3};
use hamilton::storages::VecStorage;
use hamilton::Component;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct PolyMesh2dComponent {
/// Mesh name used for the output file as: {entity_name}_{mesh_name}_polymesh_{sequence_name}.{file_extension}
pub mesh_name: String,
/// Optional subfolder (relative to output directory) for the output of the meshes
pub subfolder: Option<PathBuf>,
/// The polymesh to write to a file
pub mesh: PolyMesh2d<f64>,
/// Optional interpolator to interpolate the polymesh with on every output write
pub interpolator: Option<FiniteElementInterpolator<f64>>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct PolyMesh3dComponent {
/// Mesh name used for the output file as: {entity_name}_{mesh_name}_polymesh_{sequence_name}.{file_extension}
pub mesh_name: String,
/// Optional subfolder (relative to output directory) for the output of the meshes
pub subfolder: Option<PathBuf>,
/// The polymesh to write to a file
pub mesh: PolyMesh3d<f64>,
/// Optional interpolator to interpolate the polymesh with on every output write
pub interpolator: Option<FiniteElementInterpolator<f64>>,
}
impl PolyMesh3dComponent {
/// Creates a PolyMesh3dComponent for static geometry
pub fn new<S: Into<String>>(mesh_name: S, mesh: PolyMesh3d<f64>) -> Self {
PolyMesh3dComponent {
mesh_name: mesh_name.into(),
subfolder: None,
mesh,
interpolator: None,
}
}
/// Attaches an interpolator to this PolyMesh3dComponent
pub fn with_interpolator<M: MakeInterpolator<f64, U3>>(mut self, model: &M) -> Result<Self, Box<dyn Error>> {
self.interpolator = Some(model.make_interpolator(self.mesh.vertices())?);
Ok(self)
}
/// Attaches a subfolder to this PolyMesh3dComponent
pub fn with_subfolder<P: Into<PathBuf>>(mut self, subfolder: P) -> Self {
self.subfolder = Some(subfolder.into());
self
}
}
impl PolyMesh2dComponent {
/// Creates a PolyMesh2dComponent for static geometry
pub fn new<S: Into<String>>(mesh_name: S, mesh: PolyMesh2d<f64>) -> Self {
PolyMesh2dComponent {
mesh_name: mesh_name.into(),
subfolder: None,
mesh,
interpolator: None,
}
} | pub fn with_interpolator<M: MakeInterpolator<f64, U2>>(mut self, model: &M) -> Result<Self, Box<dyn Error>> {
self.interpolator = Some(model.make_interpolator(self.mesh.vertices())?);
Ok(self)
}
/// Attaches a subfolder to this PolyMesh3dComponent
pub fn with_subfolder<P: Into<PathBuf>>(mut self, subfolder: P) -> Self {
self.subfolder = Some(subfolder.into());
self
}
}
/// Component storing interpolators for arbitrary 3D polymeshes
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct PolyMesh2dCollection(pub Vec<PolyMesh2dComponent>);
/// Component storing interpolators for arbitrary 3D polymeshes
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct PolyMesh3dCollection(pub Vec<PolyMesh3dComponent>);
impl Deref for PolyMesh2dCollection {
type Target = Vec<PolyMesh2dComponent>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Component for PolyMesh2dCollection {
type Storage = VecStorage<Self>;
}
impl Deref for PolyMesh3dCollection {
type Target = Vec<PolyMesh3dComponent>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Component for PolyMesh3dCollection {
type Storage = VecStorage<Self>;
}
/// Component storing an interpolator for a set of points
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct PointInterpolator {
pub reference_points: Vec<Point2<f64>>,
pub interpolator: FiniteElementInterpolator<f64>,
}
impl Component for PointInterpolator {
type Storage = VecStorage<Self>;
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
// TODO: Replace this with polygonal/polyhedral meshes later on
pub enum VolumeMesh2d {
QuadMesh(QuadMesh2d<f64>),
TriMesh(TriangleMesh2d<f64>),
}
impl From<QuadMesh2d<f64>> for VolumeMesh2d {
fn from(mesh: QuadMesh2d<f64>) -> Self {
Self::QuadMesh(mesh)
}
}
impl From<TriangleMesh2d<f64>> for VolumeMesh2d {
fn from(mesh: TriangleMesh2d<f64>) -> Self {
Self::TriMesh(mesh)
}
}
impl VolumeMesh2d {
pub fn vertices(&self) -> &[Point2<f64>] {
match self {
Self::QuadMesh(ref mesh) => mesh.vertices(),
Self::TriMesh(ref mesh) => mesh.vertices(),
}
}
pub fn vertices_mut(&mut self) -> &mut [Point2<f64>] {
match self {
Self::QuadMesh(ref mut mesh) => mesh.vertices_mut(),
Self::TriMesh(ref mut mesh) => mesh.vertices_mut(),
}
}
}
impl Component for VolumeMesh2d {
type Storage = VecStorage<Self>;
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SurfaceMesh2d(pub Mesh2d<f64, Segment2d2Connectivity>);
impl From<Mesh2d<f64, Segment2d2Connectivity>> for SurfaceMesh2d {
fn from(mesh: Mesh2d<f64, Segment2d2Connectivity>) -> Self {
Self(mesh)
}
}
impl Component for SurfaceMesh2d {
type Storage = VecStorage<Self>;
}
impl Deref for SurfaceMesh2d {
type Target = Mesh2d<f64, Segment2d2Connectivity>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for SurfaceMesh2d {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct VolumeMesh3d(pub PolyMesh3d<f64>);
impl Component for VolumeMesh3d {
type Storage = VecStorage<Self>;
}
impl Deref for VolumeMesh3d {
type Target = PolyMesh3d<f64>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for VolumeMesh3d {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<P> From<P> for VolumeMesh3d
where
P: Into<PolyMesh3d<f64>>,
{
fn from(into_poly: P) -> Self {
Self(into_poly.into())
}
} |
/// Attaches an interpolator to this PolyMesh2dComponent |
count_request_builder.go | package count
import (
i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f "github.com/microsoft/kiota-abstractions-go"
ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a "github.com/microsoftgraph/msgraph-sdk-go/models/odataerrors"
)
// CountRequestBuilder provides operations to count the resources in the collection.
type CountRequestBuilder struct {
// Path parameters for the request
pathParameters map[string]string
// The request adapter to use to execute the requests.
requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter
// Url template to use to build the URL for the current request builder
urlTemplate string
}
// CountRequestBuilderGetRequestConfiguration configuration for the request such as headers, query parameters, and middleware options.
type CountRequestBuilderGetRequestConfiguration struct {
// Request headers
Headers map[string]string
// Request options
Options []i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestOption
}
// NewCountRequestBuilderInternal instantiates a new CountRequestBuilder and sets the default values.
func NewCountRequestBuilderInternal(pathParameters map[string]string, requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter)(*CountRequestBuilder) {
m := &CountRequestBuilder{
}
m.urlTemplate = "{+baseurl}/groups/{group%2Did}/team/channels/{channel%2Did}/members/$count";
urlTplParams := make(map[string]string)
for idx, item := range pathParameters {
urlTplParams[idx] = item
}
m.pathParameters = urlTplParams;
m.requestAdapter = requestAdapter;
return m
}
// NewCountRequestBuilder instantiates a new CountRequestBuilder and sets the default values.
func NewCountRequestBuilder(rawUrl string, requestAdapter i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestAdapter)(*CountRequestBuilder) {
urlParams := make(map[string]string)
urlParams["request-raw-url"] = rawUrl
return NewCountRequestBuilderInternal(urlParams, requestAdapter)
}
// CreateGetRequestInformation get the number of the resource
func (m *CountRequestBuilder) CreateGetRequestInformation()(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {
return m.CreateGetRequestInformationWithRequestConfiguration(nil);
}
// CreateGetRequestInformationWithRequestConfiguration get the number of the resource
func (m *CountRequestBuilder) CreateGetRequestInformationWithRequestConfiguration(requestConfiguration *CountRequestBuilderGetRequestConfiguration)(*i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.RequestInformation, error) {
requestInfo := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.GET
if requestConfiguration != nil |
return requestInfo, nil
}
// Get get the number of the resource
func (m *CountRequestBuilder) Get()(*int32, error) {
return m.GetWithRequestConfigurationAndResponseHandler(nil, nil);
}
// GetWithRequestConfigurationAndResponseHandler get the number of the resource
func (m *CountRequestBuilder) GetWithRequestConfigurationAndResponseHandler(requestConfiguration *CountRequestBuilderGetRequestConfiguration, responseHandler i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ResponseHandler)(*int32, error) {
requestInfo, err := m.CreateGetRequestInformationWithRequestConfiguration(requestConfiguration);
if err != nil {
return nil, err
}
errorMapping := i2ae4187f7daee263371cb1c977df639813ab50ffa529013b7437480d1ec0158f.ErrorMappings {
"4XX": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,
"5XX": ia572726a95efa92ddd544552cd950653dc691023836923576b2f4bf716cf204a.CreateODataErrorFromDiscriminatorValue,
}
res, err := m.requestAdapter.SendPrimitiveAsync(requestInfo, "int32", responseHandler, errorMapping)
if err != nil {
return nil, err
}
return res.(*int32), nil
}
| {
requestInfo.AddRequestHeaders(requestConfiguration.Headers)
requestInfo.AddRequestOptions(requestConfiguration.Options)
} |
resource_citrixadc_sslprofile_sslcipher_binding.go | package citrixadc
import (
"github.com/chiradeep/go-nitro/config/ssl"
"github.com/chiradeep/go-nitro/netscaler"
"github.com/hashicorp/terraform/helper/schema"
"fmt"
"log"
"strings"
)
func resourceCitrixAdcSslprofile_sslcipher_binding() *schema.Resource {
return &schema.Resource{
SchemaVersion: 1,
Create: createSslprofile_sslcipher_bindingFunc,
Read: readSslprofile_sslcipher_bindingFunc,
Delete: deleteSslprofile_sslcipher_bindingFunc,
Importer: &schema.ResourceImporter{
State: schema.ImportStatePassthrough,
},
Schema: map[string]*schema.Schema{
"ciphername": &schema.Schema{
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"cipherpriority": &schema.Schema{
Type: schema.TypeInt, | },
"name": &schema.Schema{
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
},
}
}
func createSslprofile_sslcipher_bindingFunc(d *schema.ResourceData, meta interface{}) error {
log.Printf("[DEBUG] citrixadc-provider: In createSslprofile_sslcipher_bindingFunc")
client := meta.(*NetScalerNitroClient).client
profileName := d.Get("name").(string)
cipherName := d.Get("ciphername").(string)
// Use `,` as the separator since it is invalid character for adc entity strings
bindingId := fmt.Sprintf("%s,%s", profileName, cipherName)
sslprofile_sslcipher_binding := ssl.Sslprofilesslcipherbinding{
Ciphername: d.Get("ciphername").(string),
Cipherpriority: d.Get("cipherpriority").(int),
Name: d.Get("name").(string),
}
err := client.UpdateUnnamedResource(netscaler.Sslprofile_sslcipher_binding.Type(), &sslprofile_sslcipher_binding)
if err != nil {
return err
}
d.SetId(bindingId)
err = readSslprofile_sslcipher_bindingFunc(d, meta)
if err != nil {
log.Printf("[ERROR] netscaler-provider: ?? we just created this sslprofile_sslcipher_binding but we can't read it ?? %s", bindingId)
return nil
}
return nil
}
func readSslprofile_sslcipher_bindingFunc(d *schema.ResourceData, meta interface{}) error {
log.Printf("[DEBUG] citrixadc-provider: In readSslprofile_sslcipher_bindingFunc")
client := meta.(*NetScalerNitroClient).client
bindingId := d.Id()
idSlice := strings.Split(bindingId, ",")
if len(idSlice) < 2 {
return fmt.Errorf("Cannot deduce ciphername from id string")
}
if len(idSlice) > 2 {
return fmt.Errorf("Too many separators \",\" in id string")
}
profileName := idSlice[0]
cipherName := idSlice[1]
findParams := netscaler.FindParams{
ResourceType: "sslprofile_sslcipher_binding",
ResourceName: profileName,
ResourceMissingErrorCode: 3248,
}
dataArr, err := client.FindResourceArrayWithParams(findParams)
if err != nil {
if strings.Contains(err.Error(), "\"errorcode\": 3248") {
return nil
} else {
// Unexpected error
log.Printf("[DEBUG] citrixadc-provider: Error during FindResourceArrayWithParams %s", err.Error())
return err
}
}
// Resource is missing
if len(dataArr) == 0 {
log.Printf("[DEBUG] citrixadc-provider: FindResourceArrayWithParams returned empty array")
log.Printf("[WARN] citrixadc-provider: Clearing sslprofile_sslcipher_binding state %s", bindingId)
d.SetId("")
return nil
}
// Iterate through results to find the one with the right policy name
foundIndex := -1
for i, v := range dataArr {
if v["cipheraliasname"].(string) == cipherName {
foundIndex = i
break
}
}
// Resource is missing
if foundIndex == -1 {
log.Printf("[DEBUG] citrixadc-provider: FindResourceArrayWithParams cipher name not found in array")
log.Printf("[WARN] citrixadc-provider: Clearing sslprofile_sslcipher_binding state %s", bindingId)
d.SetId("")
return nil
}
// Fallthrough
data := dataArr[foundIndex]
d.Set("name", data["name"])
d.Set("ciphername", data["cipheraliasname"])
d.Set("cipherpriority", data["cipherpriority"])
return nil
}
func deleteSslprofile_sslcipher_bindingFunc(d *schema.ResourceData, meta interface{}) error {
log.Printf("[DEBUG] citrixadc-provider: In deleteSslprofile_sslcipher_bindingFunc")
client := meta.(*NetScalerNitroClient).client
bindingId := d.Id()
idSlice := strings.Split(bindingId, ",")
profileName := idSlice[0]
cipherName := idSlice[1]
args := make([]string, 0)
args = append(args, fmt.Sprintf("ciphername:%v", cipherName))
err := client.DeleteResourceWithArgs(netscaler.Sslprofile_sslcipher_binding.Type(), profileName, args)
if err != nil {
return err
}
d.SetId("")
return nil
} | Required: true,
ForceNew: true, |
test_view_password_change.py | from django.contrib.auth import views as auth_views
from django.contrib.auth.forms import PasswordChangeForm
from django.contrib.auth.models import User
from django.test import TestCase
from django.urls import resolve, reverse
class PasswordChangeTests(TestCase):
def setUp(self):
username = 'john'
password = 'secret123'
User.objects.create_user(username=username, email='[email protected]', password=password)
url = reverse('password_change')
self.client.login(username=username, password=password)
self.response = self.client.get(url)
def test_status_code(self):
self.assertEquals(self.response.status_code, 200)
def test_url_resolves_correct_view(self):
view = resolve('/settings/password/')
self.assertEquals(view.func.view_class, auth_views.PasswordChangeView)
def test_csrf(self):
self.assertContains(self.response, 'csrfmiddlewaretoken')
def test_contains_form(self):
form = self.response.context.get('form')
self.assertIsInstance(form, PasswordChangeForm)
| def test_form_inputs(self):
'''
The view must contain four inputs: csrf, old_password, new_password1, new_password2
'''
self.assertContains(self.response, '<input', 4)
self.assertContains(self.response, 'type="password"', 3)
class LoginRequiredPasswordChangeTests(TestCase):
def test_redirection(self):
url = reverse('password_change')
login_url = reverse('login')
response = self.client.get(url)
self.assertRedirects(response, f'{login_url}?next={url}')
class PasswordChangeTestCase(TestCase):
'''
Base test case for form processing
accepts a `data` dict to POST to the view.
'''
def setUp(self, data={}):
self.user = User.objects.create_user(username='john', email='[email protected]', password='old_password')
self.url = reverse('password_change')
self.client.login(username='john', password='old_password')
self.response = self.client.post(self.url, data)
class SuccessfulPasswordChangeTests(PasswordChangeTestCase):
def setUp(self):
super().setUp({
'old_password': 'old_password',
'new_password1': 'new_password',
'new_password2': 'new_password',
})
def test_redirection(self):
'''
A valid form submission should redirect the user
'''
self.assertRedirects(self.response, reverse('password_change_done'))
def test_password_changed(self):
'''
refresh the user instance from database to get the new password
hash updated by the change password view.
'''
self.user.refresh_from_db()
self.assertTrue(self.user.check_password('new_password'))
def test_user_authentication(self):
'''
Create a new request to an arbitrary page.
The resulting response should now have an `user` to its context, after a successful sign up.
'''
response = self.client.get(reverse('home'))
user = response.context.get('user')
self.assertTrue(user.is_authenticated)
class InvalidPasswordChangeTests(PasswordChangeTestCase):
def test_status_code(self):
'''
An invalid form submission should return to the same page
'''
self.assertEquals(self.response.status_code, 200)
def test_form_errors(self):
form = self.response.context.get('form')
self.assertTrue(form.errors)
def test_didnt_change_password(self):
'''
refresh the user instance from the database to make
sure we have the latest data.
'''
self.user.refresh_from_db()
self.assertTrue(self.user.check_password('old_password')) | |
package.py | # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
from spack import *
class Sparskit(MakefilePackage):
"""SPARSKIT: A basic tool-kit for sparse matrix computations (Version 2).
Made by Yousef Saad, University of Minnesota.
"""
homepage = "https://www-users.cse.umn.edu/~saad/software/SPARSKIT/"
version('develop', sha256='ecdd0a9968d6b45153a328710a42fe87600f0bba0e3c53896090b8ae1c113b7a',
url='http://www-users.cs.umn.edu/~saad/software/SPARSKIT/SPARSKIT2.tar.gz')
# The library uses blas routine which needs to be known when the lib is used.
# A dependent package should add self.spec['blas'].libs.ld_flags
# at the end of its link line.
# But, asis, this packages compiles without needing to know about it.
# depends_on('blas', type='run')
variant('pic', default=True,
description='Compile with position independent code.')
variant('debug', default=False,
description='Builds a debug version of the library')
# We provide the standard Make flags here:
# https://spack.readthedocs.io/en/latest/packaging_guide.html?highlight=flag_handler#compiler-flags
def flag_handler(self, name, flags):
spec = self.spec
if '+pic' in spec:
if name == 'fflags':
flags.append(self.compiler.fc_pic_flag)
if name == 'fflags':
if 'gfortran' in self.compiler.fc:
flags.append('-std=legacy')
flags.append('-Wall')
if '+debug' in spec:
if '-g' in self.compiler.debug_flags:
flags.append('-g')
if '-O0' in self.compiler.opt_flags:
flags.append('-O0')
elif '-O' in self.compiler.opt_flags:
flags.append('-O')
else:
if '-O3' in self.compiler.opt_flags:
flags.append('-O3')
elif '-O2' in self.compiler.opt_flags:
flags.append('-O2')
return (None, flags, None)
def | (self, spec, prefix):
mkfile = FileFilter('makefile')
mkfile.filter(r'^(OPT).*=.+', r'\1= -c $(FFLAGS)')
if os.path.exists('libskit.a'):
os.unlink('libskit.a')
def build(self, spec, prefix):
make('clean')
make('F77={0}'.format(spack_fc))
def install(self, spec, prefix):
mkdirp(prefix.lib)
install('libskit.*', prefix.lib)
@property
def libs(self):
return find_libraries(
"libskit*", root=self.prefix, shared=False, recursive=True
)
| edit |
log.py | # sqlalchemy/log.py
# Copyright (C) 2006-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
# Includes alterations by Vinay Sajip [email protected]
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""Logging control and utilities.
Control of logging for SA can be performed from the regular python logging
module. The regular dotted module namespace is used, starting at
'sqlalchemy'. For class-level logging, the class name is appended.
The "echo" keyword parameter, available on SQLA :class:`_engine.Engine`
and :class:`_pool.Pool` objects, corresponds to a logger specific to that
instance only.
"""
from __future__ import annotations
import logging
import sys
from typing import Any
from typing import Optional
from typing import overload
from typing import Set
from typing import Type
from typing import TypeVar
from typing import Union
from .util import py311
from .util import py38
from .util.typing import Literal
if py38:
STACKLEVEL = True
# needed as of py3.11.0b1
# #8019
STACKLEVEL_OFFSET = 2 if py311 else 1
else:
STACKLEVEL = False
STACKLEVEL_OFFSET = 0
_IT = TypeVar("_IT", bound="Identified")
_EchoFlagType = Union[None, bool, Literal["debug"]]
# set initial level to WARN. This so that
# log statements don't occur in the absence of explicit
# logging being enabled for 'sqlalchemy'.
rootlogger = logging.getLogger("sqlalchemy")
if rootlogger.level == logging.NOTSET:
rootlogger.setLevel(logging.WARN)
def _add_default_handler(logger: logging.Logger) -> None:
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(
logging.Formatter("%(asctime)s %(levelname)s %(name)s %(message)s")
)
logger.addHandler(handler)
_logged_classes: Set[Type["Identified"]] = set()
def _qual_logger_name_for_cls(cls: Type["Identified"]) -> str:
return (
getattr(cls, "_sqla_logger_namespace", None)
or cls.__module__ + "." + cls.__name__
)
def class_logger(cls: Type[_IT]) -> Type[_IT]:
logger = logging.getLogger(_qual_logger_name_for_cls(cls))
cls._should_log_debug = lambda self: logger.isEnabledFor( # type: ignore[assignment] # noqa: E501
logging.DEBUG
)
cls._should_log_info = lambda self: logger.isEnabledFor( # type: ignore[assignment] # noqa: E501
logging.INFO
)
cls.logger = logger
_logged_classes.add(cls)
return cls
_IdentifiedLoggerType = Union[logging.Logger, "InstanceLogger"]
class Identified:
__slots__ = ()
logging_name: Optional[str] = None
logger: _IdentifiedLoggerType
_echo: _EchoFlagType
def _should_log_debug(self) -> bool:
return self.logger.isEnabledFor(logging.DEBUG)
def _should_log_info(self) -> bool:
return self.logger.isEnabledFor(logging.INFO)
class InstanceLogger:
"""A logger adapter (wrapper) for :class:`.Identified` subclasses.
This allows multiple instances (e.g. Engine or Pool instances)
to share a logger, but have its verbosity controlled on a
per-instance basis.
The basic functionality is to return a logging level
which is based on an instance's echo setting.
Default implementation is:
'debug' -> logging.DEBUG
True -> logging.INFO
False -> Effective level of underlying logger (
logging.WARNING by default)
None -> same as False
"""
# Map echo settings to logger levels
_echo_map = {
None: logging.NOTSET,
False: logging.NOTSET,
True: logging.INFO,
"debug": logging.DEBUG,
}
_echo: _EchoFlagType
__slots__ = ("echo", "logger")
def __init__(self, echo: _EchoFlagType, name: str):
self.echo = echo
self.logger = logging.getLogger(name)
# if echo flag is enabled and no handlers,
# add a handler to the list
if self._echo_map[echo] <= logging.INFO and not self.logger.handlers:
_add_default_handler(self.logger)
#
# Boilerplate convenience methods
#
def debug(self, msg: str, *args: Any, **kwargs: Any) -> None:
"""Delegate a debug call to the underlying logger."""
self.log(logging.DEBUG, msg, *args, **kwargs)
def info(self, msg: str, *args: Any, **kwargs: Any) -> None:
|
def warning(self, msg: str, *args: Any, **kwargs: Any) -> None:
"""Delegate a warning call to the underlying logger."""
self.log(logging.WARNING, msg, *args, **kwargs)
warn = warning
def error(self, msg: str, *args: Any, **kwargs: Any) -> None:
"""
Delegate an error call to the underlying logger.
"""
self.log(logging.ERROR, msg, *args, **kwargs)
def exception(self, msg: str, *args: Any, **kwargs: Any) -> None:
"""Delegate an exception call to the underlying logger."""
kwargs["exc_info"] = 1
self.log(logging.ERROR, msg, *args, **kwargs)
def critical(self, msg: str, *args: Any, **kwargs: Any) -> None:
"""Delegate a critical call to the underlying logger."""
self.log(logging.CRITICAL, msg, *args, **kwargs)
def log(self, level: int, msg: str, *args: Any, **kwargs: Any) -> None:
"""Delegate a log call to the underlying logger.
The level here is determined by the echo
flag as well as that of the underlying logger, and
logger._log() is called directly.
"""
# inline the logic from isEnabledFor(),
# getEffectiveLevel(), to avoid overhead.
if self.logger.manager.disable >= level:
return
selected_level = self._echo_map[self.echo]
if selected_level == logging.NOTSET:
selected_level = self.logger.getEffectiveLevel()
if level >= selected_level:
if STACKLEVEL:
kwargs["stacklevel"] = (
kwargs.get("stacklevel", 1) + STACKLEVEL_OFFSET
)
self.logger._log(level, msg, args, **kwargs)
def isEnabledFor(self, level: int) -> bool:
"""Is this logger enabled for level 'level'?"""
if self.logger.manager.disable >= level:
return False
return level >= self.getEffectiveLevel()
def getEffectiveLevel(self) -> int:
"""What's the effective level for this logger?"""
level = self._echo_map[self.echo]
if level == logging.NOTSET:
level = self.logger.getEffectiveLevel()
return level
def instance_logger(
instance: Identified, echoflag: _EchoFlagType = None
) -> None:
"""create a logger for an instance that implements :class:`.Identified`."""
if instance.logging_name:
name = "%s.%s" % (
_qual_logger_name_for_cls(instance.__class__),
instance.logging_name,
)
else:
name = _qual_logger_name_for_cls(instance.__class__)
instance._echo = echoflag # type: ignore
logger: Union[logging.Logger, InstanceLogger]
if echoflag in (False, None):
# if no echo setting or False, return a Logger directly,
# avoiding overhead of filtering
logger = logging.getLogger(name)
else:
# if a specified echo flag, return an EchoLogger,
# which checks the flag, overrides normal log
# levels by calling logger._log()
logger = InstanceLogger(echoflag, name)
instance.logger = logger # type: ignore
class echo_property:
__doc__ = """\
When ``True``, enable log output for this element.
This has the effect of setting the Python logging level for the namespace
of this element's class and object reference. A value of boolean ``True``
indicates that the loglevel ``logging.INFO`` will be set for the logger,
whereas the string value ``debug`` will set the loglevel to
``logging.DEBUG``.
"""
@overload
def __get__(
self, instance: Literal[None], owner: Type[Identified]
) -> echo_property:
...
@overload
def __get__(
self, instance: Identified, owner: Type[Identified]
) -> _EchoFlagType:
...
def __get__(
self, instance: Optional[Identified], owner: Type[Identified]
) -> Union[echo_property, _EchoFlagType]:
if instance is None:
return self
else:
return instance._echo
def __set__(self, instance: Identified, value: _EchoFlagType) -> None:
instance_logger(instance, echoflag=value)
| """Delegate an info call to the underlying logger."""
self.log(logging.INFO, msg, *args, **kwargs) |
label.go | // @generated AUTO GENERATED - DO NOT EDIT! 117d51fa2854b0184adc875246a35929bbbf0a91
// Copyright (c) 2018 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package labels
import (
"strings"
)
// Label represents an immutable label which consists of a list of names. Since a label is a list of names the label
// have a hierarchy, e.g. the labels foo.bar and foo.baz can be thought of as being nested under the label foo.*.
type Label struct {
// names is a list of names, e.g. foo, bar and baz
names []string
// simpleName is a concatenation of names with . in between, e.g. foo.bar.baz
simpleName string
// wildcard is true iff the label contains a wildcard
wildcard bool
}
// NewLabel creates a new label from the given names.
func NewLabel(names ...string) *Label {
simpleName := strings.Join(names, ".")
return &Label{
names: names,
simpleName: simpleName,
wildcard: strings.Contains(simpleName, "*"),
}
}
// Wildcard returns true iff the label contains a wildcard.
func (label *Label) Wildcard() bool {
return label.wildcard
}
// Names returns a copy of the names in the label.
func (label *Label) Names() []string {
result := make([]string, 0, len(label.names))
result = append(result, label.names...)
return result
}
// String returns a concatenation of the names in the label with dot as a separator.
func (label *Label) String() string {
return label.simpleName
}
func (label *Label) nameMatch(name1, name2 string) bool {
if name1 == "*" || name2 == "*" {
return true
}
return name1 == name2
}
// Match returns true iff the label matches the other label or vice versa taking wildcards into account. | func (label *Label) Match(other *Label) bool {
if label == other {
return true
}
if !label.wildcard && !other.wildcard {
return label.simpleName == other.simpleName
}
if len(label.names) != len(other.names) {
return false
}
for i := range label.names {
if !label.nameMatch(label.names[i], other.names[i]) {
return false
}
}
return true
} | // When matching one label to another label then a wildcard will match any name in the other label at the same position. |
issue-29861.rs | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub trait MakeRef<'a> {
type Ref;
}
impl<'a, T: 'a> MakeRef<'a> for T {
type Ref = &'a T;
}
pub trait MakeRef2 {
type Ref2;
}
impl<'a, T: 'a> MakeRef2 for T {
//~^ ERROR the lifetime parameter `'a` is not constrained
type Ref2 = <T as MakeRef<'a>>::Ref;
}
fn foo() -> <String as MakeRef2>::Ref2 { &String::from("foo") }
fn | () {
println!("{}", foo());
}
| main |
bfs.go | package main
import "fmt"
func bfs(url string, maxDepth int) Set {
depth := 0
visited := NewSet()
queue := NewSet()
queue.Push(url)
for len(queue) > 0 {
curr := queue.Pop()
if visited.Contains(curr) {
continue
}
visited.Push(curr)
for _, link := range parseUrl(curr) {
queue.Push(link)
}
depth++
fmt.Println("queue: ", queue)
fmt.Println("visited: ", visited)
}
return visited
}
type Set []string
func NewSet() Set {
return make([]string, 0, 10)
}
func (s *Set) Push(value string) {
*s = append(*s, value)
}
func (s *Set) Pop() string {
if len(*s) > 0 |
return ""
}
func (s *Set) Contains(value string) bool {
for i := range *s {
if (*s)[i] == value {
return true
}
}
return false
}
func (s *Set) Extend(a Set) {
}
| {
val := (*s)[0]
*s = (*s)[1:]
return val
} |
test_image_helpers.py | import pytest
from PIL import Image
from yoga.image import helpers
class Test_image_have_alpha(object):
@pytest.mark.parametrize(
"image_path",
[
"test/images/image1.jpg",
"test/images/unused-alpha.png",
"test/images/indexed.png",
"test/images/grayscale.png",
],
)
def test_image_without_alpha(self, image_path):
image = Image.open(image_path)
assert not helpers.image_have_alpha(image)
def test_image_with_alpha(self):
image = Image.open("test/images/alpha.png")
assert helpers.image_have_alpha(image)
@pytest.mark.parametrize(
"image_path, threshold, is_alpha",
[
("test/images/threshold.png", 0xEF, True),
("test/images/threshold.png", 0xE0, False),
],
)
def test_alpha_threshold(self, image_path, threshold, is_alpha):
image = Image.open("test/images/threshold.png")
if is_alpha:
assert helpers.image_have_alpha(image, threshold)
else:
assert not helpers.image_have_alpha(image, threshold)
| [
("test/images/image1.jpg", "jpeg"),
("test/images/alpha.png", "png"),
],
)
def test_supported_image_format(self, image_path, expected_format):
image_bytes = open(image_path, "rb").read()
assert helpers.guess_image_format(image_bytes) == expected_format
def test_unsuported_image_format(self):
image_bytes = open("test/images/alpha.svg", "rb").read()
with pytest.raises(ValueError):
helpers.guess_image_format(image_bytes) |
class Test_gess_image_format(object):
@pytest.mark.parametrize(
"image_path, expected_format", |
Section.js | import React from 'react';
import { View, Text, StyleSheet } from 'react-native';
export default function Section({ title, children }) {
return (
<View style={styles.container}>
<Text style={[styles.title]}>
{title}
</Text>
<View style={styles.seperator} />
{children}
</View>
);
}
const styles = StyleSheet.create({
container: {
// flex: 1,
paddingBottom: 20,
},
title: {
paddingBottom: 10,
fontSize: 14,
alignSelf: 'center',
},
seperator: {
height: 0.5, | marginBottom: 10,
backgroundColor: '#a6a6a6',
},
}); | |
V1RunKind.spec.js | // Copyright 2018-2021 Polyaxon, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* Polyaxon SDKs and REST API specification.
* Polyaxon SDKs and REST API specification.
*
* The version of the OpenAPI document: 1.14.0
* Contact: [email protected]
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*
*/
(function(root, factory) {
if (typeof define === 'function' && define.amd) {
// AMD.
define(['expect.js', process.cwd()+'/src/index'], factory);
} else if (typeof module === 'object' && module.exports) {
// CommonJS-like environments that support module.exports, like Node.
factory(require('expect.js'), require(process.cwd()+'/src/index'));
} else {
// Browser globals (root is window)
factory(root.expect, root.PolyaxonSdk); | 'use strict';
var instance;
beforeEach(function() {
});
var getProperty = function(object, getter, property) {
// Use getter method if present; otherwise, get the property directly.
if (typeof object[getter] === 'function')
return object[getter]();
else
return object[property];
}
var setProperty = function(object, setter, property, value) {
// Use setter method if present; otherwise, set the property directly.
if (typeof object[setter] === 'function')
object[setter](value);
else
object[property] = value;
}
describe('V1RunKind', function() {
it('should create an instance of V1RunKind', function() {
// uncomment below and update the code to test V1RunKind
//var instance = new PolyaxonSdk.V1RunKind();
//expect(instance).to.be.a(PolyaxonSdk.V1RunKind);
});
});
})); | }
}(this, function(expect, PolyaxonSdk) { |
dynamic-grid-spec.js | /* eslint-disable no-unused-expressions */
// eslint-disable-next-line import/no-extraneous-dependencies
const resizeTo = require('terra-toolkit/lib/nightwatch/responsive-helpers').resizeTo;
module.exports = resizeTo(['medium'], {
'Displays a default dynamic-grid': (browser) => {
browser
.url(`${browser.launchUrl}/#/raw/tests/dynamic-grid/default-dynamic-grid`)
.expect.element('#defaultDynamicGrid').to.be.present; | browser.expect.element('#defaultDynamicGrid').to.have.css('grid-template-rows');
browser.expect.element('#region1').to.have.css('grid-row-start');
browser.expect.element('#region1').to.have.css('grid-column-start');
browser.expect.element('#region1').text.to.equal('R1');
browser.expect.element('#region2').to.have.css('grid-row-start');
browser.expect.element('#region2').to.have.css('grid-column-start');
browser.expect.element('#region2').text.to.equal('R2');
},
}); |
browser.expect.element('#defaultDynamicGrid').to.have.css('display').which.matches(/^(-ms-)?grid$/);
browser.expect.element('#defaultDynamicGrid').to.have.css('grid-gap');
browser.expect.element('#defaultDynamicGrid').to.have.css('grid-template-columns'); |
face.rs | use crate::errors::Error;
use crate::wire::EdgeIter;
use crate::*;
use rustc_hash::FxHashMap as HashMap;
impl<P, C, S> Face<P, C, S> {
/// Creates a new face by a wire.
/// # Failure
/// All wires in `boundaries` must be non-empty, simple and closed. If not, returns the following errors:
/// * If a wire is empty, then returns [`Error::EmptyWire`].
/// * If a wire is not closed, then returns [`Error::NotClosedWire`].
/// * If a wire is closed but not simple, then returns [`Error::NotSimpleWire`].
///
/// [`Error::EmptyWire`]: errors/enum.Error.html#variant.EmptyWire
/// [`Error::NotClosedWire`]: errors/enum.Error.html#variant.NotClosedWire
/// [`Error::NotSimpleWire`]: errors/enum.Error.html#variant.NotSimpleWire
/// # Examples
/// ```
/// # use truck_topology::*;
/// # use errors::Error;
/// let v = Vertex::news(&[(); 4]);
/// let mut wire = Wire::from(vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[3], ()),
/// Edge::new(&v[3], &v[0], ()),
/// ]);
/// assert!(Face::try_new(vec![wire], ()).is_ok());
/// ```
#[inline(always)]
pub fn try_new(boundaries: Vec<Wire<P, C>>, surface: S) -> Result<Face<P, C, S>> {
for wire in &boundaries {
if wire.is_empty() {
return Err(Error::EmptyWire);
} else if !wire.is_closed() {
return Err(Error::NotClosedWire);
} else if !wire.is_simple() {
return Err(Error::NotSimpleWire);
}
}
if !Wire::disjoint_wires(&boundaries) {
Err(Error::NotSimpleWire)
} else {
Ok(Face::new_unchecked(boundaries, surface))
}
}
/// Creates a new face by a wire.
/// # Panic
/// All wires in `boundaries` must be non-empty, simple and closed.
#[inline(always)]
pub fn new(boundaries: Vec<Wire<P, C>>, surface: S) -> Face<P, C, S> {
Face::try_new(boundaries, surface).remove_try()
}
/// Creates a new face by a wire.
/// # Remarks
/// This method is prepared only for performance-critical development and is not recommended.
/// This method does NOT check the regularity conditions of `Face::try_new()`.
/// The programmer must guarantee this condition before using this method.
#[inline(always)]
pub fn new_unchecked(boundaries: Vec<Wire<P, C>>, surface: S) -> Face<P, C, S> {
Face {
boundaries,
orientation: true,
surface: Arc::new(Mutex::new(surface)),
}
}
/// Creates a new face by a wire.
/// # Remarks
/// This method check the regularity conditions of `Face::try_new()` in the debug mode.
/// The programmer must guarantee this condition before using this method.
#[inline(always)]
pub fn debug_new(boundaries: Vec<Wire<P, C>>, surface: S) -> Face<P, C, S> {
match cfg!(debug_assertions) {
true => Face::new(boundaries, surface),
false => Face::new_unchecked(boundaries, surface),
}
}
/// Returns the boundaries of the face.
/// # Examples
/// ```
/// use truck_topology::*;
/// let v = Vertex::news(&[(); 3]);
/// let wire = Wire::from(vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[0], ()),
/// ]);
/// let mut face = Face::new(vec![wire], ());
/// let boundaries = face.boundaries();
/// for (i, vert) in boundaries[0].vertex_iter().enumerate() {
/// assert_eq!(vert, v[i]);
/// }
///
/// // If invert the face, the boundaries is also inverted.
/// face.invert();
/// assert_eq!(boundaries[0].inverse(), face.boundaries()[0]);
/// ```
#[inline(always)]
pub fn boundaries(&self) -> Vec<Wire<P, C>> {
match self.orientation {
true => self.boundaries.clone(),
false => self.boundaries.iter().map(|wire| wire.inverse()).collect(),
}
}
/// Consumes `self` and returns the entity of its boundaries.
/// ```
/// # use truck_topology::*;
/// let v = Vertex::news(&[(), (), ()]);
/// let wire = Wire::from(vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[0], ()),
/// ]);
/// let mut face = Face::new(vec![wire], ());
/// let boundaries = face.clone().into_boundaries();
/// for (i, vert) in boundaries[0].vertex_iter().enumerate() {
/// assert_eq!(vert, v[i]);
/// }
///
/// // If invert the face, the boundaries is also inverted.
/// face.invert();
/// assert_eq!(boundaries[0].inverse(), face.into_boundaries()[0]);
/// ```
#[inline(always)]
pub fn into_boundaries(self) -> Vec<Wire<P, C>> {
match self.orientation {
true => self.boundaries,
false => self.boundaries(),
}
}
/// Returns the reference of the boundaries wire which is generated by constructor.
/// # Examples
/// ```
/// use truck_topology::*;
/// let v = Vertex::news(&[(), (), ()]);
/// let wire = Wire::from(vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[0], ()),
/// ]);
/// let mut face = Face::new(vec![wire], ());
/// let boundaries = face.boundaries();
/// face.invert();
///
/// // The result of face.boudnary() is already inversed.
/// assert_eq!(face.boundaries()[0], boundaries[0].inverse());
///
/// // The absolute boundaries does never change.
/// assert_eq!(face.absolute_boundaries(), &boundaries);
/// ```
#[inline(always)]
pub fn absolute_boundaries(&self) -> &Vec<Wire<P, C>> { &self.boundaries }
/// Returns an iterator over all edges in the boundaries.
/// ```
/// use truck_topology::*;
/// let v = Vertex::news(&[(), (), ()]);
/// let wire = Wire::from(vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[0], ()),
/// ]);
/// let mut face = Face::new(vec![wire], ());
/// face.invert();
/// let boundaries = face.boundaries().clone();
/// let edge_iter0 = boundaries.iter().flat_map(Wire::edge_iter);
/// let edge_iter1 = face.boundary_iters().into_iter().flatten();
/// for (edge0, edge1) in edge_iter0.zip(edge_iter1) {
/// assert_eq!(edge0, &edge1);
/// }
/// ```
#[inline(always)]
pub fn boundary_iters(&self) -> Vec<BoundaryIter<P, C>> {
self.boundaries
.iter()
.map(|wire| BoundaryIter {
edge_iter: wire.edge_iter(),
orientation: self.orientation,
})
.collect()
}
#[inline(always)]
fn renew_pointer(&mut self)
where S: Clone {
let surface = self.get_surface();
self.surface = Arc::new(Mutex::new(surface));
}
/// Adds a boundary to the face.
/// # Examples
/// ```
/// use truck_topology::*;
/// let v = Vertex::news(&[(), (), (), (), (), ()]);
/// let wire0 = Wire::from(vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[0], ()),
/// ]);
/// let wire1 = Wire::from(vec![
/// Edge::new(&v[3], &v[4], ()),
/// Edge::new(&v[4], &v[5], ()),
/// Edge::new(&v[5], &v[3], ()),
/// ]);
/// let mut face0 = Face::new(vec![wire0.clone()], ());
/// face0.try_add_boundary(wire1.clone()).unwrap();
/// let face1 = Face::new(vec![wire0, wire1], ());
/// assert_eq!(face0.boundaries(), face1.boundaries());
/// ```
/// # Remarks
/// 1. If the face is inverted, then the added wire is inverted as absolute bounday.
/// ```
/// use truck_topology::*;
/// let v = Vertex::news(&[(), (), (), (), (), ()]);
/// let wire0 = Wire::from(vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[0], ()),
/// ]);
/// let wire1 = Wire::from(vec![
/// Edge::new(&v[3], &v[4], ()),
/// Edge::new(&v[5], &v[4], ()).inverse(),
/// Edge::new(&v[5], &v[3], ()),
/// ]);
/// let mut face = Face::new(vec![wire0], ());
/// face.invert();
/// face.try_add_boundary(wire1.clone()).unwrap();
///
/// // The boundary is added in compatible with the face orientation.
/// assert_eq!(face.boundaries()[1], wire1);
///
/// // The absolute bounday is inverted!
/// let iter0 = face.absolute_boundaries()[1].edge_iter();
/// let iter1 = wire1.edge_iter().rev();
/// for (edge0, edge1) in iter0.zip(iter1) {
/// assert_eq!(edge0.id(), edge1.id());
/// assert_eq!(edge0.orientation(), !edge1.orientation());
/// }
/// ```
/// 2. This method renew the face id.
/// ```
/// use truck_topology::*;
/// let v = Vertex::news(&[(), (), (), (), (), ()]);
/// let wire0 = Wire::from(vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[0], ()),
/// ]);
/// let wire1 = Wire::from(vec![
/// Edge::new(&v[3], &v[4], ()),
/// Edge::new(&v[5], &v[4], ()).inverse(),
/// Edge::new(&v[5], &v[3], ()),
/// ]);
/// let mut face0 = Face::new(vec![wire0], ());
/// let face1 = face0.clone();
/// assert_eq!(face0.id(), face1.id());
/// face0.try_add_boundary(wire1).unwrap();
/// assert_ne!(face0.id(), face1.id());
/// ```
#[inline(always)]
pub fn try_add_boundary(&mut self, mut wire: Wire<P, C>) -> Result<()>
where S: Clone {
if wire.is_empty() {
return Err(Error::EmptyWire);
} else if !wire.is_closed() {
return Err(Error::NotClosedWire);
} else if !wire.is_simple() {
return Err(Error::NotSimpleWire);
}
if !self.orientation {
wire.invert();
}
self.boundaries.push(wire);
self.renew_pointer();
if !Wire::disjoint_wires(&self.boundaries) {
self.boundaries.pop();
return Err(Error::NotDisjointWires);
}
Ok(())
}
/// Adds a boundary to the face.
/// # Examples
/// ```
/// use truck_topology::*;
/// let v = Vertex::news(&[(), (), (), (), (), ()]);
/// let wire0 = Wire::from(vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[0], ()),
/// ]);
/// let wire1 = Wire::from(vec![
/// Edge::new(&v[3], &v[4], ()),
/// Edge::new(&v[4], &v[5], ()),
/// Edge::new(&v[5], &v[3], ()),
/// ]);
/// let mut face0 = Face::new(vec![wire0.clone()], ());
/// face0.add_boundary(wire1.clone());
/// let face1 = Face::new(vec![wire0, wire1], ());
/// assert_eq!(face0.boundaries(), face1.boundaries());
/// ```
/// # Remarks
/// 1. If the face is inverted, then the added wire is inverted as absolute bounday.
/// ```
/// use truck_topology::*;
/// let v = Vertex::news(&[(), (), (), (), (), ()]);
/// let wire0 = Wire::from(vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[0], ()),
/// ]);
/// let wire1 = Wire::from(vec![
/// Edge::new(&v[3], &v[4], ()),
/// Edge::new(&v[5], &v[4], ()).inverse(),
/// Edge::new(&v[5], &v[3], ()),
/// ]);
/// let mut face = Face::new(vec![wire0], ());
/// face.invert();
/// face.add_boundary(wire1.clone());
///
/// // The boundary is added in compatible with the face orientation.
/// assert_eq!(face.boundaries()[1], wire1);
///
/// // The absolute bounday is inverted!
/// let iter0 = face.absolute_boundaries()[1].edge_iter();
/// let iter1 = wire1.edge_iter().rev();
/// for (edge0, edge1) in iter0.zip(iter1) {
/// assert_eq!(edge0.id(), edge1.id());
/// assert_eq!(edge0.orientation(), !edge1.orientation());
/// }
/// ```
/// 2. This method renew the face id.
/// ```
/// use truck_topology::*;
/// let v = Vertex::news(&[(), (), (), (), (), ()]);
/// let wire0 = Wire::from(vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[0], ()),
/// ]);
/// let wire1 = Wire::from(vec![
/// Edge::new(&v[3], &v[4], ()),
/// Edge::new(&v[5], &v[4], ()).inverse(),
/// Edge::new(&v[5], &v[3], ()),
/// ]);
/// let mut face0 = Face::new(vec![wire0], ());
/// let face1 = face0.clone();
/// assert_eq!(face0.id(), face1.id());
/// face0.add_boundary(wire1);
/// assert_ne!(face0.id(), face1.id());
/// ```
#[inline(always)]
pub fn add_boundary(&mut self, wire: Wire<P, C>)
where S: Clone {
self.try_add_boundary(wire).remove_try()
}
/// Returns a new face whose surface is mapped by `surface_mapping`,
/// curves are mapped by `curve_mapping` and points are mapped by `point_mapping`.
/// # Remarks
/// Accessing geometry elements directly in the closure will result in a deadlock.
/// So, this method does not appear to the document.
#[doc(hidden)]
pub fn try_mapped<Q, D, T>(
&self,
mut point_mapping: impl FnMut(&P) -> Option<Q>,
mut curve_mapping: impl FnMut(&C) -> Option<D>,
mut surface_mapping: impl FnMut(&S) -> Option<T>,
) -> Option<Face<Q, D, T>> {
let wires = self
.absolute_boundaries()
.iter()
.map(|wire| wire.try_mapped(&mut point_mapping, &mut curve_mapping))
.collect::<Option<Vec<_>>>()?;
let surface = surface_mapping(&*self.surface.lock().unwrap())?;
let mut face = Face::debug_new(wires, surface);
if !self.orientation() {
face.invert();
}
Some(face)
}
/// Returns a new face whose surface is mapped by `surface_mapping`,
/// curves are mapped by `curve_mapping` and points are mapped by `point_mapping`.
/// # Examples
/// ```
/// use truck_topology::*;
/// let v = Vertex::news(&[0, 1, 2, 3, 4, 5, 6]);
/// let wire0 = Wire::from(vec![
/// Edge::new(&v[0], &v[1], 100),
/// Edge::new(&v[1], &v[2], 200),
/// Edge::new(&v[2], &v[3], 300),
/// Edge::new(&v[3], &v[0], 400),
/// ]);
/// let wire1 = Wire::from(vec![
/// Edge::new(&v[4], &v[5], 500),
/// Edge::new(&v[6], &v[5], 600).inverse(),
/// Edge::new(&v[6], &v[4], 700),
/// ]);
/// let face0 = Face::new(vec![wire0, wire1], 10000);
/// let face1 = face0.mapped(
/// &move |i: &usize| *i + 10,
/// &move |j: &usize| *j + 1000,
/// &move |k: &usize| *k + 100000,
/// );
/// # for wire in face1.boundaries() {
/// # assert!(wire.is_closed());
/// # assert!(wire.is_simple());
/// # }
///
/// assert_eq!(
/// face0.get_surface() + 100000,
/// face1.get_surface(),
/// );
/// let biters0 = face0.boundary_iters();
/// let biters1 = face1.boundary_iters();
/// for (biter0, biter1) in biters0.into_iter().zip(biters1) {
/// for (edge0, edge1) in biter0.zip(biter1) {
/// assert_eq!(
/// edge0.front().get_point() + 10,
/// edge1.front().get_point(),
/// );
/// assert_eq!(
/// edge0.back().get_point() + 10,
/// edge1.back().get_point(),
/// );
/// assert_eq!(edge0.orientation(), edge1.orientation());
/// assert_eq!(
/// edge0.get_curve() + 1000,
/// edge1.get_curve(),
/// );
/// }
/// }
/// ```
/// # Remarks
/// Accessing geometry elements directly in the closure will result in a deadlock.
/// So, this method does not appear to the document.
#[doc(hidden)]
pub fn mapped<Q, D, T>(
&self,
mut point_mapping: impl FnMut(&P) -> Q,
mut curve_mapping: impl FnMut(&C) -> D,
mut surface_mapping: impl FnMut(&S) -> T,
) -> Face<Q, D, T> {
let wires: Vec<_> = self
.absolute_boundaries()
.iter()
.map(|wire| wire.mapped(&mut point_mapping, &mut curve_mapping))
.collect();
let surface = surface_mapping(&*self.surface.lock().unwrap());
let mut face = Face::debug_new(wires, surface);
if !self.orientation() {
face.invert();
}
face
}
/// Returns the orientation of face.
///
/// The result of this method is the same with `self.boundaries() == self.absolute_boundaries().clone()`.
/// Moreover, if this method returns false, `self.boundaries() == self.absolute_boundaries().inverse()`.
#[inline(always)]
pub fn orientation(&self) -> bool { self.orientation }
/// Returns the clone of surface of face.
#[inline(always)]
pub fn get_surface(&self) -> S
where S: Clone {
self.surface.lock().unwrap().clone()
}
/// Sets the surface of face.
/// # Examples
/// ```
/// use truck_topology::*;
/// let v = Vertex::news(&[(), (), ()]);
/// let wire = Wire::from(vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[0], ()),
/// ]);
/// let face0 = Face::new(vec![wire], 0);
/// let face1 = face0.clone();
///
/// // Two faces have the same content.
/// assert_eq!(face0.get_surface(), 0);
/// assert_eq!(face1.get_surface(), 0);
///
/// // Set surface
/// face0.set_surface(1);
///
/// // The contents of two vertices are synchronized.
/// assert_eq!(face0.get_surface(), 1);
/// assert_eq!(face1.get_surface(), 1);
/// ```
#[inline(always)]
pub fn set_surface(&self, surface: S) { *self.surface.lock().unwrap() = surface; }
/// Inverts the direction of the face.
/// # Examples
/// ```
/// use truck_topology::*;
/// use truck_topology::errors::Error;
/// let v = Vertex::news(&[(), (), ()]);
/// let wire = Wire::from(vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[0], ()),
/// ]);
/// let mut face = Face::new(vec![wire], ());
/// let org_face = face.clone();
/// let org_bdry = face.boundaries();
/// face.invert();
///
/// // Two faces are the same face.
/// face.is_same(&org_face);
///
/// // The boundaries is inverted.
/// let inversed_edge_iter = org_bdry[0].inverse().edge_into_iter();
/// let face_edge_iter = &mut face.boundary_iters()[0];
/// for (edge0, edge1) in inversed_edge_iter.zip(face_edge_iter) {
/// assert_eq!(edge0, edge1);
/// }
/// ```
#[inline(always)]
pub fn invert(&mut self) -> &mut Self {
self.orientation = !self.orientation;
self
}
/// Returns whether two faces are the same. Returns `true` even if the orientaions are different.
/// # Examples
/// ```
/// use truck_topology::*;
/// let v = Vertex::news(&[(); 3]);
/// let wire = Wire::from(vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[0], ()),
/// ]);
/// let face0 = Face::new(vec![wire], ());
/// let face1 = face0.inverse();
/// assert_ne!(face0, face1);
/// assert!(face0.is_same(&face1));
/// ```
#[inline(always)]
pub fn is_same(&self, other: &Self) -> bool {
std::ptr::eq(Arc::as_ptr(&self.surface), Arc::as_ptr(&other.surface))
}
/// Returns the id that does not depend on the direction of the face.
/// # Examples
/// ```
/// use truck_topology::*;
/// let v = Vertex::news(&[(); 3]);
/// let wire = Wire::from(vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[0], ()),
/// ]);
/// let face0 = Face::new(vec![wire.clone()], ());
/// let face1 = face0.inverse();
/// let face2 = Face::new(vec![wire], ());
/// assert_ne!(face0, face1);
/// assert_ne!(face0, face2);
/// assert_eq!(face0.id(), face1.id());
/// assert_ne!(face0.id(), face2.id());
/// ```
#[inline(always)]
pub fn id(&self) -> FaceID<S> { ID::new(Arc::as_ptr(&self.surface)) }
/// Returns how many same faces.
///
/// # Examples
/// ```
/// use truck_topology::*;
/// let v = Vertex::news(&[(); 3]);
/// let wire = Wire::from(vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[0], ()),
/// ]);
///
/// // Create one face
/// let face0 = Face::new(vec![wire.clone()], ());
/// assert_eq!(face0.count(), 1);
/// // Create another face, independent from face0
/// let face1 = Face::new(vec![wire.clone()], ());
/// assert_eq!(face0.count(), 1);
/// // Clone face0, the result will be 2.
/// let face2 = face0.clone();
/// assert_eq!(face0.count(), 2);
/// assert_eq!(face2.count(), 2);
/// // drop face2, the result will be 1.
/// drop(face2);
/// assert_eq!(face0.count(), 1);
/// ```
#[inline(always)]
pub fn count(&self) -> usize { Arc::strong_count(&self.surface) }
/// Returns the inverse face.
/// # Examples
/// ```
/// use truck_topology::*;
/// use truck_topology::errors::Error;
/// let v = Vertex::news(&[(), (), ()]);
/// let wire = Wire::from(vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[0], ()),
/// ]);
/// let mut face = Face::new(vec![wire], ());
/// let inverted = face.inverse();
///
/// // Two faces are the same face.
/// assert!(face.is_same(&inverted));
///
/// // Two faces has the same id.
/// assert_eq!(face.id(), inverted.id());
///
/// // The boundaries is inverted.
/// let mut inversed_edge_iter = face.boundaries()[0].inverse().edge_into_iter();
/// let face_edge_iter = &mut inverted.boundary_iters()[0];
/// for (edge0, edge1) in inversed_edge_iter.zip(face_edge_iter) {
/// assert_eq!(edge0, edge1);
/// }
/// ```
#[inline(always)]
pub fn inverse(&self) -> Face<P, C, S> {
let mut face = self.clone();
face.invert();
face
}
/// Returns whether two faces `self` and `other` have a shared edge.
/// # Examples
/// ```
/// use truck_topology::*;
/// use std::iter::FromIterator;
/// let v = Vertex::news(&[(); 4]);
/// let shared_edge = Edge::new(&v[0], &v[1], ());
/// let another_edge = Edge::new(&v[0], &v[1], ());
/// let inversed_edge = shared_edge.inverse();
/// let wire = vec![
/// Wire::from_iter(vec![&Edge::new(&v[2], &v[0], ()), &shared_edge, &Edge::new(&v[1], &v[2], ())]),
/// Wire::from_iter(vec![&Edge::new(&v[2], &v[0], ()), &another_edge, &Edge::new(&v[1], &v[2], ())]),
/// Wire::from_iter(vec![&Edge::new(&v[3], &v[0], ()), &shared_edge, &Edge::new(&v[1], &v[3], ())]),
/// Wire::from_iter(vec![&Edge::new(&v[3], &v[1], ()), &inversed_edge, &Edge::new(&v[0], &v[3], ())]),
/// ];
/// let face: Vec<_> = wire.into_iter().map(|w| Face::new(vec![w], ())).collect();
/// assert!(face[0].border_on(&face[2]));
/// assert!(!face[1].border_on(&face[2]));
/// assert!(face[0].border_on(&face[3]));
/// ```
pub fn border_on(&self, other: &Face<P, C, S>) -> bool {
let mut hashmap = HashMap::default();
let edge_iter = self.boundary_iters().into_iter().flatten();
edge_iter.for_each(|edge| {
hashmap.insert(edge.id(), edge);
});
let mut edge_iter = other.boundary_iters().into_iter().flatten();
edge_iter.any(|edge| hashmap.insert(edge.id(), edge).is_some())
}
/// Cuts a face with only one boundary by an edge.
/// # Examples
/// ```
/// use truck_topology::*;
/// let v = Vertex::news(&[(), (), (), ()]);
/// let wire = Wire::from(vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[3], ()),
/// Edge::new(&v[3], &v[0], ()),
/// ]);
/// let mut face0 = Face::new(vec![wire], ());
///
/// let face1 = face0.cut_by_edge(Edge::new(&v[1], &v[3], ())).unwrap();
///
/// // The front vertex of face0's boundary becomes the back of cutting edge.
/// let v0: Vec<Vertex<()>> = face0.boundaries()[0].vertex_iter().collect();
/// assert_eq!(v0, vec![v[3].clone(), v[0].clone(), v[1].clone()]);
///
/// let v1: Vec<Vertex<()>> = face1.boundaries()[0].vertex_iter().collect();
/// assert_eq!(v1, vec![v[1].clone(), v[2].clone(), v[3].clone()]);
/// ```
/// # Failures
/// Returns `None` if:
/// - `self` has several boundaries, or
/// - `self` does not include vertices of the end vertices of `edge`.
/// ```
/// use truck_topology::*;
/// let v = Vertex::news(&[(); 6]);
/// let wire0 = Wire::from(vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[0], ()),
/// ]);
/// let wire1 = Wire::from(vec![
/// Edge::new(&v[3], &v[4], ()),
/// Edge::new(&v[4], &v[5], ()),
/// Edge::new(&v[5], &v[3], ()),
/// ]);
/// let mut face = Face::new(vec![wire0, wire1], ());
/// assert!(face.cut_by_edge(Edge::new(&v[1], &v[2], ())).is_none());
/// ```
/// ```
/// use truck_topology::*;
/// let v = Vertex::news(&[(), (), (), (), ()]);
/// let wire = Wire::from(vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[3], ()),
/// Edge::new(&v[3], &v[0], ()),
/// ]);
/// let mut face0 = Face::new(vec![wire], ());
/// assert!(face0.cut_by_edge(Edge::new(&v[1], &v[4], ())).is_none());
pub fn cut_by_edge(&mut self, edge: Edge<P, C>) -> Option<Self>
where S: Clone {
if self.boundaries.len() != 1 {
return None;
}
let wire = &mut self.boundaries[0];
let i = wire
.edge_iter()
.enumerate()
.find(|(_, e)| e.front() == edge.back())
.map(|(i, _)| i)?;
let j = wire
.edge_iter()
.enumerate()
.find(|(_, e)| e.back() == edge.front())
.map(|(i, _)| i)?;
wire.rotate_left(i);
let j = (j + wire.len() - i) % wire.len();
let mut new_wire = wire.split_off(j + 1);
wire.push_back(edge.clone());
new_wire.push_back(edge.inverse());
self.renew_pointer();
debug_assert!(Face::try_new(self.boundaries.clone(), ()).is_ok());
debug_assert!(Face::try_new(vec![new_wire.clone()], ()).is_ok());
Some(Face {
boundaries: vec![new_wire],
orientation: self.orientation,
surface: Arc::new(Mutex::new(self.get_surface())),
})
}
/// Glue two faces at boundaries.
/// # Examples
/// ```
/// use truck_topology::*;
/// let v = Vertex::news(&[(); 8]);
/// let edge = vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[0], ()),
/// Edge::new(&v[3], &v[4], ()),
/// Edge::new(&v[4], &v[5], ()),
/// Edge::new(&v[5], &v[3], ()),
/// Edge::new(&v[6], &v[2], ()),
/// Edge::new(&v[1], &v[6], ()),
/// Edge::new(&v[7], &v[5], ()),
/// Edge::new(&v[4], &v[7], ()),
/// ];
/// let wire0 = Wire::from(vec![
/// edge[0].clone(),
/// edge[1].clone(),
/// edge[2].clone(),
/// ]);
/// let wire1 = Wire::from(vec![
/// edge[3].clone(),
/// edge[4].clone(),
/// edge[5].clone(),
/// ]);
/// let wire2 = Wire::from(vec![
/// edge[6].clone(),
/// edge[1].inverse(),
/// edge[7].clone(),
/// ]);
/// let wire3 = Wire::from(vec![
/// edge[8].clone(),
/// edge[4].inverse(),
/// edge[9].clone(),
/// ]);
/// let face0 = Face::new(vec![wire0, wire1], ());
/// let face1 = Face::new(vec![wire2, wire3], ());
/// let face = face0.glue_at_boundaries(&face1).unwrap();
/// let boundaries = face.boundary_iters();
/// assert_eq!(boundaries.len(), 2);
/// assert_eq!(boundaries[0].len(), 4);
/// assert_eq!(boundaries[1].len(), 4);
/// ```
pub fn glue_at_boundaries(&self, other: &Self) -> Option<Self>
where
S: Clone + PartialEq,
Wire<P, C>: Debug, {
let surface = self.get_surface();
if surface != other.get_surface() || self.orientation() != other.orientation() {
return None;
}
let mut vemap: HashMap<VertexID<P>, &Edge<P, C>> = self
.absolute_boundaries()
.iter()
.flatten()
.map(|edge| (edge.front().id(), edge))
.collect();
other
.absolute_boundaries()
.iter()
.flatten()
.try_for_each(|edge| {
if let Some(edge0) = vemap.get(&edge.back().id()) {
if edge.front() == edge0.back() {
if edge.is_same(edge0) {
vemap.remove(&edge.back().id());
return Some(());
} else {
return None;
}
}
}
vemap.insert(edge.front().id(), edge);
Some(())
})?;
if vemap.is_empty() {
return None;
}
let mut boundaries = Vec::new();
while !vemap.is_empty() {
let mut wire = Wire::new();
let v = *vemap.iter().next().unwrap().0;
let mut edge = vemap.remove(&v).unwrap();
wire.push_back(edge.clone());
while let Some(edge0) = vemap.remove(&edge.back().id()) {
wire.push_back(edge0.clone());
edge = edge0;
}
boundaries.push(wire);
}
debug_assert!(Face::try_new(boundaries.clone(), ()).is_ok());
Some(Face {
boundaries,
orientation: self.orientation(),
surface: Arc::new(Mutex::new(surface)),
})
}
/// Creates display struct for debugging the face.
///
/// # Examples
/// ```
/// use truck_topology::*;
/// use FaceDisplayFormat as FDF;
/// let v = Vertex::news(&[0, 1, 2, 3, 4, 5]);
/// let edge = vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[0], ()),
/// Edge::new(&v[3], &v[4], ()),
/// Edge::new(&v[4], &v[5], ()),
/// Edge::new(&v[5], &v[3], ()),
/// ];
/// let wire0 = Wire::from(vec![
/// edge[0].clone(),
/// edge[1].clone(),
/// edge[2].clone(),
/// ]);
/// let wire1 = Wire::from(vec![
/// edge[3].clone(),
/// edge[4].clone(),
/// edge[5].clone(),
/// ]);
/// let face = Face::new(vec![wire0, wire1], 120);
///
/// let vertex_format = VertexDisplayFormat::AsPoint;
/// let edge_format = EdgeDisplayFormat::VerticesTuple { vertex_format };
/// let wire_format = WireDisplayFormat::EdgesList { edge_format };
///
/// assert_eq!(
/// format!("{:?}", face.display(FDF::Full { wire_format })),
/// format!("Face {{ id: {:?}, boundaries: [[(0, 1), (1, 2), (2, 0)], [(3, 4), (4, 5), (5, 3)]], entity: 120 }}", face.id()),
/// );
/// assert_eq!(
/// format!("{:?}", face.display(FDF::BoundariesAndID { wire_format })),
/// format!("Face {{ id: {:?}, boundaries: [[(0, 1), (1, 2), (2, 0)], [(3, 4), (4, 5), (5, 3)]] }}", face.id()),
/// );
/// assert_eq!(
/// &format!("{:?}", face.display(FDF::BoundariesAndSurface { wire_format })),
/// "Face { boundaries: [[(0, 1), (1, 2), (2, 0)], [(3, 4), (4, 5), (5, 3)]], entity: 120 }",
/// );
/// assert_eq!(
/// &format!("{:?}", face.display(FDF::LoopsListTuple { wire_format })),
/// "Face([[(0, 1), (1, 2), (2, 0)], [(3, 4), (4, 5), (5, 3)]])",
/// );
/// assert_eq!(
/// &format!("{:?}", face.display(FDF::LoopsList { wire_format })),
/// "[[(0, 1), (1, 2), (2, 0)], [(3, 4), (4, 5), (5, 3)]]",
/// );
/// assert_eq!(
/// &format!("{:?}", face.display(FDF::AsSurface)),
/// "120",
/// );
/// ```
#[inline(always)]
pub fn display(&self, format: FaceDisplayFormat) -> DebugDisplay<Self, FaceDisplayFormat> {
DebugDisplay {
entity: self,
format,
}
}
}
impl<P, C, S: Clone + Invertible> Face<P, C, S> {
/// Returns the cloned surface in face.
/// If face is inverted, then the returned surface is also inverted.
#[inline(always)]
pub fn oriented_surface(&self) -> S {
match self.orientation {
true => self.surface.lock().unwrap().clone(),
false => self.surface.lock().unwrap().inverse(),
}
}
}
impl<P, C, S> Face<P, C, S>
where
P: Tolerance,
C: ParametricCurve<Point = P>,
S: IncludeCurve<C>,
{
/// Returns the consistence of the geometry of end vertices
/// and the geometry of edge.
#[inline(always)]
pub fn is_geometric_consistent(&self) -> bool {
let surface = &*self.surface.lock().unwrap();
self.boundary_iters().into_iter().flatten().all(|edge| {
let edge_consist = edge.is_geometric_consistent();
let curve = &*edge.curve.lock().unwrap();
let curve_consist = surface.include(curve);
edge_consist && curve_consist
})
}
}
impl<P, C, S> Clone for Face<P, C, S> {
#[inline(always)]
fn clone(&self) -> Face<P, C, S> |
}
impl<P, C, S> PartialEq for Face<P, C, S> {
#[inline(always)]
fn eq(&self, other: &Self) -> bool {
std::ptr::eq(Arc::as_ptr(&self.surface), Arc::as_ptr(&other.surface))
&& self.orientation == other.orientation
}
}
impl<P, C, S> Eq for Face<P, C, S> {}
impl<P, C, S> Hash for Face<P, C, S> {
#[inline(always)]
fn hash<H: Hasher>(&self, state: &mut H) {
std::ptr::hash(Arc::as_ptr(&self.surface), state);
self.orientation.hash(state);
}
}
/// An iterator over the edges in the boundaries of a face.
/// # Examples
/// ```
/// use truck_topology::*;
/// let v = Vertex::news(&[(); 4]);
/// let wire = Wire::from(vec![
/// Edge::new(&v[0], &v[1], ()),
/// Edge::new(&v[1], &v[2], ()),
/// Edge::new(&v[2], &v[3], ()),
/// Edge::new(&v[3], &v[0], ()),
/// ]);
/// let face = Face::new(vec![wire.clone()], ());
///
/// let iter = &mut face.boundary_iters()[0];
/// assert_eq!(iter.next().as_ref(), Some(&wire[0]));
/// assert_eq!(iter.next_back().as_ref(), Some(&wire[3])); // double ended
/// assert_eq!(iter.next().as_ref(), Some(&wire[1]));
/// assert_eq!(iter.next().as_ref(), Some(&wire[2]));
/// assert_eq!(iter.next_back().as_ref(), None);
/// assert_eq!(iter.next().as_ref(), None); // fused
/// ```
#[derive(Clone, Debug)]
pub struct BoundaryIter<'a, P, C> {
edge_iter: EdgeIter<'a, P, C>,
orientation: bool,
}
impl<'a, P, C> Iterator for BoundaryIter<'a, P, C> {
type Item = Edge<P, C>;
#[inline(always)]
fn next(&mut self) -> Option<Edge<P, C>> {
match self.orientation {
true => self.edge_iter.next().cloned(),
false => self.edge_iter.next_back().map(|edge| edge.inverse()),
}
}
#[inline(always)]
fn size_hint(&self) -> (usize, Option<usize>) { (self.len(), Some(self.len())) }
#[inline(always)]
fn last(mut self) -> Option<Edge<P, C>> { self.next_back() }
}
impl<'a, P, C> DoubleEndedIterator for BoundaryIter<'a, P, C> {
#[inline(always)]
fn next_back(&mut self) -> Option<Edge<P, C>> {
match self.orientation {
true => self.edge_iter.next_back().cloned(),
false => self.edge_iter.next().map(|edge| edge.inverse()),
}
}
}
impl<'a, P, C> ExactSizeIterator for BoundaryIter<'a, P, C> {
#[inline(always)]
fn len(&self) -> usize { self.edge_iter.len() }
}
impl<'a, P, C> std::iter::FusedIterator for BoundaryIter<'a, P, C> {}
impl<'a, P: Debug, C: Debug, S: Debug> Debug
for DebugDisplay<'a, Face<P, C, S>, FaceDisplayFormat>
{
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
match self.format {
FaceDisplayFormat::Full { wire_format } => f
.debug_struct("Face")
.field("id", &self.entity.id())
.field(
"boundaries",
&self
.entity
.boundaries()
.iter()
.map(|wire| wire.display(wire_format))
.collect::<Vec<_>>(),
)
.field("entity", &MutexFmt(&self.entity.surface))
.finish(),
FaceDisplayFormat::BoundariesAndID { wire_format } => f
.debug_struct("Face")
.field("id", &self.entity.id())
.field(
"boundaries",
&self
.entity
.boundaries()
.iter()
.map(|wire| wire.display(wire_format))
.collect::<Vec<_>>(),
)
.finish(),
FaceDisplayFormat::BoundariesAndSurface { wire_format } => f
.debug_struct("Face")
.field(
"boundaries",
&self
.entity
.boundaries()
.iter()
.map(|wire| wire.display(wire_format))
.collect::<Vec<_>>(),
)
.field("entity", &MutexFmt(&self.entity.surface))
.finish(),
FaceDisplayFormat::LoopsListTuple { wire_format } => f
.debug_tuple("Face")
.field(
&self
.entity
.boundaries()
.iter()
.map(|wire| wire.display(wire_format))
.collect::<Vec<_>>(),
)
.finish(),
FaceDisplayFormat::LoopsList { wire_format } => f
.debug_list()
.entries(
self.entity
.boundaries()
.iter()
.map(|wire| wire.display(wire_format)),
)
.finish(),
FaceDisplayFormat::AsSurface => {
f.write_fmt(format_args!("{:?}", &MutexFmt(&self.entity.surface)))
}
}
}
}
#[test]
fn invert_mapped_face() {
let v = Vertex::news(&[0, 1, 2, 3, 4, 5, 6]);
let wire0 = Wire::from(vec![
Edge::new(&v[0], &v[1], 100),
Edge::new(&v[1], &v[2], 200),
Edge::new(&v[2], &v[3], 300),
Edge::new(&v[3], &v[0], 400),
]);
let wire1 = Wire::from(vec![
Edge::new(&v[4], &v[5], 500),
Edge::new(&v[6], &v[5], 600).inverse(),
Edge::new(&v[6], &v[4], 700),
]);
let face0 = Face::new(vec![wire0, wire1], 10000).inverse();
let face1 = face0.mapped(
&move |i: &usize| *i + 10,
&move |j: &usize| *j + 1000,
&move |k: &usize| *k + 100000,
);
assert_eq!(face0.get_surface() + 100000, face1.get_surface(),);
assert_eq!(face0.orientation(), face1.orientation());
let biters0 = face0.boundary_iters();
let biters1 = face1.boundary_iters();
for (biter0, biter1) in biters0.into_iter().zip(biters1) {
for (edge0, edge1) in biter0.zip(biter1) {
assert_eq!(edge0.front().get_point() + 10, edge1.front().get_point(),);
assert_eq!(edge0.back().get_point() + 10, edge1.back().get_point(),);
assert_eq!(edge0.get_curve() + 1000, edge1.get_curve(),);
}
}
}
| {
Face {
boundaries: self.boundaries.clone(),
orientation: self.orientation,
surface: Arc::clone(&self.surface),
}
} |
main.rs | #![feature(proc_macro_hygiene)]
extern crate failure;
use std::io::Write;
use clap::{clap_app, crate_version};
use pulldown_cmark::{Parser, Options, html};
use std::fs;
use std::fmt;
use maud::html;
use std::fs::File;
use failure::{Fail};
#[derive(Debug, Fail)]
pub enum ErrorEnum {
#[fail(display = "Empty file provided - {} bytes", size)]
EmptyFile {
size: u32
},
#[fail(display = "Small file provided {} bytes", size)]
SmallFile {
size: u32
},
#[fail(display = "Medium size file provided {} bytes", size)]
MediumSizeFile {
size: u32
},
#[fail(display = "Large file provided {} bytes", size)]
LargeFile {
size: u32
}
}
pub struct AppErrors {
code: String,
reason: String,
file_size: ErrorEnum
}
impl fmt::Display for AppErrors {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "({}, {}, {})", self.code, self.reason, self.file_size)
}
}
impl fmt::Debug for AppErrors {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Debug ({}, {}, {})", self.code, self.reason, self.file_size)
}
}
impl Fail for AppErrors {
}
impl From<std::io::Error> for AppErrors {
fn from(error: std::io::Error) -> Self {
AppErrors {
code: "0".to_string(),
reason: error.to_string(),
file_size: ErrorEnum::EmptyFile {
size: 0
}
}
}
}
fn wrap_block_in_html(partial_html: &str, css: Option<&str>) -> String {
let res = html! {
(maud::DOCTYPE)
html {
head {
meta charset="utf-8";
@if let Some(css_link) = css {
link rel="stylesheet" type="text/css" href=(css_link) {}
}
}
body {
(maud::PreEscaped(partial_html))
}
}
};
res.into_string()
}
pub fn read_file (input_filename: String) -> Result<String, AppErrors> {
let string = fs::read_to_string(input_filename)?;
Ok(string)
}
fn | () -> Result<(), AppErrors> {
let args_handler = clap_app!(pull_down_cmark =>
(version: crate_version!())
(author: "vadhri")
(@arg input: -i --input +takes_value +multiple +required "The input filename to look for mark up. Use space seperate multiple items.")
(@arg events: -e --events "Sets a custom config to print events. (verbose - try to redirect to file.)")
(@arg output: -o --output +takes_value "Output file to write html")
(@arg css: -c --css +takes_value "path to css file")
).get_matches();
let input_filenames = args_handler.values_of("input").unwrap();
let events_print = args_handler.is_present("events");
let output_file_print = args_handler.is_present("output");
let input_css = args_handler.is_present("css");
for item in input_filenames {
println!("Processing ... {:?}", item.split('/').last().unwrap());
let infile = read_file(item.to_string())?;
let mut outcome = String::new();
let mut options = Options::empty();
options.insert(Options::ENABLE_STRIKETHROUGH);
options.insert(Options::ENABLE_TABLES);
options.insert(Options::ENABLE_FOOTNOTES);
let parser = Parser::new_ext(&infile, options);
if events_print {
for item in parser.clone().into_iter() {
println!("Event {:?}", item);
}
}
html::push_html(&mut outcome, parser);
if input_css {
outcome = wrap_block_in_html(&outcome, Some(args_handler.value_of("css").unwrap()));
}
if output_file_print {
let output_filename_provided = args_handler.value_of("output").unwrap().to_string();
let mut output_filename_augmented_for_input = item.split('/').last().unwrap().to_string();
output_filename_augmented_for_input.push('_');
output_filename_augmented_for_input.push_str(&output_filename_provided);
let mut output_file_fd = File::create(output_filename_augmented_for_input).unwrap();
let _ignore = output_file_fd.write_all(&outcome.into_bytes());
} else {
println!("{:?}", outcome);
}
}
Ok(())
}
| main |
sync.py | # Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import errno
import os
import uuid
from swift import gettext_ as _
from time import ctime, time
from random import choice, random
from struct import unpack_from
from eventlet import sleep, Timeout
import swift.common.db
from swift.common.db import DatabaseConnectionError
from swift.container.backend import ContainerBroker
from swift.container.sync_store import ContainerSyncStore
from swift.common.container_sync_realms import ContainerSyncRealms
from swift.common.internal_client import (
delete_object, put_object, head_object,
InternalClient, UnexpectedResponse)
from swift.common.exceptions import ClientException
from swift.common.ring import Ring
from swift.common.ring.utils import is_local_device
from swift.common.utils import (
clean_content_type, config_true_value,
FileLikeIter, get_logger, hash_path, quote, urlparse, validate_sync_to,
whataremyips, Timestamp, decode_timestamps)
from swift.common.daemon import Daemon
from swift.common.http import HTTP_UNAUTHORIZED, HTTP_NOT_FOUND
from swift.common.wsgi import ConfigString
# The default internal client config body is to support upgrades without
# requiring deployment of the new /etc/swift/internal-client.conf
ic_conf_body = """
[DEFAULT]
# swift_dir = /etc/swift
# user = swift
# You can specify default log routing here if you want:
# log_name = swift
# log_facility = LOG_LOCAL0
# log_level = INFO
# log_address = /dev/log
#
# comma separated list of functions to call to setup custom log handlers.
# functions get passed: conf, name, log_to_console, log_route, fmt, logger,
# adapted_logger
# log_custom_handlers =
#
# If set, log_udp_host will override log_address
# log_udp_host =
# log_udp_port = 514
#
# You can enable StatsD logging here:
# log_statsd_host =
# log_statsd_port = 8125
# log_statsd_default_sample_rate = 1.0
# log_statsd_sample_rate_factor = 1.0
# log_statsd_metric_prefix =
[pipeline:main]
pipeline = catch_errors proxy-logging cache proxy-server
[app:proxy-server]
use = egg:swift#proxy
# See proxy-server.conf-sample for options
[filter:cache]
use = egg:swift#memcache
# See proxy-server.conf-sample for options
[filter:proxy-logging]
use = egg:swift#proxy_logging
[filter:catch_errors]
use = egg:swift#catch_errors
# See proxy-server.conf-sample for options
""".lstrip()
class ContainerSync(Daemon):
"""
Daemon to sync syncable containers.
This is done by scanning the local devices for container databases and
checking for x-container-sync-to and x-container-sync-key metadata values.
If they exist, newer rows since the last sync will trigger PUTs or DELETEs
to the other container.
The actual syncing is slightly more complicated to make use of the three
(or number-of-replicas) main nodes for a container without each trying to
do the exact same work but also without missing work if one node happens to
be down.
Two sync points are kept per container database. All rows between the two
sync points trigger updates. Any rows newer than both sync points cause
updates depending on the node's position for the container (primary nodes
do one third, etc. depending on the replica count of course). After a sync
run, the first sync point is set to the newest ROWID known and the second
sync point is set to newest ROWID for which all updates have been sent.
An example may help. Assume replica count is 3 and perfectly matching
ROWIDs starting at 1.
First sync run, database has 6 rows:
* SyncPoint1 starts as -1.
* SyncPoint2 starts as -1.
* No rows between points, so no "all updates" rows.
* Six rows newer than SyncPoint1, so a third of the rows are sent
by node 1, another third by node 2, remaining third by node 3.
* SyncPoint1 is set as 6 (the newest ROWID known).
* SyncPoint2 is left as -1 since no "all updates" rows were synced.
Next sync run, database has 12 rows:
* SyncPoint1 starts as 6.
* SyncPoint2 starts as -1.
* The rows between -1 and 6 all trigger updates (most of which
should short-circuit on the remote end as having already been
done).
* Six more rows newer than SyncPoint1, so a third of the rows are
sent by node 1, another third by node 2, remaining third by node
3.
* SyncPoint1 is set as 12 (the newest ROWID known).
* SyncPoint2 is set as 6 (the newest "all updates" ROWID).
In this way, under normal circumstances each node sends its share of
updates each run and just sends a batch of older updates to ensure nothing
was missed.
:param conf: The dict of configuration values from the [container-sync]
section of the container-server.conf
:param container_ring: If None, the <swift_dir>/container.ring.gz will be
loaded. This is overridden by unit tests.
"""
def __init__(self, conf, container_ring=None, logger=None):
#: The dict of configuration values from the [container-sync] section
#: of the container-server.conf.
self.conf = conf
#: Logger to use for container-sync log lines.
self.logger = logger or get_logger(conf, log_route='container-sync')
#: Path to the local device mount points.
self.devices = conf.get('devices', '/srv/node')
#: Indicates whether mount points should be verified as actual mount
#: points (normally true, false for tests and SAIO).
self.mount_check = config_true_value(conf.get('mount_check', 'true'))
#: Minimum time between full scans. This is to keep the daemon from
#: running wild on near empty systems.
self.interval = int(conf.get('interval', 300))
#: Maximum amount of time to spend syncing a container before moving on
#: to the next one. If a container sync hasn't finished in this time,
#: it'll just be resumed next scan.
self.container_time = int(conf.get('container_time', 60))
#: ContainerSyncCluster instance for validating sync-to values.
self.realms_conf = ContainerSyncRealms(
os.path.join(
conf.get('swift_dir', '/etc/swift'),
'container-sync-realms.conf'),
self.logger)
#: The list of hosts we're allowed to send syncs to. This can be
#: overridden by data in self.realms_conf
self.allowed_sync_hosts = [
h.strip()
for h in conf.get('allowed_sync_hosts', '127.0.0.1').split(',')
if h.strip()]
self.http_proxies = [
a.strip()
for a in conf.get('sync_proxy', '').split(',')
if a.strip()]
#: ContainerSyncStore instance for iterating over synced containers
self.sync_store = ContainerSyncStore(self.devices,
self.logger,
self.mount_check)
#: Number of containers with sync turned on that were successfully
#: synced.
self.container_syncs = 0
#: Number of successful DELETEs triggered.
self.container_deletes = 0
#: Number of successful PUTs triggered.
self.container_puts = 0
#: Number of containers whose sync has been turned off, but
#: are not yet cleared from the sync store.
self.container_skips = 0
#: Number of containers that had a failure of some type.
self.container_failures = 0
#: Time of last stats report.
self.reported = time()
self.swift_dir = conf.get('swift_dir', '/etc/swift')
#: swift.common.ring.Ring for locating containers.
self.container_ring = container_ring or Ring(self.swift_dir,
ring_name='container')
bind_ip = conf.get('bind_ip', '0.0.0.0')
self._myips = whataremyips(bind_ip)
self._myport = int(conf.get('bind_port', 6001))
swift.common.db.DB_PREALLOCATION = \
config_true_value(conf.get('db_preallocation', 'f'))
self.conn_timeout = float(conf.get('conn_timeout', 5))
request_tries = int(conf.get('request_tries') or 3)
internal_client_conf_path = conf.get('internal_client_conf_path')
if not internal_client_conf_path:
self.logger.warning(
_('Configuration option internal_client_conf_path not '
'defined. Using default configuration, See '
'internal-client.conf-sample for options'))
internal_client_conf = ConfigString(ic_conf_body)
else:
internal_client_conf = internal_client_conf_path
try:
self.swift = InternalClient(
internal_client_conf, 'Swift Container Sync', request_tries)
except IOError as err:
if err.errno != errno.ENOENT:
raise
raise SystemExit(
_('Unable to load internal client from config: %r (%s)') %
(internal_client_conf_path, err))
def run_forever(self, *args, **kwargs):
"""
Runs container sync scans until stopped.
"""
sleep(random() * self.interval)
while True:
begin = time()
for path in self.sync_store.synced_containers_generator():
self.container_sync(path)
if time() - self.reported >= 3600: # once an hour
|
elapsed = time() - begin
if elapsed < self.interval:
sleep(self.interval - elapsed)
def run_once(self, *args, **kwargs):
"""
Runs a single container sync scan.
"""
self.logger.info(_('Begin container sync "once" mode'))
begin = time()
for path in self.sync_store.synced_containers_generator():
self.container_sync(path)
if time() - self.reported >= 3600: # once an hour
self.report()
self.report()
elapsed = time() - begin
self.logger.info(
_('Container sync "once" mode completed: %.02fs'), elapsed)
def report(self):
"""
Writes a report of the stats to the logger and resets the stats for the
next report.
"""
self.logger.info(
_('Since %(time)s: %(sync)s synced [%(delete)s deletes, %(put)s '
'puts], %(skip)s skipped, %(fail)s failed'),
{'time': ctime(self.reported),
'sync': self.container_syncs,
'delete': self.container_deletes,
'put': self.container_puts,
'skip': self.container_skips,
'fail': self.container_failures})
self.reported = time()
self.container_syncs = 0
self.container_deletes = 0
self.container_puts = 0
self.container_skips = 0
self.container_failures = 0
def container_sync(self, path):
"""
Checks the given path for a container database, determines if syncing
is turned on for that database and, if so, sends any updates to the
other container.
:param path: the path to a container db
"""
broker = None
try:
broker = ContainerBroker(path)
# The path we pass to the ContainerBroker is a real path of
# a container DB. If we get here, however, it means that this
# path is linked from the sync_containers dir. In rare cases
# of race or processes failures the link can be stale and
# the get_info below will raise a DB doesn't exist exception
# In this case we remove the stale link and raise an error
# since in most cases the db should be there.
try:
info = broker.get_info()
except DatabaseConnectionError as db_err:
if str(db_err).endswith("DB doesn't exist"):
self.sync_store.remove_synced_container(broker)
raise
x, nodes = self.container_ring.get_nodes(info['account'],
info['container'])
for ordinal, node in enumerate(nodes):
if is_local_device(self._myips, self._myport,
node['ip'], node['port']):
break
else:
return
if not broker.is_deleted():
sync_to = None
user_key = None
sync_point1 = info['x_container_sync_point1']
sync_point2 = info['x_container_sync_point2']
for key, (value, timestamp) in broker.metadata.items():
if key.lower() == 'x-container-sync-to':
sync_to = value
elif key.lower() == 'x-container-sync-key':
user_key = value
if not sync_to or not user_key:
self.container_skips += 1
self.logger.increment('skips')
return
err, sync_to, realm, realm_key = validate_sync_to(
sync_to, self.allowed_sync_hosts, self.realms_conf)
if err:
self.logger.info(
_('ERROR %(db_file)s: %(validate_sync_to_err)s'),
{'db_file': str(broker),
'validate_sync_to_err': err})
self.container_failures += 1
self.logger.increment('failures')
return
stop_at = time() + self.container_time
next_sync_point = None
while time() < stop_at and sync_point2 < sync_point1:
rows = broker.get_items_since(sync_point2, 1)
if not rows:
break
row = rows[0]
if row['ROWID'] > sync_point1:
break
# This node will only initially sync out one third of the
# objects (if 3 replicas, 1/4 if 4, etc.) and will skip
# problematic rows as needed in case of faults.
# This section will attempt to sync previously skipped
# rows in case the previous attempts by any of the nodes
# didn't succeed.
if not self.container_sync_row(
row, sync_to, user_key, broker, info, realm,
realm_key):
if not next_sync_point:
next_sync_point = sync_point2
sync_point2 = row['ROWID']
broker.set_x_container_sync_points(None, sync_point2)
if next_sync_point:
broker.set_x_container_sync_points(None, next_sync_point)
while time() < stop_at:
rows = broker.get_items_since(sync_point1, 1)
if not rows:
break
row = rows[0]
key = hash_path(info['account'], info['container'],
row['name'], raw_digest=True)
# This node will only initially sync out one third of the
# objects (if 3 replicas, 1/4 if 4, etc.). It'll come back
# around to the section above and attempt to sync
# previously skipped rows in case the other nodes didn't
# succeed or in case it failed to do so the first time.
if unpack_from('>I', key)[0] % \
len(nodes) == ordinal:
self.container_sync_row(
row, sync_to, user_key, broker, info, realm,
realm_key)
sync_point1 = row['ROWID']
broker.set_x_container_sync_points(sync_point1, None)
self.container_syncs += 1
self.logger.increment('syncs')
except (Exception, Timeout):
self.container_failures += 1
self.logger.increment('failures')
self.logger.exception(_('ERROR Syncing %s'),
broker if broker else path)
def _update_sync_to_headers(self, name, sync_to, user_key,
realm, realm_key, method, headers):
"""
Updates container sync headers
:param name: The name of the object
:param sync_to: The URL to the remote container.
:param user_key: The X-Container-Sync-Key to use when sending requests
to the other container.
:param realm: The realm from self.realms_conf, if there is one.
If None, fallback to using the older allowed_sync_hosts
way of syncing.
:param realm_key: The realm key from self.realms_conf, if there
is one. If None, fallback to using the older
allowed_sync_hosts way of syncing.
:param method: HTTP method to create sig with
:param headers: headers to update with container sync headers
"""
if realm and realm_key:
nonce = uuid.uuid4().hex
path = urlparse(sync_to).path + '/' + quote(name)
sig = self.realms_conf.get_sig(method, path,
headers.get('x-timestamp', 0),
nonce, realm_key,
user_key)
headers['x-container-sync-auth'] = '%s %s %s' % (realm,
nonce,
sig)
else:
headers['x-container-sync-key'] = user_key
def _object_in_remote_container(self, name, sync_to, user_key,
realm, realm_key, timestamp):
"""
Performs head object on remote to eliminate extra remote put and
local get object calls
:param name: The name of the object in the updated row in the local
database triggering the sync update.
:param sync_to: The URL to the remote container.
:param user_key: The X-Container-Sync-Key to use when sending requests
to the other container.
:param realm: The realm from self.realms_conf, if there is one.
If None, fallback to using the older allowed_sync_hosts
way of syncing.
:param realm_key: The realm key from self.realms_conf, if there
is one. If None, fallback to using the older
allowed_sync_hosts way of syncing.
:param timestamp: last modified date of local object
:returns: True if object already exists in remote
"""
headers = {'x-timestamp': timestamp.internal}
self._update_sync_to_headers(name, sync_to, user_key, realm,
realm_key, 'HEAD', headers)
try:
metadata, _ = head_object(sync_to, name=name,
headers=headers,
proxy=self.select_http_proxy(),
logger=self.logger,
retries=0)
remote_ts = Timestamp(metadata.get('x-timestamp', 0))
self.logger.debug("remote obj timestamp %s local obj %s" %
(timestamp.internal, remote_ts.internal))
if timestamp <= remote_ts:
return True
# Object in remote should be updated
return False
except ClientException as http_err:
# Object not in remote
if http_err.http_status == 404:
return False
raise http_err
def container_sync_row(self, row, sync_to, user_key, broker, info,
realm, realm_key):
"""
Sends the update the row indicates to the sync_to container.
Update can be either delete or put.
:param row: The updated row in the local database triggering the sync
update.
:param sync_to: The URL to the remote container.
:param user_key: The X-Container-Sync-Key to use when sending requests
to the other container.
:param broker: The local container database broker.
:param info: The get_info result from the local container database
broker.
:param realm: The realm from self.realms_conf, if there is one.
If None, fallback to using the older allowed_sync_hosts
way of syncing.
:param realm_key: The realm key from self.realms_conf, if there
is one. If None, fallback to using the older
allowed_sync_hosts way of syncing.
:returns: True on success
"""
try:
start_time = time()
# extract last modified time from the created_at value
ts_data, ts_ctype, ts_meta = decode_timestamps(
row['created_at'])
if row['deleted']:
# when sync'ing a deleted object, use ts_data - this is the
# timestamp of the source tombstone
try:
headers = {'x-timestamp': ts_data.internal}
self._update_sync_to_headers(row['name'], sync_to,
user_key, realm, realm_key,
'DELETE', headers)
delete_object(sync_to, name=row['name'], headers=headers,
proxy=self.select_http_proxy(),
logger=self.logger,
timeout=self.conn_timeout)
except ClientException as err:
if err.http_status != HTTP_NOT_FOUND:
raise
self.container_deletes += 1
self.logger.increment('deletes')
self.logger.timing_since('deletes.timing', start_time)
else:
# when sync'ing a live object, use ts_meta - this is the time
# at which the source object was last modified by a PUT or POST
if self._object_in_remote_container(row['name'],
sync_to, user_key, realm,
realm_key, ts_meta):
return True
exc = None
# look up for the newest one
headers_out = {'X-Newest': True,
'X-Backend-Storage-Policy-Index':
str(info['storage_policy_index'])}
try:
source_obj_status, headers, body = \
self.swift.get_object(info['account'],
info['container'], row['name'],
headers=headers_out,
acceptable_statuses=(2, 4))
except (Exception, UnexpectedResponse, Timeout) as err:
headers = {}
body = None
exc = err
timestamp = Timestamp(headers.get('x-timestamp', 0))
if timestamp < ts_meta:
if exc:
raise exc
raise Exception(
_('Unknown exception trying to GET: '
'%(account)r %(container)r %(object)r'),
{'account': info['account'],
'container': info['container'],
'object': row['name']})
for key in ('date', 'last-modified'):
if key in headers:
del headers[key]
if 'etag' in headers:
headers['etag'] = headers['etag'].strip('"')
if 'content-type' in headers:
headers['content-type'] = clean_content_type(
headers['content-type'])
self._update_sync_to_headers(row['name'], sync_to, user_key,
realm, realm_key, 'PUT', headers)
put_object(sync_to, name=row['name'], headers=headers,
contents=FileLikeIter(body),
proxy=self.select_http_proxy(), logger=self.logger,
timeout=self.conn_timeout)
self.container_puts += 1
self.logger.increment('puts')
self.logger.timing_since('puts.timing', start_time)
except ClientException as err:
if err.http_status == HTTP_UNAUTHORIZED:
self.logger.info(
_('Unauth %(sync_from)r => %(sync_to)r'),
{'sync_from': '%s/%s' %
(quote(info['account']), quote(info['container'])),
'sync_to': sync_to})
elif err.http_status == HTTP_NOT_FOUND:
self.logger.info(
_('Not found %(sync_from)r => %(sync_to)r \
- object %(obj_name)r'),
{'sync_from': '%s/%s' %
(quote(info['account']), quote(info['container'])),
'sync_to': sync_to, 'obj_name': row['name']})
else:
self.logger.exception(
_('ERROR Syncing %(db_file)s %(row)s'),
{'db_file': str(broker), 'row': row})
self.container_failures += 1
self.logger.increment('failures')
return False
except (Exception, Timeout) as err:
self.logger.exception(
_('ERROR Syncing %(db_file)s %(row)s'),
{'db_file': str(broker), 'row': row})
self.container_failures += 1
self.logger.increment('failures')
return False
return True
def select_http_proxy(self):
return choice(self.http_proxies) if self.http_proxies else None
| self.report() |
item.rs | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Walks the crate looking for items/impl-items/trait-items that have
//! either a `rustc_symbol_name` or `rustc_item_path` attribute and
//! generates an error giving, respectively, the symbol name or
//! item-path. This is used for unit testing the code that generates
//! paths etc in all kinds of annoying scenarios.
use monomorphize::Instance;
use rustc::hir;
use rustc::hir::def_id::DefId;
use rustc::session::config::OptLevel;
use rustc::ty::{self, Ty, TyCtxt, ClosureSubsts, GeneratorSubsts};
use rustc::ty::subst::Substs;
use syntax::ast;
use syntax::attr::InlineAttr;
use std::fmt::{self, Write};
use std::iter;
use rustc::mir::mono::Linkage;
use syntax_pos::symbol::Symbol;
use syntax::source_map::Span;
pub use rustc::mir::mono::MonoItem;
/// Describes how a monomorphization will be instantiated in object files.
#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)]
pub enum InstantiationMode {
/// There will be exactly one instance of the given MonoItem. It will have
/// external linkage so that it can be linked to from other codegen units.
GloballyShared {
/// In some compilation scenarios we may decide to take functions that
/// are typically `LocalCopy` and instead move them to `GloballyShared`
/// to avoid codegenning them a bunch of times. In this situation,
/// however, our local copy may conflict with other crates also
/// inlining the same function.
///
/// This flag indicates that this situation is occurring, and informs
/// symbol name calculation that some extra mangling is needed to
/// avoid conflicts. Note that this may eventually go away entirely if
/// ThinLTO enables us to *always* have a globally shared instance of a
/// function within one crate's compilation.
may_conflict: bool,
},
/// Each codegen unit containing a reference to the given MonoItem will
/// have its own private copy of the function (with internal linkage).
LocalCopy,
}
pub trait MonoItemExt<'a, 'tcx>: fmt::Debug {
fn as_mono_item(&self) -> &MonoItem<'tcx>;
fn is_generic_fn(&self) -> bool {
match *self.as_mono_item() {
MonoItem::Fn(ref instance) => {
instance.substs.types().next().is_some()
}
MonoItem::Static(..) |
MonoItem::GlobalAsm(..) => false,
}
}
fn symbol_name(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::SymbolName {
match *self.as_mono_item() {
MonoItem::Fn(instance) => tcx.symbol_name(instance),
MonoItem::Static(def_id) => {
tcx.symbol_name(Instance::mono(tcx, def_id))
}
MonoItem::GlobalAsm(node_id) => {
let def_id = tcx.hir.local_def_id(node_id);
ty::SymbolName {
name: Symbol::intern(&format!("global_asm_{:?}", def_id)).as_interned_str()
}
}
}
}
fn instantiation_mode(&self,
tcx: TyCtxt<'a, 'tcx, 'tcx>)
-> InstantiationMode {
let inline_in_all_cgus =
tcx.sess.opts.debugging_opts.inline_in_all_cgus.unwrap_or_else(|| {
tcx.sess.opts.optimize != OptLevel::No
}) && !tcx.sess.opts.cg.link_dead_code;
match *self.as_mono_item() {
MonoItem::Fn(ref instance) => {
let entry_def_id =
tcx.sess.entry_fn.borrow().map(|(id, _, _)| tcx.hir.local_def_id(id));
// If this function isn't inlined or otherwise has explicit
// linkage, then we'll be creating a globally shared version.
if self.explicit_linkage(tcx).is_some() ||
!instance.def.requires_local(tcx) ||
Some(instance.def_id()) == entry_def_id
{
return InstantiationMode::GloballyShared { may_conflict: false }
}
// At this point we don't have explicit linkage and we're an
// inlined function. If we're inlining into all CGUs then we'll
// be creating a local copy per CGU
if inline_in_all_cgus {
return InstantiationMode::LocalCopy
}
// Finally, if this is `#[inline(always)]` we're sure to respect
// that with an inline copy per CGU, but otherwise we'll be
// creating one copy of this `#[inline]` function which may
// conflict with upstream crates as it could be an exported
// symbol.
match tcx.codegen_fn_attrs(instance.def_id()).inline {
InlineAttr::Always => InstantiationMode::LocalCopy,
_ => {
InstantiationMode::GloballyShared { may_conflict: true }
}
}
}
MonoItem::Static(..) |
MonoItem::GlobalAsm(..) => {
InstantiationMode::GloballyShared { may_conflict: false }
}
}
}
fn explicit_linkage(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<Linkage> {
let def_id = match *self.as_mono_item() {
MonoItem::Fn(ref instance) => instance.def_id(),
MonoItem::Static(def_id) => def_id,
MonoItem::GlobalAsm(..) => return None,
};
let codegen_fn_attrs = tcx.codegen_fn_attrs(def_id);
codegen_fn_attrs.linkage
}
/// Returns whether this instance is instantiable - whether it has no unsatisfied
/// predicates.
///
/// In order to codegen an item, all of its predicates must hold, because
/// otherwise the item does not make sense. Type-checking ensures that
/// the predicates of every item that is *used by* a valid item *do*
/// hold, so we can rely on that.
///
/// However, we codegen collector roots (reachable items) and functions
/// in vtables when they are seen, even if they are not used, and so they
/// might not be instantiable. For example, a programmer can define this
/// public function:
///
/// pub fn foo<'a>(s: &'a mut ()) where &'a mut (): Clone {
/// <&mut () as Clone>::clone(&s);
/// }
///
/// That function can't be codegened, because the method `<&mut () as Clone>::clone`
/// does not exist. Luckily for us, that function can't ever be used,
/// because that would require for `&'a mut (): Clone` to hold, so we
/// can just not emit any code, or even a linker reference for it.
///
/// Similarly, if a vtable method has such a signature, and therefore can't
/// be used, we can just not emit it and have a placeholder (a null pointer,
/// which will never be accessed) in its place.
fn is_instantiable(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool {
debug!("is_instantiable({:?})", self);
let (def_id, substs) = match *self.as_mono_item() {
MonoItem::Fn(ref instance) => (instance.def_id(), instance.substs),
MonoItem::Static(def_id) => (def_id, Substs::empty()),
// global asm never has predicates
MonoItem::GlobalAsm(..) => return true
};
tcx.substitute_normalize_and_test_predicates((def_id, &substs))
}
fn to_string(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> String {
return match *self.as_mono_item() {
MonoItem::Fn(instance) => {
to_string_internal(tcx, "fn ", instance)
},
MonoItem::Static(def_id) => {
let instance = Instance::new(def_id, tcx.intern_substs(&[]));
to_string_internal(tcx, "static ", instance)
},
MonoItem::GlobalAsm(..) => {
"global_asm".to_string()
}
};
fn to_string_internal<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
prefix: &str,
instance: Instance<'tcx>)
-> String {
let mut result = String::with_capacity(32);
result.push_str(prefix);
let printer = DefPathBasedNames::new(tcx, false, false);
printer.push_instance_as_string(instance, &mut result);
result
}
}
fn local_span(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option<Span> {
match *self.as_mono_item() {
MonoItem::Fn(Instance { def, .. }) => {
tcx.hir.as_local_node_id(def.def_id())
}
MonoItem::Static(def_id) => {
tcx.hir.as_local_node_id(def_id)
}
MonoItem::GlobalAsm(node_id) => {
Some(node_id)
}
}.map(|node_id| tcx.hir.span(node_id))
}
}
impl<'a, 'tcx> MonoItemExt<'a, 'tcx> for MonoItem<'tcx> {
fn as_mono_item(&self) -> &MonoItem<'tcx> {
self
}
}
//=-----------------------------------------------------------------------------
// MonoItem String Keys
//=-----------------------------------------------------------------------------
// The code below allows for producing a unique string key for a mono item.
// These keys are used by the handwritten auto-tests, so they need to be
// predictable and human-readable.
//
// Note: A lot of this could looks very similar to what's already in the
// ppaux module. It would be good to refactor things so we only have one
// parameterizable implementation for printing types.
/// Same as `unique_type_name()` but with the result pushed onto the given
/// `output` parameter.
pub struct DefPathBasedNames<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
omit_disambiguators: bool,
omit_local_crate_name: bool,
}
impl<'a, 'tcx> DefPathBasedNames<'a, 'tcx> {
pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>,
omit_disambiguators: bool,
omit_local_crate_name: bool)
-> Self {
DefPathBasedNames {
tcx,
omit_disambiguators,
omit_local_crate_name,
}
}
pub fn push_type_name(&self, t: Ty<'tcx>, output: &mut String) {
match t.sty {
ty::TyBool => output.push_str("bool"),
ty::TyChar => output.push_str("char"),
ty::TyStr => output.push_str("str"),
ty::TyNever => output.push_str("!"),
ty::TyInt(ast::IntTy::Isize) => output.push_str("isize"),
ty::TyInt(ast::IntTy::I8) => output.push_str("i8"),
ty::TyInt(ast::IntTy::I16) => output.push_str("i16"),
ty::TyInt(ast::IntTy::I32) => output.push_str("i32"),
ty::TyInt(ast::IntTy::I64) => output.push_str("i64"),
ty::TyInt(ast::IntTy::I128) => output.push_str("i128"),
ty::TyUint(ast::UintTy::Usize) => output.push_str("usize"),
ty::TyUint(ast::UintTy::U8) => output.push_str("u8"),
ty::TyUint(ast::UintTy::U16) => output.push_str("u16"),
ty::TyUint(ast::UintTy::U32) => output.push_str("u32"),
ty::TyUint(ast::UintTy::U64) => output.push_str("u64"),
ty::TyUint(ast::UintTy::U128) => output.push_str("u128"),
ty::TyFloat(ast::FloatTy::F32) => output.push_str("f32"),
ty::TyFloat(ast::FloatTy::F64) => output.push_str("f64"),
ty::TyAdt(adt_def, substs) => {
self.push_def_path(adt_def.did, output);
self.push_type_params(substs, iter::empty(), output);
},
ty::TyTuple(component_types) => {
output.push('(');
for &component_type in component_types {
self.push_type_name(component_type, output);
output.push_str(", ");
}
if !component_types.is_empty() {
output.pop();
output.pop();
}
output.push(')');
},
ty::TyRawPtr(ty::TypeAndMut { ty: inner_type, mutbl } ) => {
output.push('*');
match mutbl {
hir::MutImmutable => output.push_str("const "),
hir::MutMutable => output.push_str("mut "),
}
self.push_type_name(inner_type, output);
},
ty::TyRef(_, inner_type, mutbl) => {
output.push('&');
if mutbl == hir::MutMutable {
output.push_str("mut ");
}
self.push_type_name(inner_type, output);
},
ty::TyArray(inner_type, len) => {
output.push('[');
self.push_type_name(inner_type, output);
write!(output, "; {}", len.unwrap_usize(self.tcx)).unwrap();
output.push(']');
},
ty::TySlice(inner_type) => {
output.push('[');
self.push_type_name(inner_type, output);
output.push(']');
},
ty::TyDynamic(ref trait_data, ..) => {
if let Some(principal) = trait_data.principal() {
self.push_def_path(principal.def_id(), output);
self.push_type_params(principal.skip_binder().substs,
trait_data.projection_bounds(),
output);
}
},
ty::TyForeign(did) => self.push_def_path(did, output),
ty::TyFnDef(..) |
ty::TyFnPtr(_) => {
let sig = t.fn_sig(self.tcx);
if sig.unsafety() == hir::Unsafety::Unsafe {
output.push_str("unsafe ");
}
let abi = sig.abi();
if abi != ::rustc_target::spec::abi::Abi::Rust {
output.push_str("extern \"");
output.push_str(abi.name());
output.push_str("\" ");
}
output.push_str("fn(");
let sig = self.tcx.normalize_erasing_late_bound_regions(
ty::ParamEnv::reveal_all(),
&sig,
);
if !sig.inputs().is_empty() {
for ¶meter_type in sig.inputs() {
self.push_type_name(parameter_type, output);
output.push_str(", ");
}
output.pop();
output.pop();
}
if sig.variadic {
if !sig.inputs().is_empty() {
output.push_str(", ...");
} else {
output.push_str("...");
}
}
output.push(')');
if !sig.output().is_nil() {
output.push_str(" -> ");
self.push_type_name(sig.output(), output);
}
},
ty::TyGenerator(def_id, GeneratorSubsts { ref substs }, _) |
ty::TyClosure(def_id, ClosureSubsts { ref substs }) => {
self.push_def_path(def_id, output);
let generics = self.tcx.generics_of(self.tcx.closure_base_def_id(def_id));
let substs = substs.truncate_to(self.tcx, generics);
self.push_type_params(substs, iter::empty(), output);
}
ty::TyError |
ty::TyInfer(_) |
ty::TyProjection(..) |
ty::TyParam(_) |
ty::TyGeneratorWitness(_) |
ty::TyAnon(..) => {
bug!("DefPathBasedNames: Trying to create type name for \
unexpected type: {:?}", t);
}
}
}
pub fn push_def_path(&self,
def_id: DefId,
output: &mut String) |
fn push_type_params<I>(&self,
substs: &Substs<'tcx>,
projections: I,
output: &mut String)
where I: Iterator<Item=ty::PolyExistentialProjection<'tcx>>
{
let mut projections = projections.peekable();
if substs.types().next().is_none() && projections.peek().is_none() {
return;
}
output.push('<');
for type_parameter in substs.types() {
self.push_type_name(type_parameter, output);
output.push_str(", ");
}
for projection in projections {
let projection = projection.skip_binder();
let name = &self.tcx.associated_item(projection.item_def_id).ident.as_str();
output.push_str(name);
output.push_str("=");
self.push_type_name(projection.ty, output);
output.push_str(", ");
}
output.pop();
output.pop();
output.push('>');
}
pub fn push_instance_as_string(&self,
instance: Instance<'tcx>,
output: &mut String) {
self.push_def_path(instance.def_id(), output);
self.push_type_params(instance.substs, iter::empty(), output);
}
}
| {
let def_path = self.tcx.def_path(def_id);
// some_crate::
if !(self.omit_local_crate_name && def_id.is_local()) {
output.push_str(&self.tcx.crate_name(def_path.krate).as_str());
output.push_str("::");
}
// foo::bar::ItemName::
for part in self.tcx.def_path(def_id).data {
if self.omit_disambiguators {
write!(output, "{}::", part.data.as_interned_str()).unwrap();
} else {
write!(output, "{}[{}]::",
part.data.as_interned_str(),
part.disambiguator).unwrap();
}
}
// remove final "::"
output.pop();
output.pop();
} |
progress.rs | //! Build progress tracking and reporting, for the purpose of display to the
//! user.
use std::collections::VecDeque;
use std::io::Write;
use std::time::Duration;
use std::time::Instant;
use crate::graph::Build;
use crate::graph::BuildId;
use crate::task::TaskResult;
use crate::task::Termination;
use crate::work::BuildState;
use crate::work::StateCounts;
#[cfg(unix)]
#[allow(clippy::uninit_assumed_init)]
pub fn get_terminal_cols() -> Option<usize> {
unsafe {
let mut winsize: libc::winsize = std::mem::MaybeUninit::uninit().assume_init();
if libc::ioctl(0, libc::TIOCGWINSZ, &mut winsize) < 0 {
return None;
}
Some(winsize.ws_col as usize)
}
}
#[cfg(windows)]
#[allow(clippy::uninit_assumed_init)]
pub fn get_terminal_cols() -> Option<usize> {
extern crate winapi;
extern crate kernel32;
use kernel32::{GetConsoleScreenBufferInfo, GetStdHandle};
let console = unsafe { GetStdHandle(winapi::um::winbase::STD_OUTPUT_HANDLE) };
if console == winapi::um::handleapi::INVALID_HANDLE_VALUE {
return None;
}
unsafe {
let mut csbi = ::std::mem::MaybeUninit::uninit().assume_init();
if GetConsoleScreenBufferInfo(console, &mut csbi) == 0 {
return None;
}
Some(csbi.dwSize.X as usize)
}
}
/// Compute the message to display on the console for a given build.
pub fn build_message(build: &Build) -> &str {
build
.desc
.as_ref()
.unwrap_or_else(|| build.cmdline.as_ref().unwrap())
}
/// Trait for build progress notifications.
pub trait Progress {
/// Called as individual build tasks progress through build states.
fn update(&mut self, counts: &StateCounts);
/// Called when we expect to be waiting for a while before another update.
fn flush(&mut self);
/// Called when a task starts or completes.
fn task_state(&mut self, id: BuildId, build: &Build, result: Option<&TaskResult>);
/// Called when the overall build has completed (success or failure), to allow
/// cleaning up the display.
fn finish(&mut self);
}
/// Currently running build task, as tracked for progress updates.
struct Task {
id: BuildId,
/// When the task started running.
start: Instant,
/// Build status message for the task.
message: String,
}
/// Console progress pretty-printer.
/// Each time it prints, it clears from the cursor to the end of the console,
/// prints the status text, and then moves moves the cursor back up to the
/// start position. This means on errors etc. we can clear any status by
/// clearing the console too.
pub struct ConsoleProgress {
/// Last time we updated the console, used to throttle updates.
last_update: Instant,
/// Counts of tasks in each state. TODO: pass this as function args?
counts: StateCounts,
/// Build tasks that are currently executing.
/// Pushed to as tasks are started, so it's always in order of age.
tasks: VecDeque<Task>,
/// Whether to print command lines of completed programs.
verbose: bool,
/// Whether to print a progress bar and currently running tasks.
fancy_terminal: bool,
}
#[allow(clippy::new_without_default)]
impl ConsoleProgress {
pub fn new(verbose: bool, fancy_terminal: bool) -> Self {
ConsoleProgress {
// Act like our last update was now, so that we delay slightly
// before our first print. This reduces flicker in the case where
// the work immediately completes.
last_update: Instant::now(),
counts: StateCounts::new(),
tasks: VecDeque::new(),
verbose,
fancy_terminal,
}
}
}
impl Progress for ConsoleProgress {
fn update(&mut self, counts: &StateCounts) {
self.counts = counts.clone();
self.maybe_print_progress();
}
fn task_state(&mut self, id: BuildId, build: &Build, result: Option<&TaskResult>) {
match result {
None => {
// Task starting.
let message = build_message(build);
self.tasks.push_back(Task {
id,
start: Instant::now(),
message: message.to_string(),
});
}
Some(result) => {
// Task completed.
self.tasks
.remove(self.tasks.iter().position(|t| t.id == id).unwrap());
self.print_result(build, result);
}
}
self.maybe_print_progress();
}
fn | (&mut self) {
self.print_progress();
}
fn finish(&mut self) {
self.clear_progress();
}
}
impl ConsoleProgress {
fn progress_bar(&self) -> String {
let bar_size = 40;
let mut bar = String::with_capacity(bar_size);
let mut sum: usize = 0;
let total = self.counts.total();
for (count, ch) in [
(
self.counts.get(BuildState::Done) + self.counts.get(BuildState::Failed),
'=',
),
(
self.counts.get(BuildState::Queued)
+ self.counts.get(BuildState::Running)
+ self.counts.get(BuildState::Ready),
'-',
),
(self.counts.get(BuildState::Want), ' '),
] {
sum += count;
while bar.len() <= (sum * bar_size / total) {
bar.push(ch);
}
}
bar
}
fn clear_progress(&self) {
if !self.fancy_terminal {
return;
}
// If the user hit ctl-c, it may have printed something on the line.
// So \r to go to first column first, then clear anything below.
std::io::stdout().write_all(b"\r\x1b[J").unwrap();
}
fn print_progress(&self) {
if !self.fancy_terminal {
return;
}
self.clear_progress();
let failed = self.counts.get(BuildState::Failed);
let mut progress_line = format!(
"[{}] {}/{} done, ",
self.progress_bar(),
self.counts.get(BuildState::Done) + failed,
self.counts.total()
);
if failed > 0 {
progress_line.push_str(&format!("{} failed, ", failed));
}
progress_line.push_str(&format!(
"{}/{} running",
self.tasks.len(),
self.counts.get(BuildState::Queued) + self.tasks.len(),
));
println!("{}", progress_line);
let max_cols = get_terminal_cols().unwrap_or(80);
let mut lines = 1;
let max_lines = 8;
let now = Instant::now();
for task in self.tasks.iter().take(max_lines) {
if lines == max_lines && self.tasks.len() > max_lines {
println!("...and {} more", self.tasks.len() - max_lines + 1);
} else {
let delta = now.duration_since(task.start).as_secs();
let line = format!("{}s {}", delta, task.message);
if line.len() >= max_cols {
println!("{}...", &line[0..max_cols - 4]);
} else {
println!("{}", line);
}
}
lines += 1;
}
// Move cursor up to the first printed line, for overprinting.
print!("\x1b[{}A", lines);
}
fn maybe_print_progress(&mut self) {
let now = Instant::now();
let delta = now.duration_since(self.last_update);
if delta < Duration::from_millis(50) {
return;
}
self.print_progress();
self.last_update = now;
}
fn print_result(&mut self, build: &Build, result: &TaskResult) {
// By default we don't want to print anything when a task completes,
// but we do want to print the completed task when:
// - failed tasks
// - when we opted in to verbose output
// - when we aren't doing fancy terminal progress display
// - when the task had output (even in non-failing cases)
if result.termination == Termination::Success
&& !self.verbose
&& self.fancy_terminal
&& result.output.is_empty()
{
return;
}
self.clear_progress();
let status = match result.termination {
Termination::Success => "",
Termination::Interrupted => "interrupted: ",
Termination::Failure => "failed: ",
};
let message = if self.verbose {
build.cmdline.as_ref().unwrap()
} else {
build_message(build)
};
println!("{}{}", status, message);
if !result.output.is_empty() {
std::io::stdout().write_all(&result.output).unwrap();
}
}
}
| flush |
base.py | # -*- coding: utf-8 -*-
"""
eve.io.base
~~~~~~~~~~~
Standard interface implemented by Eve data layers.
:copyright: (c) 2014 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
import datetime
import simplejson as json
from copy import copy
from flask import request, abort
from eve.utils import date_to_str
from eve.auth import auth_field_and_value
from eve.utils import config, debug_error_message, auto_fields
class BaseJSONEncoder(json.JSONEncoder):
""" Proprietary JSONEconder subclass used by the json render function.
This is needed to address the encoding of special values.
"""
def default(self, obj):
if isinstance(obj, datetime.datetime):
# convert any datetime to RFC 1123 format
return date_to_str(obj)
elif isinstance(obj, (datetime.time, datetime.date)):
# should not happen since the only supported date-like format
# supported at dmain schema level is 'datetime' .
return obj.isoformat()
return json.JSONEncoder.default(self, obj)
class ConnectionException(Exception):
""" Raised when DataLayer subclasses cannot find/activate to their
database connection.
:param driver_exception: the original exception raised by the source db
driver
"""
def __init__(self, driver_exception=None):
self.driver_exception = driver_exception
def __str__(self):
msg = ("Error initializing the driver. Make sure the database server"
"is running. ")
if self.driver_exception:
msg += "Driver exception: %s" % repr(self.driver_exception)
return msg
class DataLayer(object):
""" Base data layer class. Defines the interface that actual data-access
classes, being subclasses, must implement. Implemented as a Flask
extension.
Admittedly, this interface is a Mongo rip-off. See the io.mongo
package for an implementation example.
.. versionchanged:: 0.2
Allow subclasses to provide their own specialized json encoder.
.. versionchanged:: 0.1.1
'serializers' dictionary added.
.. versionchanged:: 0.1.0
Support for PUT method.
.. versionchanged:: 0.0.6
support for 'projections' has been added. For more information see
http://docs.mongodb.org/manual/reference/glossary/#term-projection.
While typically a MongoDB feature, other subclasses could decide to
provide support for their own projection syntax.
.. versionchanged:: 0.0.4
the _datasource helper function has been added.
"""
# if custom serialize functions are needed, add them to the 'serializers'
# dictionary, eg:
# serializers = {'objectid': ObjectId, 'datetime': serialize_date}
serializers = {}
# json.JSONEncoder subclass for serializing data to json.
# Subclasses should provide their own specialized encoder (see
# eve.io.mongo.MongoJSONEncoder).
json_encoder_class = BaseJSONEncoder
def __init__(self, app):
""" Implements the Flask extension pattern.
.. versionchanged:: 0.2
Explicit initialize self.driver to None.
"""
self.driver = None
if app is not None:
self.app = app
self.init_app(self.app)
else:
self.app = None
def init_app(self, app):
""" This is where you want to initialize the db driver so it will be
alive through the whole instance lifespan.
"""
raise NotImplementedError
def find(self, resource, req, sub_resource_lookup):
""" Retrieves a set of documents (rows), matching the current request.
Consumed when a request hits a collection/document endpoint
(`/people/`).
:param resource: resource being accessed. You should then use
the ``_datasource`` helper function to retrieve both
the db collection/table and base query (filter), if
any.
:param req: an instance of ``eve.utils.ParsedRequest``. This contains
all the constraints that must be fulfilled in order to
satisfy the original request (where and sort parts, paging,
etc). Be warned that `where` and `sort` expresions will
need proper parsing, according to the syntax that you want
to support with your driver. For example ``eve.io.Mongo``
supports both Python and Mongo-like query syntaxes.
:param sub_resource_lookup: sub-resource lookup from the endpoint url.
.. versionchanged:: 0.3
Support for sub-resources.
"""
raise NotImplementedError
def find_one(self, resource, req, **lookup):
""" Retrieves a single document/record. Consumed when a request hits an
item endpoint (`/people/id/`).
:param resource: resource being accessed. You should then use the
``_datasource`` helper function to retrieve both the
db collection/table and base query (filter), if any.
:param req: an instance of ``eve.utils.ParsedRequest``. This contains
all the constraints that must be fulfilled in order to
satisfy the original request (where and sort parts, paging,
etc). As we are going to only look for one document here,
the only req attribute that you want to process here is
``req.projection``.
:param **lookup: the lookup fields. This will most likely be a record
id or, if alternate lookup is supported by the API,
the corresponding query.
.. versionchanged:: 0.4
Added the 'req' argument.
"""
raise NotImplementedError
def find_one_raw(self, resource, _id):
""" Retrieves a single, raw document. No projections or datasource
filters are being applied here. Just looking up the document by unique
id.
:param resource: resource name.
:param id: unique id.
.. versionadded:: 0.4
"""
raise NotImplementedError
def find_list_of_ids(self, resource, ids, client_projection=None):
""" Retrieves a list of documents based on a list of primary keys
The primary key is the field defined in `ID_FIELD`.
This is a separate function to allow us to use per-database
optimizations for this type of query. | :param ids: a list of ids corresponding to the documents
to retrieve
:param client_projection: a specific projection to use
:return: a list of documents matching the ids in `ids` from the
collection specified in `resource`
.. versionadded:: 0.1.0
"""
raise NotImplementedError
def insert(self, resource, doc_or_docs):
""" Inserts a document into a resource collection/table.
:param resource: resource being accessed. You should then use
the ``_datasource`` helper function to retrieve both
the actual datasource name.
:param doc_or_docs: json document or list of json documents to be added
to the database.
.. versionchanged:: 0.0.6
'document' param renamed to 'doc_or_docs', making support for bulk
inserts apparent.
"""
raise NotImplementedError
def update(self, resource, id_, updates):
""" Updates a collection/table document/row.
:param resource: resource being accessed. You should then use
the ``_datasource`` helper function to retrieve
the actual datasource name.
:param id_: the unique id of the document.
:param updates: json updates to be performed on the database document
(or row).
"""
raise NotImplementedError
def replace(self, resource, id_, document):
""" Replaces a collection/table document/row.
:param resource: resource being accessed. You should then use
the ``_datasource`` helper function to retrieve
the actual datasource name.
:param id_: the unique id of the document.
:param document: the new json document
.. versionadded:: 0.1.0
"""
raise NotImplementedError
def remove(self, resource, lookup={}):
""" Removes a document/row or an entire set of documents/rows from a
database collection/table.
:param resource: resource being accessed. You should then use
the ``_datasource`` helper function to retrieve
the actual datasource name.
:param lookup: a dict with the query that documents must match in order
to qualify for deletion. For single document deletes,
this is usually the unique id of the document to be
removed.
.. versionchanged:: 0.3
'_id' arg removed; replaced with 'lookup'.
"""
raise NotImplementedError
def combine_queries(self, query_a, query_b):
""" Takes two db queries and applies db-specific syntax to produce
the intersection.
.. versionadded: 0.1.0
Support for intelligent combination of db queries
"""
raise NotImplementedError
def get_value_from_query(self, query, field_name):
""" Parses the given potentially-complex query and returns the value
being assigned to the field given in `field_name`.
This mainly exists to deal with more complicated compound queries
.. versionadded: 0.1.0
Support for parsing values embedded in compound db queries
"""
raise NotImplementedError
def query_contains_field(self, query, field_name):
""" For the specified field name, does the query contain it?
Used know whether we need to parse a compound query.
.. versionadded: 0.1.0
Support for parsing values embedded in compound db queries
"""
raise NotImplementedError
def is_empty(self, resource):
""" Returns True if the collection is empty; False otherwise. While
a user could rely on self.find() method to achieve the same result,
this method can probably take advantage of specific datastore features
to provide better perfomance.
Don't forget, a 'resource' could have a pre-defined filter. If that is
the case, it will have to be taken into consideration when performing
the is_empty() check (see eve.io.mongo.mongo.py implementation).
:param resource: resource being accessed. You should then use
the ``_datasource`` helper function to retrieve
the actual datasource name.
.. versionadded: 0.3
"""
raise NotImplementedError
def _datasource(self, resource):
""" Returns a tuple with the actual name of the database
collection/table, base query and projection for the resource being
accessed.
:param resource: resource being accessed.
.. versionchanged:: 0.5
If allow_unknown is enabled for the resource, don't return any
projection for the document. Addresses #397 and #250.
.. versionchanged:: 0.4
Return copies to avoid accidental tampering. Fix #258.
.. versionchanged:: 0.2
Support for 'default_sort'.
"""
dsource = config.SOURCES[resource]
source = copy(dsource['source'])
filter_ = copy(dsource['filter'])
sort = copy(dsource['default_sort'])
# if allow_unknown is enabled for the resource, then don't return
# the default or client projection so all document fields can be
# returned to the client (regardless of the resource schema).
allow_unknown = config.DOMAIN[resource]['allow_unknown']
projection = copy(dsource['projection']) if not allow_unknown else None
return source, filter_, projection, sort,
def _datasource_ex(self, resource, query=None, client_projection=None,
client_sort=None):
""" Returns both db collection and exact query (base filter included)
to which an API resource refers to.
.. versionchanged:: 0.4
Always return required/auto fields (issue 282.)
.. versionchanged:: 0.3
Field exclusion support in client projections.
Honor auth_field even when client query is missing.
Only inject auth_field in queries when we are not creating new
documents.
'auth_field' and 'request_auth_value' fetching is now delegated to
auth.auth_field_and value().
.. versionchanged:: 0.2
Difference between resource and item endpoints is now determined
by the presence of a '|' in request.endpoint.
Support for 'default_sort'.
.. versionchanged:: 0.1.1
auth.request_auth_value is now used to store the auth_field value.
.. versionchanged:: 0.1.0
Calls `combine_queries` to merge query and filter_
Updated logic performing `auth_field` check
.. versionchanged:: 0.0.9
Storing self.app.auth.userid in auth_field when 'user-restricted
resource access' is enabled.
Support for Python 3.3.
.. versionchanged:: 0.0.6
'auth_username_field' is injected even in empty queries.
Projection queries ('?projection={"name": 1}')
.. versionchanged:: 0.0.5
Support for 'user-restricted resource access'.
.. versionadded:: 0.0.4
"""
datasource, filter_, projection_, sort_ = self._datasource(resource)
if client_sort:
sort = client_sort
else:
# default sort is activated only if 'sorting' is enabled for the
# resource.
# TODO Consider raising a validation error on startup instead?
sort = sort_ if sort_ and config.DOMAIN[resource]['sorting'] else \
None
if filter_:
if query:
# Can't just dump one set of query operators into another
# e.g. if the dataset contains a custom datasource pattern
# 'filter': {'username': {'$exists': True}}
# and we try to filter on the field `username`,
# which is correct?
# Solution: call the db driver `combine_queries` operation
# which will apply db-specific syntax to produce the
# intersection of the two queries
query = self.combine_queries(query, filter_)
else:
query = filter_
fields = projection_
if client_projection:
# only allow fields which are included with the standard projection
# for the resource (avoid sniffing of private fields)
keep_fields = auto_fields(resource)
if 0 not in client_projection.values():
# inclusive projection - all values are 0 unless spec. or auto
fields = dict([(field, field in keep_fields) for field in
fields.keys()])
for field, value in client_projection.items():
field_base = field.split('.')[0]
if field_base not in keep_fields and field_base in fields:
fields[field] = value
fields = dict([(field, 1) for field, value in fields.items() if
value])
# If the current HTTP method is in `public_methods` or
# `public_item_methods`, skip the `auth_field` check
# Only inject the auth_field in the query when not creating new
# documents.
if request and request.method not in ('POST', 'PUT'):
auth_field, request_auth_value = auth_field_and_value(resource)
if auth_field and request.authorization and request_auth_value:
if query:
# If the auth_field *replaces* a field in the query,
# and the values are /different/, deny the request
# This prevents the auth_field condition from
# overwriting the query (issue #77)
auth_field_in_query = \
self.app.data.query_contains_field(query, auth_field)
if auth_field_in_query and \
self.app.data.get_value_from_query(
query, auth_field) != request_auth_value:
abort(401, description=debug_error_message(
'Incompatible User-Restricted Resource request. '
'Request was for "%s"="%s" but `auth_field` '
'requires "%s"="%s".' % (
auth_field,
self.app.data.get_value_from_query(
query, auth_field),
auth_field,
request_auth_value)
))
else:
query = self.app.data.combine_queries(
query, {auth_field: request_auth_value}
)
else:
query = {auth_field: request_auth_value}
return datasource, query, fields, sort |
:param resource: resource name. |
CreateTable.py | from Instrucciones.Declaracion import Declaracion
from Instrucciones.Sql_create.Tipo_Constraint import Tipo_Dato_Constraint
from Instrucciones.TablaSimbolos.Tipo import Tipo
from Instrucciones.TablaSimbolos.Instruccion import Instruccion
from Instrucciones.TablaSimbolos.Tabla import Tabla
from Instrucciones.Excepcion import Excepcion
from storageManager.jsonMode import *
from Instrucciones.Tablas.Tablas import Tablas
class CreateTable(Instruccion):
def __init__(self, tabla, tipo, campos, herencia, linea, columna):
Instruccion.__init__(self,tipo,linea,columna)
self.tabla = tabla
self.campos = campos
self.herencia = herencia
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
# Ambito para la tabla
tablaLocal = Tabla(tabla)
#SE VALIDA QUE SE HAYA SELECCIONADO UN BD
if arbol.bdUsar != None:
#SE VALIDA SI LA TABLA VA HEREDAR
if self.herencia!=None:
#SE BUSCA LA SI LA TABLA HEREDADA EXISTE
htabla = arbol.devolverBaseDeDatos().getTabla(self.herencia)
if htabla != None:
tabla_temp=[]
#SE RECORRE TODOS LAS COLUMNAS DE LA TABLA PARA UNIR CAMPOS REPETIDOS
for campo_her in htabla.lista_de_campos:
indice=0
bandera_campo=True
for campo_nuevo in self.campos:
if campo_her.nombre==campo_nuevo.nombre:
tabla_temp.append(campo_nuevo)
arbol.consola.append(f"NOTICE: mezclando la columna <<{campo_nuevo.nombre}>> con la definición heredada.")
self.campos.pop(indice)
indice+=1
bandera_campo=False
break
if bandera_campo:
tabla_temp.append(campo_her)
tabla_temp = tabla_temp + self.campos
self.campos= tabla_temp
else:
error = Excepcion(f"42P01","Semantico","No existe la relación <<{self.herencia}>>.",self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
return
#SE CREA UN AMBITO PARA LA TABLA
tablaNueva = Tablas(self.tabla,None)
#SE LLENA LA TABLA EN MEMORIA
for camp in self.campos:
if isinstance(camp.tipo,Tipo):
if camp.constraint != None:
for s in camp.constraint:
if s.tipo == Tipo_Dato_Constraint.CHECK:
objeto = Declaracion(camp.nombre, camp.tipo, s.expresion)
checkBueno = objeto.ejecutar(tablaLocal, arbol)
if not isinstance(checkBueno,Excepcion):
#tablaNueva.agregarColumna(camp.nombre,camp.tipo.toString(),None, camp.constraint)
#continue
pass
else:
#arbol.consola.append(checkBueno.toString())
return
tablaNueva.agregarColumna(camp.nombre,camp.tipo.toString(),None, camp.constraint)
else:
tablaNueva.agregarColumna(camp.nombre,camp.tipo,None, camp.constraint)
#SE CREA LA TABLA EN DISCO
ctable = createTable(arbol.bdUsar,self.tabla,len(self.campos))
if ctable==0: #CUANDO LA TABLA SE CREA CORRECTAMENTE
arbol.consola.append(f"La Tabla: <<{self.tabla}>> se creo correctamente.")
arbol.agregarTablaABd(tablaNueva)
elif ctable==3: #CUANDO LA TABLA YA EXISTE
error = Excepcion("100","Semantico","La Tabla ya Existe.",self.linea,self.columna) | arbol.consola.append(error.toString())
elif ctable==2: #CUANDO POR ALGUN ERROR NO SE CREA LA TABLA.
error = Excepcion("100","Semantico","Error Interno.",self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
else:
error = Excepcion("100","Semantico","No ha seleccionado ninguna Base de Datos.",self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
'''
instruccion = CreateTable("hola mundo",None, 1,2)
instruccion.ejecutar(None,None)
''' | arbol.excepciones.append(error) |
handler.go | package evidence
import (
sdk "github.com/mydexchain/chain-sdk/types"
sdkerrors "github.com/mydexchain/chain-sdk/types/errors"
"github.com/mydexchain/chain-sdk/x/evidence/keeper"
"github.com/mydexchain/chain-sdk/x/evidence/types"
)
// NewHandler returns a handler for evidence messages.
func NewHandler(k keeper.Keeper) sdk.Handler { |
return func(ctx sdk.Context, msg sdk.Msg) (*sdk.Result, error) {
ctx = ctx.WithEventManager(sdk.NewEventManager())
switch msg := msg.(type) {
case *types.MsgSubmitEvidence:
res, err := msgServer.SubmitEvidence(sdk.WrapSDKContext(ctx), msg)
return sdk.WrapServiceResult(ctx, res, err)
default:
return nil, sdkerrors.Wrapf(sdkerrors.ErrUnknownRequest, "unrecognized %s message type: %T", types.ModuleName, msg)
}
}
} | msgServer := keeper.NewMsgServerImpl(k) |
main.py | import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
from datetime import datetime
from api_request import Weather
builder = Gtk.Builder()
builder.add_from_file('./glade/main.glade')
class | :
def __init__(self, *args, **kwargs):
super(Handler, self).__init__(*args, **kwargs)
self.weather_instance = Weather()
self.entry = builder.get_object('entry')
self.btn_search = builder.get_object('btn_search')
self.city_name = builder.get_object('city_name')
self.city_text = builder.get_object('city_text')
self.main_temp = builder.get_object('main_temp')
self.which_temp_simbol_is = 'Celsius'
self.weekday_name = builder.get_object('weekday_name')
self.weekday_name_today = builder.get_object('weekday_name_today')
self.temp_today_max = builder.get_object('today_max')
self.temp_today_min = builder.get_object('today_min')
self.hour_1_now = builder.get_object('hour_1_now')
self.hour_1_chance_of_rain = builder.get_object('hour_1_chance_of_rain')
self.hour_1_icon = builder.get_object('hour_1_icon')
self.hour_1_temp = builder.get_object('hour_1_temp')
self.hour_2_clock = builder.get_object('hour_2_clock')
self.hour_2_chance_of_rain = builder.get_object('hour_2_chance_of_rain')
self.hour_2_icon = builder.get_object('hour_2_icon')
self.hour_2_temp = builder.get_object('hour_2_temp')
self.hour_3_clock = builder.get_object('hour_3_clock')
self.hour_3_chance_of_rain = builder.get_object('hour_3_chance_of_rain')
self.hour_3_icon = builder.get_object('hour_3_icon')
self.hour_3_temp = builder.get_object('hour_3_temp')
self.hour_4_clock = builder.get_object('hour_4_clock')
self.hour_4_chance_of_rain = builder.get_object('hour_4_chance_of_rain')
self.hour_4_icon = builder.get_object('hour_4_icon')
self.hour_4_temp = builder.get_object('hour_4_temp')
self.hour_5_clock = builder.get_object('hour_5_clock')
self.hour_5_chance_of_rain = builder.get_object('hour_5_chance_of_rain')
self.hour_5_icon = builder.get_object('hour_5_icon')
self.hour_5_temp = builder.get_object('hour_5_temp')
self.day_1_name = builder.get_object('day_1_name')
self.day_1_icon = builder.get_object('day_1_icon')
self.day_1_temp_max = builder.get_object('day_1_temp_max')
self.day_1_temp_min = builder.get_object('day_1_temp_min')
self.day_2_name = builder.get_object('day_2_name')
self.day_2_icon = builder.get_object('day_2_icon')
self.day_2_temp_max = builder.get_object('day_2_temp_max')
self.day_2_temp_min = builder.get_object('day_2_temp_min')
def onDestroy(self, *args):
Gtk.main_quit()
def on_button_search_clicked(self, widget):
# now.strftime('%A') to know how weekday is
import re, unicodedata
word = unicodedata.normalize('NFD', self.entry.get_text())
word = re.sub('[\u0300-\u036f]', '', word)
try:
now = datetime.now()
current_hour = int(now.strftime('%H'))
current_search = self.weather_instance.get_weather_info(word, current_hour=current_hour)
self.city_name.set_text(current_search['location']['name'] + '/' + current_search['location']['region'])
self.city_text.set_text(current_search['current']['condition']['text'])
self.main_temp.set_text(str(int(current_search['current']['temp_c'])) + '°')
weekday = now.strftime('%A')
self.weekday_name.set_text(weekday)
self.weekday_name_today.set_text('Today')
today_max_temp = str(int(current_search['forecast']['forecastday'][0]['day']['maxtemp_c']))
today_min_temp = str(int(current_search['forecast']['forecastday'][0]['day']['mintemp_c']))
self.temp_today_max.set_text(today_max_temp)
self.temp_today_min.set_text(today_min_temp)
### Hours informations ######################################################
def is_available(increase: int) -> bool:
return not (current_hour + increase > 23)
if is_available(0):
self.hour_1_now.set_text('Now')
if int(chance_of_rain := current_search['forecast']['forecastday'][0]['hour'][current_hour]['chance_of_rain'])>0:
self.hour_1_chance_of_rain.set_text(str(chance_of_rain) + '%')
self.hour_1_temp.set_text(str(int(current_search['forecast']['forecastday'][0]['hour'][current_hour]['temp_c'])))
else:
self.hour_1_now.set_text('unavailable')
self.hour_1_temp.set_text('tomorrow')
self.hour_1_icon.set_from_file('./images/hour_icon/1.png')
if is_available(1):
self.hour_2_clock.set_text(str(int(now.strftime('%I'))+1) + now.strftime('%p'))
if int(chance_of_rain := current_search['forecast']['forecastday'][0]['hour'][current_hour+1]['chance_of_rain'])>0:
self.hour_1_chance_of_rain.set_text(str(chance_of_rain) + '%')
self.hour_2_temp.set_text(str(int(current_search['forecast']['forecastday'][0]['hour'][current_hour+1]['temp_c'])))
else:
self.hour_2_clock.set_text('unavailable')
self.hour_2_temp.set_text('tomorrow')
self.hour_2_icon.set_from_file('./images/hour_icon/2.png')
if is_available(2):
self.hour_3_clock.set_text(str(int(now.strftime('%I'))+2) + now.strftime('%p'))
if int(chance_of_rain := current_search['forecast']['forecastday'][0]['hour'][current_hour+2]['chance_of_rain'])>0:
self.hour_3_chance_of_rain.set_text(str(chance_of_rain) + '%')
self.hour_3_temp.set_text(str(int(current_search['forecast']['forecastday'][0]['hour'][current_hour+2]['temp_c'])))
else:
self.hour_3_clock.set_text('unavailable')
self.hour_3_temp.set_text('tomorrow')
self.hour_3_icon.set_from_file('./images/hour_icon/3.png')
if is_available(3):
self.hour_4_clock.set_text(str(int(now.strftime('%I'))+3) + now.strftime('%p'))
if int(chance_of_rain := current_search['forecast']['forecastday'][0]['hour'][current_hour+3]['chance_of_rain'])>0:
self.hour_4_chance_of_rain.set_text(str(chance_of_rain) + '%')
self.hour_4_temp.set_text(str(int(current_search['forecast']['forecastday'][0]['hour'][current_hour+3]['temp_c'])))
else:
self.hour_4_clock.set_text('unavailable')
self.hour_4_temp.set_text('tomorrow')
self.hour_4_icon.set_from_file('./images/hour_icon/4.png')
if is_available(4):
self.hour_5_clock.set_text(str(int(now.strftime('%I'))+4) + now.strftime('%p'))
if int(chance_of_rain := current_search['forecast']['forecastday'][0]['hour'][current_hour+3]['chance_of_rain'])>0:
self.hour_5_chance_of_rain.set_text(str(chance_of_rain) + '%')
self.hour_5_temp.set_text(str(int(current_search['forecast']['forecastday'][0]['hour'][current_hour+4]['temp_c'])))
else:
self.hour_5_clock.set_text('unavailable')
self.hour_5_temp.set_text('tomorrow')
self.hour_5_icon.set_from_file('./images/hour_icon/5.png')
### days informations ######################################################
self.day_1_name.set_text(datetime.fromisoformat(current_search['forecast']['forecastday'][1]['date']).strftime('%A'))
self.day_1_icon.set_from_file('./images/days_icon/1.png')
self.day_1_temp_max.set_text(str(int(current_search['forecast']['forecastday'][1]['day']['maxtemp_c'])))
self.day_1_temp_min.set_text(str(int(current_search['forecast']['forecastday'][1]['day']['mintemp_c'])))
self.day_2_name.set_text(datetime.fromisoformat(current_search['forecast']['forecastday'][2]['date']).strftime('%A'))
self.day_2_icon.set_from_file('./images/days_icon/2.png')
self.day_2_temp_max.set_text(str(int(current_search['forecast']['forecastday'][2]['day']['maxtemp_c'])))
self.day_2_temp_min.set_text(str(int(current_search['forecast']['forecastday'][2]['day']['mintemp_c'])))
except Exception as error:
print(f'error {error}')
builder.connect_signals(Handler())
window = builder.get_object('window')
window.show_all()
Gtk.main() | Handler |
index.js | /*
* Tencent is pleased to support the open source community by making
* Hippy available.
*
* Copyright (C) 2017-2019 THL A29 Limited, a Tencent company.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import AnimationComponent from './animation';
import DialogComponent from './dialog';
import ListRefreshComponent from './ul-refresh';
import SwiperComponent from './swiper';
import PullsComponents from './pulls';
import WaterfallComponent from './waterfall';
/**
* Register all of native components
*/
const HippyVueNativeComponents = {
install(Vue) {
AnimationComponent(Vue);
DialogComponent(Vue);
ListRefreshComponent(Vue);
SwiperComponent(Vue);
PullsComponents(Vue);
WaterfallComponent(Vue);
},
};
export default HippyVueNativeComponents;
// Export specific component for TreeSharking.
export {
AnimationComponent,
DialogComponent,
ListRefreshComponent,
SwiperComponent,
PullsComponents, | WaterfallComponent,
}; |
|
resource_aws_spot_datafeed_subscription_test.go | package aws
import (
"fmt"
"testing"
"time"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/service/ec2"
"github.com/hashicorp/aws-sdk-go-base/tfawserr"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
)
func TestAccAWSSpotDatafeedSubscription_serial(t *testing.T) {
cases := map[string]func(t *testing.T){
"basic": testAccAWSSpotDatafeedSubscription_basic,
"disappears": testAccAWSSpotDatafeedSubscription_disappears,
}
for name, tc := range cases {
t.Run(name, func(t *testing.T) {
tc(t)
})
}
}
func testAccAWSSpotDatafeedSubscription_basic(t *testing.T) {
var subscription ec2.SpotDatafeedSubscription
resourceName := "aws_spot_datafeed_subscription.test"
rName := acctest.RandomWithPrefix("tf-acc-test")
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t); testAccPreCheckAWSSpotDatafeedSubscription(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSSpotDatafeedSubscriptionDestroy,
Steps: []resource.TestStep{
{
Config: testAccAWSSpotDatafeedSubscription(rName),
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSSpotDatafeedSubscriptionExists(resourceName, &subscription),
),
},
{
ResourceName: resourceName,
ImportState: true,
ImportStateVerify: true,
},
},
})
}
func testAccCheckAWSSpotDatafeedSubscriptionDisappears(subscription *ec2.SpotDatafeedSubscription) resource.TestCheckFunc {
return func(s *terraform.State) error {
conn := testAccProvider.Meta().(*AWSClient).ec2conn
_, err := conn.DeleteSpotDatafeedSubscription(&ec2.DeleteSpotDatafeedSubscriptionInput{})
if err != nil {
return err
}
return resource.Retry(40*time.Minute, func() *resource.RetryError {
_, err := conn.DescribeSpotDatafeedSubscription(&ec2.DescribeSpotDatafeedSubscriptionInput{})
if err != nil {
cgw, ok := err.(awserr.Error)
if ok && cgw.Code() == "InvalidSpotDatafeed.NotFound" {
return nil
}
return resource.NonRetryableError(
fmt.Errorf("Error retrieving Spot Datafeed Subscription: %s", err))
} | }
func testAccAWSSpotDatafeedSubscription_disappears(t *testing.T) {
var subscription ec2.SpotDatafeedSubscription
resourceName := "aws_spot_datafeed_subscription.test"
rName := acctest.RandomWithPrefix("tf-acc-test")
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t); testAccPreCheckAWSSpotDatafeedSubscription(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckAWSSpotDatafeedSubscriptionDestroy,
Steps: []resource.TestStep{
{
Config: testAccAWSSpotDatafeedSubscription(rName),
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSSpotDatafeedSubscriptionExists(resourceName, &subscription),
testAccCheckAWSSpotDatafeedSubscriptionDisappears(&subscription),
),
ExpectNonEmptyPlan: true,
},
},
})
}
func testAccCheckAWSSpotDatafeedSubscriptionExists(n string, subscription *ec2.SpotDatafeedSubscription) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[n]
if !ok {
return fmt.Errorf("Not found: %s", n)
}
if rs.Primary.ID == "" {
return fmt.Errorf("No policy ID is set")
}
conn := testAccProvider.Meta().(*AWSClient).ec2conn
resp, err := conn.DescribeSpotDatafeedSubscription(&ec2.DescribeSpotDatafeedSubscriptionInput{})
if err != nil {
return err
}
*subscription = *resp.SpotDatafeedSubscription
return nil
}
}
func testAccCheckAWSSpotDatafeedSubscriptionDestroy(s *terraform.State) error {
conn := testAccProvider.Meta().(*AWSClient).ec2conn
for _, rs := range s.RootModule().Resources {
if rs.Type != "aws_spot_datafeed_subscription" {
continue
}
_, err := conn.DescribeSpotDatafeedSubscription(&ec2.DescribeSpotDatafeedSubscriptionInput{})
if tfawserr.ErrCodeEquals(err, "InvalidSpotDatafeed.NotFound") {
continue
}
if err != nil {
return fmt.Errorf("error descripting EC2 Spot Datafeed Subscription: %w", err)
}
}
return nil
}
func testAccPreCheckAWSSpotDatafeedSubscription(t *testing.T) {
conn := testAccProvider.Meta().(*AWSClient).ec2conn
input := &ec2.DescribeSpotDatafeedSubscriptionInput{}
_, err := conn.DescribeSpotDatafeedSubscription(input)
if testAccPreCheckSkipError(err) {
t.Skipf("skipping acceptance testing: %s", err)
}
if tfawserr.ErrCodeEquals(err, "InvalidSpotDatafeed.NotFound") {
return
}
if err != nil {
t.Fatalf("unexpected PreCheck error: %s", err)
}
}
func testAccAWSSpotDatafeedSubscription(rName string) string {
return fmt.Sprintf(`
data "aws_canonical_user_id" "current" {}
resource "aws_s3_bucket" "test" {
bucket = %[1]q
grant {
id = data.aws_canonical_user_id.current.id
permissions = ["FULL_CONTROL"]
type = "CanonicalUser"
}
grant {
id = "c4c1ede66af53448b93c283ce9448c4ba468c9432aa01d700d3878632f77d2d0" # EC2 Account
permissions = ["FULL_CONTROL"]
type = "CanonicalUser"
}
}
resource "aws_spot_datafeed_subscription" "test" {
bucket = aws_s3_bucket.test.bucket
}
`, rName)
} | return resource.RetryableError(fmt.Errorf("Waiting for Spot Datafeed Subscription"))
})
} |
VRMFirstPersonImporter.d.ts | import { GLTF } from 'three/examples/jsm/loaders/GLTFLoader';
import { VRMHumanoid } from '../humanoid';
import { VRMFirstPerson } from './VRMFirstPerson';
/**
* An importer that imports a [[VRMFirstPerson]] from a VRM extension of a GLTF.
*/
export declare class VRMFirstPersonImporter {
/**
* Import a [[VRMFirstPerson]] from a VRM.
*
* @param gltf A parsed result of GLTF taken from GLTFLoader | import(gltf: GLTF, humanoid: VRMHumanoid): Promise<VRMFirstPerson | null>;
} | * @param humanoid A [[VRMHumanoid]] instance that represents the VRM
*/ |
textarea.js | /*
Copyright (c) 2018 Uber Technologies, Inc.
This source code is licensed under the MIT license found in the
LICENSE file in the root directory of this source tree.
*/
// @flow
import * as React from 'react';
import type {TextareaPropsT} from './types.js';
import {mergeOverrides} from '../helpers/overrides.js';
import {BaseInput, SIZE, CUSTOM_INPUT_TYPE} from '../input/index.js';
import {
Textarea as StyledTextarea,
TextareaContainer as StyledTextareaContainer,
} from './styled-components.js';
class Textarea extends React.Component<TextareaPropsT> {
static defaultProps = {
autoFocus: false,
disabled: false,
error: false,
inputRef: (React.createRef(): {current: ?HTMLInputElement}),
name: '',
onBlur: () => {},
onChange: () => {},
onKeyDown: () => {},
onKeyPress: () => {},
onKeyUp: () => {},
onFocus: () => {},
overrides: {},
placeholder: '',
required: false,
rows: 3,
size: SIZE.default,
value: '',
};
render() {
const overrides = mergeOverrides(
{
Input: StyledTextarea,
InputContainer: StyledTextareaContainer,
},
this.props.overrides,
);
return (
<BaseInput
data-baseweb="textarea"
{...this.props}
type={CUSTOM_INPUT_TYPE.textarea}
overrides={overrides}
/>
);
} | export default Textarea; | }
|
config.rs | // Copyright (c) Microsoft. All rights reserved.
use failure::ResultExt;
use docker::models::{AuthConfig, ContainerCreateBody};
use edgelet_utils::{ensure_not_empty_with_context, serde_clone};
use crate::error::{ErrorKind, Result};
#[derive(Debug, serde_derive::Serialize, serde_derive::Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct DockerConfig {
image: String,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename = "imageHash")]
image_id: Option<String>,
#[serde(default = "ContainerCreateBody::new")]
create_options: ContainerCreateBody,
#[serde(skip_serializing_if = "Option::is_none")]
auth: Option<AuthConfig>,
}
impl DockerConfig {
pub fn new(
image: String,
create_options: ContainerCreateBody,
auth: Option<AuthConfig>,
) -> Result<Self> {
ensure_not_empty_with_context(&image, || ErrorKind::InvalidImage(image.clone()))?;
let config = DockerConfig {
image,
image_id: None,
create_options,
auth,
};
Ok(config)
}
pub fn clone_create_options(&self) -> Result<ContainerCreateBody> {
Ok(serde_clone(&self.create_options).context(ErrorKind::CloneCreateOptions)?)
}
pub fn image(&self) -> &str {
&self.image
}
pub fn with_image(mut self, image: String) -> Self {
self.image = image;
self
}
pub fn image_id(&self) -> Option<&str> {
self.image_id.as_ref().map(AsRef::as_ref)
}
pub fn with_image_id(mut self, image_id: String) -> Self {
self.image_id = Some(image_id);
self
}
pub fn create_options(&self) -> &ContainerCreateBody {
&self.create_options
}
pub fn with_create_options(mut self, create_options: ContainerCreateBody) -> Self {
self.create_options = create_options;
self
}
pub fn set_create_options(&mut self, create_options: ContainerCreateBody) {
self.create_options = create_options;
}
pub fn auth(&self) -> Option<&AuthConfig> {
self.auth.as_ref()
}
pub fn with_auth(mut self, auth: AuthConfig) -> Self {
self.auth = Some(auth);
self
}
}
#[cfg(test)]
mod tests {
use std::collections::HashMap;
use docker::models::{ContainerCreateBody, HostConfig, HostConfigPortBindings};
use serde_json::json;
use super::{AuthConfig, DockerConfig};
#[test]
fn empty_image_fails() {
let _ = DockerConfig::new("".to_string(), ContainerCreateBody::new(), None).unwrap_err();
}
#[test]
fn white_space_image_fails() {
let _ =
DockerConfig::new(" ".to_string(), ContainerCreateBody::new(), None).unwrap_err();
}
#[test] | fn docker_config_ser() {
let mut labels = HashMap::new();
labels.insert("k1".to_string(), "v1".to_string());
labels.insert("k2".to_string(), "v2".to_string());
let mut port_bindings = HashMap::new();
port_bindings.insert(
"27017/tcp".to_string(),
vec![HostConfigPortBindings::new().with_host_port("27017".to_string())],
);
let create_options = ContainerCreateBody::new()
.with_host_config(HostConfig::new().with_port_bindings(port_bindings))
.with_labels(labels);
let config = DockerConfig::new("ubuntu".to_string(), create_options, None)
.unwrap()
.with_image_id("42".to_string());
let actual_json = serde_json::to_string(&config).unwrap();
let expected_json = json!({
"image": "ubuntu",
"imageHash": "42",
"createOptions": {
"Labels": {
"k1": "v1",
"k2": "v2"
},
"HostConfig": {
"PortBindings": {
"27017/tcp": [
{
"HostPort": "27017"
}
]
}
}
}
});
assert_eq!(
serde_json::from_str::<serde_json::Value>(&actual_json).unwrap(),
expected_json
);
}
#[test]
fn docker_config_ser_auth() {
let mut labels = HashMap::new();
labels.insert("k1".to_string(), "v1".to_string());
labels.insert("k2".to_string(), "v2".to_string());
let mut port_bindings = HashMap::new();
port_bindings.insert(
"27017/tcp".to_string(),
vec![HostConfigPortBindings::new().with_host_port("27017".to_string())],
);
let create_options = ContainerCreateBody::new()
.with_host_config(HostConfig::new().with_port_bindings(port_bindings))
.with_labels(labels);
let auth_config = AuthConfig::new()
.with_username("username".to_string())
.with_password("password".to_string())
.with_serveraddress("repo.azurecr.io".to_string());
let config =
DockerConfig::new("ubuntu".to_string(), create_options, Some(auth_config)).unwrap();
let actual_json = serde_json::to_string(&config).unwrap();
let expected_json = json!({
"image": "ubuntu",
"createOptions": {
"Labels": {
"k1": "v1",
"k2": "v2"
},
"HostConfig": {
"PortBindings": {
"27017/tcp": [
{
"HostPort": "27017"
}
]
}
}
},
"auth": {
"username": "username",
"password": "password",
"serveraddress": "repo.azurecr.io"
}
});
assert_eq!(
serde_json::from_str::<serde_json::Value>(&actual_json).unwrap(),
expected_json
);
}
#[test]
fn docker_config_deser_no_create_options() {
let input_json = json!({
"image": "ubuntu"
});
let config = serde_json::from_str::<DockerConfig>(&input_json.to_string()).unwrap();
assert_eq!(config.image, "ubuntu");
}
#[test]
fn docker_config_deser_from_map() {
let input_json = json!({
"image": "ubuntu",
"createOptions": {
"Labels": {
"k1": "v1",
"k2": "v2"
},
"HostConfig": {
"PortBindings": {
"27017/tcp": [
{
"HostPort": "27017"
}
]
}
}
},
"auth": {
"username": "username",
"password": "password",
"serveraddress": "repo.azurecr.io"
}
});
let config = serde_json::from_str::<DockerConfig>(&input_json.to_string()).unwrap();
assert_eq!(config.image, "ubuntu");
assert_eq!(&config.create_options.labels().unwrap()["k1"], "v1");
assert_eq!(&config.create_options.labels().unwrap()["k2"], "v2");
let port_binding = &config
.create_options
.host_config()
.unwrap()
.port_bindings()
.unwrap()["27017/tcp"];
assert_eq!(
port_binding.iter().next().unwrap().host_port().unwrap(),
"27017"
);
assert_eq!("username", config.auth().unwrap().username().unwrap());
assert_eq!("password", config.auth().unwrap().password().unwrap());
assert_eq!(
"repo.azurecr.io",
config.auth().unwrap().serveraddress().unwrap()
);
}
#[test]
fn docker_config_deser_from_str() {
let input_json = json!({
"image": "ubuntu",
"createOptions": {
"Labels": {
"k1": "v1",
"k2": "v2"
},
"HostConfig": {
"PortBindings": {
"27017/tcp": [
{
"HostPort": "27017"
}
]
}
}
}
});
let config: DockerConfig = serde_json::from_str(&input_json.to_string()).unwrap();
assert_eq!(config.image, "ubuntu");
assert_eq!(&config.create_options.labels().unwrap()["k1"], "v1");
assert_eq!(&config.create_options.labels().unwrap()["k2"], "v2");
let port_binding = &config
.create_options
.host_config()
.unwrap()
.port_bindings()
.unwrap()["27017/tcp"];
assert_eq!(
port_binding.iter().next().unwrap().host_port().unwrap(),
"27017"
);
}
} | |
Model_HM_RWS.py |
import site
site.addsitedir('..')
import torch
from pytorch_probgraph import BernoulliLayer
from pytorch_probgraph import InteractionLinear
from pytorch_probgraph import HelmholtzMachine
from itertools import chain
from tqdm import tqdm
class Model_HM_RWS(torch.nn.Module):
| def __init__(self):
super().__init__()
layer0 = BernoulliLayer(torch.nn.Parameter(torch.zeros([1, 1, 28, 28]), requires_grad=True))
layer1 = BernoulliLayer(torch.nn.Parameter(torch.zeros([1, 200]), requires_grad=True))
layer2 = BernoulliLayer(torch.nn.Parameter(torch.zeros([1, 200]), requires_grad=True))
interactionUp1 = InteractionLinear(layer0.bias.shape[1:], layer1.bias.shape[1:])
interactionDown1 = InteractionLinear(layer1.bias.shape[1:], layer0.bias.shape[1:])
interactionUp2 = InteractionLinear(layer1.bias.shape[1:], layer2.bias.shape[1:])
interactionDown2 = InteractionLinear(layer2.bias.shape[1:], layer1.bias.shape[1:])
parameters = chain(*[m.parameters() for m in [layer0, layer1, layer2, interactionUp1, interactionUp2, interactionDown1, interactionDown2]])
opt = torch.optim.Adam(parameters)
self.model = HelmholtzMachine([layer0, layer1, layer2],
[interactionUp1, interactionUp2],
[interactionDown1, interactionDown2],
optimizer=opt)
#print(interaction.weight.shape)
def train(self, data, epochs=1, device=None):
for epoch in range(epochs):
for dat in data:
self.model.trainReweightedWS(dat.to(device), ksamples=5)
if isinstance(data, tqdm):
data = tqdm(data)
#print(torch.sum(self.model.interaction.weight))
def loglikelihood(self, data):
return self.model.loglikelihood(data, ksamples=100).cpu().detach()
def generate(self, N=1):
return self.model.sampleAll(N=N)[0][0].cpu() |
|
export_d3po.py | import json
import os
from astropy.table import Table, Column
from ..config import exporters
from ..qt.widgets import ScatterWidget, HistogramWidget
from ..core import Subset
def save_page(page, page_number, label, subset):
|
def save_plot_base(plot, index):
result = {}
result['gridPosition'] = [0, index]
return result
def save_plot(plot, index):
dispatch = {ScatterWidget: save_scatter,
HistogramWidget: save_histogram}
typ = type(plot)
return dispatch[typ](plot, index)
def save_scatter(plot, index):
""" Convert a single glue scatter plot to a D3PO plot
:param plot: Glue scatter plot
:class:`~glue.qt.widgets.scatter_widget.ScatterWidget`
:param index: 1D index of plot on the page
:type index: int
:rtype: json-serializable dict
"""
result = save_plot_base(plot, index)
props = plot.properties
result['type'] = 'scatter'
result['xAxis'] = dict(columnName=props['xatt'].label,
range=[props['xmin'], props['xmax']])
result['yAxis'] = dict(columnName=props['yatt'].label,
range=[props['ymin'], props['ymax']])
# XXX log scales
return result
def save_histogram(plot, index):
""" Convert a single histogram to a D3PO plot
:param plot: Glue histogram
:type plot: :class:`~glue.qt.widgets.histogram_widget.HistogramWidget`
:param index: 1D index of plot on the page
:type index: int
:rtype: json-serializable dict
"""
result = save_plot_base(plot, index)
props = plot.properties
result['type'] = 'histogram'
result['xAxis'] = dict(columnName=props['component'].label,
bins=props['nbins'],
range=[props['xmin'], props['xmax']])
# XXX normed, cumultive, log
return result
def stage_subsets(application):
"""
Return a tuple of the subset to use for each stage/tab,
or None if the tab has no subset
If more than one subset is used per stage/tab, returns None
"""
result = []
for page in application.viewers:
subset = None
for viewer in page:
for layer_artist in viewer.layers:
if not layer_artist.visible:
continue
s = layer_artist.layer
if not isinstance(s, Subset):
continue
if subset is not None and s is not subset:
return None
if subset is None:
subset = s
result.append(subset)
return tuple(result)
def can_save_d3po(application):
"""
Check whether an application can be exported to D3PO.
Raises an exception if not
"""
dc = application.session.data_collection
if len(dc) != 1:
raise ValueError("D3PO Export only supports a single dataset")
data = dc[0]
for tab in application.viewers:
for viewer in tab:
if not isinstance(viewer, (ScatterWidget, HistogramWidget)):
raise ValueError("D3PO Export only supports scatter "
"and histogram plots")
if sum(len(tab) for tab in application.viewers) == 0:
raise ValueError("D3PO Export requires at least one scatterplot "
"or histogram")
if stage_subsets(application) is None:
raise ValueError("D3PO Export restricted to 0 or 1 subsets visible "
"in each tab")
def make_data_file(data, subsets, path):
"""
Create the data.csv file, given Data and tuple of subsets
"""
data_path = os.path.join(path, 'data.csv')
t = Table([data[c] for c in data.components],
names=[c.label for c in data.components])
for i, subset in enumerate(subsets):
if subset is None:
continue
c = Column(data=subset.to_mask().astype('i'), name='selection_%i' % i)
t.add_column(c)
t.write(data_path, format='ascii', delimiter=',')
def save_d3po(application, path):
"""Save a Glue session to a D3PO bundle.
Currently, this has the following restrictions:
- The Glue session must have only one dataset open, and 0 or 1 subsets
- Only scatter plots or histograms are present
- At least one plot is present
:param application: Glue appication to save
:param path: Path to directory to save in. Will be created if needed
"""
if os.path.exists(path) and not os.path.isdir(path):
os.unlink(path)
if not os.path.exists(path):
os.mkdir(path)
data = application.session.data_collection[0]
subsets = stage_subsets(application)
viewers = application.viewers
# data.csv
make_data_file(data, subsets, path)
# states.json
result = {}
result['filename'] = 'data.csv' # XXX don't think this is needed?
result['title'] = "Glue export of %s" % data.label
result['states'] = map(save_page, application.viewers,
range(len(viewers)),
application.tab_names,
subsets)
state_path = os.path.join(path, 'states.json')
with open(state_path, 'w') as outfile:
json.dump(result, outfile, indent=2)
# index.html
html_path = os.path.join(path, 'index.html')
with open(html_path, 'w') as outfile:
outfile.write(HTML)
# show the result
launch(path)
def launch(path):
"""Start a server to view an exported D3PO bundle, and open a browser.
:param path: The TLD of the bundle
"""
from SocketServer import TCPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
from random import randrange
from socket import error
import webbrowser
from threading import Thread
os.chdir(path)
while True:
try:
PORT = randrange(8000, 9000)
server = TCPServer(("", PORT), SimpleHTTPRequestHandler, False)
server.allow_reuse_address = True
server.server_bind()
break
except error: # port already taken
pass
print 'Serving D3PO on port 0.0.0.0:%i' % PORT
server.server_activate()
thread = Thread(target=server.serve_forever)
thread.setDaemon(True) # do not prevent shutdown
thread.start()
webbrowser.open('http://0.0.0.0:%i' % PORT)
exporters.add('D3PO', save_d3po, can_save_d3po, outmode='directory')
HTML = """
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<link rel="stylesheet" type="text/css" href="http://d3po.org/static/css/style.css">
<link rel="stylesheet" type="text/css" href="http://d3po.org/static/css/d3po.css">
<link href='http://fonts.googleapis.com/css?family=Source+Sans+Pro:100,200,300,400,700' rel='stylesheet' type='text/css'>
<style>
#footer {
position: fixed;
bottom: 0;
right: 0;
}
</style>
<!-- not to be confused with Planet Telex -->
<!-- Javscript dependencies -->
<script src="http://d3js.org/d3.v3.min.js" charset="utf-8"></script>
<script src="http://d3po.org/static/js/util.js"></script>
<script src="//ajax.googleapis.com/ajax/libs/jquery/1.10.2/jquery.min.js"></script>
<script src="http://d3po.org/static/js/d3po.js"></script>
<script src="http://d3po.org/static/js/d3po.init.js"></script>
</head>
<body>
<div id="svg"><svg></svg></div>
<div id="controls">
<ul class="navigation">
</ul>
</div>
<div id="caption"></div>
<div id="footer">
More information: <a href="http://d3po.org">d3po.org</a>
</div>
<script type="text/javascript">
$(document).ready(function() {
initialize('states.json', 'data.csv');
}
);
</script>
</body>
</html>
"""
| """ Convert a tab of a glue session into a D3PO page
:param page: Tuple of data viewers to save
:param label: Tab label
"""
result = {}
# layout settings
result['grid'] = {'nRows': 1, 'nColumns': len(page)}
result['name'] = str(label)
result['caption'] = 'Generated by Glue'
# style settings
d = page[0]._data[0]
unselected = dict(opacity=d.style.alpha,
size=d.style.markersize / 2,
color=d.style.color)
result['markerStyle'] = dict(unselected=unselected)
if subset is not None:
s = subset.style
selected = dict(opacity=s.alpha, size=s.markersize / 2, color=s.color)
result['markerStyle']['selected'] = selected
result['selection'] = {'type': 'booleanColumn',
'columnName': 'selection_%i' % page_number}
result['histogramStyle'] = result['markerStyle']
# save each plot
result['plots'] = map(save_plot, page, range(len(page)))
return result |
consumers.py | import json
from asgiref.sync import async_to_sync
from channels.generic.websocket import WebsocketConsumer
Rooms = {}
def AddToRoom(RoomName, UserID):
if RoomName in Rooms:
if UserID not in Rooms[RoomName]:
Rooms[RoomName].append(UserID)
else:
Rooms.update({f"{RoomName}": []})
if UserID not in Rooms[RoomName]:
Rooms[RoomName].append(UserID)
def RemoveFromRoom(RoomName, UserID):
if RoomName in Rooms:
if UserID in Rooms[RoomName]:
Rooms[RoomName].remove(UserID)
class ChatConsumer(WebsocketConsumer):
def connect(self):
RoomName = self.scope['url_route']['kwargs']['room_name']
UserID = self.scope['url_route']['kwargs']['user_id']
self.room_name = RoomName
self.room_group_name = 'Chat_%s' % self.room_name
async_to_sync(self.channel_layer.group_add)(
self.room_group_name,
self.channel_name
)
self.accept()
AddToRoom(RoomName, UserID)
if RoomName in Rooms:
async_to_sync(self.channel_layer.group_send)(
self.room_group_name,
{
'type': "UsersCount",
'UsersCount': len(Rooms[RoomName])
}
)
else:
async_to_sync(self.channel_layer.group_send)(
self.room_group_name,
{
'type': "UsersCount",
'UsersCount': 0
}
)
def disconnect(self, close_code):
RoomName = self.scope['url_route']['kwargs']['room_name']
UserID = self.scope['url_route']['kwargs']['user_id']
async_to_sync(self.channel_layer.group_discard)(
self.room_group_name,
self.channel_name
)
RemoveFromRoom(RoomName, UserID)
if RoomName in Rooms:
async_to_sync(self.channel_layer.group_send)(
self.room_group_name,
{
'type': "UsersCount",
'UsersCount': len(Rooms[RoomName])
}
)
else:
async_to_sync(self.channel_layer.group_send)(
self.room_group_name,
{
'type': "UsersCount",
'UsersCount': 0
}
)
def receive(self, text_data):
text_data_json = json.loads(text_data)
if text_data_json['Type'] == "Text":
async_to_sync(self.channel_layer.group_send)(
self.room_group_name,
{
'type': "Text",
'Message': text_data_json['Message'],
"Date": text_data_json['Date'],
"From": text_data_json['From'],
"Read": text_data_json['Read'],
"Time": text_data_json['Time'],
"Type": text_data_json['Type'],
"_id": text_data_json['_id'],
}
)
elif text_data_json['Type'] == "File":
async_to_sync(self.channel_layer.group_send)(
self.room_group_name,
{
'type': "File",
'Message': text_data_json['Message'],
"Date": text_data_json['Date'],
"DateOBJ": text_data_json['DateOBJ'],
"File": text_data_json['File'],
"From": text_data_json['From'],
"Mime": text_data_json["Mime"],
"PreviewFile": text_data_json['PreviewFile'],
"Read": text_data_json['Read'],
"Time": text_data_json['Time'],
"Type": text_data_json['Type'],
"_id": text_data_json['_id'],
}
)
def Text(self, event):
|
def File(self, event):
self.send(text_data=json.dumps({
'Message': event['Message'],
"Date": event['Date'],
"DateOBJ": event['DateOBJ'],
"File": event['File'],
"From": event['From'],
"Mime": event["Mime"],
"PreviewFile": event['PreviewFile'],
"Read": event['Read'],
"Time": event['Time'],
"Type": event['Type'],
"_id": event['_id'],
}))
def UsersCount(self, event):
self.send(text_data=json.dumps({
'UsersCount': event['UsersCount'],
"Type": 'UsersCount',
}))
| self.send(text_data=json.dumps({
'Message': event['Message'],
"Date": event['Date'],
"From": event['From'],
"Read": event['Read'],
"Time": event['Time'],
"Type": event['Type'],
"_id": event['_id'],
})) |
blob_test.rs | #[cfg(test)]
mod tests {
use blob::{
blob_data::Data, blob_handler_client::BlobHandlerClient, BlobData, BlobInfo, FileInfo,
};
use dotenv::dotenv;
use futures::stream::iter;
use services::BlobService;
pub mod blob {
tonic::include_proto!("blob");
}
struct TestBlobClient {
client: BlobHandlerClient<tonic::transport::Channel>,
blob_service: BlobService,
files_path: String,
}
impl TestBlobClient {
async fn new(dst: String, files_path: String) -> Self {
let client = BlobHandlerClient::connect(dst).await.unwrap();
let blob_service = BlobService::new(files_path.clone()).unwrap();
TestBlobClient {
client,
blob_service,
files_path,
}
}
async fn upload_image(
&mut self,
test_file: &str,
) -> Result<BlobInfo, Box<dyn std::error::Error>> {
// Copy a file so we know it exists
tokio::fs::copy(&test_file, format!("{}/{}", self.files_path, &test_file))
.await
.unwrap();
let mut reader = self.blob_service.reader(&test_file).await.unwrap();
let mut arr = Vec::new();
while let Some(chunk) = reader.read().await.unwrap() {
arr.push(BlobData {
data: Some(Data::ChunkData(chunk)),
});
}
arr.push(BlobData {
data: Some(Data::Info(FileInfo {
extension: ".jpeg".to_string(),
file_name: "".to_string(), // ignored for now
meta_text: "Smooth".to_string(), // ignored for now
})),
});
let res = self.client.upload(tonic::Request::new(iter(arr))).await?;
Ok(res.into_inner())
}
async fn | (&mut self, blob_id: &str) -> (String, Option<FileInfo>) {
let stream = self
.client
.download(BlobInfo {
blob_id: blob_id.to_string(),
})
.await
.unwrap();
let mut stream = stream.into_inner();
let mut blob = self.blob_service.writer().create_blob().await.unwrap();
let mut file_info: Option<FileInfo> = None;
while let Some(req) = stream.message().await.unwrap() {
if let Some(d) = req.data {
match d {
Data::Info(info) => file_info = Some(info),
Data::ChunkData(chunk) => blob.append(chunk).await.unwrap(),
}
}
}
(blob.finalize(".test.jpeg").await.unwrap(), file_info)
}
}
#[tokio::test]
async fn upload_download_works() {
dotenv().ok();
let file = "test_img.jpeg";
let path = dotenv::var("UPLOAD_PATH").unwrap();
let mut test_client =
TestBlobClient::new("http://0.0.0.0:50051".to_string(), path.clone()).await;
let upload = test_client.upload_image(&file).await.unwrap();
assert!(!upload.blob_id.is_empty());
let (file_id, info) = test_client.download_image(&upload.blob_id).await;
assert!(info.is_some());
assert!(std::fs::read(format!("{}/{}", path, file_id)).is_ok());
}
}
| download_image |
base_trainer_test.py | # Lint as: python3
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow_models.core.trainers.trainer."""
# pylint: disable=g-direct-tensorflow-import
import os
from absl.testing import parameterized
import tensorflow as tf
from tensorflow.python.distribute import combinations
from tensorflow.python.distribute import strategy_combinations
from official.core import base_trainer as trainer_lib
from official.core import train_lib
from official.modeling.hyperparams import config_definitions as cfg
from official.utils.testing import mock_task
def all_strategy_combinations():
return combinations.combine(
distribution=[
strategy_combinations.default_strategy,
strategy_combinations.tpu_strategy,
strategy_combinations.one_device_strategy_gpu,
],
mode='eager',
)
class TrainerTest(tf.test.TestCase, parameterized.TestCase):
def setUp(self):
super().setUp()
self._config = cfg.ExperimentConfig(
trainer=cfg.TrainerConfig(
optimizer_config=cfg.OptimizationConfig({
'optimizer': {
'type': 'sgd'
},
'learning_rate': {
'type': 'constant'
}
})))
def create_test_trainer(self, config, model_dir=None):
task = mock_task.MockTask(config.task, logging_dir=model_dir)
ckpt_exporter = train_lib.maybe_create_best_ckpt_exporter(config, model_dir)
trainer = trainer_lib.Trainer(
config,
task,
model=task.build_model(),
optimizer=trainer_lib.create_optimizer(config.trainer, config.runtime),
checkpoint_exporter=ckpt_exporter)
return trainer
@combinations.generate(all_strategy_combinations())
def test_trainer_train(self, distribution):
with distribution.scope():
trainer = self.create_test_trainer(self._config)
logs = trainer.train(tf.convert_to_tensor(5, dtype=tf.int32))
self.assertIn('training_loss', logs)
self.assertIn('learning_rate', logs)
@combinations.generate(all_strategy_combinations())
def test_trainer_validate(self, distribution):
with distribution.scope():
trainer = self.create_test_trainer(self._config)
logs = trainer.evaluate(tf.convert_to_tensor(5, dtype=tf.int32))
self.assertIn('validation_loss', logs)
self.assertEqual(logs['acc'], 5. * distribution.num_replicas_in_sync)
@combinations.generate(
combinations.combine(
mixed_precision_dtype=['float32', 'bfloat16', 'float16'],
loss_scale=[None, 'dynamic', 128, 256],
))
def test_configure_optimizer(self, mixed_precision_dtype, loss_scale):
config = cfg.ExperimentConfig(
runtime=cfg.RuntimeConfig(
mixed_precision_dtype=mixed_precision_dtype, loss_scale=loss_scale),
trainer=cfg.TrainerConfig(
optimizer_config=cfg.OptimizationConfig({
'optimizer': {
'type': 'sgd'
},
'learning_rate': {
'type': 'constant'
}
})))
trainer = self.create_test_trainer(config)
if mixed_precision_dtype != 'float16':
self.assertIsInstance(trainer.optimizer, tf.keras.optimizers.SGD)
elif mixed_precision_dtype == 'float16' and loss_scale is None:
self.assertIsInstance(trainer.optimizer, tf.keras.optimizers.SGD)
else:
self.assertIsInstance(
trainer.optimizer,
tf.keras.mixed_precision.experimental.LossScaleOptimizer)
metrics = trainer.train(tf.convert_to_tensor(5, dtype=tf.int32))
self.assertIn('training_loss', metrics)
@combinations.generate(all_strategy_combinations())
def test_export_best_ckpt(self, distribution):
|
if __name__ == '__main__':
tf.test.main()
| config = cfg.ExperimentConfig(
trainer=cfg.TrainerConfig(
best_checkpoint_export_subdir='best_ckpt',
best_checkpoint_eval_metric='acc',
optimizer_config=cfg.OptimizationConfig({
'optimizer': {
'type': 'sgd'
},
'learning_rate': {
'type': 'constant'
}
})))
model_dir = self.get_temp_dir()
trainer = self.create_test_trainer(config, model_dir=model_dir)
trainer.train(tf.convert_to_tensor(1, dtype=tf.int32))
trainer.evaluate(tf.convert_to_tensor(1, dtype=tf.int32))
self.assertTrue(
tf.io.gfile.exists(os.path.join(model_dir, 'best_ckpt', 'info.json'))) |
page.js | (function(){'use strict';function a(a){const b=B.createContextualFragment(a);return b.children[0]}function b(a){return(a+'').replace(/[&<>"'\/]/g,(a)=>C[a])}function c(a,...c){return c=c.map((a)=>b(a)),a.reduce((a,b,d)=>a+=b+(c[d]||''),'')}function d(a){return new Response(a).text()}function e({removeClass:a=!1}={}){return function(b,c='active',d='transition'){if(a){if(!b.classList.contains(c))return Promise.resolve();}else if(b.classList.contains(c))return Promise.resolve();const e=new Promise((e)=>{const f=(a)=>{a.target!=b||(b.removeEventListener('webkitTransitionEnd',f),b.removeEventListener('transitionend',f),b.classList.remove(d),e())};b.classList.add(d),requestAnimationFrame(()=>{b.addEventListener('webkitTransitionEnd',f),b.addEventListener('transitionend',f),b.classList[a?'remove':'add'](c)})}),f=new Promise((a)=>setTimeout(a,1e3));return Promise.race([e,f])}}function f(a){return{x:a.pageX,y:a.pageY}}function g(a,b){var c=Math.abs;const d=c(b.x-a.x),e=c(b.y-a.y);return Math.sqrt(d*d+e*e)}function h(a,b){return{x:(a.x+b.x)/2,y:(a.y+b.y)/2}}function i(a){return a.touches?Array.from(a.touches).map((a)=>f(a)):[f(a)]}function j(a,b){const c=Math.pow(10,b);return Math.floor(Math.round(a*c))/c}function k(a){return 1024>a?a+' bytes':j(a/1024,2)+'k'}function l(){}function m(){m.init.call(this)}function n(a){return void 0===a._maxListeners?m.defaultMaxListeners:a._maxListeners}function o(a,b,c){if(b)a.call(c);else for(var d=a.length,e=y(a,d),f=0;f<d;++f)e[f].call(c)}function p(a,b,c,d){if(b)a.call(c,d);else for(var e=a.length,f=y(a,e),g=0;g<e;++g)f[g].call(c,d)}function q(a,b,c,d,e){if(b)a.call(c,d,e);else for(var f=a.length,g=y(a,f),h=0;h<f;++h)g[h].call(c,d,e)}function r(a,b,c,d,e,f){if(b)a.call(c,d,e,f);else for(var g=a.length,h=y(a,g),j=0;j<g;++j)h[j].call(c,d,e,f)}function s(a,b,c,d){if(b)a.apply(c,d);else for(var e=a.length,f=y(a,e),g=0;g<e;++g)f[g].apply(c,d)}function t(a,b,c,d){var e,f,g;if('function'!=typeof c)throw new TypeError('"listener" argument must be a function');if(f=a._events,f?(f.newListener&&(a.emit('newListener',b,c.listener?c.listener:c),f=a._events),g=f[b]):(f=a._events=new l,a._eventsCount=0),!g)g=f[b]=c,++a._eventsCount;else if('function'==typeof g?g=f[b]=d?[c,g]:[g,c]:d?g.unshift(c):g.push(c),!g.warned&&(e=n(a),e&&0<e&&g.length>e)){g.warned=!0;var h=new Error('Possible EventEmitter memory leak detected. '+g.length+' '+b+' listeners added. Use emitter.setMaxListeners() to increase limit');h.name='MaxListenersExceededWarning',h.emitter=a,h.type=b,h.count=g.length,u(h)}return a}function u(a){'function'==typeof console.warn?console.warn(a):console.log(a)}function v(a,b,c){function d(){a.removeListener(b,d),e||(e=!0,c.apply(a,arguments))}var e=!1;return d.listener=c,d}function w(a){var b=this._events;if(b){var c=b[a];if('function'==typeof c)return 1;if(c)return c.length}return 0}function x(a,b){for(var c=b,d=c+1,e=a.length;d<e;c+=1,d+=1)a[c]=a[d];a.pop()}function y(a,b){for(var c=Array(b);b--;)c[b]=a[b];return c}function z(a){for(var b=Array(a.length),c=0;c<b.length;++c)b[c]=a[c].listener||a[c];return b}const A=new Promise((a)=>{function b(){'loading'!=document.readyState&&a()}document.addEventListener('readystatechange',b),b()}),B=document.createRange();B.selectNode(document.documentElement);const C={"&":'&',"<":'<',">":'>','"':'"',"'":''',"/":'/'},D=e(),E=e({removeClass:!0});let F=(()=>{function a(){return c||(c=new Promise((a,b)=>{const c=indexedDB.open('svgo-keyval',1);c.onerror=()=>{b(c.error)},c.onupgradeneeded=()=>{c.result.createObjectStore('keyval')},c.onsuccess=()=>{a(c.result)}})),c}async function b(b,c){const d=await a();return new Promise((a,e)=>{const f=d.transaction('keyval',b);f.oncomplete=()=>a(),f.onerror=()=>e(f.error),c(f.objectStore('keyval'))})}let c;return{async get(a){let c;return await b('readonly',(b)=>{c=b.get(a)}),c.result},set(a,c){return b('readwrite',(b)=>{b.put(c,a)})},delete(a){return b('readwrite',(b)=>{b.delete(a)})}}})();self.indexedDB||(F={get:(a)=>Promise.resolve(localStorage.getItem(a)),set:(a,b)=>Promise.resolve(localStorage.setItem(a,b)),delete:(a)=>Promise.resolve(localStorage.removeItem(a))}),'use strict';class G{constructor(a){this._requestId=0,this._pending={},this._url=a,this._worker=new Worker(this._url),this._worker.onmessage=(a)=>this._onMessage(a)}async release(){this._worker&&(this._worker.terminate(),this._worker=null);for(const a of Object.keys(this._pending))this._fulfillPending(id,null,new Error('Worker terminated: '+this._url))}_postMessage(a){this._worker.postMessage(a)}_onMessage(a){return a.data.id?void this._fulfillPending(a.data.id,a.data.result,a.data.error):void console.log('Unexpected message',a)}_fulfillPending(a,b,c){const d=this._pending[a];return d?(delete this._pending[a],c?void d[1](new Error(c)):void d[0](b)):void console.log('No resolver for',{id:a,result:b,error:c})}_requestResponse(a){return new Promise((b,c)=>{a.id=++this._requestId,this._pending[a.id]=[b,c],this._postMessage(a)})}}const H=new class extends G{constructor(){super('js/gzip-worker.js')}compress(a){return this._requestResponse({data:a})}};class I{constructor(a,b,c){this.text=a,this._compressedSize=null,this._url='',this._blob=null,this.width=b,this.height=c}async size({compress:a}){return a?(this._compressedSize||(this._compressedSize=H.compress(this.text).then((a)=>a.byteLength)),this._compressedSize):this.text.length}_create(){this._blob=new Blob([this.text],{type:'image/svg+xml'}),this._url=URL.createObjectURL(this._blob)}get blob(){return this._blob||this._create(),this._blob}get url(){return this._url||this._create(),this._url}release(){this._url&&(this._blob=null,URL.revokeObjectURL(this._url))}}class J{constructor(a,{eventArea:b=a,shouldCaptureFunc:c=()=>!0}={}){this._target=a,this._shouldCaptureFunc=c,this._dx=0,this._dy=0,this._scale=1,this._active=0,this._lastPoints=[],['_onPointerDown','_onPointerMove','_onPointerUp'].forEach((a)=>{this[a]=this[a].bind(this)}),b.addEventListener('mousedown',this._onPointerDown),b.addEventListener('touchstart',this._onPointerDown),b.addEventListener('wheel',(a)=>this._onWheel(a))}reset(){this._dx=0,this._dy=0,this._scale=1,this._update()}_onWheel(a){if(!this._shouldCaptureFunc(a.target))return;a.preventDefault();const b=this._target.getBoundingClientRect();let c=a.deltaY;1===a.deltaMode&&(c*=15),c=Math.max(Math.min(c,60),-60);const d=c/300+1;0.05>this._scale*d||(this._scale*=d,this._dx-=(a.pageX-b.left)*(d-1),this._dy-=(a.pageY-b.top)*(d-1),this._update())}_onFirstPointerDown(){document.addEventListener('mousemove',this._onPointerMove),document.addEventListener('mouseup',this._onPointerUp),document.addEventListener('touchmove',this._onPointerMove),document.addEventListener('touchend',this._onPointerUp)}_onPointerDown(a){('mousedown'!=a.type||1==a.which)&&this._shouldCaptureFunc(a.target)&&(a.preventDefault(),this._lastPoints=i(a),this._active++,1===this._active&&this._onFirstPointerDown(a))}_onPointerMove(a){a.preventDefault();const b=i(a),c=b.reduce(h),d=this._lastPoints.reduce(h),e=this._target.getBoundingClientRect();if(this._dx+=c.x-d.x,this._dy+=c.y-d.y,b[1]){const a=g(b[0],b[1])/g(this._lastPoints[0],this._lastPoints[1]);this._scale*=a,this._dx-=(c.x-e.left)*(a-1),this._dy-=(c.y-e.top)*(a-1)}this._update(),this._lastPoints=b}_update(){this._target.style.WebkitTransform=this._target.style.transform=`translate3d(${this._dx}px, ${this._dy}px, 0) scale(${this._scale})`}_onPointerUp(a){return a.preventDefault(),this._active--,this._lastPoints.pop(),this._active?void(this._lastPoints=i(a)):void(document.removeEventListener('mousemove',this._onPointerMove),document.removeEventListener('mouseup',this._onPointerUp),document.removeEventListener('touchmove',this._onPointerMove),document.removeEventListener('touchend',this._onPointerUp))}}class K{constructor(){this.container=a('<div class="svg-output"><div class="svg-container"><iframe class="svg-frame" sandbox="allow-scripts"></iframe></div><div class="svg-clickjacker"></div></div>'),this._svgFrame=this.container.querySelector('.svg-frame'),this._svgFrame.scrolling='no',this._svgContainer=this.container.querySelector('.svg-container'),A.then(()=>{this._panZoom=new J(this._svgContainer,{eventArea:this.container})})}setSvg(a){const b=this._nextLoadPromise();return this._svgFrame.src='data:image/svg+xml;charset=utf-8,'+encodeURIComponent(a.text),this._svgFrame.width=a.width,this._svgFrame.height=a.height,b}reset(){this._svgFrame.src='about:blank',this._panZoom.reset()}_nextLoadPromise(){return new Promise((a)=>{const b=()=>{this._svgFrame.removeEventListener('load',b),a()};this._svgFrame.addEventListener('load',b)})}}const L=new class extends G{constructor(){super('js/prism-worker.js')}highlight(a){return this._requestResponse({data:a})}};class M{constructor(){this.container=a('<div class="code-output"><pre><code></code></pre></div>'),this._codeEl=this.container.querySelector('code')}async setSvg(a){this._codeEl.innerHTML=await L.highlight(a.text)}reset(){this._codeEl.innerHTML=''}}class N{constructor(){this.container=a('<div class="output-switcher"></div>'),this._types={image:new K,code:new M},this._svgFile=null,this._switchQueue=Promise.resolve(),this.set('image',{noAnimate:!0})}update(a){return this._svgFile=a,this._types[this._activeType].setSvg(a)}reset(){this._types[this._activeType].reset()}set(a,{noAnimate:b=!1}={}){return this._switchQueue=this._switchQueue.then(async()=>{const c=this._activeType&&this._types[this._activeType].container;this._activeType=a;const d=this._types[this._activeType].container;if(this.container.appendChild(d),this._svgFile&&(await this.update(this._svgFile)),b)d.classList.add('active'),c&&c.classList.remove('active');else{const a=[D(d)];c&&a.push(E(c)),await Promise.all(a)}c&&this.container.removeChild(c)})}}class O{constructor(){this.container=a('<div class="ripple"></div>')}animate(){this.container.classList.remove('animate'),this.container.offsetLeft,this.container.classList.add('animate')}}class P{constructor(){this.container=a('<div class="spinner"><div class="spinner-container"><div class="spinner-layer"><div class="circle-clipper left"><div class="circle"></div></div><div class="gap-patch"><div class="circle"></div></div><div class="circle-clipper right"><div class="circle"></div></div></div></div></div>'),this._showTimeout=null,this.container.style.display='none';const b=(a)=>{a.target==this.container&&(this.container.style.display='none')};this.container.addEventListener('webkitAnimationEnd',b),this.container.addEventListener('animationend',b)}show(a=300){clearTimeout(this._showTimeout),this.container.style.display='none',this.container.classList.remove('cooldown'),this._showTimeout=setTimeout(()=>{this.container.style.display=''},a)}hide(){clearTimeout(this._showTimeout),this.container.classList.add('cooldown')}}class Q{constructor({title:b,href:c,iconSvg:d,classList:e,minor:f}){this.container=a((c?'<a>':'<div role="button" tabindex="0">')+d+(c?'</a>':'</div>')+''),c&&(this.container.href=c),b&&this.container.setAttribute('title',b),this.container.classList.add(f?'minor-floating-action-button':'floating-action-button'),e&&e.forEach((a)=>{this.container.classList.add(a)}),this._ripple=new O,this.container.appendChild(this._ripple.container),this._spinner=new P,this.container.appendChild(this._spinner.container),this.container.addEventListener('click',(a)=>this._onClick(a))}_onClick(){this._ripple.animate()}working(){this._spinner.show(500)}done(){this._spinner.hide()}}class R extends Q{constructor(){const a='Download';super({title:a,href:'./',iconSvg:'<svg viewBox="0 0 24 24" class="icon">'+`<title>${a}</title>`+'<path d="M19 9h-4V3H9v6H5l7 7 7-7zM5 18v2h14v-2H5z"/></svg>'}),this._svgFile=null}_onClick(a){super._onClick(a),'msSaveBlob'in navigator&&(a.preventDefault(),navigator.msSaveBlob(this._svgFile.blob,this._svgFile.filename))}setDownload(a,b){this.container.download=a,this.container.href=b.url,this._svgFile=b}}const S=document.queryCommandSupported&&document.queryCommandSupported('copy');class T extends Q{constructor(){const a='Copy as text';super({title:a,iconSvg:'<svg viewBox="0 0 24 24" class="icon">'+`<title>${a}</title>`+'<path d="M16 1H4C3 1 2 2 2 3v14h2V3h12V1zm3 4H8C7 5 6 6 6 7v14c0 1 1 2 2 2h11c1 0 2-1 2-2V7c0-1-1-2-2-2zm0 16H8V7h11v14z"/></svg>',minor:!0}),this._text=null,this._pre=document.createElement('pre')}_onClick(a){super._onClick(a),this._pre.textContent=this._text,document.body.appendChild(this._pre),getSelection().removeAllRanges();const b=document.createRange();b.selectNode(this._pre),window.getSelection().addRange(b),document.execCommand('copy'),getSelection().removeAllRanges(),document.body.removeChild(this._pre)}setCopyText(a){this._text=a}}class U extends Q{constructor(){const a='Preview on vivid background';super({title:a,iconSvg:'<svg viewBox="0 0 24 24" class="icon">'+`<title>${a}</title>`+'<path d="M21.143 9.667c-.733-1.392-1.914-3.05-3.617-4.753-2.977-2.978-5.478-3.914-6.785-3.914-.414 0-.708.094-.86.246l-1.361 1.36c-1.899-.236-3.42.106-4.294.983-.876.875-1.164 2.159-.792 3.523.492 1.806 2.305 4.049 5.905 5.375.038.323.157.638.405.885.588.588 1.535.586 2.121 0s.588-1.533.002-2.119c-.588-.587-1.537-.588-2.123-.001l-.17.256c-2.031-.765-3.395-1.828-4.232-2.9l3.879-3.875c.496 2.73 6.432 8.676 9.178 9.178l-7.115 7.107c-.234.153-2.798-.316-6.156-3.675-3.393-3.393-3.175-5.271-3.027-5.498l1.859-1.856c-.439-.359-.925-1.103-1.141-1.689l-2.134 2.131c-.445.446-.685 1.064-.685 1.82 0 1.634 1.121 3.915 3.713 6.506 2.764 2.764 5.58 4.243 7.432 4.243.648 0 1.18-.195 1.547-.562l8.086-8.078c.91.874-.778 3.538-.778 4.648 0 1.104.896 1.999 2 1.999 1.105 0 2-.896 2-2 0-3.184-1.425-6.81-2.857-9.34zm-16.209-5.371c.527-.53 1.471-.791 2.656-.761l-3.209 3.206c-.236-.978-.049-1.845.553-2.445zm9.292 4.079l-.03-.029c-1.292-1.292-3.803-4.356-3.096-5.063.715-.715 3.488 1.521 5.062 3.096.862.862 2.088 2.247 2.937 3.458-1.717-1.074-3.491-1.469-4.873-1.462z"/></svg>',classList:['fillAB'],minor:!0})}_onClick(a){super._onClick(a),this.container.classList.contains('active')?(this.container.classList.remove('active'),this.setColor('transparent')):(this.container.classList.add('active'),this.setColor('rgba(0, 0, 0, 0.7)'))}setColor(a){document.documentElement.style.backgroundColor=a}}class V{constructor(){this.container=a('<div class="results"><span class="size"></span> <span class="diff"></span></div>'),this._sizeEl=this.container.querySelector('.size'),this._newSizeEl=this.container.querySelector('.newSize'),this._diffEl=this.container.querySelector('.diff')}update({size:a,comparisonSize:b}){return this._sizeEl.textContent=b?k(b)+' \u2192 '+k(a):k(a),this._diffEl.classList.remove('decrease','increase'),b?void(a===b?this._diffEl.textContent='100%':(this._diffEl.textContent=j(100*(a/b),2)+'%',this._diffEl.classList.add(a>b?'increase':'decrease'))):void(this._diffEl.textContent='')}}var W;l.prototype=Object.create(null),m.EventEmitter=m,m.usingDomains=!1,m.prototype.domain=void 0,m.prototype._events=void 0,m.prototype._maxListeners=void 0,m.defaultMaxListeners=10,m.init=function(){this.domain=null,m.usingDomains&&W.active&&!(this instanceof W.Domain)&&(this.domain=W.active),this._events&&this._events!==Object.getPrototypeOf(this)._events||(this._events=new l,this._eventsCount=0),this._maxListeners=this._maxListeners||void 0},m.prototype.setMaxListeners=function(a){if('number'!=typeof a||0>a||isNaN(a))throw new TypeError('"n" argument must be a positive number');return this._maxListeners=a,this},m.prototype.getMaxListeners=function(){return n(this)},m.prototype.emit=function(a){var b,c,d,e,f,g,h,i='error'===a;if(g=this._events,g)i=i&&null==g.error;else if(!i)return!1;if(h=this.domain,i){if(b=arguments[1],h)b||(b=new Error('Uncaught, unspecified "error" event')),b.domainEmitter=this,b.domain=h,b.domainThrown=!1,h.emit('error',b);else if(b instanceof Error)throw b;else{var j=new Error('Uncaught, unspecified "error" event. ('+b+')');throw j.context=b,j}return!1}if(c=g[a],!c)return!1;var k='function'==typeof c;switch(d=arguments.length,d){case 1:o(c,k,this);break;case 2:p(c,k,this,arguments[1]);break;case 3:q(c,k,this,arguments[1],arguments[2]);break;case 4:r(c,k,this,arguments[1],arguments[2],arguments[3]);break;default:for(e=Array(d-1),f=1;f<d;f++)e[f-1]=arguments[f];s(c,k,this,e);}return!1,!0},m.prototype.addListener=function(a,b){return t(this,a,b,!1)},m.prototype.on=m.prototype.addListener,m.prototype.prependListener=function(a,b){return t(this,a,b,!0)},m.prototype.once=function(a,b){if('function'!=typeof b)throw new TypeError('"listener" argument must be a function');return this.on(a,v(this,a,b)),this},m.prototype.prependOnceListener=function(a,b){if('function'!=typeof b)throw new TypeError('"listener" argument must be a function');return this.prependListener(a,v(this,a,b)),this},m.prototype.removeListener=function(a,b){var c,d,e,f,g;if('function'!=typeof b)throw new TypeError('"listener" argument must be a function');if(d=this._events,!d)return this;if(c=d[a],!c)return this;if(c===b||c.listener&&c.listener===b)0==--this._eventsCount?this._events=new l:(delete d[a],d.removeListener&&this.emit('removeListener',a,c.listener||b));else if('function'!=typeof c){for(e=-1,f=c.length;0<f--;)if(c[f]===b||c[f].listener&&c[f].listener===b){g=c[f].listener,e=f;break}if(0>e)return this;if(1===c.length){if(c[0]=void 0,0==--this._eventsCount)return this._events=new l,this;delete d[a]}else x(c,e);d.removeListener&&this.emit('removeListener',a,g||b)}return this},m.prototype.removeAllListeners=function(a){var b,c;if(c=this._events,!c)return this;if(!c.removeListener)return 0===arguments.length?(this._events=new l,this._eventsCount=0):c[a]&&(0==--this._eventsCount?this._events=new l:delete c[a]),this;if(0===arguments.length){for(var d,e=Object.keys(c),f=0;f<e.length;++f)d=e[f],'removeListener'===d||this.removeAllListeners(d);return this.removeAllListeners('removeListener'),this._events=new l,this._eventsCount=0,this}if(b=c[a],'function'==typeof b)this.removeListener(a,b);else if(b)do this.removeListener(a,b[b.length-1]);while(b[0]);return this},m.prototype.listeners=function(a){var b,c,d=this._events;return d?(b=d[a],c=b?'function'==typeof b?[b.listener||b]:z(b):[]):c=[],c},m.listenerCount=function(a,b){return'function'==typeof a.listenerCount?a.listenerCount(b):w.call(a,b)},m.prototype.listenerCount=w,m.prototype.eventNames=function(){return 0<this._eventsCount?Reflect.ownKeys(this._events):[]};class X{constructor(b){this.container=a(`
<div class="material-slider">
<div class="track">
<div class="track-on"></div>
<div class="handle">
<div class="arrow"></div>
<div class="val"></div>
</div>
</div>
</div> | `),this.range=b,this._handle=this.container.querySelector('.handle'),this._trackOn=this.container.querySelector('.track-on'),this._val=this.container.querySelector('.val'),b.parentNode.insertBefore(this.container,b),this.container.insertBefore(b,this.container.firstChild),b.addEventListener('input',()=>this._onInputChange()),this.range.addEventListener('mousedown',()=>this._onRangeMouseDown()),this.range.addEventListener('touchstart',()=>this._onRangeTouchStart()),this.range.addEventListener('touchend',()=>this._onRangeTouchEnd()),this._setPosition()}_onRangeTouchStart(){this.range.focus()}_onRangeTouchEnd(){this.range.blur()}_onRangeMouseDown(){this.range.classList.add('active');const a=()=>{requestAnimationFrame(()=>{this.range.blur()}),this.range.classList.remove('active'),document.removeEventListener('mouseup',a)};document.addEventListener('mouseup',a)}set value(a){this.range.value=a,this._update()}_onInputChange(){this._update()}_update(){requestAnimationFrame(()=>this._setPosition())}_setPosition(){const{min:a,max:b,value:c}=this.range;this._trackOn.style.width=this._handle.style.left=100*((+c-a)/(b-a))+'%',this._val.textContent=c}}class Y extends m{constructor(){super(),this._throttleTimeout=null,A.then(()=>{this._pluginInputs=Array.from(document.querySelectorAll('.settings .plugins input')),this._globalInputs=Array.from(document.querySelectorAll('.settings .global input')),this._resetRipple=new O,this._resetBtn=document.querySelector('.setting-reset'),this._resetBtn.appendChild(this._resetRipple.container),this._sliderMap=new WeakMap,Array.from(document.querySelectorAll('.settings input[type=range]')).forEach((a)=>this._sliderMap.set(a,new X(a))),this.container=document.querySelector('.settings'),this._scroller=document.querySelector('.settings-scroller'),this.container.addEventListener('change',(a)=>this._onChange(a)),this.container.addEventListener('input',(a)=>this._onChange(a)),this._scroller.addEventListener('wheel',(a)=>this._onMouseWheel(a)),this._resetBtn.addEventListener('click',(a)=>this._onReset(a)),this._scroller.addEventListener('mousedown',(a)=>{a.target.closest('input[type=range]')||a.preventDefault()})})}_onMouseWheel(a){a.deltaMode||(a.preventDefault(),a.currentTarget.scrollTop+=a.deltaY)}_onChange(a){clearTimeout(this._throttleTimeout),'range'==a.target.type?this._throttleTimeout=setTimeout(()=>this.emit('change'),150):this.emit('change')}_onReset(){this._resetRipple.animate();const a=this.getSettings();for(const a of this._globalInputs)'checkbox'==a.type?a.checked=a.hasAttribute('checked'):'range'==a.type&&(this._sliderMap.get(a).value=a.getAttribute('value'));for(const a of this._pluginInputs)a.checked=a.hasAttribute('checked');this.emit('reset',a),this.emit('change')}setSettings(a){for(const b of this._globalInputs)b.name in a&&('checkbox'==b.type?b.checked=a[b.name]:'range'==b.type&&(this._sliderMap.get(b).value=a[b.name]));for(const b of this._pluginInputs)b.name in a.plugins&&(b.checked=a.plugins[b.name])}getSettings(){const a=[],b={plugins:{}};return this._globalInputs.forEach((c)=>{'gzip'!=c.name&&'original'!=c.name&&('checkbox'==c.type?a.push(+c.checked):a.push('|'+c.value+'|')),b[c.name]='checkbox'==c.type?c.checked:c.value}),this._pluginInputs.forEach((c)=>{a.push(+c.checked),b.plugins[c.name]=c.checked}),b.fingerprint=a.join(),b}}class Z extends m{constructor(){super(),this.allowHide=!1,this._spinner=new P,A.then(()=>{this.container=document.querySelector('.main-menu'),this._loadFileInput=document.querySelector('.load-file-input'),this._pasteInput=document.querySelector('.paste-input'),this._loadDemoBtn=document.querySelector('.load-demo'),this._loadFileBtn=document.querySelector('.load-file'),this._pasteLabel=document.querySelector('.menu-input'),this._overlay=this.container.querySelector('.overlay'),this._menu=this.container.querySelector('.menu'),document.querySelector('.menu-btn').addEventListener('click',(a)=>this._onMenuButtonClick(a)),this._overlay.addEventListener('click',(a)=>this._onOverlayClick(a)),this._loadFileBtn.addEventListener('click',(a)=>this._onLoadFileClick(a)),this._loadDemoBtn.addEventListener('click',(a)=>this._onLoadDemoClick(a)),this._loadFileInput.addEventListener('change',(a)=>this._onFileInputChange(a)),this._pasteInput.addEventListener('input',(a)=>this._onTextInputChange(a))})}show(){this.container.classList.remove('hidden'),E(this._overlay,'hidden'),E(this._menu,'hidden')}hide(){this.allowHide&&(this.stopSpinner(),this.container.classList.add('hidden'),D(this._overlay,'hidden'),D(this._menu,'hidden'))}stopSpinner(){this._spinner.hide()}showFilePicker(){this._loadFileInput.click()}_onOverlayClick(a){a.preventDefault(),this.hide()}_onMenuButtonClick(a){a.preventDefault(),this.show()}_onTextInputChange(){const a=this._pasteInput.value.trim();a.includes('</svg>')&&(this._pasteInput.value='',this._pasteInput.blur(),this._pasteLabel.appendChild(this._spinner.container),this._spinner.show(),this.emit('svgDataLoad',{data:a,filename:'image.svg'}))}_onLoadFileClick(a){a.preventDefault(),a.target.blur(),this.showFilePicker()}async _onFileInputChange(){const a=this._loadFileInput.files[0];a&&(this._loadFileBtn.appendChild(this._spinner.container),this._spinner.show(),this.emit('svgDataLoad',{data:await d(a),filename:a.name}))}async _onLoadDemoClick(a){a.preventDefault(),a.target.blur(),this._loadDemoBtn.appendChild(this._spinner.container),this._spinner.show();try{this.emit('svgDataLoad',{data:await fetch('test-svgs/car-lite.svg').then((a)=>a.text()),filename:'car-lite.svg'})}catch(a){{this.stopSpinner();let a;a='serviceWorker'in navigator&&navigator.serviceWorker.controller?Error('Demo not available offline'):Error('Couldn\'t fetch demo SVG'),this.emit('error',{error:a})}}}}class ${constructor(b,c,d){this.container=a('<div class="toast"><div class="toast-content"></div></div>'),this._content=this.container.querySelector('.toast-content'),this._content.textContent=b,this._answerResolve,this._hideTimeout,this.answer=new Promise((a)=>this._answerResolve=a),d.forEach((a)=>{var b=document.createElement('button');b.className='unbutton',b.textContent=a,b.addEventListener('click',()=>{this._answerResolve(a)}),this.container.appendChild(b)}),c&&(this._hideTimeout=setTimeout(()=>this.hide(),c))}hide(){return clearTimeout(this._hideTimeout),this._answerResolve(),D(this.container,'hide')}}class _{constructor(){this.container=a('<div class=\'toasts\'></div>')}show(a,{duration:b=0,buttons:c=['dismiss']}={}){const d=new $(a,b,c);return this.container.appendChild(d.container),d.answer.then(()=>d.hide()).then(()=>{this.container.removeChild(d.container)}),d}}class aa extends m{constructor(){super(),this.container=a('<div class="drop-overlay">Drop it!</div>'),this._activeEnters=0,this._currentEnteredElement=null,A.then(()=>{document.addEventListener('dragover',(a)=>a.preventDefault()),document.addEventListener('dragenter',(a)=>this._onDragEnter(a)),document.addEventListener('dragleave',(a)=>this._onDragLeave(a)),document.addEventListener('drop',(a)=>this._onDrop(a))})}_onDragEnter(a){this._currentEnteredElement==a.target||(this._currentEnteredElement=a.target,!this._activeEnters++&&D(this.container))}_onDragLeave(){this._currentEnteredElement=null,--this._activeEnters||E(this.container)}async _onDrop(a){a.preventDefault(),this._activeEnters=0,E(this.container);const b=a.dataTransfer.files[0];b&&this.emit('svgDataLoad',{data:await d(b),filename:b.name})}}class ba{constructor(){A.then(()=>{this.container=document.querySelector('.preloader'),this.activated=this.container.classList.contains('active'),this.hide()})}async hide(){await E(this.container,'active'),this.container.style.display='none'}}class ca{constructor(b){this.container=a('<section class="changelog"></section>'),this._loadedVersion=b}async showLogFrom(b){if(b==this._loadedVersion)return;const d=await fetch('changelog.json').then((a)=>a.json());let e=0,f=0;for(var g=0;g<d.length;g++){const a=d[g];if(a.version===this._loadedVersion)e=g;else if(a.version===b)break;f=g+1}const h=d.slice(e,f).reduce((a,b)=>a.concat(b.changes),[]).map((a)=>c`<li>${a}</li>`);this.container.appendChild(a('<h1>Updated!</h1>')),this.container.appendChild(a('<ul>'+h.join('')+'</ul>')),await A,D(this.container)}}class da{constructor(a){this._results=a,A.then(()=>{this._mobileContainer=document.querySelector('.results-container-mobile'),this._container=document.querySelector('.results-container'),this._query=matchMedia('(min-width: 640px)'),this._query.addListener(()=>this._positionResults()),this._positionResults()})}_positionResults(){this._query.matches?this._container.appendChild(this._results.container):this._mobileContainer.appendChild(this._results.container)}}class ea extends m{constructor(){super(),this.container=null,A.then(()=>{this.container=document.querySelector('.view-toggler'),this.container.output[0].checked=!0,this.container.addEventListener('change',(a)=>this._onChange(a))})}_onChange(){let a=this.container.output.value;a||(a=Array.from(this.container.output).reduce((a,b)=>a||(b.checked?b.value:''),'')),this.emit('change',{value:a})}}class fa{constructor(a){this._size=a,this.purge()}purge(){this._fingerprints=[],this._items=[],this._index=0}add(a,b){const c=this._items[this._index];c&&c.release(),this._fingerprints[this._index]=a,this._items[this._index]=b,this._index=(this._index+1)%this._size}match(a){return this._items[this._fingerprints.indexOf(a)]}}class ga{constructor(...a){this._activated=!1,this._toActivate=a}activate(){if(!this._activated)return this._activated=!0,Promise.all(this._toActivate.map((a)=>D(a)))}}const ha=new class extends G{constructor(){super('js/svgo-worker.js'),this._abortOnNextIteration=!1,this._currentJob=Promise.resolve()}async load(a){const{width:b,height:c}=await this._requestResponse({action:'load',data:a});return new I(a,b,c)}process(a,b){return this._currentJob=this.abortCurrent().then(async()=>{this._abortOnNextIteration=!1;let c=await this._requestResponse({action:'process',settings:a});var d=new I(c.data,c.dimensions.width,c.dimensions.height);if(b(d),a.multipass)for(;c=await this.nextPass();){if(this._abortOnNextIteration)throw Error('abort');d=new I(c.data,c.dimensions.width,c.dimensions.height),b(d)}return d})}nextPass(){return this._requestResponse({action:'nextPass'})}async abortCurrent(){this._abortOnNextIteration=!0,await this._currentJob}async release(){await this.abortCurrent(),super.release()}};(function(){var a='mouse';document.body.addEventListener('focus',(b)=>{b.target.classList.add('key'==a?'key-focused':'mouse-focused')},!0),document.body.addEventListener('blur',(a)=>{a.target.classList.remove('key-focused'),a.target.classList.remove('mouse-focused')},!0),document.body.addEventListener('keydown',()=>{a='key'},!0),document.body.addEventListener('mousedown',()=>{a='mouse'},!0)})(),new class{constructor(){this._container=null,this._mainUi=null,this._outputUi=new N,this._downloadButtonUi=new R,this._copyButtonUi=new T,this._bgFillUi=new U,this._resultsUi=new V,this._settingsUi=new Y,this._mainMenuUi=new Z,this._toastsUi=new _,this._dropUi=new aa,this._preloaderUi=new ba,this._changelogUi=new ca(self.version),this._resultsContainerUi=new da(this._resultsUi),this._viewTogglerUi=new ea,this._settingsUi.on('change',()=>this._onSettingsChange()),this._settingsUi.on('reset',(a)=>this._onSettingsReset(a)),this._mainMenuUi.on('svgDataLoad',(a)=>this._onInputChange(a)),this._dropUi.on('svgDataLoad',(a)=>this._onInputChange(a)),this._mainMenuUi.on('error',({error:a})=>this._handleError(a)),this._viewTogglerUi.on('change',(a)=>this._onViewSelectionChange(a)),window.addEventListener('keydown',(a)=>this._onGlobalKeyDown(a)),this._inputItem=null,this._cache=new fa(10),this._latestCompressJobId=0,this._userHasInteracted=!1,this._reloading=!1,'serviceWorker'in navigator&&navigator.serviceWorker.register('sw.js',{scope:'./'}).then((a)=>{a.addEventListener('updatefound',()=>this._onUpdateFound(a))}),F.get('last-seen-version').then((a)=>{a&&this._changelogUi.showLogFrom(a),F.set('last-seen-version',self.version)}),A.then(()=>{this._container=document.querySelector('.app-output'),this._mainUi=new ga(document.querySelector('.toolbar'),document.querySelector('.action-button-container'),this._outputUi.container,this._settingsUi.container);const a=document.querySelector('.action-button-container'),b=document.querySelector('.minor-action-container');b.appendChild(this._bgFillUi.container),S&&b.appendChild(this._copyButtonUi.container),a.appendChild(this._downloadButtonUi.container),document.querySelector('.output').appendChild(this._outputUi.container),this._container.appendChild(this._toastsUi.container),this._container.appendChild(this._dropUi.container),document.querySelector('.menu-extra').appendChild(this._changelogUi.container),this._loadSettings(),this._preloaderUi.activated&&this._toastsUi.show('Ready now!',{duration:3e3})})}_onGlobalKeyDown(a){'o'===a.key&&(a.ctrlKey||a.metaKey)&&(a.preventDefault(),this._mainMenuUi.showFilePicker())}_onViewSelectionChange(a){this._outputUi.set(a.value)}_onUpdateFound(a){const b=a.installing;a.installing.addEventListener('statechange',async()=>{if(!this._reloading){if('activated'==b.state&&!navigator.serviceWorker.controller)return void this._toastsUi.show('Ready to work offline',{duration:5e3});if('activated'==b.state&&navigator.serviceWorker.controller){if(!this._userHasInteracted)return this._reloading=!0,void location.reload();const a=this._toastsUi.show('Update available',{buttons:['reload','dismiss']}),b=await a.answer;'reload'==b&&(this._reloading=!0,location.reload())}}})}_onSettingsChange(){const a=this._settingsUi.getSettings();this._saveSettings(a),this._compressSvg(a)}async _onSettingsReset(a){const b=this._toastsUi.show('Settings reset',{buttons:['undo','dismiss'],duration:5e3});'undo'===(await b.answer)&&(this._settingsUi.setSettings(a),this._onSettingsChange())}async _onInputChange(a){const b=this._settingsUi.getSettings();this._userHasInteracted=!0;try{this._inputItem=await ha.load(a.data),this._inputFilename=a.filename}catch(a){{const b=new Error('Load failed: '+a.message);return this._mainMenuUi.stopSpinner(),void this._handleError(b)}}this._cache.purge();let c=!0;const d=()=>{c&&(this._outputUi.reset(),this._mainUi.activate(),this._mainMenuUi.allowHide=!0,this._mainMenuUi.hide(),c=!1)};this._compressSvg(b,()=>d()),c&&d()}_handleError(a){this._toastsUi.show(a.message),console.error(a)}async _loadSettings(){const a=await F.get('settings');a&&this._settingsUi.setSettings(a)}_saveSettings(a){const b=Object.assign({},a);delete b.original,F.set('settings',b)}async _compressSvg(a,b=function(){}){const c=this._latestCompressJobId=Math.random();if(await ha.abortCurrent(),c==this._latestCompressJobId){if(a.original)return void this._updateForFile(this._inputItem,{compress:a.gzip});const c=this._cache.match(a.fingerprint);if(c)return void this._updateForFile(c,{compareToFile:this._inputItem,compress:a.gzip});this._downloadButtonUi.working();try{const c=await ha.process(a,(c)=>{b(c),this._updateForFile(c,{compareToFile:this._inputItem,compress:a.gzip})});this._cache.add(a.fingerprint,c)}catch(a){if('abort'==a.message)return;a.message='Minifying error: '+a.message,this._handleError(a)}finally{this._downloadButtonUi.done()}}}async _updateForFile(a,{compareToFile:b,compress:c}){this._outputUi.update(a),this._downloadButtonUi.setDownload(this._inputFilename,a),this._copyButtonUi.setCopyText(a.text),this._resultsUi.update({comparisonSize:b&&(await b.size({compress:c})),size:await a.size({compress:c})})}}})();
//# sourceMappingURL=page.js.map | |
Upload64View.tsx | import * as React from 'react'
import { Helmet } from 'react-helmet'
import { connect } from 'react-redux'
import { resolve } from 'url'
import { ScreenSize } from '../../reducers/mediaQuery'
import { setCoursesSelf64, deleteCourseSelf64, deleteCourseUploaded64 } from '../../actions'
import { Course64Panel } from '../panels/Course64Panel'
import { ProgressPanel } from '../panels/ProgressPanel'
import { UploadArea } from '../areas/UploadArea'
const LIMIT = 10
class Upload64View extends React.PureComponent<any, any> {
public constructor (props: any) {
super(props)
this.fetchCourses = this.fetchCourses.bind(this)
this.renderCourses = this.renderCourses.bind(this)
this.onCourseDelete = this.onCourseDelete.bind(this)
this.onCourseDeleteRecent = this.onCourseDeleteRecent.bind(this)
this.handleScroll = this.handleScroll.bind(this)
}
// eslint-disable-next-line | public UNSAFE_componentWillMount (): void {
if (process.env.IS_SERVER) return
this.props.setFetchCourses(this.fetchCourses)
if (this.props.accountData.get('id')) {
this.fetchCourses()
}
}
// eslint-disable-next-line
public UNSAFE_componentWillReceiveProps (nextProps: any): any {
if (nextProps.accountData === this.props.accountData || !nextProps.accountData.get('id')) return
this.fetchCourses(false, LIMIT, nextProps)
}
private async fetchCourses (shouldConcat = false, limit = LIMIT, props = this.props): Promise<void> {
const accountData = props.accountData
if (!accountData.get('id')) return
try {
const apiKey = accountData.get('apikey')
const response = await fetch(resolve(process.env.DOMAIN || '', `/api/getcourses64?uploader=${accountData.get('username')}&limit=${limit}&start=${shouldConcat ? this.props.courses.size : 0}`), {
headers: {
'Authorization': `APIKEY ${apiKey}`
}
})
if (!response.ok) throw new Error(response.statusText)
const courses = await response.json()
if (courses && courses.length > 0) {
props.dispatch(setCoursesSelf64(courses, shouldConcat))
}
} catch (err) {
console.error(err)
}
}
private renderCourses (uploaded = false): JSX.Element[] {
const courses = uploaded ? this.props.uploads.toList().merge(this.props.uploadedCourses) : this.props.courses
const accountData = this.props.accountData
const onCourseDelete = uploaded ? this.onCourseDeleteRecent : this.onCourseDelete
return Array.from((function * (): IterableIterator<JSX.Element> {
let i = 0
for (let course of courses) {
const courseId = course.get('id')
if (course.get('eta') != null) {
yield (
<ProgressPanel course={course} key={courseId} />
)
} else {
yield (
<Course64Panel
key={courseId}
canEdit isSelf uploaded={uploaded} course={course}
apiKey={accountData.get('apikey')} id={i} onCourseDelete={onCourseDelete}
/>
)
}
i++
}
})())
}
private onCourseDelete (courseId: any): void {
this.props.dispatch(deleteCourseSelf64(courseId))
}
private onCourseDeleteRecent (courseId: any): void {
this.props.dispatch(deleteCourseUploaded64(courseId))
}
private handleScroll (e: any): void {
this.props.shouldUpdate(e.target)
}
public render (): JSX.Element {
const { screenSize } = this.props
const accountData = this.props.accountData.toJS()
const uploads = this.props.uploads.toList().toJS()
const uploadedCourses = this.props.uploadedCourses.toJS()
const styles: any = {
main: {
height: '100%',
display: 'flex',
flexDirection: 'column',
alignItems: 'center'
},
upload: {
maxWidth: screenSize < ScreenSize.MEDIUM ? '100%' : 'calc(100% - 120px)',
overflowY: screenSize >= ScreenSize.MEDIUM ? 'auto' : '',
zIndex: '10',
flex: '1',
color: '#fff'
},
flex: {
overflow: 'hidden',
display: screenSize >= ScreenSize.MEDIUM ? 'flex' : 'block',
flexDirection: screenSize >= ScreenSize.MEDIUM ? 'column' : '',
height: 'auto',
alignItems: 'center'
},
line: {
height: '5px',
backgroundColor: '#000',
margin: '10px 0'
},
text: {
height: 'auto',
display: 'block',
top: '50%',
transform: 'translateY(-50%)'
},
content: {
display: 'flex',
flexWrap: 'wrap',
justifyContent: 'space-around',
alignItems: 'flex-start',
color: '#000',
fontSize: '15px',
height: 'auto'
}
}
const content =
<div style={styles.content}>
{
(uploadedCourses.length > 0 || uploads.length > 0) &&
<div style={styles.content}>
<div style={{display: 'block', height: 'auto'}}>Recently uploaded:</div>
{
this.renderCourses(true)
}
<div style={styles.line} />
All uploads:
</div>
}
{
this.renderCourses()
}
</div>
return (
<div style={styles.main}>
<Helmet>
<title>SMMDB - Uploads 64</title>
</Helmet>
<div style={styles.upload} id='scroll' onScroll={this.handleScroll}>
{
accountData.id ? (
<div style={styles.flex}>
<UploadArea is64 />
<div>
{ content }
</div>
</div>
) : (
<div style={styles.text}>You are not logged in</div>
)
}
</div>
</div>
)
}
}
export default connect((state: any): any => ({
screenSize: state.getIn(['mediaQuery', 'screenSize']),
accountData: state.getIn(['userData', 'accountData']),
courses: state.getIn(['courseData', 'self64']),
uploads: state.get('uploads64'),
uploadedCourses: state.getIn(['courseData', 'uploaded64'])
}))(Upload64View) as any | |
home.js | var managment_View = require('managment_View');
var qr = require('qrLibrary');
var utils = require('utils');
show();
/* ***********************************************************
* Public functions
* ***********************************************************/
/* ***********************************************************
* Private functions
* ***********************************************************/
function | (){
//Añado el container actual al objeto de navegación
Alloy.Globals.ActualContainer = $.viewHome;
qr.createContentQR_READER($.containerHome);
//Cambiar título de la cabecera
var evtData = {
title: L('text_9')
};
Ti.App.fireEvent('changeHeaderTitle', evtData);
Alloy.Globals.ActualSection = 'home';
Ti.App.fireEvent('changeSection');
Ti.App.fireEvent('closeLoading');
}
| show |
models.py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
"""Created with Pycharm IDEA
@Create on 2015/9/12 16:31
@my_story models.py
@author : OmegaMiao"""
from app import db, loginManager
from datetime import datetime
from werkzeug.security import generate_password_hash, check_password_hash
from flask.ext.login import UserMixin
class Story(db.Model):
__tablename__ = 'story'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
title = db.Column(db.String(30), nullable=False)
content = db.Column(db.String(500), nullable=False)
create_time = db.Column(db.DateTime, default=datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
category_id = db.Column(db.Integer, db.ForeignKey('category.id'))
author_id = db.Column(db.Integer, db.ForeignKey('author.id'))
def __init__(self, title, content):
self.title = title
self.content = content
def __repr__(self):
return "<Story %r title %r>" % (self.id, self.title)
def to_json(self):
return {
"id": self.id,
"title": self.title,
"content": self.content,
"create_time": self.create_time.strftime('%Y-%m-%d %H:%M:%S')
}
class Category(db.Model):
__tablename__ = 'category'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(20), nullable=False, unique=True)
storys = db.relationship('Story', backref='category', lazy='joined')
def __init__(self, name): | return "<Category %r name %r>" % (self.id, self.name)
class Author(db.Model):
__tablename__ = 'author'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(20), nullable=False)
nick_name = db.Column(db.String(20), nullable=False, unique=True)
storys = db.relationship('Story', backref='author', lazy='joined')
def __init__(self, name, nick_name):
self.name = name
self.nick_name = nick_name
def __repr__(self):
return "<Author id: %r Name: %r nickName:%r>" % (self.id, self.name, self.nick_name)
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(64), unique=True, index=True)
username = db.Column(db.String(64), unique=True, index=True)
password_hash = db.Column(db.String(128))
role_id = db.Column(db.Integer, db.ForeignKey('roles.id'))
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def __repr__(self):
return '<User %r>' % self.username
class Role(db.Model):
__tablename__ = 'roles'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
def __repr__(self):
return '<Role %r>' % self.name
@loginManager.user_loader
def load_user(user_id):
return User.query.get(int(user_id)) | self.name = name
def __repr__(self): |
main.rs | use std::error::Error;
use std::path::{Path, PathBuf};
use yaml_rust::{Yaml, YamlEmitter, YamlLoader};
/// List of directories containing files to expand. The first tuple element is the source
/// directory, while the second tuple element is the destination directory.
#[rustfmt::skip]
static TO_EXPAND: &[(&str, &str)] = &[
("src/ci/github-actions", ".github/workflows"),
];
/// Name of a special key that will be removed from all the maps in expanded configuration files.
/// This key can then be used to contain shared anchors.
static REMOVE_MAP_KEY: &str = "x--expand-yaml-anchors--remove";
/// Message that will be included at the top of all the expanded files. {source} will be replaced
/// with the source filename relative to the base path.
static HEADER_MESSAGE: &str = "\
#############################################################
# WARNING: automatically generated file, DO NOT CHANGE! #
#############################################################
# This file was automatically generated by the expand-yaml-anchors tool. The
# source file that generated this one is:
#
# {source}
#
# Once you make changes to that file you need to run:
#
# ./x.py run src/tools/expand-yaml-anchors/
#
# The CI build will fail if the tool is not run after changes to this file.
";
enum Mode {
Check,
Generate,
}
struct App {
mode: Mode,
base: PathBuf,
}
impl App {
fn from_args() -> Result<Self, Box<dyn Error>> {
// Parse CLI arguments
let args = std::env::args().skip(1).collect::<Vec<_>>();
let (mode, base) = match args.iter().map(|s| s.as_str()).collect::<Vec<_>>().as_slice() {
["generate", ref base] => (Mode::Generate, PathBuf::from(base)),
["check", ref base] => (Mode::Check, PathBuf::from(base)),
_ => {
eprintln!("usage: expand-yaml-anchors <source-dir> <dest-dir>");
std::process::exit(1);
}
};
Ok(App { mode, base })
}
fn run(&self) -> Result<(), Box<dyn Error>> {
for (source, dest) in TO_EXPAND {
let source = self.base.join(source);
let dest = self.base.join(dest);
for entry in std::fs::read_dir(&source)? {
let path = entry?.path();
if !path.is_file() || path.extension().and_then(|e| e.to_str()) != Some("yml") {
continue;
}
let dest_path = dest.join(path.file_name().unwrap());
self.expand(&path, &dest_path).with_context(|| match self.mode {
Mode::Generate => format!(
"failed to expand {} into {}",
self.path(&path),
self.path(&dest_path)
),
Mode::Check => format!("{} is not up to date", self.path(&dest_path)),
})?;
}
}
Ok(())
}
fn expand(&self, source: &Path, dest: &Path) -> Result<(), Box<dyn Error>> {
let content = std::fs::read_to_string(source)
.with_context(|| format!("failed to read {}", self.path(source)))?;
let mut buf = HEADER_MESSAGE.replace("{source}", &self.path(source).to_string());
let documents = YamlLoader::load_from_str(&content)
.with_context(|| format!("failed to parse {}", self.path(source)))?;
for mut document in documents.into_iter() {
document = yaml_merge_keys::merge_keys(document)
.with_context(|| format!("failed to expand {}", self.path(source)))?;
document = filter_document(document);
YamlEmitter::new(&mut buf).dump(&document).map_err(|err| WithContext {
context: "failed to serialize the expanded yaml".into(),
source: Box::new(err),
})?;
buf.push('\n');
}
match self.mode {
Mode::Check => {
let old = std::fs::read_to_string(dest)
.with_context(|| format!("failed to read {}", self.path(dest)))?;
if old != buf {
return Err(Box::new(StrError(format!(
"{} and {} are different",
self.path(source),
self.path(dest),
))));
}
}
Mode::Generate => {
std::fs::write(dest, buf.as_bytes())
.with_context(|| format!("failed to write to {}", self.path(dest)))?;
}
}
Ok(())
}
fn path<'a>(&self, path: &'a Path) -> impl std::fmt::Display + 'a |
}
fn filter_document(document: Yaml) -> Yaml {
match document {
Yaml::Hash(map) => Yaml::Hash(
map.into_iter()
.filter(|(key, _)| {
if let Yaml::String(string) = &key { string != REMOVE_MAP_KEY } else { true }
})
.map(|(key, value)| (filter_document(key), filter_document(value)))
.collect(),
),
Yaml::Array(vec) => Yaml::Array(vec.into_iter().map(filter_document).collect()),
other => other,
}
}
fn main() {
if let Err(err) = App::from_args().and_then(|app| app.run()) {
eprintln!("error: {}", err);
let mut source = err.as_ref() as &dyn Error;
while let Some(err) = source.source() {
eprintln!("caused by: {}", err);
source = err;
}
std::process::exit(1);
}
}
#[derive(Debug)]
struct StrError(String);
impl Error for StrError {}
impl std::fmt::Display for StrError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(&self.0, f)
}
}
#[derive(Debug)]
struct WithContext {
context: String,
source: Box<dyn Error>,
}
impl std::fmt::Display for WithContext {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.context)
}
}
impl Error for WithContext {
fn source(&self) -> Option<&(dyn Error + 'static)> {
Some(self.source.as_ref())
}
}
pub(crate) trait ResultExt<T> {
fn with_context<F: FnOnce() -> String>(self, f: F) -> Result<T, Box<dyn Error>>;
}
impl<T, E: Into<Box<dyn Error>>> ResultExt<T> for Result<T, E> {
fn with_context<F: FnOnce() -> String>(self, f: F) -> Result<T, Box<dyn Error>> {
match self {
Ok(ok) => Ok(ok),
Err(err) => Err(WithContext { source: err.into(), context: f() }.into()),
}
}
}
| {
path.strip_prefix(&self.base).unwrap_or(path).display()
} |
presenters.go | // Package presenters allow for the specification and result
// of a Job, its associated TaskSpecs, and every JobRun and TaskRun
// to be returned in a user friendly human readable format.
package presenters
import (
"bytes"
"encoding/json"
"fmt"
"math/big"
"net/url"
"reflect"
"strconv"
"strings"
"time"
"github.com/smartcontractkit/chainlink/core/assets"
"github.com/smartcontractkit/chainlink/core/auth"
"github.com/smartcontractkit/chainlink/core/logger"
"github.com/smartcontractkit/chainlink/core/services/synchronization"
"github.com/smartcontractkit/chainlink/core/store"
"github.com/smartcontractkit/chainlink/core/store/models"
"github.com/smartcontractkit/chainlink/core/store/orm"
"github.com/smartcontractkit/chainlink/core/utils"
"github.com/ethereum/go-ethereum/accounts"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/common/hexutil"
"github.com/tidwall/gjson"
"gopkg.in/guregu/null.v3"
)
// ETHKey holds the hex representation of the address plus it's ETH & LINK balances
type ETHKey struct {
Address string `json:"address"`
EthBalance *assets.Eth `json:"ethBalance"`
LinkBalance *assets.Link `json:"linkBalance"`
NextNonce *int64 `json:"nextNonce"`
LastUsed *time.Time `json:"lastUsed"`
IsFunding bool `json:"isFunding"`
CreatedAt time.Time `json:"createdAt"`
UpdatedAt time.Time `json:"updatedAt"`
DeletedAt null.Time `json:"deletedAt"`
}
// GetID returns the ID of this structure for jsonapi serialization.
func (k ETHKey) GetID() string {
return k.Address
}
// SetID is used to set the ID of this structure when deserializing from jsonapi documents.
func (k *ETHKey) SetID(value string) error {
k.Address = value
return nil
}
// ConfigPrinter are the non-secret values of the node
//
// If you add an entry here, you should update NewConfigPrinter and
// ConfigPrinter#String accordingly.
type ConfigPrinter struct {
AccountAddress string `json:"accountAddress"`
EnvPrinter
}
// EnvPrinter contains the supported environment variables
type EnvPrinter struct {
AllowOrigins string `json:"allowOrigins"`
BalanceMonitorEnabled bool `json:"balanceMonitorEnabled"`
BlockBackfillDepth uint64 `json:"blockBackfillDepth"`
BridgeResponseURL string `json:"bridgeResponseURL,omitempty"`
ChainID *big.Int `json:"ethChainId"`
ClientNodeURL string `json:"clientNodeUrl"`
DatabaseTimeout models.Duration `json:"databaseTimeout"`
DatabaseMaximumTxDuration time.Duration `json:"databaseMaximumTxDuration"`
DefaultHTTPLimit int64 `json:"defaultHttpLimit"`
DefaultHTTPTimeout models.Duration `json:"defaultHttpTimeout"`
Dev bool `json:"chainlinkDev"`
EnableExperimentalAdapters bool `json:"enableExperimentalAdapters"`
EthBalanceMonitorBlockDelay uint16 `json:"ethBalanceMonitorBlockDelay"`
EthereumDisabled bool `json:"ethereumDisabled"`
EthFinalityDepth uint `json:"ethFinalityDepth"`
EthGasBumpThreshold uint64 `json:"ethGasBumpThreshold"`
EthGasBumpTxDepth uint16 `json:"ethGasBumpTxDepth"`
EthGasBumpWei *big.Int `json:"ethGasBumpWei"`
EthGasLimitDefault uint64 `json:"ethGasLimitDefault"`
EthGasPriceDefault *big.Int `json:"ethGasPriceDefault"`
EthHeadTrackerHistoryDepth uint `json:"ethHeadTrackerHistoryDepth"`
EthHeadTrackerMaxBufferSize uint `json:"ethHeadTrackerMaxBufferSize"`
EthMaxGasPriceWei *big.Int `json:"ethMaxGasPriceWei"`
EthereumURL string `json:"ethUrl"`
EthereumSecondaryURLs []url.URL `json:"ethSecondaryURLs"`
ExplorerURL string `json:"explorerUrl"`
FeatureExternalInitiators bool `json:"featureExternalInitiators"`
FeatureFluxMonitor bool `json:"featureFluxMonitor"`
FeatureOffchainReporting bool `json:"featureOffchainReporting"`
FlagsContractAddress string `json:"flagsContractAddress"`
GasUpdaterBlockDelay uint16 `json:"gasUpdaterBlockDelay"`
GasUpdaterBlockHistorySize uint16 `json:"gasUpdaterBlockHistorySize"`
GasUpdaterEnabled bool `json:"gasUpdaterEnabled"`
GasUpdaterTransactionPercentile uint16 `json:"gasUpdaterTransactionPercentile"`
InsecureFastScrypt bool `json:"insecureFastScrypt"`
TriggerFallbackDBPollInterval time.Duration `json:"jobPipelineDBPollInterval"`
JobPipelineMaxTaskDuration time.Duration `json:"jobPipelineMaxTaskDuration"`
JobPipelineParallelism uint8 `json:"jobPipelineParallelism"`
JobPipelineReaperInterval time.Duration `json:"jobPipelineReaperInterval"`
JobPipelineReaperThreshold time.Duration `json:"jobPipelineReaperThreshold"`
JSONConsole bool `json:"jsonConsole"`
LinkContractAddress string `json:"linkContractAddress"`
LogLevel orm.LogLevel `json:"logLevel"`
LogSQLMigrations bool `json:"logSqlMigrations"`
LogSQLStatements bool `json:"logSqlStatements"`
LogToDisk bool `json:"logToDisk"`
MaximumServiceDuration models.Duration `json:"maximumServiceDuration"`
MinIncomingConfirmations uint32 `json:"minIncomingConfirmations"`
MinRequiredOutgoingConfirmations uint64 `json:"minOutgoingConfirmations"`
MinimumServiceDuration models.Duration `json:"minimumServiceDuration"`
MinimumContractPayment *assets.Link `json:"minimumContractPayment"`
MinimumRequestExpiration uint64 `json:"minimumRequestExpiration"`
OCRBootstrapCheckInterval time.Duration `json:"ocrBootstrapCheckInterval"`
OCRContractTransmitterTransmitTimeout time.Duration `json:"ocrContractTransmitterTransmitTimeout"`
OCRDatabaseTimeout time.Duration `json:"ocrDatabaseTimeout"`
P2PListenIP string `json:"ocrListenIP"`
P2PListenPort uint16 `json:"ocrListenPort"`
OCRIncomingMessageBufferSize int `json:"ocrIncomingMessageBufferSize"`
OCROutgoingMessageBufferSize int `json:"ocrOutgoingMessageBufferSize"`
OCRNewStreamTimeout time.Duration `json:"ocrNewStreamTimeout"`
OCRDHTLookupInterval int `json:"ocrDHTLookupInterval"`
OCRTraceLogging bool `json:"ocrTraceLogging"`
OperatorContractAddress common.Address `json:"oracleContractAddress"`
Port uint16 `json:"chainlinkPort"`
ReaperExpiration models.Duration `json:"reaperExpiration"`
ReplayFromBlock int64 `json:"replayFromBlock"`
RootDir string `json:"root"`
SecureCookies bool `json:"secureCookies"`
SessionTimeout models.Duration `json:"sessionTimeout"`
TLSHost string `json:"chainlinkTLSHost"`
TLSPort uint16 `json:"chainlinkTLSPort"`
TLSRedirect bool `json:"chainlinkTLSRedirect"`
TxAttemptLimit uint16 `json:"txAttemptLimit"`
}
// NewConfigPrinter creates an instance of ConfigPrinter
func | (store *store.Store) (ConfigPrinter, error) {
config := store.Config
explorerURL := ""
if config.ExplorerURL() != nil {
explorerURL = config.ExplorerURL().String()
}
return ConfigPrinter{
EnvPrinter: EnvPrinter{
AllowOrigins: config.AllowOrigins(),
BalanceMonitorEnabled: config.BalanceMonitorEnabled(),
BlockBackfillDepth: config.BlockBackfillDepth(),
BridgeResponseURL: config.BridgeResponseURL().String(),
ChainID: config.ChainID(),
ClientNodeURL: config.ClientNodeURL(),
DatabaseTimeout: config.DatabaseTimeout(),
DefaultHTTPLimit: config.DefaultHTTPLimit(),
DefaultHTTPTimeout: config.DefaultHTTPTimeout(),
DatabaseMaximumTxDuration: config.DatabaseMaximumTxDuration(),
Dev: config.Dev(),
EnableExperimentalAdapters: config.EnableExperimentalAdapters(),
EthBalanceMonitorBlockDelay: config.EthBalanceMonitorBlockDelay(),
EthereumDisabled: config.EthereumDisabled(),
EthFinalityDepth: config.EthFinalityDepth(),
EthGasBumpThreshold: config.EthGasBumpThreshold(),
EthGasBumpTxDepth: config.EthGasBumpTxDepth(),
EthGasBumpWei: config.EthGasBumpWei(),
EthGasLimitDefault: config.EthGasLimitDefault(),
EthGasPriceDefault: config.EthGasPriceDefault(),
EthHeadTrackerHistoryDepth: config.EthHeadTrackerHistoryDepth(),
EthHeadTrackerMaxBufferSize: config.EthHeadTrackerMaxBufferSize(),
EthMaxGasPriceWei: config.EthMaxGasPriceWei(),
EthereumURL: config.EthereumURL(),
EthereumSecondaryURLs: config.EthereumSecondaryURLs(),
ExplorerURL: explorerURL,
FeatureExternalInitiators: config.FeatureExternalInitiators(),
FeatureFluxMonitor: config.FeatureFluxMonitor(),
FeatureOffchainReporting: config.FeatureOffchainReporting(),
FlagsContractAddress: config.FlagsContractAddress(),
GasUpdaterBlockDelay: config.GasUpdaterBlockDelay(),
GasUpdaterBlockHistorySize: config.GasUpdaterBlockHistorySize(),
GasUpdaterEnabled: config.GasUpdaterEnabled(),
GasUpdaterTransactionPercentile: config.GasUpdaterTransactionPercentile(),
InsecureFastScrypt: config.InsecureFastScrypt(),
TriggerFallbackDBPollInterval: config.TriggerFallbackDBPollInterval(),
JobPipelineMaxTaskDuration: config.JobPipelineMaxTaskDuration(),
JobPipelineParallelism: config.JobPipelineParallelism(),
JobPipelineReaperInterval: config.JobPipelineReaperInterval(),
JobPipelineReaperThreshold: config.JobPipelineReaperThreshold(),
JSONConsole: config.JSONConsole(),
LinkContractAddress: config.LinkContractAddress(),
LogLevel: config.LogLevel(),
LogSQLMigrations: config.LogSQLMigrations(),
LogSQLStatements: config.LogSQLStatements(),
LogToDisk: config.LogToDisk(),
MaximumServiceDuration: config.MaximumServiceDuration(),
MinIncomingConfirmations: config.MinIncomingConfirmations(),
MinRequiredOutgoingConfirmations: config.MinRequiredOutgoingConfirmations(),
MinimumServiceDuration: config.MinimumServiceDuration(),
MinimumContractPayment: config.MinimumContractPayment(),
MinimumRequestExpiration: config.MinimumRequestExpiration(),
OCRBootstrapCheckInterval: config.OCRBootstrapCheckInterval(),
OCRContractTransmitterTransmitTimeout: config.OCRContractTransmitterTransmitTimeout(),
OCRDatabaseTimeout: config.OCRDatabaseTimeout(),
P2PListenIP: config.P2PListenIP().String(),
P2PListenPort: config.P2PListenPort(),
OCRIncomingMessageBufferSize: config.OCRIncomingMessageBufferSize(),
OCROutgoingMessageBufferSize: config.OCROutgoingMessageBufferSize(),
OCRNewStreamTimeout: config.OCRNewStreamTimeout(),
OCRDHTLookupInterval: config.OCRDHTLookupInterval(),
OCRTraceLogging: config.OCRTraceLogging(),
OperatorContractAddress: config.OperatorContractAddress(),
Port: config.Port(),
ReaperExpiration: config.ReaperExpiration(),
ReplayFromBlock: config.ReplayFromBlock(),
RootDir: config.RootDir(),
SecureCookies: config.SecureCookies(),
SessionTimeout: config.SessionTimeout(),
TLSHost: config.TLSHost(),
TLSPort: config.TLSPort(),
TLSRedirect: config.TLSRedirect(),
TxAttemptLimit: config.TxAttemptLimit(),
},
}, nil
}
// String returns the values as a newline delimited string
func (c ConfigPrinter) String() string {
var buffer bytes.Buffer
buffer.WriteString(fmt.Sprintf("ACCOUNT_ADDRESS: %v\n", c.AccountAddress))
schemaT := reflect.TypeOf(orm.ConfigSchema{})
cwlT := reflect.TypeOf(c.EnvPrinter)
cwlV := reflect.ValueOf(c.EnvPrinter)
for index := 0; index < cwlT.NumField(); index++ {
item := cwlT.FieldByIndex([]int{index})
schemaItem, ok := schemaT.FieldByName(item.Name)
if !ok {
logger.Panicf("Field %s missing from store.Schema", item.Name)
}
envName, ok := schemaItem.Tag.Lookup("env")
if !ok {
continue
}
field := cwlV.FieldByIndex(item.Index)
buffer.WriteString(envName)
buffer.WriteString(": ")
if stringer, ok := field.Interface().(fmt.Stringer); ok {
if stringer != reflect.Zero(reflect.TypeOf(stringer)).Interface() {
buffer.WriteString(stringer.String())
}
} else {
buffer.WriteString(fmt.Sprintf("%v", field))
}
buffer.WriteString("\n")
}
return buffer.String()
}
// GetID generates a new ID for jsonapi serialization.
func (c ConfigPrinter) GetID() string {
return utils.NewBytes32ID()
}
// SetID is used to conform to the UnmarshallIdentifier interface for
// deserializing from jsonapi documents.
func (c *ConfigPrinter) SetID(value string) error {
return nil
}
// JobSpec holds the JobSpec definition together with
// the total link earned from that job
type JobSpec struct {
models.JobSpec
Errors []models.JobSpecError `json:"errors"`
Earnings *assets.Link `json:"earnings"`
}
// MarshalJSON returns the JSON data of the Job and its Initiators.
func (job JobSpec) MarshalJSON() ([]byte, error) {
type Alias JobSpec
pis := make([]Initiator, len(job.Initiators))
for i, modelInitr := range job.Initiators {
pis[i] = Initiator{modelInitr}
}
return json.Marshal(&struct {
Initiators []Initiator `json:"initiators"`
Alias
}{
pis,
Alias(job),
})
}
// FriendlyCreatedAt returns a human-readable string of the Job's
// CreatedAt field.
func (job JobSpec) FriendlyCreatedAt() string {
return utils.ISO8601UTC(job.CreatedAt)
}
// FriendlyStartAt returns a human-readable string of the Job's
// StartAt field.
func (job JobSpec) FriendlyStartAt() string {
if job.StartAt.Valid {
return utils.ISO8601UTC(job.StartAt.Time)
}
return ""
}
// FriendlyEndAt returns a human-readable string of the Job's
// EndAt field.
func (job JobSpec) FriendlyEndAt() string {
if job.EndAt.Valid {
return utils.ISO8601UTC(job.EndAt.Time)
}
return ""
}
// FriendlyMinPayment returns a formatted string of the Job's
// Minimum Link Payment threshold
func (job JobSpec) FriendlyMinPayment() string {
return job.MinPayment.Text(10)
}
// FriendlyInitiators returns the list of Initiator types as
// a comma separated string.
func (job JobSpec) FriendlyInitiators() string {
var initrs []string
for _, i := range job.Initiators {
initrs = append(initrs, i.Type)
}
return strings.Join(initrs, "\n")
}
// FriendlyTasks returns the list of Task types as a comma
// separated string.
func (job JobSpec) FriendlyTasks() string {
var tasks []string
for _, t := range job.Tasks {
tasks = append(tasks, t.Type.String())
}
return strings.Join(tasks, "\n")
}
// Initiator holds the Job definition's Initiator.
type Initiator struct {
models.Initiator
}
// MarshalJSON returns the JSON data of the Initiator based
// on its Initiator Type.
func (i Initiator) MarshalJSON() ([]byte, error) {
p, err := initiatorParams(i)
if err != nil {
return []byte{}, err
}
return json.Marshal(&struct {
Type string `json:"type"`
Params interface{} `json:"params"`
}{i.Type, p})
}
func initiatorParams(i Initiator) (interface{}, error) {
switch i.Type {
case models.InitiatorWeb:
return struct{}{}, nil
case models.InitiatorCron:
return struct {
Schedule models.Cron `json:"schedule"`
}{i.Schedule}, nil
case models.InitiatorRunAt:
return struct {
Time models.AnyTime `json:"time"`
Ran bool `json:"ran"`
}{models.NewAnyTime(i.Time.Time), i.Ran}, nil
case models.InitiatorEthLog:
fallthrough
case models.InitiatorRunLog:
return struct {
Address common.Address `json:"address"`
}{i.Address}, nil
case models.InitiatorExternal:
return struct {
Name string `json:"name"`
}{i.Name}, nil
case models.InitiatorFluxMonitor:
return struct {
Address common.Address `json:"address"`
RequestData models.JSON `json:"requestData"`
Feeds models.JSON `json:"feeds"`
Threshold float32 `json:"threshold"`
AbsoluteThreshold float32 `json:"absoluteThreshold"`
Precision int32 `json:"precision"`
PollTimer models.PollTimerConfig `json:"pollTimer,omitempty"`
IdleTimer models.IdleTimerConfig `json:"idleTimer,omitempty"`
}{i.Address, i.RequestData, i.Feeds, i.Threshold, i.AbsoluteThreshold,
i.Precision, i.PollTimer, i.IdleTimer}, nil
case models.InitiatorRandomnessLog:
return struct{ Address common.Address }{i.Address}, nil
default:
return nil, fmt.Errorf("cannot marshal unsupported initiator type '%v'", i.Type)
}
}
// FriendlyRunAt returns a human-readable string for Cron Initiator types.
func (i Initiator) FriendlyRunAt() string {
if i.Type == models.InitiatorRunAt {
return utils.ISO8601UTC(i.Time.Time)
}
return ""
}
// FriendlyAddress returns the Ethereum address if present, and a blank
// string if not.
func (i Initiator) FriendlyAddress() string {
if i.IsLogInitiated() {
return utils.LogListeningAddress(i.Address)
}
return ""
}
// JobRun presents an API friendly version of the data.
type JobRun struct {
models.JobRun
}
// MarshalJSON returns the JSON data of the JobRun and its Initiator.
func (jr JobRun) MarshalJSON() ([]byte, error) {
type Alias JobRun
return json.Marshal(&struct {
Alias
Initiator Initiator `json:"initiator"`
}{
Alias(jr),
Initiator{jr.Initiator},
})
}
// TaskSpec holds a task specified in the Job definition.
type TaskSpec struct {
models.TaskSpec
}
// FriendlyParams returns a map of the TaskSpec's parameters.
func (t TaskSpec) FriendlyParams() (string, string) {
keys := []string{}
values := []string{}
t.Params.ForEach(func(key, value gjson.Result) bool {
if key.String() != "type" {
keys = append(keys, key.String())
values = append(values, value.String())
}
return true
})
return strings.Join(keys, "\n"), strings.Join(values, "\n")
}
// FriendlyBigInt returns a string printing the integer in both
// decimal and hexadecimal formats.
func FriendlyBigInt(n *big.Int) string {
return fmt.Sprintf("#%[1]v (0x%[1]x)", n)
}
// ServiceAgreement presents an API friendly version of the data.
type ServiceAgreement struct {
models.ServiceAgreement
}
type ServiceAgreementPresentation struct {
ID string `json:"id"`
CreatedAt string `json:"createdAt"`
Encumbrance models.Encumbrance `json:"encumbrance"`
EncumbranceID int64 `json:"encumbranceID"`
RequestBody string `json:"requestBody"`
Signature string `json:"signature"`
JobSpec models.JobSpec `json:"jobSpec"`
JobSpecID string `json:"jobSpecId"`
}
// MarshalJSON presents the ServiceAgreement as public JSON data
func (sa ServiceAgreement) MarshalJSON() ([]byte, error) {
return json.Marshal(ServiceAgreementPresentation{
ID: sa.ID,
CreatedAt: utils.ISO8601UTC(sa.CreatedAt),
Encumbrance: sa.Encumbrance,
EncumbranceID: sa.EncumbranceID,
RequestBody: sa.RequestBody,
Signature: sa.Signature.String(),
JobSpec: sa.JobSpec,
JobSpecID: sa.JobSpecID.String(),
})
}
// FriendlyCreatedAt returns the ServiceAgreement's created at time in a human
// readable format.
func (sa ServiceAgreement) FriendlyCreatedAt() string {
return utils.ISO8601UTC(sa.CreatedAt)
}
// FriendlyExpiration returns the ServiceAgreement's Encumbrance expiration time
// in a human readable format.
func (sa ServiceAgreement) FriendlyExpiration() string {
return fmt.Sprintf("%v seconds", sa.Encumbrance.Expiration)
}
// FriendlyPayment returns the ServiceAgreement's Encumbrance payment amount in
// a human readable format.
func (sa ServiceAgreement) FriendlyPayment() string {
return fmt.Sprintf("%v LINK", sa.Encumbrance.Payment.String())
}
// FriendlyAggregator returns the ServiceAgreement's aggregator address,
// in a human readable format.
func (sa ServiceAgreement) FriendlyAggregator() string {
return sa.Encumbrance.Aggregator.String()
}
// FriendlyAggregator returns the ServiceAgreement's aggregator initialization
// method's function selector, in a human readable format.
func (sa ServiceAgreement) FriendlyAggregatorInitMethod() string {
return sa.Encumbrance.AggInitiateJobSelector.String()
}
// FriendlyAggregatorFulfillMethod returns the ServiceAgreement's aggregator
// fulfillment (orcale reporting) method's function selector, in a human
// readable format.
func (sa ServiceAgreement) FriendlyAggregatorFulfillMethod() string {
return sa.Encumbrance.AggFulfillSelector.String()
}
// UserPresenter wraps the user record for shipping as a jsonapi response in
// the API.
type UserPresenter struct {
*models.User
}
// GetID returns the jsonapi ID.
func (u UserPresenter) GetID() string {
return u.User.Email
}
// GetName returns the collection name for jsonapi.
func (u UserPresenter) GetName() string {
return "users"
}
// MarshalJSON returns the User as json.
func (u UserPresenter) MarshalJSON() ([]byte, error) {
return json.Marshal(&struct {
Email string `json:"email"`
CreatedAt string `json:"createdAt"`
}{
Email: u.User.Email,
CreatedAt: utils.ISO8601UTC(u.User.CreatedAt),
})
}
// NewAccount is a jsonapi wrapper for an Ethereum account.
type NewAccount struct {
*accounts.Account
}
// GetID returns the jsonapi ID.
func (a NewAccount) GetID() string {
return a.Address.String()
}
// GetName returns the collection name for jsonapi.
func (a NewAccount) GetName() string {
return "keys"
}
// EthTx is a jsonapi wrapper for an Ethereum Transaction.
type EthTx struct {
ID int64 `json:"-"`
State string `json:"state,omitempty"`
Data hexutil.Bytes `json:"data,omitempty"`
From *common.Address `json:"from,omitempty"`
GasLimit string `json:"gasLimit,omitempty"`
GasPrice string `json:"gasPrice,omitempty"`
Hash common.Hash `json:"hash,omitempty"`
Hex string `json:"rawHex,omitempty"`
Nonce string `json:"nonce,omitempty"`
SentAt string `json:"sentAt,omitempty"`
To *common.Address `json:"to,omitempty"`
Value string `json:"value,omitempty"`
}
func NewEthTxFromAttempt(txa models.EthTxAttempt) EthTx {
return newEthTxWithAttempt(txa.EthTx, txa)
}
func newEthTxWithAttempt(tx models.EthTx, txa models.EthTxAttempt) EthTx {
ethTX := EthTx{
Data: hexutil.Bytes(tx.EncodedPayload),
From: &tx.FromAddress,
GasLimit: strconv.FormatUint(tx.GasLimit, 10),
GasPrice: txa.GasPrice.String(),
Hash: txa.Hash,
Hex: hexutil.Encode(txa.SignedRawTx),
ID: tx.ID,
State: string(tx.State),
To: &tx.ToAddress,
Value: tx.Value.String(),
}
if tx.Nonce != nil {
ethTX.Nonce = strconv.FormatUint(uint64(*tx.Nonce), 10)
}
if txa.BroadcastBeforeBlockNum != nil {
ethTX.SentAt = strconv.FormatUint(uint64(*txa.BroadcastBeforeBlockNum), 10)
}
return ethTX
}
// GetID returns the jsonapi ID.
func (t EthTx) GetID() string {
return t.Hash.Hex()
}
// GetName returns the collection name for jsonapi.
func (EthTx) GetName() string {
return "transactions"
}
// SetID is used to conform to the UnmarshallIdentifier interface for
// deserializing from jsonapi documents.
func (t *EthTx) SetID(hex string) error {
t.Hash = common.HexToHash(hex)
return nil
}
// ExternalInitiatorAuthentication includes initiator and authentication details.
type ExternalInitiatorAuthentication struct {
Name string `json:"name,omitempty"`
URL models.WebURL `json:"url,omitempty"`
AccessKey string `json:"incomingAccessKey,omitempty"`
Secret string `json:"incomingSecret,omitempty"`
OutgoingToken string `json:"outgoingToken,omitempty"`
OutgoingSecret string `json:"outgoingSecret,omitempty"`
}
// NewExternalInitiatorAuthentication creates an instance of ExternalInitiatorAuthentication.
func NewExternalInitiatorAuthentication(
ei models.ExternalInitiator,
eia auth.Token,
) *ExternalInitiatorAuthentication {
var result = &ExternalInitiatorAuthentication{
Name: ei.Name,
AccessKey: ei.AccessKey,
Secret: eia.Secret,
OutgoingToken: ei.OutgoingToken,
OutgoingSecret: ei.OutgoingSecret,
}
if ei.URL != nil {
result.URL = *ei.URL
}
return result
}
// GetID returns the jsonapi ID.
func (ei *ExternalInitiatorAuthentication) GetID() string {
return ei.Name
}
// GetName returns the collection name for jsonapi.
func (*ExternalInitiatorAuthentication) GetName() string {
return "external initiators"
}
// SetID is used to conform to the UnmarshallIdentifier interface for
// deserializing from jsonapi documents.
func (ei *ExternalInitiatorAuthentication) SetID(name string) error {
ei.Name = name
return nil
}
// ExplorerStatus represents the connected server and status of the connection
type ExplorerStatus struct {
Status string `json:"status"`
Url string `json:"url"`
}
// NewExplorerStatus returns an initialized ExplorerStatus from the store
func NewExplorerStatus(statsPusher synchronization.StatsPusher) ExplorerStatus {
url := statsPusher.GetURL()
return ExplorerStatus{
Status: string(statsPusher.GetStatus()),
Url: url.String(),
}
}
| NewConfigPrinter |
main.rs | use std::io::{stdin, Read};
fn main() | {
let mut input = String::new();
stdin().read_to_string(&mut input).unwrap();
let lines: Vec<&str> = input.lines().collect();
let depart_time = lines[0].parse::<u64>().unwrap();
// Part 1
let bus_ids: Vec<u64> = lines[1]
.split(',')
.filter(|x| *x != "x")
.map(|x| x.parse::<u64>().unwrap())
.collect();
let delays: Vec<(u64, u64)> = bus_ids
.iter()
.map(|&x| ((x * ((depart_time / x) + 1) % depart_time), x))
.collect();
let choice = delays
.iter()
.min_by(|(a1, _), (a2, _)| a1.cmp(&a2))
.unwrap();
println!("Part 1: the product is {}", choice.0 * choice.1);
// Part 2
// Use the large base only for the puzzle input, due to the hint
//let base = 1;
let base = 100_000_000_000;
let (result, _) = lines[1]
.split(',')
.enumerate()
.filter(|(_, bus_id)| *bus_id != "x")
.map(|(offset, bus_id)| (offset, bus_id.parse::<u64>().unwrap()))
.fold((base, 1), |(mut result, multiple), (offset, bus_id)| {
//println!("({}, {}), ({}, {})", result, multiple, offset, bus_id);
while (result + offset as u64) % bus_id != 0 {
result += multiple;
}
(result, multiple * bus_id)
});
println!("Part 2: the earliest timestamp is {}", result);
} |
|
userModel.js | const crypto = require('crypto');
const mongoose = require('mongoose');
const validator = require('validator');
const bcrypt = require('bcryptjs');
const userSchema = new mongoose.Schema({
name: {
type: String,
required: [true, 'Please tell us your name!']
},
email: {
type: String,
required: [true, 'Please provide your email!'],
unique: true,
lowercase: true,
validate: [validator.isEmail, 'Please provide a valid email!']
},
photo: {
type: String,
default: 'default.jpg'
},
role: {
type: String,
enum: ['user', 'guide', 'lead-guide', 'admin'],
default: 'user'
},
password: {
type: String,
required: [true, 'Please provide a password!'],
minlength: 8,
select: false
},
passwordConfirm: {
type: String,
required: [true, 'Please confirm your password!'],
validate: {
// This only works on CREATE and SAVE!
validator: function(el) {
return el === this.password;
},
message: 'Passwords are not the same!'
}
},
passwordChangedAt: Date,
passwordResetToken: String,
passwordResetExpires: Date, | active: {
type: Boolean,
default: true,
select: false
}
});
userSchema.pre('save', async function(next) {
// Only run this function if password was actually modified
if (!this.isModified('password')) return next();
// Hash the password with cost of 12
this.password = await bcrypt.hash(this.password, 12);
// Delete the password with cost of 12
this.passwordConfirm = undefined;
next();
});
userSchema.pre('save', function(next) {
if (!this.isModified('password') || this.isNew) return next();
this.passwordChangedAt = Date.now() - 1000;
next();
});
userSchema.pre(/^find/, function(next) {
// this points to the current query
this.find({ active: { $ne: false } });
next();
});
userSchema.methods.correctPassword = async function(
candidatePassword,
userPassword
) {
return await bcrypt.compare(candidatePassword, userPassword);
};
userSchema.methods.changedPasswordAfter = function(JWTTimestamp) {
if (this.passwordChangedAt) {
const changedTimestamp = parseInt(
this.passwordChangedAt.getTime() / 1000,
10
);
// console.log(changedTimestamp, JWTTimestamp);
return JWTTimestamp < changedTimestamp;
}
// False means NOT changed
return false;
};
userSchema.methods.createPasswordResetToken = function() {
const resetToken = crypto.randomBytes(32).toString('hex');
this.passwordResetToken = crypto
.createHash('sha256')
.update(resetToken)
.digest('hex');
// console.log({ resetToken }, this.passwordResetToken);
this.passwordResetExpires = Date.now() + 10 * 60 * 1000;
return resetToken;
};
const User = mongoose.model('User', userSchema);
module.exports = User; | |
walking_turns.rs | use geom::{Distance, PolyLine, Pt2D, Ring, EPSILON_DIST};
use crate::{
Direction, DrivingSide, Intersection, IntersectionID, Lane, LaneID, Map, Turn, TurnID, TurnType,
};
/// Looks at all sidewalks (or lack thereof) in counter-clockwise order around an intersection.
/// Based on adjacency, create a SharedSidewalkCorner or a Crosswalk.
/// UnmarkedCrossings are not generated here; another process later "downgrades" crosswalks to
/// unmarked.
pub fn make_walking_turns(map: &Map, i: &Intersection) -> Vec<Turn> {
let driving_side = map.config.driving_side;
// Consider all roads in counter-clockwise order. Every road has up to two sidewalks. Gather
// those in order, remembering what roads don't have them.
let mut lanes: Vec<Option<&Lane>> = Vec::new();
let mut sorted_roads = i.get_roads_sorted_by_incoming_angle(map);
// And for left-handed driving, we need to walk around in the opposite order.
if driving_side == DrivingSide::Left {
sorted_roads.reverse();
}
for r in sorted_roads {
let road = map.get_r(r);
let mut fwd = None;
let mut back = None;
for l in &road.lanes {
if l.lane_type.is_walkable() {
if l.dir == Direction::Fwd {
fwd = Some(l);
} else {
back = Some(l);
}
}
}
let (in_lane, out_lane) = if road.src_i == i.id {
(back, fwd)
} else {
(fwd, back)
};
// Don't add None entries for footways even if they only have one lane
if map.get_r(r).is_footway() {
if in_lane.is_some() {
lanes.push(in_lane);
}
if out_lane.is_some() {
lanes.push(out_lane);
}
} else {
lanes.push(in_lane);
lanes.push(out_lane);
}
}
// If there are 0 or 1 sidewalks there are no turns to be made
if lanes.iter().filter(|l| l.is_some()).count() <= 1 {
return Vec::new();
}
// At a deadend make only one SharedSidewalkCorner
if i.is_deadend() {
let (l1, l2) = (lanes[0].unwrap(), lanes[1].unwrap());
return vec![Turn {
id: turn_id(i.id, l1.id, l2.id),
turn_type: TurnType::SharedSidewalkCorner,
geom: make_shared_sidewalk_corner(i, l1, l2),
}];
}
// Make sure we start with a sidewalk.
while lanes[0].is_none() {
lanes.rotate_left(1);
}
let mut result: Vec<Turn> = Vec::new();
let mut from: Option<&Lane> = lanes[0];
let mut adj = true;
for l in lanes.iter().skip(1).chain(lanes.iter().take(1)) {
if from.is_none() {
from = *l;
adj = true;
continue;
}
let l1 = from.unwrap();
if l.is_none() {
adj = false;
continue;
}
let l2 = l.unwrap();
if adj && l1.id.road != l2.id.road {
result.push(Turn {
id: turn_id(i.id, l1.id, l2.id),
turn_type: TurnType::SharedSidewalkCorner,
geom: make_shared_sidewalk_corner(i, l1, l2),
});
from = Some(l2);
// adj stays true
} else {
result.push(Turn {
id: turn_id(i.id, l1.id, l2.id),
turn_type: TurnType::Crosswalk,
geom: make_crosswalk(i, l1, l2),
});
from = Some(l2);
adj = true;
}
}
// If there are exactly two crosswalks they must be connected, so delete one.
if result
.iter()
.filter(|t| t.turn_type == TurnType::Crosswalk)
.count()
== 2
{
result.remove(
result
.iter()
.position(|t| t.turn_type == TurnType::Crosswalk)
.unwrap(),
);
}
result
}
/// Filter out crosswalks on really short roads. In reality, these roads are usually located within
/// an intersection, which isn't a valid place for a pedestrian crossing.
///
/// And if the road is marked as having no crosswalks at an end, downgrade them to unmarked
/// crossings.
pub fn filter_turns(mut input: Vec<Turn>, map: &Map, i: &Intersection) -> Vec<Turn> {
for r in &i.roads {
if map.get_r(*r).is_extremely_short() {
input.retain(|t| {
!(t.id.src.road == *r && t.id.dst.road == *r && t.turn_type.pedestrian_crossing())
});
}
}
for turn in &mut input {
if let Some(dr) = turn.crosswalk_over_road(map) {
let road = map.get_r(dr.road);
let keep = if dr.dir == Direction::Fwd {
road.crosswalk_forward
} else {
road.crosswalk_backward
};
if !keep {
turn.turn_type = TurnType::UnmarkedCrossing;
}
} else if turn.turn_type.pedestrian_crossing() {
// We have a crosswalk over multiple roads (or sometimes, just one road that only has a
// walkable lane on one side of it). We can't yet detect all the roads crossed. So for
// now, it's more often correct to assume that if any nearby roads don't have a
// crossing snapped to both ends, then there's probably no crosswalk here.
for l in [turn.id.src, turn.id.dst] {
let road = map.get_parent(l);
if !road.crosswalk_forward || !road.crosswalk_backward {
turn.turn_type = TurnType::UnmarkedCrossing;
}
}
}
}
input
}
fn make_crosswalk(i: &Intersection, l1: &Lane, l2: &Lane) -> PolyLine {
let l1_line = l1.end_line(i.id);
let l2_line = l2.end_line(i.id);
// Jut out a bit into the intersection, cross over, then jut back in.
// Put degenerate intersection crosswalks in the middle (DEGENERATE_HALF_LENGTH).
PolyLine::deduping_new(vec![
l1_line.pt2(),
l1_line.unbounded_dist_along(
l1_line.length()
+ if i.is_degenerate() {
Distance::const_meters(2.5)
} else {
l1.width / 2.0
},
),
l2_line.unbounded_dist_along(
l2_line.length()
+ if i.is_degenerate() {
Distance::const_meters(2.5)
} else {
l2.width / 2.0
},
),
l2_line.pt2(),
])
.unwrap_or_else(|_| baseline_geometry(l1.endpoint(i.id), l2.endpoint(i.id)))
}
// TODO This doesn't handle sidewalk/shoulder transitions
fn make_shared_sidewalk_corner(i: &Intersection, l1: &Lane, l2: &Lane) -> PolyLine {
// We'll fallback to this if the fancier geometry fails.
let baseline = baseline_geometry(l1.endpoint(i.id), l2.endpoint(i.id));
// Is point2 counter-clockwise of point1?
let dir = if i
.polygon
.center()
.angle_to(l1.endpoint(i.id))
.simple_shortest_rotation_towards(i.polygon.center().angle_to(l2.endpoint(i.id)))
> 0.0
{
1.0
} else {
-1.0
// For deadends, go the long way around
} * if i.is_deadend() { -1.0 } else { 1.0 };
// Find all of the points on the intersection polygon between the two sidewalks. Assumes
// sidewalks are the same length.
let corner1 = l1
.end_line(i.id)
.shift_either_direction(dir * l1.width / 2.0)
.pt2();
let corner2 = l2
.end_line(i.id)
.shift_either_direction(-dir * l2.width / 2.0)
.pt2();
// TODO Something like this will be MUCH simpler and avoid going around the long way sometimes.
if false {
return Ring::must_new(i.polygon.points().clone())
.get_shorter_slice_btwn(corner1, corner2)
.unwrap();
}
// The order of the points here seems backwards, but it's because we scan from corner2
// to corner1 below.
let mut pts_between = vec![l2.endpoint(i.id)];
// Intersection polygons are constructed in clockwise order, so do corner2 to corner1.
let mut i_pts = i.polygon.points().clone();
// last pt = first_pt
i_pts.pop();
if dir < 0.0 {
i_pts.reverse();
}
for _ in 0..i_pts.len() {
if i_pts[0].approx_eq(corner2, Distance::meters(0.5)) {
break;
}
i_pts.rotate_left(1);
}
for idx in 0..i_pts.len() {
if i_pts[idx].approx_eq(corner1, Distance::meters(0.5)) {
i_pts.truncate(idx + 1);
break;
}
}
if i_pts.len() < 2 {
// no intermediate points, so just do a straight line
return baseline;
}
if let Ok(pl) =
PolyLine::must_new(i_pts).shift_either_direction(dir * l1.width.min(l2.width) / 2.0)
{
// The first and last points should be approximately l2's and l1's endpoints
pts_between.extend(pl.points().iter().take(pl.points().len() - 1).skip(1));
} else {
warn!(
"SharedSidewalkCorner between {} and {} has weird collapsing geometry, so \
just doing straight line",
l1.id, l2.id
);
return baseline;
}
pts_between.push(l1.endpoint(i.id));
pts_between.dedup();
pts_between.reverse();
if abstutil::contains_duplicates(
&pts_between
.iter()
.map(|pt| pt.to_hashable())
.collect::<Vec<_>>(),
) || pts_between.len() < 2
{
warn!(
"SharedSidewalkCorner between {} and {} has weird duplicate geometry, so just doing \
straight line",
l1.id, l2.id
);
return baseline;
}
let result = PolyLine::must_new(pts_between);
if result.length() > 10.0 * baseline.length() {
warn!(
"SharedSidewalkCorner between {} and {} explodes to {} long, so just doing straight \
line",
l1.id,
l2.id,
result.length()
);
return baseline;
}
result
}
// Never in any circumstance should we produce a polyline with only one point (or two points
// that're equal), because it'll just crash downstream rendering logic and make a mess elsewhere.
// Avoid that here. The result is unlikely to look correct (or be easily visible at all).
//
// TODO Proper fix is likely to make a turn's geometry optional.
fn baseline_geometry(pt1: Pt2D, pt2: Pt2D) -> PolyLine { | PolyLine::new(vec![pt1, pt2]).unwrap_or_else(|_| {
PolyLine::must_new(vec![
pt1,
pt1.offset(EPSILON_DIST.inner_meters(), EPSILON_DIST.inner_meters()),
])
})
}
fn turn_id(parent: IntersectionID, src: LaneID, dst: LaneID) -> TurnID {
TurnID { parent, src, dst }
} | |
cap.go | /*
Copyright 2014 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package s2
import (
"fmt"
"io"
"math"
"github.com/golang/geo/r1"
"github.com/golang/geo/s1"
)
var (
// centerPoint is the default center for Caps
centerPoint = PointFromCoords(1.0, 0, 0)
)
// Cap represents a disc-shaped region defined by a center and radius.
// Technically this shape is called a "spherical cap" (rather than disc)
// because it is not planar; the cap represents a portion of the sphere that
// has been cut off by a plane. The boundary of the cap is the circle defined
// by the intersection of the sphere and the plane. For containment purposes,
// the cap is a closed set, i.e. it contains its boundary.
//
// For the most part, you can use a spherical cap wherever you would use a
// disc in planar geometry. The radius of the cap is measured along the
// surface of the sphere (rather than the straight-line distance through the
// interior). Thus a cap of radius π/2 is a hemisphere, and a cap of radius
// π covers the entire sphere.
//
// The center is a point on the surface of the unit sphere. (Hence the need for
// it to be of unit length.)
//
// A cap can also be defined by its center point and height. The height is the
// distance from the center point to the cutoff plane. There is also support for
// "empty" and "full" caps, which contain no points and all points respectively.
//
// Here are some useful relationships between the cap height (h), the cap
// radius (r), the maximum chord length from the cap's center (d), and the
// radius of cap's base (a).
//
// h = 1 - cos(r)
// = 2 * sin^2(r/2)
// d^2 = 2 * h
// = a^2 + h^2
//
// The zero value of Cap is an invalid cap. Use EmptyCap to get a valid empty cap.
type Cap struct {
center Point
radius s1.ChordAngle
}
// CapFromPoint constructs a cap containing a single point.
func CapFromPoint(p Point) Cap {
return CapFromCenterChordAngle(p, 0)
}
// CapFromCenterAngle constructs a cap with the given center and angle.
func CapFromCenterAngle(center Point, angle s1.Angle) Cap {
return CapFromCenterChordAngle(center, s1.ChordAngleFromAngle(angle))
}
// CapFromCenterChordAngle constructs a cap where the angle is expressed as an
// s1.ChordAngle. This constructor is more efficient than using an s1.Angle.
func CapFromCenterChordAngle(center Point, radius s1.ChordAngle) Cap {
return Cap{
center: center,
radius: radius,
}
}
// CapFromCenterHeight constructs a cap with the given center and height. A
// negative height yields an empty cap; a height of 2 or more yields a full cap.
// The center should be unit length.
func CapFromCenterHeight(center Point, height float64) Cap {
return CapFromCenterChordAngle(center, s1.ChordAngleFromSquaredLength(2*height))
}
// CapFromCenterArea constructs a cap with the given center and surface area.
// Note that the area can also be interpreted as the solid angle subtended by the
// cap (because the sphere has unit radius). A negative area yields an empty cap;
// an area of 4*π or more yields a full cap.
func CapFromCenterArea(center Point, area float64) Cap {
return CapFromCenterChordAngle(center, s1.ChordAngleFromSquaredLength(area/math.Pi))
}
// EmptyCap returns a cap that contains no points.
func EmptyCap() Cap {
return CapFromCenterChordAngle(centerPoint, s1.NegativeChordAngle)
}
// FullCap returns a cap that contains all points.
func FullCap() Cap {
return CapFromCenterChordAngle(centerPoint, s1.StraightChordAngle)
}
// IsValid reports whether the Cap is considered valid.
func (c Cap) IsValid() bool {
return c.center.Vector.IsUnit() && c.radius <= s1.StraightChordAngle
}
// IsEmpty reports whether the cap is empty, i.e. it contains no points.
func (c Cap) IsEmpty() bool {
return c.radius < 0
}
// IsFull reports whether the cap is full, i.e. it contains all points.
func (c Cap) IsFull() bool {
return c.radius == s1.StraightChordAngle
}
// Center returns the cap's center point.
func (c Cap) Center() Point {
return c.center
}
// Height returns the height of the cap. This is the distance from the center
// point to the cutoff plane.
func (c Cap) Height() float64 {
return float64(0.5 * c.radius)
}
// Radius returns the cap radius as an s1.Angle. (Note that the cap angle
// is stored internally as a ChordAngle, so this method requires a trigonometric
// operation and may yield a slightly different result than the value passed
// to CapFromCenterAngle).
func (c Cap) Radius() s1.Angle {
return c.radius.Angle()
}
// Area returns the surface area of the Cap on the unit sphere.
func (c Cap) Area() float64 {
return 2.0 * math.Pi * math.Max(0, c.Height())
}
// Contains reports whether this cap contains the other.
func (c Cap) Contains(other Cap) bool {
// In a set containment sense, every cap contains the empty cap.
if c.IsFull() || other.IsEmpty() {
return true
}
return c.radius >= ChordAngleBetweenPoints(c.center, other.center).Add(other.radius)
}
// Intersects reports whether this cap intersects the other cap.
// i.e. whether they have any points in common.
func (c Cap) Intersects(other Cap) bool {
if c.IsEmpty() || other.IsEmpty() {
return false
}
return c.radius.Add(other.radius) >= ChordAngleBetweenPoints(c.center, other.center)
}
// InteriorIntersects reports whether this caps interior intersects the other cap.
func (c Cap) InteriorIntersects(other Cap) bool {
// Make sure this cap has an interior and the other cap is non-empty.
if c.radius <= 0 || other.IsEmpty() {
return false
}
return c.radius.Add(other.radius) > ChordAngleBetweenPoints(c.center, other.center)
}
// ContainsPoint reports whether this cap contains the point.
func (c Cap) ContainsPoint(p Point) bool {
return ChordAngleBetweenPoints(c.center, p) <= c.radius
}
// InteriorContainsPoint reports whether the point is within the interior of this cap.
func (c Cap) InteriorContainsPoint(p Point) bool {
return c.IsFull() || ChordAngleBetweenPoints(c.center, p) < c.radius
}
// Complement returns the complement of the interior of the cap. A cap and its
// complement have the same boundary but do not share any interior points.
// The complement operator is not a bijection because the complement of a
// singleton cap (containing a single point) is the same as the complement
// of an empty cap.
func (c Cap) Complement() Cap {
if c.IsFull() {
return EmptyCap()
}
if c.IsEmpty() {
return FullCap()
}
return CapFromCenterChordAngle(Point{c.center.Mul(-1)}, s1.StraightChordAngle.Sub(c.radius))
}
// CapBound returns a bounding spherical cap. This is not guaranteed to be exact.
func (c Cap) CapBound() Cap {
return c
}
// RectBound returns a bounding latitude-longitude rectangle.
// The bounds are not guaranteed to be tight.
func (c Cap) RectBound() Rect {
if c.IsEmpty() {
return EmptyRect()
}
capAngle := c.Radius().Radians()
allLongitudes := false
lat := r1.Interval{
Lo: latitude(c.center).Radians() - capAngle,
Hi: latitude(c.center).Radians() + capAngle,
}
lng := s1.FullInterval()
// Check whether cap includes the south pole.
if lat.Lo <= -math.Pi/2 {
lat.Lo = -math.Pi / 2
allLongitudes = true
}
// Check whether cap includes the north pole.
if lat.Hi >= math.Pi/2 {
lat.Hi = math.Pi / 2
allLongitudes = true
}
if !allLongitudes {
// Compute the range of longitudes covered by the cap. We use the law
// of sines for spherical triangles. Consider the triangle ABC where
// A is the north pole, B is the center of the cap, and C is the point
// of tangency between the cap boundary and a line of longitude. Then
// C is a right angle, and letting a,b,c denote the sides opposite A,B,C,
// we have sin(a)/sin(A) = sin(c)/sin(C), or sin(A) = sin(a)/sin(c).
// Here "a" is the cap angle, and "c" is the colatitude (90 degrees
// minus the latitude). This formula also works for negative latitudes.
//
// The formula for sin(a) follows from the relationship h = 1 - cos(a).
sinA := c.radius.Sin()
sinC := math.Cos(latitude(c.center).Radians())
if sinA <= sinC {
angleA := math.Asin(sinA / sinC)
lng.Lo = math.Remainder(longitude(c.center).Radians()-angleA, math.Pi*2)
lng.Hi = math.Remainder(longitude(c.center).Radians()+angleA, math.Pi*2)
}
}
return Rect{lat, lng}
}
// Equal reports whether this cap is equal to the other cap.
func (c Cap) Equal(other Cap) bool {
return (c.radius == other.radius && c.center == other.center) ||
(c.IsEmpty() && other.IsEmpty()) ||
(c.IsFull() && other.IsFull())
}
// ApproxEqual reports whether this cap is equal to the other cap within the given tolerance.
func (c Cap) ApproxEqual(other Cap) bool {
const epsilon = 1e-14
r2 := float64(c.radius)
otherR2 := float64(other.radius)
return c.center.ApproxEqual(other.center) &&
math.Abs(r2-otherR2) <= epsilon ||
c.IsEmpty() && otherR2 <= epsilon ||
other.IsEmpty() && r2 <= epsilon ||
c.IsFull() && otherR2 >= 2-epsilon ||
other.IsFull() && r2 >= 2-epsilon
}
// AddPoint increases the cap if necessary to include the given point. If this cap is empty,
// then the center is set to the point with a zero height. p must be unit-length.
func (c Cap) AddPoint(p Point) Cap {
if c.IsEmpty() {
c.center = p
c.radius = 0
return c
}
// After calling cap.AddPoint(p), cap.Contains(p) must be true. However
// we don't need to do anything special to achieve this because Contains()
// does exactly the same distance calculation that we do here.
if newRad := ChordAngleBetweenPoints(c.center, p); newRad > c.radius {
c.radius = newRad
}
return c
}
// AddCap increases the cap height if necessary to include the other cap. If this cap is empty,
// it is set to the other cap.
func (c Cap) AddCap(other Cap) Cap {
if c.IsEmpty() {
return other
}
if other.IsEmpty() {
return c
}
// We round up the distance to ensure that the cap is actually contained.
// TODO(roberts): Do some error analysis in order to guarantee this.
dist := ChordAngleBetweenPoints(c.center, other.center).Add(other.radius)
if newRad := dist.Expanded(dblEpsilon * float64(dist)); newRad > c.radius {
c.radius = newRad
}
return c
}
// Expanded returns a new cap expanded by the given angle. If the cap is empty,
// it returns an empty cap.
func (c Cap) Expanded(distance s1.Angle) Cap {
if c.IsEmpty() {
return EmptyCap()
}
return CapFromCenterChordAngle(c.center, c.radius.Add(s1.ChordAngleFromAngle(distance)))
}
func (c Cap) String() string {
return fmt.Sprintf("[Center=%v, Radius=%f]", c.center.Vector, c.Radius().Degrees())
}
// radiusToHeight converts an s1.Angle into the height of the cap.
func radiusToHeight(r s1.Angle) float64 {
if r.Radians() < 0 {
return float64(s1.NegativeChordAngle)
}
if r.Radians() >= math.Pi {
return float64(s1.RightChordAngle)
}
return float64(0.5 * s1.ChordAngleFromAngle(r))
}
// ContainsCell reports whether the cap contains the given cell.
func (c Cap) ContainsCell(cell Cell) bool {
// If the cap does not contain all cell vertices, return false.
var vertices [4]Point
for k := 0; k < 4; k++ {
vertices[k] = cell.Vertex(k)
if !c.ContainsPoint(vertices[k]) {
return false
}
}
// Otherwise, return true if the complement of the cap does not intersect the cell.
return !c.Complement().intersects(cell, vertices)
}
// IntersectsCell reports whether the cap intersects the cell.
func (c Cap) IntersectsCell(cell Cell) bool {
// If the cap contains any cell vertex, return true.
var vertices [4]Point
for k := 0; k < 4; k++ {
vertices[k] = cell.Vertex(k)
if c.ContainsPoint(vertices[k]) {
return true
}
}
return c.intersects(cell, vertices)
}
// intersects reports whether the cap intersects any point of the cell excluding
// its vertices (which are assumed to already have been checked).
func (c Cap) intersects(cell Cell, vertices [4]Point) bool {
// If the cap is a hemisphere or larger, the cell and the complement of the cap
// are both convex. Therefore since no vertex of the cell is contained, no other
// interior point of the cell is contained either.
if c.radius >= s1.RightChordAngle {
return false
}
// We need to check for empty caps due to the center check just below.
if c.IsEmpty() {
return false
}
// Optimization: return true if the cell contains the cap center. This allows half
// of the edge checks below to be skipped.
if cell.ContainsPoint(c.center) {
| // At this point we know that the cell does not contain the cap center, and the cap
// does not contain any cell vertex. The only way that they can intersect is if the
// cap intersects the interior of some edge.
sin2Angle := c.radius.Sin2()
for k := 0; k < 4; k++ {
edge := cell.Edge(k).Vector
dot := c.center.Vector.Dot(edge)
if dot > 0 {
// The center is in the interior half-space defined by the edge. We do not need
// to consider these edges, since if the cap intersects this edge then it also
// intersects the edge on the opposite side of the cell, because the center is
// not contained with the cell.
continue
}
// The Norm2() factor is necessary because "edge" is not normalized.
if dot*dot > sin2Angle*edge.Norm2() {
return false
}
// Otherwise, the great circle containing this edge intersects the interior of the cap. We just
// need to check whether the point of closest approach occurs between the two edge endpoints.
dir := edge.Cross(c.center.Vector)
if dir.Dot(vertices[k].Vector) < 0 && dir.Dot(vertices[(k+1)&3].Vector) > 0 {
return true
}
}
return false
}
// CellUnionBound computes a covering of the Cap. In general the covering
// consists of at most 4 cells except for very large caps, which may need
// up to 6 cells. The output is not sorted.
func (c Cap) CellUnionBound() []CellID {
// TODO(roberts): The covering could be made quite a bit tighter by mapping
// the cap to a rectangle in (i,j)-space and finding a covering for that.
// Find the maximum level such that the cap contains at most one cell vertex
// and such that CellID.AppendVertexNeighbors() can be called.
level := MinWidthMetric.MaxLevel(c.Radius().Radians()) - 1
// If level < 0, more than three face cells are required.
if level < 0 {
cellIDs := make([]CellID, 6)
for face := 0; face < 6; face++ {
cellIDs[face] = CellIDFromFace(face)
}
return cellIDs
}
// The covering consists of the 4 cells at the given level that share the
// cell vertex that is closest to the cap center.
return cellIDFromPoint(c.center).VertexNeighbors(level)
}
// Centroid returns the true centroid of the cap multiplied by its surface area
// The result lies on the ray from the origin through the cap's center, but it
// is not unit length. Note that if you just want the "surface centroid", i.e.
// the normalized result, then it is simpler to call Center.
//
// The reason for multiplying the result by the cap area is to make it
// easier to compute the centroid of more complicated shapes. The centroid
// of a union of disjoint regions can be computed simply by adding their
// Centroid() results. Caveat: for caps that contain a single point
// (i.e., zero radius), this method always returns the origin (0, 0, 0).
// This is because shapes with no area don't affect the centroid of a
// union whose total area is positive.
func (c Cap) Centroid() Point {
// From symmetry, the centroid of the cap must be somewhere on the line
// from the origin to the center of the cap on the surface of the sphere.
// When a sphere is divided into slices of constant thickness by a set of
// parallel planes, all slices have the same surface area. This implies
// that the radial component of the centroid is simply the midpoint of the
// range of radial distances spanned by the cap. That is easily computed
// from the cap height.
if c.IsEmpty() {
return Point{}
}
r := 1 - 0.5*c.Height()
return Point{c.center.Mul(r * c.Area())}
}
// Union returns the smallest cap which encloses this cap and other.
func (c Cap) Union(other Cap) Cap {
// If the other cap is larger, swap c and other for the rest of the computations.
if c.radius < other.radius {
c, other = other, c
}
if c.IsFull() || other.IsEmpty() {
return c
}
// TODO: This calculation would be more efficient using s1.ChordAngles.
cRadius := c.Radius()
otherRadius := other.Radius()
distance := c.center.Distance(other.center)
if cRadius >= distance+otherRadius {
return c
}
resRadius := 0.5 * (distance + cRadius + otherRadius)
resCenter := InterpolateAtDistance(0.5*(distance-cRadius+otherRadius), c.center, other.center)
return CapFromCenterAngle(resCenter, resRadius)
}
// Encode encodes the Cap.
func (c Cap) Encode(w io.Writer) error {
e := &encoder{w: w}
c.encode(e)
return e.err
}
func (c Cap) encode(e *encoder) {
e.writeFloat64(c.center.X)
e.writeFloat64(c.center.Y)
e.writeFloat64(c.center.Z)
e.writeFloat64(float64(c.radius))
}
| return true
}
|
azurepage.go | package targets
import (
"fmt"
"time"
"github.com/Azure/azure-sdk-for-go/storage"
"github.com/Azure/blobporter/pipeline"
"github.com/Azure/blobporter/util"
)
////////////////////////////////////////////////////////////
///// AzurePage Target
////////////////////////////////////////////////////////////
//AzurePage represents an Azure Block target
type AzurePage struct {
Creds *pipeline.StorageAccountCredentials
Container string
StorageClient *storage.BlobStorageClient
}
//NewAzurePage creates a new Azure Block target
func NewAzurePage(accountName string, accountKey string, container string) pipeline.TargetPipeline |
//PageSize size of page in Azure Page Blob storage
const PageSize int64 = 512
//PreProcessSourceInfo implementation of PreProcessSourceInfo from the pipeline.TargetPipeline interface.
//Passthrough no need to pre-process for blob blocks.
func (t AzurePage) PreProcessSourceInfo(source *pipeline.SourceInfo) (err error) {
size := int64(source.Size)
if size%PageSize != 0 {
return fmt.Errorf("Invalid size for a page blob. The size of the file %v (%v) is not a multiple of %v", source.SourceName, source.Size, PageSize)
}
if size > int64(util.TB) {
return fmt.Errorf("The file %v is too big (%v). Tha maximum size of a page blob is %v ", source.SourceName, source.Size, util.TB)
}
//if the max retries is exceeded, panic will happen, hence no error is returned.
util.RetriableOperation(func(r int) error {
if err := (*t.StorageClient).PutPageBlob(t.Container, (*source).TargetAlias, size, nil); err != nil {
return err
}
return nil
})
return nil
}
//CommitList implements CommitList from the pipeline.TargetPipeline interface.
//Passthrough no need to a commit for page blob.
func (t AzurePage) CommitList(listInfo *pipeline.TargetCommittedListInfo, NumberOfBlocks int, targetName string) (msg string, err error) {
msg = "Page blob committed"
err = nil
return
}
//ProcessWrittenPart implements ProcessWrittenPart from the pipeline.TargetPipeline interface.
//Passthrough no need to process a written part when transferring to a page blob.
func (t AzurePage) ProcessWrittenPart(result *pipeline.WorkerResult, listInfo *pipeline.TargetCommittedListInfo) (requeue bool, err error) {
requeue = false
err = nil
return
}
//WritePart implements WritePart from the pipeline.TargetPipeline interface.
//Performs a PUT page operation with the data contained in the part.
//This assumes the part.BytesToRead is a multiple of the PageSize
func (t AzurePage) WritePart(part *pipeline.Part) (duration time.Duration, startTime time.Time, numOfRetries int, err error) {
offset := int64(part.Offset)
endByte := int64(part.Offset + uint64(part.BytesToRead) - 1)
//if the max retries is exceeded, panic will happen, hence no error is returned.
duration, startTime, numOfRetries = util.RetriableOperation(func(r int) error {
if err := (*t.StorageClient).PutPage(t.Container, part.TargetAlias, offset, endByte, "update", part.Data, nil); err != nil {
if util.Verbose {
fmt.Printf("EH|S|%v|%v|%v|%v\n", part.Offset, len(part.Data), part.TargetAlias, err)
}
return err
}
if util.Verbose {
fmt.Printf("OKA|S|%v|%v|%v|%v\n", part.Offset, len(part.Data), part.TargetAlias, err)
}
return nil
})
return
}
| {
util.CreateContainerIfNotExists(container, accountName, accountKey)
creds := pipeline.StorageAccountCredentials{AccountName: accountName, AccountKey: accountKey}
client := util.GetBlobStorageClient(creds.AccountName, creds.AccountKey)
return AzurePage{Creds: &creds, Container: container, StorageClient: &client}
} |
clipboard_simple.rs | //! Simple clipboard example
//!
//! From https://developer.gnome.org/gtkmm-tutorial/stable/sec-clipboard-examples.html.en
extern crate gdk;
extern crate gio;
extern crate gtk;
use std::cell::RefCell;
use std::env::args;
use gio::prelude::*;
use gtk::prelude::*;
struct | {
pub button_a1: gtk::ToggleButton,
pub button_a2: gtk::ToggleButton,
pub button_b1: gtk::ToggleButton,
pub button_b2: gtk::ToggleButton,
}
// Declare a new thread local storage key
thread_local!(
static GLOBAL: RefCell<Option<Ui>> = RefCell::new(None)
);
fn build_ui(application: >k::Application) {
let window = gtk::ApplicationWindow::new(application);
// Create the whole window
window.set_title("gtk::Clipboard Simple Example");
window.connect_delete_event(|window, _| {
window.destroy();
Inhibit(false)
});
// Create the button grid
let grid = gtk::Grid::new();
grid.set_row_homogeneous(true);
grid.set_column_homogeneous(true);
let button_a1 = gtk::ToggleButton::new_with_label("A1");
grid.attach(&button_a1, 0, 0, 1, 1);
let button_a2 = gtk::ToggleButton::new_with_label("A2");
grid.attach(&button_a2, 1, 0, 1, 1);
let button_b1 = gtk::ToggleButton::new_with_label("B1");
grid.attach(&button_b1, 0, 1, 1, 1);
let button_b2 = gtk::ToggleButton::new_with_label("B2");
grid.attach(&button_b2, 1, 1, 1, 1);
// Add in the action buttons
let copy_button = gtk::Button::new_with_mnemonic("_Copy");
let paste_button = gtk::Button::new_with_mnemonic("_Paste");
let button_box = gtk::ButtonBox::new(gtk::Orientation::Horizontal);
button_box.set_layout(gtk::ButtonBoxStyle::End);
button_box.pack_start(©_button, false, false, 0);
button_box.pack_start(&paste_button, false, false, 0);
// Pack widgets into the window and display everything
let vbox = gtk::Box::new(gtk::Orientation::Vertical, 0);
vbox.set_spacing(6);
let label = gtk::Label::new(Some(
"Select cells in the grid, click Copy, then \
open a second instance of this example to try \
pasting the copied data.",
));
vbox.pack_start(&label, true, true, 0);
vbox.pack_start(&grid, true, true, 0);
vbox.pack_start(&button_box, true, true, 0);
window.add(&vbox);
window.show_all();
// Save out UI in thread-local storage so we can use it in callbacks later
GLOBAL.with(move |global| {
*global.borrow_mut() = Some(Ui {
button_a1: button_a1,
button_a2: button_a2,
button_b1: button_b1,
button_b2: button_b2,
})
});
// Attach signal handlers
copy_button.connect_clicked(|_| {
let mut s = String::new();
GLOBAL.with(|global| {
if let Some(ref ui) = *global.borrow() {
if ui.button_a1.get_active() {
s.push_str("1");
} else {
s.push_str("0");
}
if ui.button_a2.get_active() {
s.push_str("1");
} else {
s.push_str("0");
}
if ui.button_b1.get_active() {
s.push_str("1");
} else {
s.push_str("0");
}
if ui.button_b2.get_active() {
s.push_str("1");
} else {
s.push_str("0");
}
}
});
let clipboard = gtk::Clipboard::get(&gdk::SELECTION_CLIPBOARD);
clipboard.set_text(&s);
});
paste_button.connect_clicked(|_| {
let clipboard = gtk::Clipboard::get(&gdk::SELECTION_CLIPBOARD);
clipboard.request_text(|_, t| {
if t.is_some() {
let t = t.unwrap();
if t.len() >= 4 {
GLOBAL.with(|global| {
if let Some(ref ui) = *global.borrow() {
ui.button_a1.set_active(t.chars().nth(0).unwrap() == '1');
ui.button_a2.set_active(t.chars().nth(1).unwrap() == '1');
ui.button_b1.set_active(t.chars().nth(2).unwrap() == '1');
ui.button_b2.set_active(t.chars().nth(3).unwrap() == '1');
}
});
}
}
});
});
}
fn main() {
let application = gtk::Application::new(
Some("org.gtk-rs.example.clipboard_simple"),
gio::ApplicationFlags::NON_UNIQUE,
)
.expect("Initialization failed...");
application.connect_startup(|app| {
build_ui(app);
});
application.connect_activate(|_| {});
application.run(&args().collect::<Vec<_>>());
}
| Ui |
sp6_bitstream_analyzer.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017-2020 The Project X-Ray Authors.
#
# Use of this source code is governed by a ISC-style
# license that can be found in the LICENSE file or at
# https://opensource.org/licenses/ISC
#
# SPDX-License-Identifier: ISC
'''
Spartan 6 bitstream analyzer tool.
This script reads a Spartan6 bitstream and prints out some useful information.
It can also create a frames file with the configuration data words.
The bitstream is analyzed word by word and interpreted according to
the UG380 Configuration User Guide.
The tool can be used to derive the initialization, startup and finalization
sequence as well as the configuration data. The latter is written to a frames
file which can be used by the bitstream tools such as frames2bit to generate
a valid bitstream.
'''
import argparse
from io import StringIO
conf_regs = {
0: "CRC",
1: "FAR_MAJ",
2: "FAR_MIN",
3: "FDRI",
4: "FDRO",
5: "CMD",
6: "CTL",
7: "MASK",
8: "STAT",
9: "LOUT",
10: "COR1",
11: "COR2",
12: "PWRDN_REG",
13: "FLR",
14: "IDCODE",
15: "CWDT",
16: "HC_OPT_REG",
18: "CSBO",
19: "GENERAL1",
20: "GENERAL2",
21: "GENERAL3",
22: "GENERAL4",
23: "GENERAL5",
24: "MODE_REG",
25: "PU_GWE",
26: "PU_GTS",
27: "MFWR",
28: "CCLK_FREQ",
29: "SEU_OPT",
30: "EXP_SIGN",
31: "RDBK_SIGN",
32: "BOOTSTS",
33: "EYE_MASK",
34: "CBC_REG"
}
cmd_reg_codes = {
0: "NULL",
1: "WCFG",
2: "MFW",
3: "LFRM",
4: "RCFG",
5: "START",
7: "RCRC",
8: "AGHIGH",
10: "GRESTORE",
11: "SHUTDOWN",
13: "DESYNC",
14: "IPROG"
}
opcodes = ("NOP", "READ", "WRITE", "UNKNOWN")
def KnuthMorrisPratt(text, pattern):
'''
Yields all starting positions of copies of the pattern in the text.
Calling conventions are similar to string.find, but its arguments can be
lists or iterators, not just strings, it returns all matches, not just
the first one, and it does not need the whole text in memory at once.
Whenever it yields, it will have read the text exactly up to and including
the match that caused the yield.
'''
# allow indexing into pattern and protect against change during yield
pattern = list(pattern)
# build table of shift amounts
shifts = [1] * (len(pattern) + 1)
shift = 1
for pos in range(len(pattern)):
while shift <= pos and pattern[pos] != pattern[pos - shift]:
shift += shifts[pos - shift]
shifts[pos + 1] = shift
# do the actual search
startPos = 0
matchLen = 0
for c in text:
while matchLen == len(pattern) or \
matchLen >= 0 and pattern[matchLen] != c:
startPos += shifts[matchLen]
matchLen -= shifts[matchLen]
matchLen += 1
if matchLen == len(pattern):
yield startPos
class Bitstream:
def __init__(self, file_name, verbose=False):
self.frame_data = []
self.idcode = 0
self.exp_sign = 0
self.far_min = 0
self.far_maj = 0
self.curr_fdri_write_len = 0
self.curr_crc_check = 0
self.fdri_in_progress = False
with open(file_name, "rb") as f:
self.bytes = f.read()
pos, self.header = self.get_header()
self.body = [
(i << 8) | j
for i, j in zip(self.bytes[pos::2], self.bytes[pos + 1::2])
]
self.parse_bitstream(verbose)
def get_header(self):
pos = next(KnuthMorrisPratt(self.bytes, [0xaa, 0x99, 0x55, 0x66]))
return pos + 4, self.bytes[:pos + 4]
def parse_bitstream(self, verbose):
payload_len = 0
for word in self.body:
if payload_len > 0:
if verbose:
print("\tWord: ", hex(word))
payload_len = self.parse_reg(
reg_addr, word, payload_len, verbose)
continue
else:
packet_header = self.parse_packet_header(word)
opcode = packet_header["opcode"]
reg_addr = packet_header["reg_addr"]
words = packet_header["word_count"]
type = packet_header["type"]
if verbose:
print(
"\tWord: ", hex(word),
'Type: {}, Op: {}, Addr: {}, Words: {}'.format(
type, opcodes[opcode], reg_addr, words))
if opcode and reg_addr in conf_regs:
payload_len = words
continue
def parse_packet_header(self, word):
type = (word >> 13) & 0x7
opcode = (word >> 11) & 0x3
reg_addr = (word >> 5) & 0x3F
if type == 1:
word_count = word & 0x1F
elif type == 2:
word_count = 2
else:
word_count = 0
return {
"type": type,
"opcode": opcode,
"reg_addr": reg_addr,
"word_count": word_count
}
def parse_command(self, word):
return cmd_reg_codes[word]
def parse_cor1(self, word):
return word
def parse_cor2(self, word):
return word
def parse_ctl(self, word):
#decryption
dec = (word >> 6) & 1
#security bits
sb = (word >> 4) & 3
#persist
p = (word >> 3) & 1
#use efuse
efuse = (word >> 2) & 1
#crc extstat disable
crc = (word >> 1) & 1
return {
"decryption": dec,
"security bits": sb,
"pesist": p,
"use efuse": efuse,
"crc extstat disable": crc
}
def parse_cclk_freq(self, word):
ext_mclk = (word >> 14) & 1
mclk_freq = word & 0x3FF
return (ext_mclk, mclk_freq)
def parse_pwrdn(self, word):
en_eyes = (word >> 14) & 1
filter_b = (word >> 5) & 1
en_pgsr = (word >> 4) & 1
en_pwrdn = (word >> 2) & 1
keep_sclk = word & 1
return {
"en_eyes": en_eyes,
"filter_b": filter_b,
"en_pgsr": en_pgsr,
"en_pwrdn": en_pwrdn,
"keep_sclk": keep_sclk
}
def parse_eye_mask(self, word):
return word & 0xFF
def parse_hc_opt(self, word):
return (word >> 6) & 1
def parse_cwdt(self, word):
return word
def parse_pu_gwe(self, word):
return word & 0x3FF
def parse_pu_gts(self, word):
return word & 0x3FF
def parse_mode(self, word):
new_mode = (word >> 13) & 0x1
buswidth = (word >> 11) & 0x3
bootmode = (word >> 8) & 0x7
bootvsel = word & 0xFF
return {
"new_mode": new_mode,
"buswidth": buswidth,
"bootmode": bootmode,
"bootvsel": bootvsel
}
def parse_seu(self, word):
seu_freq = (word >> 4) & 0x3FF
seu_run_on_err = (word >> 3) & 0x1
glut_mask = (word >> 1) & 0x1
seu_enable = word & 0x1
return {
"seu_freq": seu_freq,
"seu_run_on_err": seu_run_on_err,
"glut_mask": glut_mask,
"seu_enable": seu_enable
}
def parse_reg(self, reg_addr, word, payload_len, verbose):
reg = conf_regs[reg_addr]
if reg == "CMD":
command = self.parse_command(word)
if verbose:
print("Command: {}\n".format(command))
elif reg == "FLR":
frame_length = word
if verbose:
print("Frame length: {}\n".format(frame_length))
elif reg == "COR1":
conf_options = self.parse_cor1(word)
if verbose:
print("COR1 options: {}\n".format(conf_options))
elif reg == "COR2":
conf_options = self.parse_cor2(word)
if verbose:
print("COR2 options: {}\n".format(conf_options))
elif reg == "IDCODE":
assert payload_len < 3
if payload_len == 2:
self.idcode = word << 16
elif payload_len == 1:
self.idcode |= word
if verbose:
print("IDCODE: {}\n".format(hex(self.idcode)))
elif reg == "MASK":
mask = word
if verbose:
print("Mask value: {}\n".format(mask))
elif reg == "CTL":
ctl_options = self.parse_ctl(word)
if verbose:
print("CTL options: {}\n".format(ctl_options))
elif reg == "CCLK_FREQ":
cclk_freq_options = self.parse_cclk_freq(word)
if verbose:
print("CCLK_FREQ options: {}\n".format(cclk_freq_options))
elif reg == "PWRDN_REG":
suspend_reg_options = self.parse_pwrdn(word)
if verbose:
print("{} options: {}\n".format(reg, suspend_reg_options))
elif reg == "EYE_MASK":
eye_mask = self.parse_eye_mask(word)
if verbose:
print("{} options: {}\n".format(reg, eye_mask))
elif reg == "HC_OPT_REG":
hc_options = self.parse_hc_opt(word)
if verbose:
print("{} options: {}\n".format(reg, hc_options))
elif reg == "CWDT":
cwdt_options = self.parse_cwdt(word)
if verbose:
print("{} options: {}\n".format(reg, cwdt_options))
elif reg == "PU_GWE":
pu_gwe_sequence = self.parse_pu_gwe(word)
if verbose:
print("{} options: {}\n".format(reg, pu_gwe_sequence))
elif reg == "PU_GTS":
pu_gts_sequence = self.parse_pu_gts(word)
if verbose:
print("{} options: {}\n".format(reg, pu_gts_sequence))
elif reg == "MODE_REG":
mode_options = self.parse_mode(word)
if verbose:
print("{} options: {}\n".format(reg, mode_options))
elif reg == "GENERAL1" or reg == "GENERAL2" \
or reg == "GENERAL3" or reg == "GENERAL4" \
or reg == "GENERAL5":
general_options = word
if verbose:
print("{} options: {}\n".format(reg, general_options))
elif reg == "SEU_OPT":
seu_options = self.parse_seu(word)
if verbose:
print("{} options: {}\n".format(reg, seu_options))
elif reg == "EXP_SIGN":
if payload_len == 2:
self.exp_sign = word << 16
elif payload_len == 1:
self.exp_sign |= word
if verbose:
print("{}: {}\n".format(reg, self.exp_sign))
elif reg == "FAR_MAJ":
if payload_len == 2:
self.current_far_maj = word
elif payload_len == 1:
self.current_far_min = word
if verbose:
print(
"{}: {} FAR_MIN: {}\n".format(
reg, self.far_maj, self.far_min))
elif reg == "FDRI":
if self.fdri_in_progress:
self.frame_data.append(word)
if payload_len == 1:
self.fdri_in_progress = False
return 0
elif payload_len == 2:
self.curr_fdri_write_len = (word & 0xFFF) << 16
elif payload_len == 1:
self.curr_fdri_write_len |= word
self.fdri_in_progress = True
# Check if 0 words actually means read something
payload_len = self.curr_fdri_write_len + 2
if verbose:
print("{}: {}\n".format(reg, self.curr_fdri_write_len))
return payload_len
elif reg == "CRC":
if payload_len == 2:
self.curr_crc_check = (word & 0xFFF) << 16
elif payload_len == 1:
self.curr_crc_check |= word
if verbose:
print("{}: {}\n".format(reg, self.curr_crc_check))
payload_len -= 1
return payload_len
def write_frames_txt(self, file_name):
'''Write frame data in a more readable format'''
frame_stream = StringIO()
for i in range(len(self.frame_data)):
if i % 65 == 0:
frame_stream.write("\nFrame {:4}\n".format(i // 65))
#IOB word
if i % 65 == 32:
frame_stream.write(
"\n#{:3}:{:6}\n".format(i % 65, hex(self.frame_data[i])))
else:
frame_stream.write(
"#{:3}:{:6},".format(i % 65, hex(self.frame_data[i])))
with open(file_name, "w") as f:
print(frame_stream.getvalue(), file=f)
def write_frames(self, file_name):
|
def main(args):
verbose = not args.silent
bitstream = Bitstream(args.bitstream, verbose)
print("Frame data length: ", len(bitstream.frame_data))
if args.frames_out:
bitstream.write_frames(args.frames_out)
if verbose:
bitstream.write_frames_txt(args.frames_out + ".txt")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--bitstream', help='Input bitstream')
parser.add_argument('--frames_out', help='Output frames file')
parser.add_argument(
'--silent', help="Don't print analysis details", action='store_true')
args = parser.parse_args()
main(args)
| '''Write configuration data to frames file'''
frame_stream = StringIO()
for i in range(len(self.frame_data)):
if i % 65 == 0:
frame_stream.write("0x{:08x} ".format(i // 65))
frame_stream.write("0x{:04x}".format(self.frame_data[i]))
if i % 65 == 64:
frame_stream.write("\n")
elif i < len(self.frame_data) - 1:
frame_stream.write(",")
with open(file_name, "w") as f:
print(frame_stream.getvalue(), file=f) |
unwind-resource2.rs | // xfail-win32
use std;
import task;
import comm;
class complainer {
let c: @int;
new(c: @int) { self.c = c; }
drop {}
}
fn | () {
let c <- complainer(@0);
fail;
}
fn main() {
task::spawn_unlinked(f);
}
| f |
test_dict.rs | // Copyright (C) 2020 - 2022, J2 Innovations
//! Test Dict
#[cfg(test)]
use libhaystack::dict;
use libhaystack::encoding::zinc::decode::*;
use libhaystack::encoding::zinc::encode::*;
use libhaystack::val::*;
use std::iter::FromIterator;
use std::str::FromStr;
#[test]
fn | () {
let rec = dict! {
"site" => Value::make_marker(),
"name" => Value::make_str("Foo"),
"dict" => Value::make_dict(dict! {"foo" => Value::make_bool(true)})
};
let zinc = rec.to_zinc_string();
assert_eq!(zinc.unwrap(), r#"{dict:{foo:T},name:"Foo",site}"#);
assert_eq!(
dict![
"a" => "a".into(),
"b" => 100.into(),
"c" => true.into(),
"d" => Remove.into(),
"e" => List::from_iter([Value::make_str("str")]).into(),
"f" => dict!{"na" => Na.into()}.into(),
"g" => Ref::from("ref").into(),
"h" => Value::make_symbol("symbol"),
"i" => Value::make_uri("uri"),
"j" => Value::make_xstr_from("xstr", "val"),
"k" => Value::Na,
"l" => Value::Marker,
"m" => Time::from_str("20:00:00").expect("Time").into(),
"n" => Date::from_str("2021-06-19").expect("Date").into(),
"o" => DateTime::from_str("2021-06-19T19:48:23-00:00").expect("DateTime").into(),
"p" => Coord::make(34.0522, 118.2437).into(),
"q" => Grid::make_empty().into()
]
.to_zinc_string()
.unwrap(),
"{a:\"a\",b:100,c:T,d:R,e:[\"str\"],f:{na:NA},g:@ref,h:^symbol,i:`uri`,j:Xstr(\"val\"),k:NA,l,m:20:00:00,n:2021-06-19,o:2021-06-19T19:48:23Z,p:C(34.0522,118.2437),q:<<\nver:\"3.0\"\nempty\n>>}",
)
}
#[test]
fn test_zinc_dict_decode() {
let value: Value = from_str(r#"{dict:{foo:T, x:M} name:"Foo" site}"#).unwrap();
let dict = dict! {
"site" => Value::make_marker(),
"name" => Value::make_str("Foo"),
"dict" => Value::make_dict(dict! {"foo" => Value::make_bool(true), "x" => Value::Marker})
};
assert_eq!(value, Value::make_dict(dict));
let zinc_dict = concat!(
"{",
r#"a:"a" "#,
"b:100 ",
"c:T ",
"d:R ",
r#"e:["str"] "#,
"f:{na:NA} ",
"g:@ref ",
"h:^symbol ",
"i:`uri` ",
"j:Xstr(\"val\") ",
"k:NA ",
"l ",
"m:20:00:00 ",
"n:2021-06-19 ",
"o:2021-06-19T19:48:23Z ",
"p:C(34.0522,118.2437) ",
"q:<<\nver:\"3.0\"\nempty\n>>",
"r:42",
"}"
);
let value = from_str(zinc_dict).unwrap();
let dict = match value {
Value::Dict(dict) => dict,
_ => panic!("Must be a Dict"),
};
assert_eq!(dict.len(), 18);
assert!(('a'..='r').into_iter().all(|n| dict.has(&n.to_string())));
assert!(dict.missing("x"));
assert_eq!(dict.get_str("a"), Some(&Str::from("a")));
assert_eq!(dict.get_num("b"), Some(&Number::from(100)));
assert_eq!(dict.get_bool("c"), Some(&Bool::from(true)));
assert_eq!(dict.get_list("e"), Some(&vec!["str".into()]));
assert_eq!(dict.get_dict("f"), Some(&dict! {"na" => Na.into()}));
assert_eq!(dict.get_ref("g"), Some(&Ref::from("ref")));
assert_eq!(dict.get_symbol("h"), Some(&Symbol::from("symbol")));
assert_eq!(dict.get_uri("i"), Some(&Uri::from("uri")));
assert_eq!(dict.get_xstr("j"), Some(&XStr::make("Xstr", "val")));
assert_eq!(
dict.get_time("m"),
Some(&Time::from_str("20:00:00").expect("Time"))
);
assert_eq!(
dict.get_date("n"),
Some(&Date::from_str("2021-06-19").expect("Date"))
);
assert_eq!(
dict.get_date_time("o"),
Some(&DateTime::from_str("2021-06-19T19:48:23-00:00").expect("DateTime"))
);
assert_eq!(dict.get_coord("p"), Some(&Coord::make(34.0522, 118.2437)));
assert_eq!(dict.get_grid("q"), Some(&Grid::make_empty()));
assert_eq!(dict.get_num("r"), Some(&Number::make(42.0)));
assert!(dict.has_remove("d"));
assert!(dict.has_na("k"));
assert!(dict.has_marker("l"));
assert_eq!(dict["a"], Value::make_str("a"));
}
#[test]
fn test_zinc_dict_comma_decode() {
let value: Value = from_str(r#"{a,b,c}"#).unwrap();
assert_eq!(
value,
Value::make_dict(dict! {"a"=> Value::Marker, "b"=> Value::Marker,"c"=> Value::Marker})
);
}
| test_zinc_dict_encode |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.