hexsha
stringlengths 40
40
| size
int64 140
1.03M
| ext
stringclasses 94
values | lang
stringclasses 21
values | max_stars_repo_path
stringlengths 3
663
| max_stars_repo_name
stringlengths 4
120
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
368k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
663
| max_issues_repo_name
stringlengths 4
120
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
663
| max_forks_repo_name
stringlengths 4
135
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 140
1.03M
| avg_line_length
float64 2.32
23.1k
| max_line_length
int64 11
938k
| alphanum_fraction
float64 0.01
1
| score
float32 3
4.25
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0cc92881b3783140afbb04ec688ee09d279aa156 | 2,794 | py | Python | distla/distla_core/distla_core/linalg/qr/test_qr_ooc.py | google/distla_core | 7f0d8ab7b847a75e0fc713627488643a8984712a | [
"Apache-2.0"
] | 2 | 2021-12-19T21:17:06.000Z | 2021-12-25T09:19:47.000Z | distla/distla_core/distla_core/linalg/qr/test_qr_ooc.py | google/distla_core | 7f0d8ab7b847a75e0fc713627488643a8984712a | [
"Apache-2.0"
] | null | null | null | distla/distla_core/distla_core/linalg/qr/test_qr_ooc.py | google/distla_core | 7f0d8ab7b847a75e0fc713627488643a8984712a | [
"Apache-2.0"
] | 1 | 2021-12-25T09:19:56.000Z | 2021-12-25T09:19:56.000Z | """Tests for qr.py."""
from jax import lax
import jax.numpy as jnp
import numpy as np
import pytest
import tempfile
from distla_core.linalg.utils import testutils
from distla_core.linalg.qr import qr_ooc
from distla_core.utils import pops
DTYPE = jnp.float32
seeds = [0, 1]
flags = [True, False]
def _dephase_qr(R, Q=None):
""" Maps the Q and R factor from an arbitrary QR decomposition to the unique
with non-negative diagonal entries.
"""
phases_data = np.sign(np.diagonal(R))
m, n = R.shape
if m > n:
phases = np.ones(m)
phases[:n] = phases_data
else:
phases = phases_data
R = phases.conj()[:, None] * R
if Q is not None:
Q = Q * phases
return Q, R
@pytest.mark.parametrize("N", [8, 32, 128])
@pytest.mark.parametrize("aspect_ratio", [1, 2, 10])
@pytest.mark.parametrize("panel_size", [1, 2])
@pytest.mark.parametrize("seed", [0, 1])
def test_qr_ooc(N, aspect_ratio, panel_size, seed):
dtype = np.float32
M = N * aspect_ratio
np.random.seed(seed)
A = np.random.randn(M, N).astype(dtype)
_, expected = np.linalg.qr(A)
_, expected = _dephase_qr(expected)
with tempfile.NamedTemporaryFile(delete=False) as f:
np.save(f, A)
f.close() # Explicit close needed to open again as a memmap.
# The file is still deleted when the context goes out of scope.
result = qr_ooc.qr_ooc(f.name, caqr_panel_size=panel_size)
result = pops.undistribute(result)
_, result = _dephase_qr(result)
atol = testutils.eps(lax.Precision.HIGHEST, dtype=dtype)
atol *= np.linalg.norm(A) ** 2
testutils.assert_allclose(result, expected, atol=atol)
@pytest.mark.parametrize("N", [8, 32, 128])
@pytest.mark.parametrize("aspect_ratio", [1, 2, 10])
@pytest.mark.parametrize("panel_size", [1, 2])
@pytest.mark.parametrize("seed", [0, 1])
def test_fake_cholesky(N, aspect_ratio, panel_size, seed):
fname = "fake_cholesky_test_matrix"
dtype = np.float32
M = N * aspect_ratio
np.random.seed(seed)
A = np.random.randn(M, N).astype(dtype)
cond = np.linalg.cond(A)
expected_gram = np.dot(A.T, A)
expected_chol = np.linalg.cholesky(expected_gram).T
_, expected_chol = _dephase_qr(expected_chol)
np.save(fname, A)
fread = fname + ".npy"
chol_fname = "cholesky_transpose"
gram_fname = "gram_matrix"
qr_ooc.fake_cholesky(fread, caqr_panel_size=panel_size,
chol_fname=chol_fname, gram_fname=gram_fname)
result_gram = np.load(gram_fname + ".npy")
result_chol = np.load(chol_fname + ".npy")
_, result_chol = _dephase_qr(result_chol)
atol = testutils.eps(lax.Precision.HIGHEST, dtype=dtype)
atol *= cond * np.linalg.norm(expected_gram) ** 2
testutils.assert_allclose(result_chol, expected_chol, atol=10 * atol)
testutils.assert_allclose(result_gram, expected_gram, atol=atol)
| 30.703297 | 78 | 0.700787 | 3.34375 |
03b8fb6d145f0cf59894925a4f20e93f384ab9f2 | 1,604 | kt | Kotlin | testing/src/main/kotlin/Combinations.kt | AtricoSoftware/atrico.kotlib | 273fd90cb345818d0333be205ba46b0950aeb340 | [
"MIT"
] | null | null | null | testing/src/main/kotlin/Combinations.kt | AtricoSoftware/atrico.kotlib | 273fd90cb345818d0333be205ba46b0950aeb340 | [
"MIT"
] | null | null | null | testing/src/main/kotlin/Combinations.kt | AtricoSoftware/atrico.kotlib | 273fd90cb345818d0333be205ba46b0950aeb340 | [
"MIT"
] | null | null | null | package atrico.kotlib.testing
import java.util.*
// Calculate the combinations of a set
fun <T> calculateCombinations(items: Collection<T>, count: Int): Sequence<Iterable<T>> {
fun itemIterator(items: Collection<T>): Sequence<Iteration<T>> {
return sequence {
val queue = ArrayDeque(items)
while (!queue.isEmpty()) {
yield(Iteration(queue.pop(), queue))
}
}
}
return calculateImpl(items, count) { col -> itemIterator(col) }
}
data class Iteration<T>(val item: T, val remaining: Collection<T>)
private fun <T> calculateImpl(
items: Collection<T>,
count: Int,
itemIterator: (Collection<T>) -> Sequence<Iteration<T>>
): Sequence<Iterable<T>> {
fun calculate(items: Collection<T>, count: Int): Sequence<Iterable<T>> {
return sequence {
for (iteration in itemIterator(items)) {
if (count == 1) yield(listOf(iteration.item))
else {
for (tail in calculate(iteration.remaining, count - 1)) {
yield(listOf(iteration.item) + tail)
}
}
}
}
}
// Validate the params
if (items.count() == 0) throw IllegalArgumentException("Elements cannot be empty")
if (count < 1) throw IllegalArgumentException("Count must be greater than 0")
if (count > items.count()) throw IllegalArgumentException("Count must be less than or equal to elements count")
// Calculate value
return calculate(items, count)
}
| 35.644444 | 116 | 0.585411 | 3.125 |
d9bfe622c46c0537a41526218e7145494b376979 | 3,043 | rs | Rust | hello-world/src/main.rs | Becavalier/rust-by-example-cases | 8e2d52986aa5bd27b06cbc26dd04ea20a46872c3 | [
"MIT"
] | 2 | 2021-04-20T08:43:35.000Z | 2021-04-20T10:33:06.000Z | hello-world/src/main.rs | Becavalier/rust-by-example-cases | 8e2d52986aa5bd27b06cbc26dd04ea20a46872c3 | [
"MIT"
] | null | null | null | hello-world/src/main.rs | Becavalier/rust-by-example-cases | 8e2d52986aa5bd27b06cbc26dd04ea20a46872c3 | [
"MIT"
] | null | null | null | fn main() {
let x = String::from("world!");
let s = format!("Hello, {}", &x);
print!("{}", s);
println!("{}", s); // with newline, to io::stdout.
eprint!("{}", s);
eprintln!("{0}", s); // use the first value.
println!("{:b}", 100); // with formatting flags (binary).
println!("{}", 100u32);
// use named arguments.
println!(
"{subject} {verb} {object}.",
object = "the lazy dog",
subject = "the quick brown fox",
verb = "jumps over"
);
// align to right, padding with zero numerical extension.
println!("{number:>width$} {number:<0width$}", number = 1, width = 6);
// print compound types with Debug trait.
#[derive(Debug)]
struct SA(i32);
println!("{:#?}", &SA(10));
// print compound types with custom Display trait.
use std::fmt;
#[derive(Debug)]
struct Person<'a> {
name: &'a str,
age: u8,
}
impl<'a> Person<'a> {
fn new(name: &'a str, age: u8) -> Self {
Person { name, age }
}
}
impl<'a> fmt::Display for Person<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} {}", &self.name, &self.age)
}
}
println!("{}", &Person::new("Jason", 19));
println!("{:#?}", &Person::new("Alice", 21));
// implementing Display trait for Vec.
struct List(Vec<i32>);
impl fmt::Display for List {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let vec = &self.0;
write!(f, "[")?; // propagate the error to the outer scope if any.
for (index, &v) in vec.iter().enumerate() {
if index == 0 {
write!(f, "{}", v)?;
} else {
write!(f, ",{}", v)?;
}
}
write!(f, "]")
}
}
println!("{}", &List(vec![1, 2, 3]));
// formatting with macro format!.
let foo = 3735928559i64;
println!("{}", format!("{}", foo));
println!("{}", format!("0x{:X}", foo));
println!("{}", format!("b'{:b}", foo));
println!("{}", format!("0o{:o}", foo));
struct City {
name: &'static str,
lat: f32,
lon: f32,
}
impl fmt::Display for City {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let lat_c = if self.lat >= 0.0 { 'N' } else { 'S' };
let lon_c = if self.lon >= 0.0 { 'E' } else { 'W' };
write!(
f,
"{}: {:.3}°{} {:.3}°{}",
self.name,
self.lat.abs(),
lat_c,
self.lon.abs(),
lon_c
)
}
}
for city in [
// pattern matching.
City {
name: "Dublin",
lat: 53.347778,
lon: -6.259722,
},
City {
name: "Oslo",
lat: 59.95,
lon: 10.75,
},
]
.iter()
{
println!("{}", city);
}
}
| 27.663636 | 78 | 0.425895 | 3.421875 |
0b9851847b18a4b7b38e82d6bd87af07dc1c57a9 | 1,531 | py | Python | examples/imu.py | dan-stone/canal | 8a6b03a46102f7e5ca457538eb03ab9526eec095 | [
"MIT"
] | 2 | 2017-02-08T20:27:39.000Z | 2019-07-15T00:34:05.000Z | examples/imu.py | dan-stone/canal | 8a6b03a46102f7e5ca457538eb03ab9526eec095 | [
"MIT"
] | null | null | null | examples/imu.py | dan-stone/canal | 8a6b03a46102f7e5ca457538eb03ab9526eec095 | [
"MIT"
] | 1 | 2018-05-31T14:09:19.000Z | 2018-05-31T14:09:19.000Z | import datetime
import canal
from influxdb import InfluxDBClient
class IMU(canal.Measurement):
accelerometer_x = canal.IntegerField()
accelerometer_y = canal.IntegerField()
accelerometer_z = canal.IntegerField()
gyroscope_x = canal.IntegerField()
gyroscope_y = canal.IntegerField()
gyroscope_z = canal.IntegerField()
user_id = canal.Tag()
if __name__ == "__main__":
start_date = datetime.datetime.now(datetime.timezone.utc)
duration = datetime.timedelta(seconds=60)
user_id = 12345678
client = InfluxDBClient(
host="localhost",
port=8086,
database="canal"
)
# Write some dummy IMU data, sampled once per second
num_imu_samples = int(duration.total_seconds())
imu = IMU(
time=[start_date + datetime.timedelta(seconds=d) for d in
range(num_imu_samples)],
acc_x=range(0, 1 * num_imu_samples, 1),
acc_y=range(0, 2 * num_imu_samples, 2),
acc_z=range(0, 3 * num_imu_samples, 3),
gyro_x=range(0, 4 * num_imu_samples, 4),
gyro_y=range(0, 5 * num_imu_samples, 5),
gyro_z=range(0, 6 * num_imu_samples, 6),
user_id=user_id
)
client.write(
data=imu.to_line_protocol(),
params=dict(
db="canal"
)
)
# Read back the IMU data
imu_resp = client.query(IMU.make_query_string(
time__gte=start_date,
time__lte=start_date + duration,
user_id=user_id
))
assert imu == IMU.from_json(imu_resp.raw)
| 27.836364 | 65 | 0.640758 | 3.203125 |
501f5480e25a1561f518bc7d75ac0beb747defbd | 7,180 | go | Go | pkg/c8y/vendor/github.com/obeattie/ohmyglob/glob.go | ricardnacher/go-c8y | 362a3775d9ffde5f14cff366044423347bacf064 | [
"MIT"
] | 8 | 2015-02-21T20:21:49.000Z | 2021-03-14T15:45:41.000Z | pkg/c8y/vendor/github.com/obeattie/ohmyglob/glob.go | ricardnacher/go-c8y | 362a3775d9ffde5f14cff366044423347bacf064 | [
"MIT"
] | 4 | 2019-05-25T07:07:46.000Z | 2021-09-20T08:53:31.000Z | pkg/c8y/vendor/github.com/obeattie/ohmyglob/glob.go | ricardnacher/go-c8y | 362a3775d9ffde5f14cff366044423347bacf064 | [
"MIT"
] | 2 | 2016-01-25T14:30:37.000Z | 2018-10-24T18:48:49.000Z | // Package ohmyglob provides a minimal glob matching utility.
package ohmyglob
import (
"bytes"
"fmt"
"io"
"os"
"regexp"
"strings"
log "github.com/cihub/seelog"
)
var (
// Logger is used to log trace-level info; logging is completely disabled by default but can be changed by replacing
// this with a configured logger
Logger log.LoggerInterface
// Escaper is the character used to escape a meaningful character
Escaper = '\\'
// Runes that, in addition to the separator, mean something when they appear in the glob (includes Escaper)
expanders = []rune{'?', '*', '!', Escaper}
)
func init() {
if Logger == nil {
var err error
Logger, err = log.LoggerFromWriterWithMinLevel(os.Stderr, log.CriticalLvl) // seelog bug means we can't use log.Off
if err != nil {
panic(err)
}
}
}
type processedToken struct {
contents *bytes.Buffer
tokenType tc
}
type parserState struct {
options *Options
// The regex-escaped separator character
escapedSeparator string
processedTokens []processedToken
}
// GlobMatcher is the basic interface of a Glob or GlobSet. It provides a Regexp-style interface for checking matches.
type GlobMatcher interface {
// Match reports whether the Glob matches the byte slice b
Match(b []byte) bool
// MatchReader reports whether the Glob matches the text read by the RuneReader
MatchReader(r io.RuneReader) bool
// MatchString reports whether the Glob matches the string s
MatchString(s string) bool
}
// Glob is a single glob pattern; implements GlobMatcher. A Glob is immutable.
type Glob interface {
GlobMatcher
// String returns the pattern that was used to create the Glob
String() string
// IsNegative returns whether the pattern was negated (prefixed with !)
IsNegative() bool
}
// Glob is a glob pattern that has been compiled into a regular expression.
type globImpl struct {
*regexp.Regexp
// The separator from options, escaped for appending to the regexBuffer (only available during parsing)
// The input pattern
globPattern string
// State only available during parsing
parserState *parserState
// Set to true if the pattern was negated
negated bool
}
// Options modify the behaviour of Glob parsing
type Options struct {
// The character used to split path components
Separator rune
// Set to false to allow any prefix before the glob match
MatchAtStart bool
// Set to false to allow any suffix after the glob match
MatchAtEnd bool
}
// DefaultOptions are a default set of Options that uses a forward slash as a separator, and require a full match
var DefaultOptions = &Options{
Separator: '/',
MatchAtStart: true,
MatchAtEnd: true,
}
func (g *globImpl) String() string {
return g.globPattern
}
func (g *globImpl) IsNegative() bool {
return g.negated
}
func popLastToken(state *parserState) *processedToken {
state.processedTokens = state.processedTokens[:len(state.processedTokens)-1]
if len(state.processedTokens) > 0 {
return &state.processedTokens[len(state.processedTokens)-1]
}
return &processedToken{}
}
// Compile parses the given glob pattern and convertes it to a Glob. If no options are given, the DefaultOptions are
// used.
func Compile(pattern string, options *Options) (Glob, error) {
pattern = strings.TrimSpace(pattern)
reader := strings.NewReader(pattern)
if options == nil {
options = DefaultOptions
} else {
// Check that the separator is not an expander
for _, expander := range expanders {
if options.Separator == expander {
return nil, fmt.Errorf("'%s' is not allowed as a separator", string(options.Separator))
}
}
}
state := &parserState{
options: options,
escapedSeparator: escapeRegexComponent(string(options.Separator)),
processedTokens: make([]processedToken, 0, 10),
}
glob := &globImpl{
Regexp: nil,
globPattern: pattern,
negated: false,
parserState: state,
}
regexBuf := new(bytes.Buffer)
if options.MatchAtStart {
regexBuf.WriteRune('^')
}
var err error
// Transform into a regular expression pattern
// 1. Parse negation prefixes
err = parseNegation(reader, glob)
if err != nil {
return nil, err
}
// 2. Tokenise and convert!
tokeniser := newGlobTokeniser(reader, options)
lastProcessedToken := &processedToken{}
for tokeniser.Scan() {
if err = tokeniser.Err(); err != nil {
return nil, err
}
token, tokenType := tokeniser.Token()
t := processedToken{
contents: nil,
tokenType: tokenType,
}
// Special cases
if tokenType == tcGlobStar && tokeniser.Peek() {
// If this is a globstar and the next token is a separator, consume it (the globstar pattern itself includes
// a separator)
if err = tokeniser.PeekErr(); err != nil {
return nil, err
}
_, peekedType := tokeniser.PeekToken()
if peekedType == tcSeparator {
tokeniser.Scan()
}
}
if tokenType == tcGlobStar && lastProcessedToken.tokenType == tcGlobStar {
// If the last token was a globstar and this is too, remove the last. We don't remove this globstar because
// it may now be the last in the pattern, which is special
lastProcessedToken = popLastToken(state)
}
if tokenType == tcGlobStar && lastProcessedToken.tokenType == tcSeparator && !tokeniser.Peek() {
// If this is the last token, and it's a globstar, remove a preceeding separator
lastProcessedToken = popLastToken(state)
}
t.contents, err = processToken(token, tokenType, glob, tokeniser)
if err != nil {
return nil, err
}
lastProcessedToken = &t
state.processedTokens = append(state.processedTokens, t)
}
for _, t := range state.processedTokens {
t.contents.WriteTo(regexBuf)
}
if options.MatchAtEnd {
regexBuf.WriteRune('$')
}
regexString := regexBuf.String()
Logger.Tracef("[ohmyglob:Glob] Compiled \"%s\" to regex `%s` (negated: %v)", pattern, regexString, glob.negated)
re, err := regexp.Compile(regexString)
if err != nil {
return nil, err
}
glob.parserState = nil
glob.Regexp = re
return glob, nil
}
func parseNegation(r io.RuneScanner, glob *globImpl) error {
for {
char, _, err := r.ReadRune()
if err != nil {
return err
} else if char == '!' {
glob.negated = !glob.negated
} else {
r.UnreadRune()
return nil
}
}
}
func processToken(token string, tokenType tc, glob *globImpl, tokeniser *globTokeniser) (*bytes.Buffer, error) {
state := glob.parserState
buf := new(bytes.Buffer)
switch tokenType {
case tcGlobStar:
// Globstars also take care of surrounding separators; separator components before and after a globstar are
// suppressed
isLast := !tokeniser.Peek()
buf.WriteString("(?:")
if isLast && len(glob.parserState.processedTokens) > 0 {
buf.WriteString(state.escapedSeparator)
}
buf.WriteString(".+")
if !isLast {
buf.WriteString(state.escapedSeparator)
}
buf.WriteString(")?")
case tcStar:
buf.WriteString("[^")
buf.WriteString(state.escapedSeparator)
buf.WriteString("]*")
case tcAny:
buf.WriteString("[^")
buf.WriteString(state.escapedSeparator)
buf.WriteString("]")
case tcSeparator:
buf.WriteString(state.escapedSeparator)
case tcLiteral:
buf.WriteString(escapeRegexComponent(token))
}
return buf, nil
}
| 26.791045 | 118 | 0.713788 | 3.328125 |
287401f8cb0c098e08ebef1b45b9c6df311fe8d7 | 8,459 | rb | Ruby | vgvm.rb | sonota88/vm2gol-v2 | 9323532d6e6ad63a42ce9a6bcc7a38207df8f2de | [
"MIT"
] | 9 | 2021-01-18T22:50:09.000Z | 2022-01-11T10:25:31.000Z | vgvm.rb | sonota88/vm2gol-v2 | 9323532d6e6ad63a42ce9a6bcc7a38207df8f2de | [
"MIT"
] | null | null | null | vgvm.rb | sonota88/vm2gol-v2 | 9323532d6e6ad63a42ce9a6bcc7a38207df8f2de | [
"MIT"
] | null | null | null | require "json"
require_relative "./common"
module TermColor
RESET = "\e[m"
RED = "\e[0;31m"
BLUE = "\e[0;34m"
end
class Memory
attr_accessor :main, :stack, :vram
MAIN_DUMP_WIDTH = 10
def initialize(stack_size)
@main = []
# スタック領域
@stack = Array.new(stack_size, 0)
@vram = Array.new(50, 0)
end
def dump_main(pc)
work_insns = []
@main.each_with_index do |insn, i|
work_insns << { addr: i, insn: insn }
end
work_insns
.select do |work_insn|
pc - MAIN_DUMP_WIDTH <= work_insn[:addr] &&
work_insn[:addr] <= pc + MAIN_DUMP_WIDTH
end
.map do |work_insn|
head =
if work_insn[:addr] == pc
"pc =>"
else
" "
end
opcode = work_insn[:insn][0]
color =
case opcode
when "exit", "call", "ret", "jump", "jump_eq"
TermColor::RED
when "_cmt", "_debug"
TermColor::BLUE
else
""
end
indent =
if opcode == "label"
""
else
" "
end
format(
"%s %02d #{color}%s%s#{TermColor::RESET}",
head,
work_insn[:addr],
indent,
work_insn[:insn].inspect
)
end
.join("\n")
end
def dump_stack(sp, bp)
lines = []
@stack.each_with_index do |x, i|
addr = i
next if addr < sp - 8
next if addr > sp + 8
head =
case addr
when sp
if sp == bp
"sp bp => "
else
"sp => "
end
when bp
" bp => "
else
" "
end
lines << head + "#{addr} #{x.inspect}"
end
lines.join("\n")
end
def format_cols(cols)
cols.map { |col| col == 1 ? "@" : "." }.join("")
end
def dump_vram
rows = @vram.each_slice(5).to_a
main = rows[0..4]
buf = rows[5..9]
(0..4)
.map do |li| # line index
format_cols(main[li]) + " " + format_cols(buf[li])
end
.join("\n")
end
end
class Vm
FLAG_TRUE = 1
FLAG_FALSE = 0
def initialize(mem, stack_size)
@pc = 0 # program counter
# registers
@reg_a = 0
@reg_b = 0
@zf = FLAG_FALSE # zero flag
@mem = mem
@sp = stack_size - 1 # stack pointer
@bp = stack_size - 1 # base pointer
@step = 0
@debug = false
end
def test?
ENV.key?("TEST")
end
def set_sp(addr)
raise "Stack overflow" if addr < 0
@sp = addr
end
def load_program_file(path)
insns = File.open(path).each_line.map { |line| JSON.parse(line) }
load_program(insns)
end
def load_program(insns)
@mem.main = insns
end
def execute
insn = @mem.main[@pc]
opcode = insn[0]
case opcode
when "exit" then return true
when "cp" then cp() ; @pc += 1
when "add_ab" then add_ab() ; @pc += 1
when "mult_ab" then mult_ab() ; @pc += 1
when "add_sp" then add_sp() ; @pc += 1
when "sub_sp" then sub_sp() ; @pc += 1
when "compare" then compare() ; @pc += 1
when "label" then @pc += 1
when "jump" then jump()
when "jump_eq" then jump_eq()
when "call" then call()
when "ret" then ret()
when "push" then push() ; @pc += 1
when "pop" then pop() ; @pc += 1
when "set_vram" then set_vram() ; @pc += 1
when "get_vram" then get_vram() ; @pc += 1
when "_cmt" then @pc += 1
when "_debug" then _debug() ; @pc += 1
else
raise "Unknown opcode (#{opcode})"
end
false
end
def start
unless test?
dump() # 初期状態
puts "Press enter key to start"
$stdin.gets
end
loop do
@step += 1
do_exit = execute()
return if do_exit
unless test?
if ENV.key?("STEP") || @debug
dump()
$stdin.gets
else
dump() if @step % 10 == 0
end
end
end
end
def dump_reg
[
"reg_a(#{ @reg_a.inspect })",
"reg_b(#{ @reg_b.inspect })"
].join(" ")
end
def dump
puts <<~DUMP
================================
#{ @step }: #{ dump_reg() } zf(#{ @zf })
---- memory (main) ----
#{ @mem.dump_main(@pc) }
---- memory (stack) ----
#{ @mem.dump_stack(@sp, @bp) }
---- memory (vram) ----
#{ @mem.dump_vram() }
DUMP
end
def calc_indirect_addr(str)
_, base_str, disp_str = str.split(":")
base =
case base_str
when "bp"
@bp
else
raise not_yet_impl("base_str", base_str)
end
base + disp_str.to_i
end
def add_ab
@reg_a = @reg_a + @reg_b
end
def mult_ab
@reg_a = @reg_a * @reg_b
end
def cp
arg_src = @mem.main[@pc][1]
arg_dest = @mem.main[@pc][2]
src_val =
case arg_src
when Integer
arg_src
when "reg_a"
@reg_a
when "sp"
@sp
when "bp"
@bp
when /^ind:/
@mem.stack[calc_indirect_addr(arg_src)]
else
raise not_yet_impl("copy src", arg_src)
end
case arg_dest
when "reg_a"
@reg_a = src_val
when "reg_b"
@reg_b = src_val
when "bp"
@bp = src_val
when "sp"
set_sp(src_val)
when /^ind:/
@mem.stack[calc_indirect_addr(arg_dest)] = src_val
else
raise not_yet_impl("copy dest", arg_dest)
end
end
def add_sp
set_sp(@sp + @mem.main[@pc][1])
end
def sub_sp
set_sp(@sp - @mem.main[@pc][1])
end
def compare
@zf = (@reg_a == @reg_b) ? FLAG_TRUE : FLAG_FALSE
end
def jump
jump_dest = @mem.main[@pc][1]
@pc = jump_dest
end
def jump_eq
if @zf == FLAG_TRUE
jump_dest = @mem.main[@pc][1]
@pc = jump_dest
else
@pc += 1
end
end
def call
set_sp(@sp - 1) # スタックポインタを1減らす
@mem.stack[@sp] = @pc + 1 # 戻り先を記憶
next_addr = @mem.main[@pc][1] # ジャンプ先
@pc = next_addr
end
def ret
ret_addr = @mem.stack[@sp] # 戻り先アドレスを取得
@pc = ret_addr # 戻る
set_sp(@sp + 1) # スタックポインタを戻す
end
def push
arg = @mem.main[@pc][1]
val_to_push =
case arg
when Integer
arg
when String
case arg
when "reg_a"
@reg_a
when "bp"
@bp
when /^ind:/
stack_addr = calc_indirect_addr(arg)
@mem.stack[stack_addr]
else
raise not_yet_impl("push", arg)
end
else
raise not_yet_impl("push", arg)
end
set_sp(@sp - 1)
@mem.stack[@sp] = val_to_push
end
def pop
arg = @mem.main[@pc][1]
val = @mem.stack[@sp]
case arg
when "reg_a"
@reg_a = val
when "reg_b"
@reg_b = val
when "bp"
@bp = val
else
raise not_yet_impl("pop", arg)
end
set_sp(@sp + 1)
end
def set_vram
arg_vram = @mem.main[@pc][1]
arg_val = @mem.main[@pc][2]
src_val =
case arg_val
when Integer
arg_val
when "reg_a"
@reg_a
when /^ind:/
stack_addr = calc_indirect_addr(arg_val)
@mem.stack[stack_addr]
else
raise not_yet_impl("arg_val", arg_val)
end
case arg_vram
when Integer
@mem.vram[arg_vram] = src_val
when /^ind:/
stack_addr = calc_indirect_addr(arg_vram)
vram_addr = @mem.stack[stack_addr]
@mem.vram[vram_addr] = src_val
else
raise not_yet_impl("arg_vram", arg_vram)
end
end
def get_vram
arg_vram = @mem.main[@pc][1]
arg_dest = @mem.main[@pc][2]
vram_addr =
case arg_vram
when Integer
arg_vram
when String
case arg_vram
when /^ind:/
stack_addr = calc_indirect_addr(arg_vram)
@mem.stack[stack_addr]
else
raise not_yet_impl("arg_vram", arg_vram)
end
else
raise not_yet_impl("arg_vram", arg_vram)
end
val = @mem.vram[vram_addr]
case arg_dest
when "reg_a"
@reg_a = val
else
raise not_yet_impl("arg_dest", arg_dest)
end
end
def _debug
@debug = true
end
end
if $PROGRAM_NAME == __FILE__
exe_file = ARGV[0]
stack_size = 50
mem = Memory.new(stack_size)
vm = Vm.new(mem, stack_size)
vm.load_program_file(exe_file)
vm.start
vm.dump()
$stderr.puts "exit"
end
| 18.881696 | 69 | 0.506916 | 3.140625 |
6b52d484f72d299ffa0f80dbe2a5dad34864b091 | 15,812 | rs | Rust | bril-rs/src/lib.rs | femtomc/bril | 51bb0c5a4d38787b12204882d3502c94e963103a | [
"MIT"
] | 1 | 2021-02-15T04:24:08.000Z | 2021-02-15T04:24:08.000Z | bril-rs/src/lib.rs | femtomc/bril | 51bb0c5a4d38787b12204882d3502c94e963103a | [
"MIT"
] | null | null | null | bril-rs/src/lib.rs | femtomc/bril | 51bb0c5a4d38787b12204882d3502c94e963103a | [
"MIT"
] | null | null | null | use std::fmt::{self, Display, Formatter};
use std::io::{self, Write};
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Program {
pub functions: Vec<Function>,
}
impl Display for Program {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
for func in self.functions.iter() {
writeln!(f, "{}", func)?;
}
Ok(())
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Function {
pub name: String,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub args: Vec<Argument>,
#[serde(rename = "type")]
#[serde(skip_serializing_if = "Option::is_none")]
pub return_type: Option<Type>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub instrs: Vec<Code>,
}
impl Display for Function {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "@{}(", self.name)?;
for (i, arg) in self.args.iter().enumerate() {
if i != 0 {
write!(f, ", ")?;
}
write!(f, "{}", arg)?;
}
write!(f, ")")?;
if let Some(tpe) = self.return_type.as_ref() {
write!(f, ": {}", tpe)?;
}
writeln!(f, " {{")?;
for instr in self.instrs.iter() {
writeln!(f, "{}", instr)?;
}
write!(f, "}}")?;
Ok(())
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Argument {
pub name: String,
#[serde(rename = "type")]
pub arg_type: Type,
}
impl Display for Argument {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{}: {}", self.name, self.arg_type)
}
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
#[serde(untagged)]
pub enum Code {
Label { label: String },
Instruction(Instruction),
}
impl Display for Code {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
Code::Label { label } => write!(f, ".{}:", label),
Code::Instruction(instr) => write!(f, " {}", instr),
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
#[serde(untagged)]
pub enum Instruction {
Constant {
op: ConstOps,
dest: String,
#[serde(rename = "type")]
const_type: Type,
value: Literal,
},
Value {
op: ValueOps,
dest: String,
#[serde(rename = "type")]
op_type: Type,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
args: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
funcs: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
labels: Vec<String>,
},
Effect {
op: EffectOps,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
args: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
funcs: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
labels: Vec<String>,
},
}
impl Display for Instruction {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
Instruction::Constant {
op,
dest,
const_type,
value,
} => {
write!(f, "{}: {} = {} {};", dest, const_type, op, value)
}
Instruction::Value {
op,
dest,
op_type,
args,
funcs,
labels,
} => {
write!(f, "{}: {} = {}", dest, op_type, op)?;
for func in funcs {
write!(f, " @{}", func)?;
}
for arg in args {
write!(f, " {}", arg)?;
}
for label in labels {
write!(f, " .{}", label)?;
}
write!(f, ";")
}
Instruction::Effect {
op,
args,
funcs,
labels,
} => {
write!(f, "{}", op)?;
for func in funcs {
write!(f, " @{}", func)?;
}
for arg in args {
write!(f, " {}", arg)?;
}
for label in labels {
write!(f, " .{}", label)?;
}
write!(f, ";")
}
}
}
}
#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum ConstOps {
#[serde(rename = "const")]
Const,
}
impl Display for ConstOps {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
ConstOps::Const => write!(f, "const"),
}
}
}
#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[serde(rename_all = "lowercase")]
pub enum EffectOps {
#[serde(rename = "jmp")]
Jump,
#[serde(rename = "br")]
Branch,
Call,
#[serde(rename = "ret")]
Return,
Print,
Nop,
#[cfg(feature = "memory")]
Store,
#[cfg(feature = "memory")]
Free,
#[cfg(feature = "speculate")]
Speculate,
#[cfg(feature = "speculate")]
Commit,
#[cfg(feature = "speculate")]
Guard,
}
impl Display for EffectOps {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
EffectOps::Jump => write!(f, "jmp"),
EffectOps::Branch => write!(f, "br"),
EffectOps::Call => write!(f, "call"),
EffectOps::Return => write!(f, "ret"),
EffectOps::Print => write!(f, "print"),
EffectOps::Nop => write!(f, "nop"),
#[cfg(feature = "memory")]
EffectOps::Store => write!(f, "store"),
#[cfg(feature = "memory")]
EffectOps::Free => write!(f, "free"),
#[cfg(feature = "speculate")]
EffectOps::Speculate => write!(f, "speculate"),
#[cfg(feature = "speculate")]
EffectOps::Commit => write!(f, "commit"),
#[cfg(feature = "speculate")]
EffectOps::Guard => write!(f, "guard"),
}
}
}
#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[serde(rename_all = "lowercase")]
pub enum ValueOps {
Add,
Sub,
Mul,
Div,
Eq,
Lt,
Gt,
Le,
Ge,
Not,
And,
Or,
Call,
Id,
#[cfg(feature = "ssa")]
Phi,
#[cfg(feature = "float")]
Fadd,
#[cfg(feature = "float")]
Fsub,
#[cfg(feature = "float")]
Fmul,
#[cfg(feature = "float")]
Fdiv,
#[cfg(feature = "float")]
Feq,
#[cfg(feature = "float")]
Flt,
#[cfg(feature = "float")]
Fgt,
#[cfg(feature = "float")]
Fle,
#[cfg(feature = "float")]
Fge,
#[cfg(feature = "memory")]
Alloc,
#[cfg(feature = "memory")]
Load,
#[cfg(feature = "memory")]
PtrAdd,
}
impl Display for ValueOps {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
ValueOps::Add => write!(f, "add"),
ValueOps::Sub => write!(f, "sub"),
ValueOps::Mul => write!(f, "mul"),
ValueOps::Div => write!(f, "div"),
ValueOps::Eq => write!(f, "eq"),
ValueOps::Lt => write!(f, "lt"),
ValueOps::Gt => write!(f, "gt"),
ValueOps::Le => write!(f, "le"),
ValueOps::Ge => write!(f, "ge"),
ValueOps::Not => write!(f, "not"),
ValueOps::And => write!(f, "and"),
ValueOps::Or => write!(f, "or"),
ValueOps::Call => write!(f, "call"),
ValueOps::Id => write!(f, "id"),
#[cfg(feature = "ssa")]
ValueOps::Phi => write!(f, "phi"),
#[cfg(feature = "float")]
ValueOps::Fadd => write!(f, "fadd"),
#[cfg(feature = "float")]
ValueOps::Fsub => write!(f, "fsub"),
#[cfg(feature = "float")]
ValueOps::Fmul => write!(f, "fmul"),
#[cfg(feature = "float")]
ValueOps::Fdiv => write!(f, "fdiv"),
#[cfg(feature = "float")]
ValueOps::Feq => write!(f, "feq"),
#[cfg(feature = "float")]
ValueOps::Flt => write!(f, "flt"),
#[cfg(feature = "float")]
ValueOps::Fgt => write!(f, "fgt"),
#[cfg(feature = "float")]
ValueOps::Fle => write!(f, "fle"),
#[cfg(feature = "float")]
ValueOps::Fge => write!(f, "fge"),
#[cfg(feature = "memory")]
ValueOps::Alloc => write!(f, "alloc"),
#[cfg(feature = "memory")]
ValueOps::Load => write!(f, "load"),
#[cfg(feature = "memory")]
ValueOps::PtrAdd => write!(f, "ptrAdd"),
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash)]
#[serde(rename_all = "lowercase")]
pub enum Type {
Int,
Bool,
#[cfg(feature = "float")]
Float,
#[cfg(feature = "memory")]
#[serde(rename = "ptr")]
Pointer(Box<Type>),
}
impl Display for Type {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
Type::Int => write!(f, "int"),
Type::Bool => write!(f, "bool"),
#[cfg(feature = "float")]
Type::Float => write!(f, "float"),
#[cfg(feature = "memory")]
Type::Pointer(tpe) => write!(f, "ptr<{}>", tpe),
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
#[serde(untagged)]
pub enum Literal {
Int(i64),
Bool(bool),
#[cfg(feature = "float")]
Float(f64),
}
impl Display for Literal {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
Literal::Int(i) => write!(f, "{}", i),
Literal::Bool(b) => write!(f, "{}", b),
#[cfg(feature = "float")]
Literal::Float(x) => write!(f, "{}", x),
}
}
}
impl Literal {
pub fn get_type(&self) -> Type {
match self {
Literal::Int(_) => Type::Int,
Literal::Bool(_) => Type::Bool,
#[cfg(feature = "float")]
Literal::Float(_) => Type::Float,
}
}
}
pub fn load_program_from_read<R: std::io::Read>(mut input: R) -> Program {
let mut buffer = String::new();
input.read_to_string(&mut buffer).unwrap();
serde_json::from_str(&buffer).unwrap()
}
pub fn load_program() -> Program {
load_program_from_read(std::io::stdin())
}
pub fn output_program(p: &Program) {
io::stdout()
.write_all(serde_json::to_string(p).unwrap().as_bytes())
.unwrap();
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn full_program() {
let expected = "@main(cond: bool) {
a: bool = const true;
br cond .left .right;
.left:
c: int = add a b;
jmp .end;
.right:
jmp .end;
.end:
print d;
}
";
let program = Program {
functions: vec![Function {
name: "main".to_owned(),
args: vec![Argument {
name: "cond".to_owned(),
arg_type: Type::Bool,
}],
return_type: None,
instrs: vec![
Code::Instruction(Instruction::Constant {
op: ConstOps::Const,
dest: "a".to_owned(),
const_type: Type::Bool,
value: Literal::Bool(true),
}),
Code::Instruction(Instruction::Effect {
op: EffectOps::Branch,
args: vec!["cond".to_owned()],
funcs: vec![],
labels: vec!["left".to_owned(), "right".to_owned()],
}),
Code::Label {
label: "left".to_owned(),
},
Code::Instruction(Instruction::Value {
op: ValueOps::Add,
dest: "c".to_owned(),
op_type: Type::Int,
args: vec!["a".to_owned(), "b".to_owned()],
funcs: vec![],
labels: vec![],
}),
Code::Instruction(Instruction::Effect {
op: EffectOps::Jump,
args: vec![],
funcs: vec![],
labels: vec!["end".to_owned()],
}),
Code::Label {
label: "right".to_owned(),
},
Code::Instruction(Instruction::Effect {
op: EffectOps::Jump,
args: vec![],
funcs: vec![],
labels: vec!["end".to_owned()],
}),
Code::Label {
label: "end".to_owned(),
},
Code::Instruction(Instruction::Effect {
op: EffectOps::Print,
args: vec!["d".to_owned()],
funcs: vec![],
labels: vec![],
}),
],
}],
};
assert_eq!(expected, format!("{}", program));
}
#[test]
fn value_call() {
assert_eq!(
"mod: int = call @mod a b;",
format!(
"{}",
Instruction::Value {
op: ValueOps::Call,
dest: "mod".to_owned(),
op_type: Type::Int,
args: vec!["a".to_owned(), "b".to_owned()],
funcs: vec!["mod".to_owned()],
labels: vec![],
}
)
)
}
#[test]
fn effect_call() {
assert_eq!(
"call @callPrint v1;",
format!(
"{}",
Instruction::Effect {
op: EffectOps::Call,
args: vec!["v1".to_owned()],
funcs: vec!["callPrint".to_owned()],
labels: vec![],
}
)
)
}
#[test]
fn pointer() {
assert_eq!(
"myptr: ptr<int> = alloc ten;",
format!(
"{}",
Instruction::Value {
op: ValueOps::Alloc,
dest: "myptr".to_owned(),
op_type: Type::Pointer(Box::new(Type::Int)),
args: vec!["ten".to_owned()],
funcs: vec![],
labels: vec![],
}
)
)
}
#[test]
fn phi() {
assert_eq!(
"x: int = phi a b .here .there;",
format!(
"{}",
Instruction::Value {
op: ValueOps::Phi,
dest: "x".to_owned(),
op_type: Type::Int,
args: vec!["a".to_owned(), "b".to_owned()],
funcs: vec![],
labels: vec!["here".to_owned(), "there".to_owned()],
}
)
)
}
#[test]
fn speculation() {
assert_eq!(
"speculate;",
format!(
"{}",
Instruction::Effect {
op: EffectOps::Speculate,
args: vec![],
funcs: vec![],
labels: vec![],
}
)
)
}
}
| 28.235714 | 76 | 0.426195 | 3.09375 |
3285c7bcf8a45e226a8861a3ccbc819526f84ad0 | 993 | lua | Lua | test/tests/colonrange.lua | jugglerchris/textadept-vi | 46eb2c6502df9a0166a4de2a8c14fca66b4a0220 | [
"MIT"
] | 35 | 2015-02-06T01:48:41.000Z | 2022-01-01T17:31:10.000Z | test/tests/colonrange.lua | jugglerchris/textadept-vi | 46eb2c6502df9a0166a4de2a8c14fca66b4a0220 | [
"MIT"
] | 31 | 2015-02-24T22:23:30.000Z | 2020-12-11T22:50:45.000Z | test/tests/colonrange.lua | jugglerchris/textadept-vi | 46eb2c6502df9a0166a4de2a8c14fca66b4a0220 | [
"MIT"
] | 9 | 2015-06-21T11:51:17.000Z | 2020-05-27T17:13:19.000Z | -- Check that running :commands works.
-- Add a dummy command
local assertEq = test.assertEq
local myvar = nil
local vi_mode = require 'textadept-vi.vi_mode'
local cmd_errors = {}
local function save_errors(f)
return function(...)
ok, err = pcall(f, ...)
if ok then return err end
cmd_errors[#cmd_errors+1] = err
end
end
vi_mode.ex_mode.add_ex_command('tester', save_errors(function(args, range)
assertEq(args, {'tester', 'arg1', 'arg2'})
assertEq(range, {1, 4})
myvar = range
end), nil) -- no completer
-- test.key doesn't currently work from the command entry, so we instead
-- need to use physkey, with one final key at the end (which will wait for
-- the keypress to have been handled).
test.keys(':1,4tester arg1 arg2')
test.key('enter')
assertEq(cmd_errors, {})
assertEq(myvar,{1,4})
-- remove the test command
assert(vi_mode.ex_mode.ex_commands['tester'])
vi_mode.ex_mode.ex_commands['tester'] = nil
| 29.205882 | 74 | 0.668681 | 3.3125 |
e9147cf4bcc0292e88382d6f3ee6df51daa6215d | 1,947 | rb | Ruby | spec/flight_radar_spec.rb | kupolak/flight_radar | 673bc47d18f0b8b47b010ecc605ac922d94c8580 | [
"MIT"
] | null | null | null | spec/flight_radar_spec.rb | kupolak/flight_radar | 673bc47d18f0b8b47b010ecc605ac922d94c8580 | [
"MIT"
] | null | null | null | spec/flight_radar_spec.rb | kupolak/flight_radar | 673bc47d18f0b8b47b010ecc605ac922d94c8580 | [
"MIT"
] | null | null | null | # frozen_string_literal: true
require "spec_helper"
RSpec.describe FlightRadar do
it "gets airlines" do
result = FlightRadar.airlines
expect(result.count).to be >= 100
end
it "gets airports" do
airports = FlightRadar.airports
expect(airports.count).to be > 4000
end
it "gets airport" do
airports = %w[ATL LAX DXB DFW]
count = 0
airports.each do |airport|
count += 1 if FlightRadar.airport(airport) != {}
end
expect(count).to be >= 3
end
it "gets zones" do
result = FlightRadar.zones
check = %w[europe northamerica southamerica oceania asia africa atlantic maldives northatlantic].all? do |s|
result.key? s
end
expect(check).to be_truthy
end
it "gets flights" do
result = FlightRadar.flights
expect(result.count).to be >= 100
end
it "gets flight details" do
flight = FlightRadar.flights
flight = flight.sample
flight_details = FlightRadar.flight_details(flight.id)
expect(flight_details["identification"]["id"]).to be == flight.id
end
it "gets flights by airline" do
airlines = %w[SWA GLO AZU UAL THY]
count = 0
airlines.each do |airline|
count += 1 if FlightRadar.flights(airline: airline)
end
expect(count).to be >= 3
end
it "gets flights by bounds" do
zone = FlightRadar.zones["europe"]
bounds = FlightRadar.bounds(zone)
result = FlightRadar.flights(bounds: bounds)
expect(result.count).to be >= 30
end
it "gets airline logo" do
airline = [%w[WN SWA], %w[G3 GLO], %w[AD AZU], %w[AA AAL], %w[TK THY]].sample
logo = FlightRadar.airline_logo(airline[0], airline[1])
expect(logo[0]).to_not be nil
expect(logo[1]).to_not be nil
end
it "gets country flag" do
country = "United States"
flag_url = FlightRadar.country_flag(country)
expect(flag_url).to eq "https://www.flightradar24.com/static/images/data/flags-small/united-states.gif"
end
end
| 26.310811 | 112 | 0.671803 | 3.34375 |
b2fdd34a89c4f597f4f4706f3635728cd6c36c6a | 2,827 | py | Python | train_utils.py | BatyrM/QL-Net | b245aadeb106810d075064137f26d773b2dbd679 | [
"MIT"
] | null | null | null | train_utils.py | BatyrM/QL-Net | b245aadeb106810d075064137f26d773b2dbd679 | [
"MIT"
] | null | null | null | train_utils.py | BatyrM/QL-Net | b245aadeb106810d075064137f26d773b2dbd679 | [
"MIT"
] | null | null | null | import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.optim as optim
def accuracy(output, target, topk=(1,)):
"""Computes the precision@k for the specified values of k"""
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).sum(0)
res.append(100*correct_k/batch_size)
return res
def adjust_learning_rate(lr, optimizer, epoch):
"""Sets the learning rate to the initial LR decayed by 10 after 3 and 6 epochs"""
lr = lr * (0.1 ** (epoch // 6))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
def train(net,trainloader, testloader, num_epoch, lr, device):
criterion = nn.CrossEntropyLoss() #
optimizer = optim.SGD(net.parameters(), lr=lr, momentum=0.9, weight_decay=5e-4)
best = 0.0
for epoch in range(num_epoch): # loop over the dataset multiple times
net.train()
adjust_learning_rate(lr, optimizer, epoch)
running_loss = 0.0
for i, (inputs, labels) in enumerate(trainloader, 0):
inputs = inputs.to(device)
labels = labels.to(device)
# zero the parameter gradients
optimizer.zero_grad()
# forward + backward + optimize
outputs = net(inputs)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
prec1 = accuracy(outputs.data, labels, topk=(1,))[0]
if i % 30 == 0: # print every 2 mini-batches
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}\tAccuracy: {:.2f}'.format(
epoch, i * len(inputs), len(trainloader.dataset),
100. * i / len(trainloader), loss.item(), prec1.item()))
print("----- Validation ----------")
score = test(net, testloader, device)
if score > best:
print("Saving model")
torch.save(net.state_dict(), 'mnist_baseline.pth')
best = score
print("---------------------------")
print('Finished Training')
return net
def test(net, testloader, device):
net.eval()
correct = 0.0
total = 0.0
i = 0.0
for (images, labels) in testloader:
images, labels = images.to(device), labels.to(device)
with torch.no_grad():
outputs = net(images)
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum()
i=i+1.0
accuracy = 100.0 * correct.item() / total
print('Accuracy of the network on the 10000 test images: %.2f %%' % (accuracy))
return accuracy
| 36.714286 | 97 | 0.573753 | 3.40625 |
cb6fd862b26f5701a798f84fb660860482ce746c | 3,921 | go | Go | server/udp/session.go | millken/tcpwder | b6e34bad5919a930bd73e9e90b460984436273ed | [
"MIT"
] | 3 | 2019-03-08T14:14:08.000Z | 2022-02-09T09:23:39.000Z | server/udp/session.go | millken/tcpwder | b6e34bad5919a930bd73e9e90b460984436273ed | [
"MIT"
] | null | null | null | server/udp/session.go | millken/tcpwder | b6e34bad5919a930bd73e9e90b460984436273ed | [
"MIT"
] | 6 | 2017-08-23T13:49:39.000Z | 2022-02-09T09:23:42.000Z | /**
* session.go - udp "session"
*
* @author Illarion Kovalchuk <[email protected]>
* @author Yaroslav Pogrebnyak <[email protected]>
*/
package udp
import (
"log"
"net"
"sync/atomic"
"time"
"github.com/millken/tcpwder/core"
"github.com/millken/tcpwder/server/scheduler"
)
/**
* Emulates UDP "session"
*/
type session struct {
/* timeout for new data from client */
clientIdleTimeout time.Duration
/* timeout for new data from backend */
backendIdleTimeout time.Duration
/* max number of client requests */
maxRequests uint64
/* actually sent client requests */
_sentRequests uint64
/* max number of backend responses */
maxResponses uint64
/* scheduler */
scheduler scheduler.Scheduler
/* connection to send responses to client with */
serverConn *net.UDPConn
/* client address */
clientAddr net.UDPAddr
/* Session backend */
backend *core.Backend
/* connection to previously elected backend */
backendConn *net.UDPConn
/* activity channel */
clientActivityC chan bool
clientLastActivity time.Time
/* stop channel */
stopC chan bool
/* function to call to notify server that session is closed and should be removed */
notifyClosed func()
}
/**
* Start session
*/
func (s *session) start() error {
s.stopC = make(chan bool)
s.clientActivityC = make(chan bool)
s.clientLastActivity = time.Now()
backendAddr, err := net.ResolveUDPAddr("udp", s.backend.Target.String())
if err != nil {
log.Printf("[ERROR] ResolveUDPAddr: %s", err)
return err
}
backendConn, err := net.DialUDP("udp", nil, backendAddr)
if err != nil {
log.Printf("[DEBUG] Error connecting to backend: %s", err)
return err
}
s.backendConn = backendConn
/**
* Update time and wait for stop
*/
var t *time.Ticker
var tC <-chan time.Time
if s.clientIdleTimeout > 0 {
log.Printf("[DEBUG] Starting new ticker for client %s%s%s", s.clientAddr, " ", s.clientIdleTimeout)
t = time.NewTicker(s.clientIdleTimeout)
tC = t.C
}
stopped := false
go func() {
for {
select {
case now := <-tC:
if s.clientLastActivity.Add(s.clientIdleTimeout).Before(now) {
log.Printf("[DEBUG] Client %s%s%s", s.clientAddr, " was idle for more than ", s.clientIdleTimeout)
go func() {
s.stopC <- true
}()
}
case <-s.stopC:
stopped = true
log.Printf("[DEBUG] Closing client session: %s", s.clientAddr.String())
s.backendConn.Close()
s.notifyClosed()
if t != nil {
t.Stop()
}
return
case <-s.clientActivityC:
s.clientLastActivity = time.Now()
}
}
}()
/**
* Proxy data from backend to client
*/
go func() {
buf := make([]byte, UDP_PACKET_SIZE)
var responses uint64
for {
if s.backendIdleTimeout > 0 {
err := s.backendConn.SetReadDeadline(time.Now().Add(s.backendIdleTimeout))
if err != nil {
log.Printf("[ERROR] Unable to set timeout for backend connection, closing. Error: %s", err)
s.stop()
return
}
}
n, _, err := s.backendConn.ReadFromUDP(buf)
if err != nil {
if !err.(*net.OpError).Timeout() && !stopped {
log.Printf("[ERROR] reading from backend %s", err)
}
s.stop()
return
}
s.scheduler.IncrementRx(*s.backend, uint(n))
s.serverConn.WriteToUDP(buf[0:n], &s.clientAddr)
if s.maxResponses > 0 {
responses++
if responses >= s.maxResponses {
s.stop()
return
}
}
}
}()
return nil
}
/**
* Writes data to session backend
*/
func (s *session) send(buf []byte) error {
select {
case s.clientActivityC <- true:
default:
}
_, err := s.backendConn.Write(buf)
if err != nil {
return err
}
s.scheduler.IncrementTx(*s.backend, uint(len(buf)))
if s.maxRequests > 0 {
if atomic.AddUint64(&s._sentRequests, 1) >= s.maxRequests {
s.stop()
}
}
return nil
}
/**
* Stops session
*/
func (c *session) stop() {
select {
case c.stopC <- true:
default:
}
}
| 19.033981 | 103 | 0.646009 | 3.09375 |
e8e29b7b8972a06149daa7c111affc519cbc1ff4 | 629 | py | Python | FormulaAccordingPrint.py | FreeBirdsCrew/Brainstorming_Codes | 9d06216cd0772ce56586acff2c240a210b94ba1f | [
"Apache-2.0"
] | 1 | 2020-12-11T10:24:08.000Z | 2020-12-11T10:24:08.000Z | FormulaAccordingPrint.py | FreeBirdsCrew/Brainstorming_Codes | 9d06216cd0772ce56586acff2c240a210b94ba1f | [
"Apache-2.0"
] | null | null | null | FormulaAccordingPrint.py | FreeBirdsCrew/Brainstorming_Codes | 9d06216cd0772ce56586acff2c240a210b94ba1f | [
"Apache-2.0"
] | null | null | null | """
Write a program that calculates and prints the value according to the given formula:
Q = Square root of [(2 * C * D)/H]
Following are the fixed values of C and H:
C is 50. H is 30.
D is the variable whose values should be input to your program in a comma-separated sequence.
Example
Let us assume the following comma separated input sequence is given to the program:
100,150,180
The output of the program should be:
18,22,24
"""
import math
c=50
h=30
value = []
items=[x for x in raw_input().split(',')]
for d in items:
value.append(str(int(round(math.sqrt(2*c*float(d)/h)))))
print ','.join(value) | 28.590909 | 94 | 0.694754 | 3.125 |
d85d1fa41bdefa15ec09d62c6bf10684f6d1aa95 | 2,229 | swift | Swift | Fokus/Focus/FocusController.swift | dnlggr/Fokus | 7efba4c63fa963e8e3d3fd0d78638285e1b6c207 | [
"MIT"
] | 3 | 2018-11-12T14:47:08.000Z | 2019-08-06T18:21:53.000Z | Fokus/Focus/FocusController.swift | dnlggr/Fokus | 7efba4c63fa963e8e3d3fd0d78638285e1b6c207 | [
"MIT"
] | null | null | null | Fokus/Focus/FocusController.swift | dnlggr/Fokus | 7efba4c63fa963e8e3d3fd0d78638285e1b6c207 | [
"MIT"
] | null | null | null | //
// FocusController.swift
// Fokus
//
// Created by Daniel on 01.09.18.
// Copyright © 2018 Daniel Egger. All rights reserved.
//
import Cocoa
import WindowLayout
class FocusController {
// MARK: Initialization
init() { }
// MARK: API
func focusLeft() {
moveFocus(toward: .west)
}
func focusDown() {
moveFocus(toward: .south)
}
func focusUp() {
moveFocus(toward: .north)
}
func focusRight() {
moveFocus(toward: .east)
}
// MARK: Private Functions
private func moveFocus(toward direction: Direction) {
if let neighborWindow = neighborOfCurrentWindow(toward: direction) {
focus(window: neighborWindow)
}
}
private func neighborOfCurrentWindow(toward direction: Direction) -> Window? {
// Get all windows
let screen = Screen(windows: WindowInfo.all.map { Window(bounds: $0.bounds, title: $0.title) })
// Get current window
guard let currentWindowInfo = WindowInfo.current else { return nil }
let currentWindow = Window(bounds: currentWindowInfo.bounds, title: currentWindowInfo.title)
// Get neighbor window
return screen.neighbor(of: currentWindow, toward: direction)
}
private func focus(window: Window) {
// Get window's accessibility application
guard let pid = WindowInfo.foremost(with: window.bounds)?.ownerPID else { return }
let accessibilityApplication = Accessibility.application(for: pid)
// Get first accessibility window with same bounds and title as window as there seems
// to be no other way of matching windows with CGWindowInfo to Accessibility windows
let accessibilityWindow = accessibilityApplication.windows.first {
return $0.bounds == window.bounds && $0.title == window.title
}
if let accessibilityWindow = accessibilityWindow {
// Activate appliction
NSRunningApplication(processIdentifier: pid)?.activate(options: .activateIgnoringOtherApps)
// Raise correct window of application
AXUIElementPerformAction(accessibilityWindow, kAXRaiseAction as CFString)
}
}
}
| 29.328947 | 103 | 0.6559 | 3.015625 |
9a542c3d53d6149d857f830bee1e2251b6bfceef | 1,799 | swift | Swift | Example/YahooWather/YahooWather/ViewController.swift | beekpr/EasyTheme | 0ea47fd730517058fd0cf0bbe9acdeaf5c3c65fa | [
"MIT"
] | 217 | 2017-06-06T17:20:49.000Z | 2020-01-23T04:05:05.000Z | Example/YahooWather/YahooWather/ViewController.swift | beekpr/EasyTheme | 0ea47fd730517058fd0cf0bbe9acdeaf5c3c65fa | [
"MIT"
] | 6 | 2017-06-04T16:14:40.000Z | 2019-07-22T16:34:04.000Z | Example/YahooWather/YahooWather/ViewController.swift | beekpr/EasyTheme | 0ea47fd730517058fd0cf0bbe9acdeaf5c3c65fa | [
"MIT"
] | 21 | 2017-06-06T19:30:11.000Z | 2020-01-01T04:20:50.000Z | import UIKit
import Themes
import DayNightSwitch
struct MyTheme: Theme {
let image: UIImage
let textColor: UIColor
init(image: UIImage, textColor: UIColor) {
self.image = image
self.textColor = textColor
}
}
struct ThemeList {
static let day = MyTheme(image: UIImage(named: "day")!, textColor: UIColor.darkGray)
static let night = MyTheme(image: UIImage(named: "night")!, textColor: UIColor.white)
}
class ViewController: UIViewController {
@IBOutlet weak var imageView: UIImageView!
@IBOutlet weak var cityLabel: UILabel!
@IBOutlet weak var timeLabel: UILabel!
@IBOutlet weak var degreeLabel: UILabel!
@IBOutlet weak var weatherLabel: UILabel!
override func viewDidLoad() {
super.viewDidLoad()
ThemeManager.currentTheme = ThemeList.day
let dayNightSwitch = DayNightSwitch(frame: CGRect(x: view.bounds.size.width - 120,
y: view.bounds.size.height - 70,
width: 100, height: 50))
view.addSubview(dayNightSwitch)
dayNightSwitch.on = true
use(MyTheme.self) { controller, theme in
UIView.transition(with: controller.imageView,
duration: 0.25,
options: .transitionCrossDissolve,
animations: {
controller.imageView.image = theme.image
}, completion: nil)
controller.cityLabel.textColor = theme.textColor
controller.timeLabel.textColor = theme.textColor
}
dayNightSwitch.changeAction = { on in
ThemeManager.currentTheme = on ? ThemeList.day : ThemeList.night
}
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
| 28.109375 | 87 | 0.648694 | 3.046875 |
739c4d8c0ff0142ea91a26701a2294cb09da0abb | 526 | sql | SQL | queries/sherlock/setup/feed_quality_score.sql | stefmolin/watson-api | 49ca922f66646f3134b500c4ec9e6cd23fb31f2c | [
"MIT"
] | null | null | null | queries/sherlock/setup/feed_quality_score.sql | stefmolin/watson-api | 49ca922f66646f3134b500c4ec9e6cd23fb31f2c | [
"MIT"
] | 1 | 2018-09-20T01:04:57.000Z | 2018-10-06T21:39:44.000Z | queries/sherlock/setup/feed_quality_score.sql | stefmolin/watson-api | 49ca922f66646f3134b500c4ec9e6cd23fb31f2c | [
"MIT"
] | null | null | null | SELECT
day
, CASE WHEN COUNT(DISTINCT day) = 0 THEN 0
ELSE (SUM(catalog_quality)/COUNT(DISTINCT day))/10000 END AS feed_quality
, MAX(last_date_import) AS feed_import
FROM
schema.fact_table AS fc
JOIN
(SELECT
partner_id
, client_id
FROM
schema.dim_table
WHERE
client_id = {client_id}
GROUP BY
partner_id
, client_id) c
ON
c.partner_id = fc.partner_id
WHERE
client_id = {client_id}
AND day BETWEEN '{start_date}' AND '{end_date}'
GROUP BY
client_id
, day
ORDER BY
day
| 18.137931 | 82 | 0.692015 | 3.125 |
c829c2d4ff87ef172ea1732f1adad21590a72250 | 1,586 | lua | Lua | server/main.lua | vwxyzzett/td-atmrob | d0259d1c841e5f5d2f577db1de98a6c9425c2880 | [
"Apache-2.0"
] | null | null | null | server/main.lua | vwxyzzett/td-atmrob | d0259d1c841e5f5d2f577db1de98a6c9425c2880 | [
"Apache-2.0"
] | null | null | null | server/main.lua | vwxyzzett/td-atmrob | d0259d1c841e5f5d2f577db1de98a6c9425c2880 | [
"Apache-2.0"
] | 1 | 2022-02-15T12:53:47.000Z | 2022-02-15T12:53:47.000Z | ESX = nil
TriggerEvent('esx:getSharedObject', function(obj) ESX = obj end)
ESX.RegisterServerCallback('hasan:copCount', function(source, cb)
local xPlayers = ESX.GetPlayers()
copConnected = 0
for i=1, #xPlayers, 1 do
local xPlayer = ESX.GetPlayerFromId(xPlayers[i])
if xPlayer.job.name == 'police' then
copConnected = copConnected + 1
end
end
cb(copConnected)
end)
RegisterNetEvent('td-atmrob:givemoney')
AddEventHandler('td-atmrob:givemoney', function()
local xPlayer = ESX.GetPlayerFromId(source)
xPlayer.addMoney(Config.Money)
end)
ESX.RegisterUsableItem('usefulusb', function(source)
local xPlayer = ESX.GetPlayerFromId(source)
TriggerClientEvent('td-atmrob:client:start', source)
end)
RegisterNetEvent('td-atmrob:giveitem')
AddEventHandler('td-atmrob:giveitem', function()
local xPlayer = ESX.GetPlayerFromId(source)
local Item = 'documents'
xPlayer.addInventoryItem(Item,1)
end)
ESX.RegisterServerCallback("hasan:itemkontrol", function(source, cb, itemname)
local xPlayer = ESX.GetPlayerFromId(source)
local item = xPlayer.getInventoryItem(itemname)["count"]
if item >= 1 then
cb(true)
else
cb(false)
end
end)
RegisterServerEvent("hasan:itemsil2")
AddEventHandler("hasan:itemsil2", function(itemname)
local xPlayer = ESX.GetPlayerFromId(source)
xPlayer.removeInventoryItem(itemname, count)
end)
RegisterServerEvent('hasan:itemver2')
AddEventHandler('hasan:itemver2', function(item, count)
local player = ESX.GetPlayerFromId(source)
player.addInventoryItem(item, count)
end)
| 24.030303 | 78 | 0.742749 | 3.078125 |
dd707193dd1fd3d7843445fb6338dfaa0d6482ba | 3,079 | go | Go | examples/restore.go | IrennaLumbuun/nimble-golang-sdk | 2af65b6ef8137fbc07c24cc78c2118206ca04149 | [
"Apache-2.0"
] | null | null | null | examples/restore.go | IrennaLumbuun/nimble-golang-sdk | 2af65b6ef8137fbc07c24cc78c2118206ca04149 | [
"Apache-2.0"
] | null | null | null | examples/restore.go | IrennaLumbuun/nimble-golang-sdk | 2af65b6ef8137fbc07c24cc78c2118206ca04149 | [
"Apache-2.0"
] | null | null | null | // Copyright 2020 Hewlett Packard Enterprise Development LP
package main
import (
"fmt"
"github.com/hpe-storage/nimble-golang-sdk/pkg/client/v1/nimbleos"
"github.com/hpe-storage/nimble-golang-sdk/pkg/param"
"github.com/hpe-storage/nimble-golang-sdk/pkg/service"
)
func main() {
// login to Array, get groupService instance
groupService, err := service.NewNimbleGroupService(
service.WithHost("1.1.1.1"),
service.WithUser("xxx"),
service.WithPassword("xxx"),
service.WithoutWaitForAsyncJobs())
if err != nil {
fmt.Printf("NewGroupService(): Unable to connect to group, err: %v", err.Error())
return
}
defer groupService.LogoutService()
// set debug
groupService.SetDebug()
// get volume service instance
volSvc := groupService.GetVolumeService()
// Initialize volume attributes
var sizeField int64 = 5120
descriptionField := "This volume was created as part of a unit test"
var limitIopsField int64 = 256
var limitMbpsField int64 = 1
// set volume attributes
newVolume := &nimbleos.Volume{
Name: param.NewString("RestoreVolume"),
Size: &sizeField,
Description: &descriptionField,
ThinlyProvisioned: param.NewBool(true),
Online: param.NewBool(true),
LimitIops: &limitIopsField,
LimitMbps: &limitMbpsField,
MultiInitiator: param.NewBool(true),
AgentType: nimbleos.NsAgentTypeNone,
}
// create volume
volume, err := volSvc.CreateVolume(newVolume)
if volume != nil {
volcollName := &nimbleos.VolumeCollection{
Name: param.NewString("TestVolcoll"),
}
volcoll, err := groupService.GetVolumeCollectionService().CreateVolumeCollection(volcollName)
if err != nil {
fmt.Println("Failed to create volume collection")
return
}
// add volume to volume collection
err = volSvc.AssociateVolume(*volume.ID, *volcoll.ID)
if err != nil {
fmt.Println("Failed to associate RestoreVolume to volcoll ")
return
}
// create a snapshot collection
snapColl, _ := groupService.GetSnapshotCollectionService().CreateSnapshotCollection(&nimbleos.SnapshotCollection{
Name: param.NewString("RestoreSnapColl"),
VolcollId: volcoll.ID,
})
// Get snapshot collection by name
snapColl, err = groupService.GetSnapshotCollectionService().GetSnapshotCollectionByName("RestoreSnapColl")
if err != nil {
fmt.Printf("Failed to get snapshot collection by name, err: %v\n", err)
}
fmt.Println(snapColl)
// set the volume offline before restore
volSvc.OfflineVolume(*volume.ID, true)
//restore volume to snapcoll
err = volSvc.RestoreVolume(*volume.ID, *snapColl.SnapshotsList[0].SnapId)
if err != nil {
fmt.Println("Failed to restore volume")
}
// cleanup
// disassociate volume from volume collection
err = volSvc.DisassociateVolume(*volume.ID)
if err != nil {
fmt.Printf("Failed to remove %s volume from volume collection\n", *volume.ID)
return
}
// delete volcoll
groupService.GetVolumeCollectionService().DeleteVolumeCollection(*volcoll.ID)
volSvc.DeleteVolume(*volume.ID)
}
}
| 28.509259 | 115 | 0.715167 | 3.03125 |
9c39c03462738217dee5cad2afd2fb9bd45ef7ac | 2,378 | js | JavaScript | CMS/CMSScripts/CMSModules/CMS.Forms/Directives/CMSInputAttributesDirective.js | mzhokh/FilterByCategoryWidget | c6b83215aea4f4df607d070fe4c772bd96ee6a95 | [
"MIT"
] | 26 | 2019-02-26T19:44:44.000Z | 2021-07-19T01:45:37.000Z | CMS/CMSScripts/CMSModules/CMS.Forms/Directives/CMSInputAttributesDirective.js | mzhokh/FilterByCategoryWidget | c6b83215aea4f4df607d070fe4c772bd96ee6a95 | [
"MIT"
] | 34 | 2018-12-10T09:30:13.000Z | 2020-09-02T11:14:12.000Z | CMS/CMSScripts/CMSModules/CMS.Forms/Directives/CMSInputAttributesDirective.js | mzhokh/FilterByCategoryWidget | c6b83215aea4f4df607d070fe4c772bd96ee6a95 | [
"MIT"
] | 50 | 2018-12-06T17:32:43.000Z | 2021-11-04T09:48:07.000Z | cmsdefine([], function () {
/**
* This directive looks for supported attributes in the parent scope. If any of the supported
* value is found, it is passed to the context element.
* If the scope specifies id attribute, its value will be used for the name attribute as well.
*
* This directive is valid only for input, textarea or select element.
*
* @example
* ...
* $scope = {
* required: true,
* maxlength: 50,
* id: "someId"
* };
*
* ...
*
* <input type="text" data-ng-model="model" cms-input-attributes="" />
*
* will transform the input to
*
* <input type="text" data-ng-model="model" name="someId" id="someId" required maxlength="50" cms-input-attributes="" />
*
* @throws If the directive is used for not supported element.
*/
// Array of supported HTML attributes containing values
var supportedAttributes = ["maxlength", "required", "cols", "rows", "id"],
// Array of supported HTML attributes without the value
supportedProperties = ["required"];
return [function () {
return {
restrict: "A",
link: function ($scope, $element) {
var elementTagName = $element.prop("tagName").toLowerCase();
if ((elementTagName !== "input") && (elementTagName !== "textarea") && (elementTagName !== "select")) {
throw "This directive can be used only for input, textarea or select element, but was used for the '" + elementTagName + "'.";
}
supportedAttributes.forEach(function(attribute) {
if ($scope[attribute]) {
$element.attr(attribute, $scope[attribute]);
}
});
supportedProperties.forEach(function (property) {
if ($scope[property]) {
$element.prop(property, true);
}
});
// Name is special attribute, since is usually matches the id, so it will be added even if it is not explicitly specified in the scope
if ($scope.id) {
$element.attr("name", $scope.id);
}
}
};
}];
}); | 37.746032 | 150 | 0.519765 | 3.171875 |
2b59cdc909c3f77b9904fd75a847089e1d0e82e9 | 3,908 | sql | SQL | resources/migrations/2017091817300000-update-completed_benchmark-metrics.up.sql | nucleotides/event-api | aea8f9ce8e76fbff646d1535228c6c09be4a7158 | [
"BSD-3-Clause-LBNL"
] | null | null | null | resources/migrations/2017091817300000-update-completed_benchmark-metrics.up.sql | nucleotides/event-api | aea8f9ce8e76fbff646d1535228c6c09be4a7158 | [
"BSD-3-Clause-LBNL"
] | null | null | null | resources/migrations/2017091817300000-update-completed_benchmark-metrics.up.sql | nucleotides/event-api | aea8f9ce8e76fbff646d1535228c6c09be4a7158 | [
"BSD-3-Clause-LBNL"
] | null | null | null | -- Drop this view first to then drop dependent views
DROP VIEW completed_benchmark_metrics;
-- No longer going to base the completed_benchmark_metrics table off of this view
DROP VIEW image_task_benchmarking_state;
--
-- View of numbers of tasks per benchmark instance by benchmark type
--
-- The numbers of tasks is always going to be 1 (the produce task) plus however
-- many evaluation tasks there are.
--
CREATE MATERIALIZED VIEW tasks_per_benchmark_instance_by_benchmark_type AS
SELECT benchmark_type_id,
(COUNT(image_task_id) + 1) AS n_tasks
FROM benchmark_type
JOIN image_expanded_fields ON benchmark_type.evaluation_image_type_id = image_expanded_fields.image_type_id
GROUP BY benchmark_type_id;
--
CREATE UNIQUE INDEX ON tasks_per_benchmark_instance_by_benchmark_type (benchmark_type_id);
--
-- View of the state of each benchmark instance
--
CREATE VIEW benchmark_instance_state AS
SELECT benchmark_instance_id,
COALESCE(bool_and(event.success), FALSE) AS instance_successful,
COUNT(event_id) = n_tasks AS instance_finished
FROM benchmark_instance
JOIN task USING (benchmark_instance_id)
JOIN tasks_per_benchmark_instance_by_benchmark_type USING (benchmark_type_id)
LEFT JOIN events_prioritised_by_successful AS event USING (task_id)
GROUP BY benchmark_instance_id, n_tasks;
--
-- Simpler view of all benchmarking metrics, only requiring each individual
-- benchmark instance has been completed, rather than all tasks for a given image
-- task be completed.
--
CREATE VIEW completed_benchmark_metrics AS
SELECT external_id AS benchmark_id,
benchmark_type.name AS benchmark_type_name,
input_file.sha256 AS input_file_id,
image_type_name AS image_type,
image_instance_name AS image_name,
image_version_name AS image_version,
image_task_name AS image_task,
input_file.platform,
input_file.protocol,
input_file.material,
input_file.extraction_method,
input_file.run_mode,
input_file.biological_source_type,
input_file.biological_source_name,
input_file.input_file_set_name,
metric_type.name AS variable,
metric_instance.value
FROM benchmark_instance_state AS state
JOIN benchmark_instance USING (benchmark_instance_id)
JOIN benchmark_type USING (benchmark_type_id)
JOIN input_data_file_expanded_fields AS input_file USING (input_data_file_id)
JOIN image_expanded_fields AS image ON benchmark_instance.product_image_task_id = image.image_task_id
JOIN task USING (benchmark_instance_id)
JOIN events_prioritised_by_successful USING (task_id)
JOIN metric_instance USING (event_id)
JOIN metric_type USING (metric_type_id)
WHERE state.instance_successful = true
AND state.instance_finished = true;
--;;
--;; Function rebuild all benchmark instances and tasks
--;;
CREATE OR REPLACE FUNCTION rebuild_benchmarks () RETURNS void AS $$
BEGIN
REFRESH MATERIALIZED VIEW input_data_file_expanded_fields;
REFRESH MATERIALIZED VIEW image_expanded_fields;
PERFORM populate_benchmark_instance();
PERFORM populate_task();
REFRESH MATERIALIZED VIEW tasks_per_image_by_benchmark_type;
REFRESH MATERIALIZED VIEW tasks_per_benchmark_instance_by_benchmark_type;
REINDEX TABLE benchmark_instance;
REINDEX TABLE task;
REINDEX TABLE tasks_per_image_by_benchmark_type;
END; $$
LANGUAGE PLPGSQL;
| 44.409091 | 124 | 0.691146 | 3.21875 |
de48dd21f4714f08feaab97eed742b56a30fe022 | 1,016 | kt | Kotlin | alphonse-validator-lib/src/main/java/com/atthapon/alphonsevalidator/rules/GreaterThanRule.kt | atthapon-k/validrator | 84bd5c99e21dfb002b2994fec4b7278ed5f231f0 | [
"Apache-2.0"
] | null | null | null | alphonse-validator-lib/src/main/java/com/atthapon/alphonsevalidator/rules/GreaterThanRule.kt | atthapon-k/validrator | 84bd5c99e21dfb002b2994fec4b7278ed5f231f0 | [
"Apache-2.0"
] | null | null | null | alphonse-validator-lib/src/main/java/com/atthapon/alphonsevalidator/rules/GreaterThanRule.kt | atthapon-k/validrator | 84bd5c99e21dfb002b2994fec4b7278ed5f231f0 | [
"Apache-2.0"
] | null | null | null | package com.atthapon.alphonsevalidator.rules
import com.atthapon.alphonsevalidator.validNumber
import java.text.NumberFormat
class GreaterThanRule(val target: Number = 0, var errorMsg: String? = null): BaseRule {
override fun validate(text: String): Boolean {
if(text.isEmpty()) return false
if(text.startsWith("-")) {
val txtNum = text.substringAfter("-")
if(txtNum.validNumber()) {
var number = NumberFormat.getNumberInstance().parse(txtNum)
number = number.toFloat() * -1
return (number.toFloat() > target.toFloat())
}
return false
} else {
if(text.validNumber()) {
val number = NumberFormat.getNumberInstance().parse(text)
return (number.toFloat() > target.toFloat())
}
return false
}
}
override fun getErrorMessage() = errorMsg
override fun setError(msg: String) {
errorMsg = msg
}
} | 32.774194 | 87 | 0.584646 | 3.0625 |
0b3963c63ed1877c12683ef9458a7f962df91e0e | 3,243 | py | Python | finder.py | giuseppebrb/Pynder | a47defc08ff497096a1fe507ab5d7b01997b69ef | [
"MIT"
] | 3 | 2017-11-11T01:19:57.000Z | 2021-07-07T15:44:32.000Z | finder.py | giuseppebrb/Pynder | a47defc08ff497096a1fe507ab5d7b01997b69ef | [
"MIT"
] | null | null | null | finder.py | giuseppebrb/Pynder | a47defc08ff497096a1fe507ab5d7b01997b69ef | [
"MIT"
] | null | null | null | import os
import fnmatch
import smtplib
import email.mime.application
import sys
import subprocess
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from pathlib import Path
home = str(Path.home()) # Return a string representing the user’s home directory
fileFound = 0 # Number of files found while discovering
fileScanned = 0 # Number of the already processed files
maxSize = 23068672 # Attachments bytes limit for the mail host (22MB in byte, but it can be changed)
actualSizeCounter = 0 # Bytes count for files already attached to the email
paths = [] # List of files directories, matching the pattern, that will be print into the email body
# Following values need to be changed
email_user = "SENDER-ADDRESS-HERE"
email_pwd = "SENDER-PASSWORD-HERE"
recipient = "RECIPIENT-ADDRESS-HERE"
"""
This function will return a list of strings which represents the files path with the specified extension inside the
specified path
"""
def find(pattern, path):
result = []
for root, dirs, files in os.walk(path):
for name in files:
if fnmatch.fnmatch(name, pattern):
result.append(os.path.join(root, name))
return result
"""
__________ START - It may NOT work on MacOS __________
| |
"""
injecting_folder = home+'\\script' # 'Injecting' folder
if not os.path.exists(injecting_folder):
os.system("mkdir %s" % injecting_folder)
executableLocation = find('EXECUTABLE-NAME-HERE.exe', os.path.dirname(os.path.abspath(__file__)))
# Create a new 'injecting' folder where software will copy itself
if not os.path.isfile(injecting_folder + "\\EXECUTABLE-NAME-HERE.exe"):
os.system("xcopy {!s} {!s} /R".format(executableLocation[0], injecting_folder))
# If current working directory is not the 'injecting' folder opens a new instance from there and close this one.
if os.getcwd() != injecting_folder:
os.chdir(injecting_folder)
subprocess.Popen([injecting_folder+'\\EXECUTABLE-NAME-HERE.exe'], stdin=None, stdout=None, stderr=None)
sys.exit()
"""
|__________ END - It may NOT work on MacOS __________|
"""
filesFound = find("*.pdf", home) # List of every pdf file found in every folder starting from the user's home directory
# Building the email structure
msg = MIMEMultipart()
msg['From'] = email_user
msg['To'] = recipient
msg['Subject'] = "Files Found"
for f in filesFound:
fp = open(r'%s' % f, 'rb')
att = email.mime.application.MIMEApplication(fp.read())
fp.close()
paths.append("Directory: " + f)
att.add_header('Content-Disposition', 'attachment; filename="%s"' % f)
msg.attach(att)
for p in paths:
msg.attach(MIMEText(p, 'plain'))
# Open the connection with mail host with specified credentials
server = smtplib.SMTP('smtp.gmail.com', 587) # These values are just an example working with Gmail, you need to change
# them with your own host's SMTP address and port
server.ehlo()
server.starttls() # Starts a secure tls connection
server.login(email_user, email_pwd)
email_body = msg.as_string()
server.sendmail(email_user, recipient, email_body) # Send the email
server.quit() # Close the connection with host
sys.exit() # Quit program
| 35.25 | 120 | 0.716929 | 3.171875 |
0b9e17c3c6711c5899263cca3e86df88aba125ad | 13,497 | py | Python | src/warp/yul/AstTools.py | sambarnes/warp | f841afa22e665d5554587eaa866c4790698bfc22 | [
"Apache-2.0"
] | 414 | 2021-07-17T13:06:55.000Z | 2022-03-31T14:57:10.000Z | src/warp/yul/AstTools.py | sambarnes/warp | f841afa22e665d5554587eaa866c4790698bfc22 | [
"Apache-2.0"
] | 78 | 2021-07-19T12:33:56.000Z | 2022-03-29T17:16:27.000Z | src/warp/yul/AstTools.py | sambarnes/warp | f841afa22e665d5554587eaa866c4790698bfc22 | [
"Apache-2.0"
] | 19 | 2021-08-18T03:55:54.000Z | 2022-03-29T15:29:48.000Z | from __future__ import annotations
import re
from typing import Union
import warp.yul.ast as ast
from warp.yul.AstVisitor import AstVisitor
from warp.yul.WarpException import WarpException
class AstParser:
def __init__(self, text: str):
self.lines = text.splitlines()
if len(self.lines) == 0:
raise WarpException("Text should not be empty")
self.pos = 0
def parse_typed_name(self) -> ast.TypedName:
tabs = self.get_tabs()
node_type_name = self.get_word(tabs)
assert node_type_name == "TypedName:", "This node should be of type TypedNode"
self.pos += 1
assert self.get_tabs() == tabs + 1, "Wrong indentation"
node_name, node_type = self.get_word(tabs + 1).split(":")
self.pos += 1
return ast.TypedName(name=node_name, type=node_type)
def parse_literal(self) -> ast.Literal:
tabs = self.get_tabs()
assert self.get_word(tabs).startswith(
"Literal:"
), "This node should be of type Literal"
value = self.get_word(tabs + 8)
self.pos += 1
try:
value = int(value)
except ValueError:
pass
return ast.Literal(value=value)
def parse_identifier(self) -> ast.Identifier:
tabs = self.get_tabs()
assert self.get_word(tabs).startswith(
"Identifier:"
), "This node should be of type Identifier"
name = self.get_word(tabs + 11)
self.pos += 1
return ast.Identifier(name=name)
def parse_assignment(self) -> ast.Assignment:
tabs = self.get_tabs()
assert (
self.get_word(tabs) == "Assignment:"
), "This node should be of type Assignment"
self.pos += 1
assert self.get_word(tabs + 1) == "Variables:"
self.pos += 1
variables_list = self.parse_list(tabs + 1, self.parse_identifier)
assert self.get_word(tabs + 1) == "Value:"
self.pos += 1
return ast.Assignment(
variable_names=variables_list, value=self.parse_expression()
)
def parse_function_call(self) -> ast.FunctionCall:
tabs = self.get_tabs()
assert (
self.get_word(tabs) == "FunctionCall:"
), "This node should be of type FunctionCall"
self.pos += 1
return ast.FunctionCall(
function_name=self.parse_identifier(),
arguments=self.parse_list(tabs, self.parse_expression),
)
def parse_expression_statement(self) -> ast.Statement:
tabs = self.get_tabs()
assert (
self.get_word(tabs) == "ExpressionStatement:"
), "This node should be of type ExpressionStatement"
self.pos += 1
return ast.ExpressionStatement(expression=self.parse_expression())
def parse_variable_declaration(self) -> ast.VariableDeclaration:
tabs = self.get_tabs()
assert (
self.get_word(tabs) == "VariableDeclaration:"
), "This node should be of type VariableDeclaration"
self.pos += 1
assert self.get_tabs() == tabs + 1
assert self.get_word(tabs + 1) == "Variables:"
self.pos += 1
variables = self.parse_list(tabs + 1, self.parse_typed_name)
assert self.get_tabs() == tabs + 1
word = self.get_word(tabs + 1)
self.pos += 1
assert word.startswith("Value")
if word.endswith("None"):
value = None
else:
value = self.parse_expression()
return ast.VariableDeclaration(variables=variables, value=value)
def parse_block(self) -> ast.Block:
tabs = self.get_tabs()
assert self.get_word(tabs) == "Block:", "This node should be of type Block"
self.pos += 1
return ast.Block(statements=tuple(self.parse_list(tabs, self.parse_statement)))
def parse_function_definition(self) -> ast.FunctionDefinition:
tabs = self.get_tabs()
assert (
self.get_word(tabs) == "FunctionDefinition:"
), "This node should be of type FunctionDefinition"
self.pos += 1
assert self.get_tabs() == tabs + 1 and self.get_word(tabs + 1).startswith(
"Name:"
)
fun_name = self.get_word(tabs + 7)
self.pos += 1
assert self.get_tabs() == tabs + 1 and self.get_word(tabs + 1) == "Parameters:"
self.pos += 1
params = self.parse_list(tabs + 1, self.parse_typed_name)
assert (
self.get_tabs() == tabs + 1
and self.get_word(tabs + 1) == "Return Variables:"
)
self.pos += 1
returns = self.parse_list(tabs + 1, self.parse_typed_name)
assert self.get_tabs() == tabs + 1 and self.get_word(tabs + 1) == "Body:"
self.pos += 1
body = self.parse_block()
return ast.FunctionDefinition(
name=fun_name, parameters=params, return_variables=returns, body=body
)
def parse_if(self) -> ast.If:
tabs = self.get_tabs()
assert self.get_word(tabs) == "If:", "This node should be of type If"
self.pos += 1
condition = self.parse_expression()
body = self.parse_block()
else_body = None
if self.get_tabs() > tabs:
else_body = self.parse_block()
return ast.If(condition=condition, body=body, else_body=else_body)
def parse_case(self) -> ast.Case:
tabs = self.get_tabs()
assert self.get_word(tabs) == "Case:", "This node should be of type Case"
self.pos += 1
try:
value = self.parse_literal()
except AssertionError:
assert (
self.get_tabs() == tabs + 1 and self.get_word(tabs + 1) == "Default"
), "The value must be a literal or None (when it's the default case)"
value = None
self.pos += 1
return ast.Case(value=value, body=self.parse_block())
def parse_switch(self) -> ast.Switch:
tabs = self.get_tabs()
assert self.get_word(tabs) == "Switch:", "This node should be of type Switch"
self.pos += 1
return ast.Switch(
expression=self.parse_expression(),
cases=self.parse_list(tabs, self.parse_case),
)
def parse_for_loop(self) -> ast.ForLoop:
tabs = self.get_tabs()
assert self.get_word(tabs) == "ForLoop:", "This node should be of type ForLoop"
self.pos += 1
return ast.ForLoop(
pre=self.parse_block(),
condition=self.parse_expression(),
post=self.parse_block(),
body=self.parse_block(),
)
def parse_break(self) -> ast.Break:
tabs = self.get_tabs()
assert self.get_word(tabs) == "Break", "This node should be of type Break"
self.pos += 1
return ast.Break()
def parse_continue(self) -> ast.Continue:
tabs = self.get_tabs()
assert self.get_word(tabs) == "Continue", "This node should be of type Continue"
self.pos += 1
return ast.Continue()
def parse_leave(self) -> ast.Leave:
tabs = self.get_tabs()
assert self.get_word(tabs) == "Leave", "This node should be of type Leave"
self.pos += 1
return ast.LEAVE
def parse_node(self) -> ast.Node:
tabs = self.get_tabs()
node_type_name = self.get_word(tabs).split(":")[0]
parser_name = f"parse_{self.get_name(node_type_name)}"
parser = getattr(self, parser_name, None)
if parser is None:
raise WarpException("Wrong node type name!")
return parser()
def parse_statement(self) -> ast.Statement:
statements = [
"ExpressionStatement",
"Assignment",
"VariableDeclaration",
"FunctionDefinition",
"If",
"Switch",
"ForLoop",
"Break",
"Continue",
"Leave",
"Block",
]
tabs = self.get_tabs()
node_type_name = self.get_word(tabs).split(":")[0]
assert node_type_name in statements, "Not a valid statement"
return ast.assert_statement(self.parse_node())
def parse_expression(self) -> ast.Expression:
tabs = self.get_tabs()
node_type_name = self.get_word(tabs).split(":")[0]
assert node_type_name in [
"Literal",
"Identifier",
"FunctionCall",
], "Node type must be an expression"
return ast.assert_expression(self.parse_node())
def parse_list(self, tabs, parser):
items = []
while self.pos < len(self.lines) and self.get_tabs() > tabs:
item = parser()
items.append(item)
return items
def get_tabs(self):
tabs = 0
if self.pos < len(self.lines):
for c in self.lines[self.pos]:
if not c == "\t":
break
tabs += 1
else:
raise WarpException(
"Lines are not supposed to be filled only with tabs"
)
return tabs
def get_word(self, start: int) -> str:
return self.lines[self.pos][start:]
def get_name(self, name):
name = "_".join(re.findall("[A-Z][^A-Z]*", name))
return name.lower()
class YulPrinter(AstVisitor):
def format(self, node: ast.Node, tabs: int = 0) -> str:
return self.visit(node, tabs)
def visit_typed_name(self, node: ast.TypedName, tabs: int = 0) -> str:
return f"{node.name}"
def visit_literal(self, node: ast.Literal, tabs: int = 0) -> str:
return f"{node.value}"
def visit_identifier(self, node: ast.Identifier, tabs: int = 0) -> str:
return f"{node.name}"
def visit_assignment(self, node: ast.Assignment, tabs: int = 0) -> str:
variables = ", ".join(self.visit_list(node.variable_names))
value = self.visit(node.value, 0)
return f"{variables} := {value}"
def visit_function_call(self, node: ast.FunctionCall, tabs: int = 0) -> str:
name = self.visit(node.function_name)
args = ", ".join(self.visit_list(node.arguments))
return f"{name}({args})"
def visit_expression_statement(
self, node: ast.ExpressionStatement, tabs: int = 0
) -> str:
return self.visit(node.expression, tabs)
def visit_variable_declaration(
self, node: ast.VariableDeclaration, tabs: int = 0
) -> str:
variables = ", ".join(self.visit_list(node.variables))
value = ""
if node.value is not None:
value = f" := {self.visit(node.value)}"
return f"let {variables}{value}"
def visit_block(self, node: ast.Block, tabs: int = 0) -> str:
open_block = "{"
close_block = "}"
if self.is_short(node.statements):
statements = "".join(self.visit_list(node.statements))
return " ".join([open_block, statements, close_block])
statements = self.visit_list(node.statements, tabs + 1)
statements = ["\t" * (tabs + 1) + stmt for stmt in statements]
statements = "\n".join(statements)
close_block = "\t" * tabs + close_block
res = "\n".join([open_block, statements, close_block])
return res
def visit_function_definition(
self, node: ast.FunctionDefinition, tabs: int = 0
) -> str:
parameters = ", ".join(self.visit_list(node.parameters, 0))
ret_vars = ", ".join(self.visit_list(node.return_variables, 0))
body = self.visit(node.body, tabs)
res = f"function {node.name}({parameters})"
if len(node.return_variables) > 0:
res += f" -> {ret_vars}"
res += f" {body}"
return res
def visit_if(self, node: ast.If, tabs: int = 0) -> str:
res = f"if {self.visit(node.condition)} "
res += self.visit(node.body, tabs)
if node.else_body is not None:
res += "\n" + "\t" * tabs + "else "
res += self.visit(node.else_body, tabs)
return res
def visit_case(self, node: ast.Case, tabs: int = 0) -> str:
res = "\t" * tabs
if node.value is not None:
res += f"case {self.visit(node.value)} "
else:
res += "default "
res += self.visit(node.body, tabs)
return res
def visit_switch(self, node: ast.Switch, tabs: int = 0) -> str:
res = f"switch {self.visit(node.expression)}\n"
res += "\n".join(self.visit_list(node.cases, tabs))
return res
def visit_for_loop(self, node: ast.ForLoop, tabs: int = 0) -> str:
res = "for "
res += self.visit(node.pre, tabs)
res += f" {self.visit(node.condition)} "
res += self.visit(node.post, tabs)
res += f"\n{self.visit(node.body, tabs)}"
return res
def visit_break(self, node: ast.Break, tabs: int = 0) -> str:
return "break"
def visit_continue(self, node: ast.Continue, tabs: int = 0) -> str:
return "continue"
def visit_leave(self, node: ast.Leave, tabs: int = 0) -> str:
return "leave"
def is_short(self, stmts: tuple) -> bool:
if len(stmts) == 0:
return True
return len(stmts) == 1 and type(stmts[0]).__name__ not in [
"Block",
"FunctionDefinition",
"If",
"Switch",
"ForLoop",
]
| 32.601449 | 88 | 0.572127 | 3.140625 |
9be22d98ace4356e919d72b1f3a27850307e90d0 | 3,044 | js | JavaScript | src/pages/project/components/List.js | xxdfly/kirin-devops-front | 690af7f7db5c3eafccbd9ae8ce06de42528951aa | [
"MIT"
] | null | null | null | src/pages/project/components/List.js | xxdfly/kirin-devops-front | 690af7f7db5c3eafccbd9ae8ce06de42528951aa | [
"MIT"
] | null | null | null | src/pages/project/components/List.js | xxdfly/kirin-devops-front | 690af7f7db5c3eafccbd9ae8ce06de42528951aa | [
"MIT"
] | null | null | null | import React, { PureComponent } from 'react'
import PropTypes from 'prop-types'
import { Table, Modal, Divider } from 'antd'
import { Trans, withI18n } from '@lingui/react'
import Link from 'umi/link'
import styles from './List.less'
const { confirm } = Modal
@withI18n()
class List extends PureComponent {
handleMenuClick = (record, e) => {
const { onDeleteItem, onEditItem, i18n } = this.props
if (e.key === '1') {
onEditItem(record)
} else if (e.key === '2') {
confirm({
title: i18n.t`Are you sure delete this record?`,
onOk() {
onDeleteItem(record.id)
},
})
}
}
handleUpdateClick = (record) => {
const { onEditItem } = this.props
onEditItem(record)
}
handleDeleteClick = (record) => {
const { onDeleteItem, i18n } = this.props
confirm({
title: i18n.t`Are you sure delete this record?`,
onOk() {
onDeleteItem(record.id)
},
})
}
render() {
const { onDeleteItem, onEditItem, i18n, ...tableProps } = this.props
const columns = [
{
title: <Trans>ProjectID</Trans>,
dataIndex:'id',
key:'id',
},
{
title: <Trans>Project Name</Trans>,
dataIndex: 'projectName',
key: 'projectName',
render: (text, record) => <Link to={`project/${record.id}`}>{text}</Link>,
},
{
title: <Trans>Project Description</Trans>,
dataIndex: 'projectDesc',
key: 'projectDesc',
},
{
title: <Trans>Project Type</Trans>,
dataIndex: 'projectType',
key: 'projectType',
},
{
title: <Trans>Create Time</Trans>,
dataIndex: 'gmtCreate',
key: 'gmtCreate',
},
{
title: <Trans>Plan Deploy Time</Trans>,
dataIndex: 'devType',
key: 'devType',
},
{
title: <Trans>Creator</Trans>,
dataIndex: 'creator',
key: 'creator',
},
{
title: <Trans>Project Status</Trans>,
dataIndex: 'projectStatus',
key: 'projectStatus',
},
{
title: <Trans>Operation</Trans>,
key: 'operation',
// fixed: 'right',
render: (text, record) => (
<span>
<a onClick={() => this.handleUpdateClick(record)}><Trans>Modify</Trans></a>
<Divider type="vertical" />
<a onClick={() => this.handleDeleteClick(record)}><Trans>Delete</Trans></a>
</span>
)
},
]
return (
<Table
{...tableProps}
// showHeader={false}
pagination={{
...tableProps.pagination,
showTotal: total => i18n.t`Total ${total} Items`,
}}
className={styles.table}
bordered
scroll={{ x: 1200 }}
columns={columns}
simple
rowKey={record => record.id}
/>
)
}
}
List.propTypes = {
onDeleteItem: PropTypes.func,
onEditItem: PropTypes.func,
location: PropTypes.object,
}
export default List
| 23.968504 | 87 | 0.528252 | 3.1875 |
8e05d5d48dea580fee0f329061f450d842fb4740 | 2,107 | lua | Lua | HW3/src/checkModel.lua | SeeTheC/Computer-Vision-CS763 | d333c90e5aa939135b66a588424b2ba494543ac5 | [
"MIT"
] | null | null | null | HW3/src/checkModel.lua | SeeTheC/Computer-Vision-CS763 | d333c90e5aa939135b66a588424b2ba494543ac5 | [
"MIT"
] | null | null | null | HW3/src/checkModel.lua | SeeTheC/Computer-Vision-CS763 | d333c90e5aa939135b66a588424b2ba494543ac5 | [
"MIT"
] | null | null | null | require "xlua"
require "Logger"
require "Linear"
require "ReLU"
require "BatchNormalization"
-- require "SpatialConvolution"
require "Model"
require "Criterion"
require "GradientDescent"
local cmd = torch.CmdLine();
if not opt then
cmd:text()
cmd:text('Options:')
cmd:option("-config" ,"modelConfig_1.txt" ,"/path/to/modelConfig.txt")
cmd:option("-i" ,"input.bin" ,"/path/to/input.bin")
cmd:option("-ig" ,"gradInput.bin" ,"/path/to/gradInput.bin")
cmd:option("-o" ,"output.bin" ,"/path/to/output.bin")
cmd:option("-ow" ,"gradWeight.bin" ,"/path/to/gradWeight.bin")
cmd:option("-ob" ,"gradB.bin" ,"/path/to/gradB.bin")
cmd:option("-og" ,"gradOutput.bin" ,"/path/to/gradOutput.bin")
cmd:text()
opt = cmd:parse(arg or {})
end
--linear x y
--relu
--batchnorm
--convolution x y z a b c
strsplit=function (str)
words = {}
for word in str:gmatch("%w+") do table.insert(words, word) end
return words;
end
print(opt["config"]);
-- Creating Model
local mlp = Model();
local weightPath="";
local biasPath="";
local lno=1;
for line in io.lines(opt["config"]) do
-- linear 192 10
-- relu
-- ba
-- mlp:addLayer(Liner(192, 10))
if lno==1 then
noOfLayers=tonumber(line);
elseif lno <= 1 + noOfLayers then
tbl=strsplit(line);
if tbl[1] == 'linear' then
mlp:addLayer(Linear(tonumber(tbl[2]),tonumber(tbl[3])));
elseif tbl[1] == 'relu' then
mlp:addLayer(ReLU());
elseif tbl[1] == 'batchnorm' then
mlp:addLayer(BatchNormalization())
end
elseif lno <= 1 + noOfLayers + 1 then
weightPath=line;
elseif lno <= 1 + noOfLayers + 2 then
biasPath=line;
end
lno=lno+1;
end
-- print(weightPath);
-- print(biasPath);
local input = torch.read(opt["i"])
local gradInput = torch.read(opt["ig"])
local output = mlp:forward(input)
local gradOutput = mlp:backward(input, gradInput)
local gradW = mlp.Layers[1].gradW
local gradB = mlp.Layers[1].gradB
torch.save(opt["o"], output)
torch.save(opt["og"], gradOutput)
torch.save(opt["ow"], gradW)
torch.save(opt["ob"], gradB)
| 24.788235 | 73 | 0.639298 | 3.25 |
f0768556a1b71d15812033a9cc69a7cc2128c930 | 1,122 | js | JavaScript | 2-avgWordLength.js | NJBOOT/ten_simple_coding_tests | 60c7e03b66c3592c0b0c94ae1d5690d0b7bac501 | [
"MIT"
] | null | null | null | 2-avgWordLength.js | NJBOOT/ten_simple_coding_tests | 60c7e03b66c3592c0b0c94ae1d5690d0b7bac501 | [
"MIT"
] | null | null | null | 2-avgWordLength.js | NJBOOT/ten_simple_coding_tests | 60c7e03b66c3592c0b0c94ae1d5690d0b7bac501 | [
"MIT"
] | null | null | null | // 2 - Average Sentence Length
// For a given sentence, return the average word length.
// Note: Remember to remove punctuation first.
const avgWordLength = s => {
// first, weird edge case where the punctuation represents a space
// then strip the rest of the punctuation using regex
const stripped = s
.replace("...", " ")
.replace(/[^\w\s]|_/g, "")
.replace(/\s+/g, " ");
// create an array of words. Transform each word to the length of that word using map.
// Then, add them up using reduce.
const arr = stripped
.split(" ")
.map(el => el.length)
.reduce(
(a, charCount) => {
a.chars += charCount;
a.words++;
return a;
},
{ words: 0, chars: 0 }
);
//console.log(arr) -> { words: 11, chars: 42 }. Divide one by the other for average
return (arr.chars / arr.words).toFixed(2);
};
let sentence1 = "Hi all, my name is Tom...I am originally from Australia.";
let sentence2 =
"I need to work very. hard to learn more about ????algo?,rithms i/n Python!";
console.log(avgWordLength(sentence1));
console.log(avgWordLength(sentence2));
| 32.057143 | 88 | 0.628342 | 3.421875 |
0b9897a43237e684b6c66f4d6a3b18dc5aaad9da | 1,217 | py | Python | onetouch.py | kakoni/insulaudit | 18fe0802bafe5764882ac4e65e472fdc840baa45 | [
"MIT"
] | 1 | 2020-11-28T13:23:58.000Z | 2020-11-28T13:23:58.000Z | onetouch.py | kakoni/insulaudit | 18fe0802bafe5764882ac4e65e472fdc840baa45 | [
"MIT"
] | null | null | null | onetouch.py | kakoni/insulaudit | 18fe0802bafe5764882ac4e65e472fdc840baa45 | [
"MIT"
] | null | null | null | #!/usr/bin/python
import user
import serial
from pprint import pprint, pformat
import insulaudit
from insulaudit.data import glucose
from insulaudit.log import io
from insulaudit.devices import onetouch2
import sys
PORT = '/dev/ttyUSB0'
def get_serial( port, timeout=2 ):
return serial.Serial( port, timeout=timeout )
def init( ):
mini = onetouch2.OneTouchUltra2( PORT, 5 )
print "is open? %s\n timeout: %s" % ( mini.serial.isOpen( ), mini.serial.getTimeout() )
print ""
print "read serial number"
serial = mini.execute( onetouch2.ReadSerial( ) )
print "serial number: %s" % serial
print ""
if serial == "":
print "could not connect"
sys.exit(1)
print ""
print "read firmware number"
firmware = mini.execute( onetouch2.ReadFirmware( ) )
print "firmware: %s" % firmware
print ""
print "RFID"
print mini.execute( onetouch2.ReadRFID( ) )
print "GLUCOSE"
data = mini.read_glucose( )
print data
print "len glucose: %s" % len( data )
head, body = data
output = open( 'sugars-debug.txt', 'w' )
output.write( glucose.format_records( body ) )
output.write( '\n' )
output.close( )
return mini
if __name__ == '__main__':
port = init()
io.info( port )
| 22.537037 | 89 | 0.67461 | 3.15625 |
899eb308936063e65f789bdec1df704f986b8824 | 1,779 | lua | Lua | Overworld/model/entities/playerStates/StillUpState.lua | tomyahu/Tomimi-Game-Editor | 8d14dc0d3ead8d90a9affd783e11048d7260c471 | [
"MIT"
] | 1 | 2019-01-18T21:50:11.000Z | 2019-01-18T21:50:11.000Z | Overworld/model/entities/playerStates/StillUpState.lua | tomyahu/LotRM | 8d14dc0d3ead8d90a9affd783e11048d7260c471 | [
"MIT"
] | 1 | 2019-12-04T15:39:45.000Z | 2019-12-04T15:39:45.000Z | Overworld/model/entities/playerStates/StillUpState.lua | tomyahu/Tomimi-Game-Editor | 8d14dc0d3ead8d90a9affd783e11048d7260c471 | [
"MIT"
] | null | null | null | require "lib.classes.class"
local NormalPlayerState = require "Overworld.model.entities.playerStates.NormalPlayerState"
--------------------------------------------------------------------------------------------------------
-- class: StillRightState
-- param: player:Player -> the player object of the overworld
-- The state of the player in which it is standing still and looking right
local StillUpState = extend(NormalPlayerState, function(self, player) end)
-- moveUp: None -> None
-- Action to perform when the player moves up
-- Changes the state to the WalkingUpState
function StillUpState.moveUp(self)
NormalPlayerState.moveUp(self)
self.player:setState("WalkingUpState")
end
-- moveDown: None -> None
-- Action to perform when the player moves down
-- Changes the state to the WalkingDownState
function StillUpState.moveDown(self)
NormalPlayerState.moveDown(self)
self.player:setState("WalkingDownState")
end
-- moveLeft: None -> None
-- Action to perform when the player moves left
-- Changes the state to the WalkingLeftState
function StillUpState.moveLeft(self)
NormalPlayerState.moveLeft(self)
self.player:setState("WalkingLeftState")
end
-- moveRight: None -> None
-- Action to perform when the player moves right
-- Changes the state to the WalkingRightState
function StillUpState.moveRight(self)
NormalPlayerState.moveRight(self)
self.player:setState("WalkingRightState")
end
-- getInteractuableHitbox: None -> None
-- Gets the interactuable hitbox of the state
-- In this case a hitbox in the upper part of the player
function StillUpState.getInteractuableHitbox(self)
return self.player.interactuable_up
end
-- to string function
function StillUpState.toString(self)
return "StillUpState"
end
return StillUpState | 32.944444 | 104 | 0.733558 | 3.34375 |
05b53598e7a861c91f8832bbb4ac226d402b2d7c | 2,136 | rb | Ruby | lib/user_engage/client.rb | CompanyMood/user_engage-ruby | 6e804d463c0d5d451febc2f0ab97e5d80b1c7ee6 | [
"MIT"
] | 3 | 2018-02-03T21:53:36.000Z | 2020-10-16T02:56:09.000Z | lib/user_engage/client.rb | CompanyMood/user_engage-ruby | 6e804d463c0d5d451febc2f0ab97e5d80b1c7ee6 | [
"MIT"
] | 6 | 2018-08-10T09:00:24.000Z | 2018-10-26T10:31:17.000Z | lib/user_engage/client.rb | CompanyMood/user_engage-ruby | 6e804d463c0d5d451febc2f0ab97e5d80b1c7ee6 | [
"MIT"
] | 1 | 2018-07-25T19:33:35.000Z | 2018-07-25T19:33:35.000Z | # frozen_string_literal: true
require 'faraday'
module UserEngage
class Client
######################
## Instance methods ##
######################
def initialize(configuration)
@configuration = configuration
end
# Public: Calls the base_url with the given path and parameters
#
def get(path, parameters = {})
request(:get, path, parameters)
end
# Public: Calls the base_url with the given path and parameters
#
def delete(path)
request(:delete, path)
end
# Public: Calls the base_url with the given path and parameters
#
def post(path, parameters = {})
request(:post, path, parameters)
end
# Public: Calls the base_url with the given path and parameters
#
def put(path, parameters = {})
request(:put, path, parameters)
end
#####################
## Private methods ##
#####################
private
def connection
Faraday.new(url: host)
end
def request(method, action_path, parameters = nil)
path = action_path.match?(/^https?/) ?
action_path :
"api/public#{action_path}"
%i[post put patch].include?(method) ?
json_body_call(method, path, parameters) :
path_params_call(method, path, parameters)
end
def path_params_call(method, path, parameters)
connection.public_send(method, path, parameters) do |request|
request.headers['Authorization'] = "Token #{@configuration.token}"
request.headers['Content-Type'] = 'application/json'
request.headers['User-Agent'] = "UserEngage-Ruby/#{UserEngage::VERSION}"
end
end
def json_body_call(method, path, parameters)
connection.public_send(method, path) do |request|
request.headers['Authorization'] = "Token #{@configuration.token}"
request.headers['Content-Type'] = 'application/json'
request.headers['User-Agent'] = "UserEngage-Ruby/#{UserEngage::VERSION}"
request.body = parameters.to_json
end
end
def host
@configuration.host || 'https://app.userengage.com/'
end
end
end
| 27.037975 | 80 | 0.6147 | 3.125 |
6226ebe4eb3e97ea82df528e03c6e361d0b237a2 | 3,869 | rs | Rust | tests/integration_tests.rs | victorjoh/console-runner | 1b9ad65a60abc6c38172d07a66738263cbc04891 | [
"MIT"
] | null | null | null | tests/integration_tests.rs | victorjoh/console-runner | 1b9ad65a60abc6c38172d07a66738263cbc04891 | [
"MIT"
] | null | null | null | tests/integration_tests.rs | victorjoh/console-runner | 1b9ad65a60abc6c38172d07a66738263cbc04891 | [
"MIT"
] | null | null | null | use console_runner::{common::*, tasks::*};
use spectral::prelude::*;
const TASK_RUNNER: TaskRunner = TaskRunner {
thread_count: 1,
view_update_period: 0,
};
#[test]
fn the_result_of_a_task_is_passed_to_the_view() {
let mut view = StoreToMemory::new();
let task = SimpleTask {
name: "my name",
run_task: || Ok(Some(String::from("5"))),
};
TASK_RUNNER.run(vec![Box::from(task)], &mut view);
assert_that(&view.task_updates).is_equal_to(vec![
a_status("my name", Status::Running),
a_status("my name", Status::Finished(Some(String::from("5")))),
]);
}
#[test]
fn when_a_task_prints_something_it_is_passed_to_the_view() {
let mut view = StoreToMemory::new();
let task = SimpleTask {
name: "my name",
run_task: || {
print!("Hello!");
Ok(None)
},
};
TASK_RUNNER.run(vec![Box::from(task)], &mut view);
assert_that(&view.task_updates).is_equal_to(vec![
a_status("my name", Status::Running),
a_message("my name", "Hello!"),
a_status("my name", Status::Finished(None)),
]);
}
#[test]
fn when_a_task_panics_it_is_passed_to_the_view() {
let mut view = StoreToMemory::new();
let task = SimpleTask {
name: "my name",
run_task: || panic!("Aargh!"),
};
TASK_RUNNER.run(vec![Box::from(task)], &mut view);
assert_that(&view.task_updates).has_length(3);
let panic_message = extract_message(&view.task_updates[1]);
assert_that(&panic_message).starts_with("thread '<unnamed>' panicked at 'Aargh!'");
assert_that(&view.task_updates[2]).is_equal_to(a_status(
"my name",
Status::Failed(String::from("Aborting task since thread panicked")),
))
}
fn extract_message(update: &TaskUpdate) -> &str {
match &update.change {
TaskChange::TaskMessage(message) => return &message,
_ => panic!("The update was not a log message: <{:?}>", update),
}
}
#[test]
fn many_tasks_are_run_in_order() {
let mut view = StoreToMemory::new();
let first_task = SimpleTask {
name: "first task",
run_task: || Err(String::from("failure")),
};
let second_task = SimpleTask {
name: "second task",
run_task: || Ok(None),
};
TASK_RUNNER.run(
vec![Box::from(first_task), Box::from(second_task)],
&mut view,
);
assert_that(&view.task_updates).is_equal_to(vec![
a_status("first task", Status::Running),
a_status("first task", Status::Failed(String::from("failure"))),
a_status("second task", Status::Running),
a_status("second task", Status::Finished(None)),
]);
}
fn a_status(name: &str, status: Status) -> TaskUpdate {
TaskUpdate {
task_name: String::from(name),
change: TaskChange::TaskStatus(status),
}
}
fn a_message(name: &str, message: &str) -> TaskUpdate {
TaskUpdate {
task_name: String::from(name),
change: TaskChange::TaskMessage(String::from(message)),
}
}
struct SimpleTask<'a> {
name: &'a str,
run_task: fn() -> TaskResult,
}
impl<'a> Task for SimpleTask<'a> {
fn run(&self, _: &dyn Logger) -> TaskResult {
let run_task = self.run_task;
run_task()
}
fn name(&self) -> TaskName {
String::from(self.name)
}
}
enum ViewMethod {
Initialize,
Update,
}
struct StoreToMemory {
tasks: Vec<TaskName>,
task_updates: Vec<TaskUpdate>,
}
impl StoreToMemory {
fn new() -> StoreToMemory {
StoreToMemory {
tasks: Vec::new(),
task_updates: Vec::new(),
}
}
}
impl View for StoreToMemory {
fn initialize(&mut self, tasks: Vec<TaskName>) {
self.tasks = tasks;
}
fn update(&mut self, task_update: TaskUpdate) {
self.task_updates.push(task_update);
}
}
| 25.123377 | 87 | 0.601447 | 3.078125 |
dfe9d4a003e46d50b50cc01b194e674b4a5e9262 | 2,318 | lua | Lua | extensions/sound/init.lua | Luke100000/3DreamEngine | 77dd382b6bb890e174f9175870db636415b835fa | [
"MIT"
] | null | null | null | extensions/sound/init.lua | Luke100000/3DreamEngine | 77dd382b6bb890e174f9175870db636415b835fa | [
"MIT"
] | null | null | null | extensions/sound/init.lua | Luke100000/3DreamEngine | 77dd382b6bb890e174f9175870db636415b835fa | [
"MIT"
] | null | null | null | local soundManager = {
paths = { },
sounds = { },
maxSounds = 16,
}
--reverb effects
for i = 1, 10 do
love.audio.setEffect("reverb_" .. i, {
type = "reverb",
decaytime = i / 2,
density = 0.5,
})
end
--add a directory to the sound library
local supportedFileTypes = table.toSet({"wav", "mp3", "ogg", "oga", "ogv", "flac"})
function soundManager:addLibrary(path, into)
for d,s in ipairs(love.filesystem.getDirectoryItems(path)) do
if love.filesystem.getInfo(path .. "/" .. s, "directory") then
self:addLibrary(path .. "/" .. s, (into and (into .. "/") or "") .. s)
else
local ext = (s:match("^.+(%..+)$") or ""):sub(2)
if supportedFileTypes[ext] then
soundManager.paths[(into and (into .. "/") or "") .. s:sub(1, #s-#ext-1)] = path .. "/" .. s
end
end
end
end
--TODO remove sorting
local sort = function(n1, n2)
return n1:tell() < n2:tell()
end
function soundManager:play(name, position, volume, pitch, echo, muffle)
assert(self.paths[name], "sound not in library")
if not self.sounds[name] then
local path = self.paths[name]
local s = love.audio.newSource(path, "static")
self.sounds[name] = {s}
assert(s:getChannelCount() == 1, path .. " is not a mono source!")
end
--sort sounds
table.sort(self.sounds[name], sort)
--take the best sound
local sound
if self.sounds[name][1] and not self.sounds[name][1]:isPlaying() then
sound = self.sounds[name][1]
elseif #self.sounds[name] < self.maxSounds then
sound = self.sounds[name][1]:clone()
self.sounds[name][#self.sounds[name]+1] = sound
else
sound = self.sounds[name][#self.sounds[name]]
end
--muffle filter
local filter = muffle and muffle > 0 and {
type = "lowpass",
volume = 1.0,
highgain = 1.0 - muffle * 0.999,
} or nil
--deactivate effetcs
for _,e in ipairs(sound:getActiveEffects()) do
sound:setEffect(e, false)
end
--echo
if echo and echo > 0 then
local i = math.min(10, math.max(1, math.ceil(echo * 10)))
sound:setEffect("reverb_" .. i, filter)
else
sound:setFilter(filter)
end
--launch the sound!
sound:setVolume(volume or 1)
sound:setPitch(pitch or 1)
sound:seek(0)
if position then
sound:setRelative(false)
sound:setPosition(position:unpack())
else
sound:setRelative(true)
sound:setPosition(0, 0, 0)
end
sound:play()
end
return soundManager | 24.924731 | 96 | 0.655306 | 3.34375 |
46df2e70f4263254dc9c292ff44a4cf88ddcab7a | 1,563 | kt | Kotlin | app/src/main/java/com/example/multitypeviewadapter/MultiTypeViewAdapter.kt | marcus-martins/MultiTypeViewAdapter | 4b8adcad6994a7cc0bba0e75021aed9c9162c2a7 | [
"MIT"
] | null | null | null | app/src/main/java/com/example/multitypeviewadapter/MultiTypeViewAdapter.kt | marcus-martins/MultiTypeViewAdapter | 4b8adcad6994a7cc0bba0e75021aed9c9162c2a7 | [
"MIT"
] | null | null | null | app/src/main/java/com/example/multitypeviewadapter/MultiTypeViewAdapter.kt | marcus-martins/MultiTypeViewAdapter | 4b8adcad6994a7cc0bba0e75021aed9c9162c2a7 | [
"MIT"
] | null | null | null | package com.example.multitypeviewadapter
import android.view.LayoutInflater
import android.view.ViewGroup
import androidx.recyclerview.widget.RecyclerView
class MultiTypeViewAdapter(
private val itemViewList : List<ItemView>
) : RecyclerView.Adapter<RecyclerView.ViewHolder>() {
private val HEADER = 1
private val LIST_ITEM = 2
private val FOOTER = 3
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): RecyclerView.ViewHolder {
val inflater = LayoutInflater.from(parent.context)
return when (viewType) {
HEADER -> HeaderViewHolder(inflater, parent)
LIST_ITEM -> ListItemViewHolder(inflater, parent)
FOOTER -> FooterViewHolder(inflater, parent)
else -> throw Exception("Not found view holder")
}
}
override fun getItemCount() = itemViewList.size
override fun onBindViewHolder(holder: RecyclerView.ViewHolder, position: Int) {
val item = itemViewList[position]
when (itemViewList[position]) {
is ItemView.Header -> (holder as HeaderViewHolder).bind(item as ItemView.Header)
is ItemView.ListItem -> (holder as ListItemViewHolder).bind(item as ItemView.ListItem)
is ItemView.Footer -> (holder as FooterViewHolder).bind(item as ItemView.Footer)
}
}
override fun getItemViewType(position: Int) =
when (itemViewList[position]) {
is ItemView.Header -> HEADER
is ItemView.ListItem -> LIST_ITEM
is ItemView.Footer -> FOOTER
}
} | 38.121951 | 98 | 0.680742 | 3 |
553bba2aab2ec051c477e9ea478141b716cfb797 | 3,404 | kt | Kotlin | common/src/test/kotlin/com/scurab/kuproxy/matcher/DefaultRequestMatcherTest.kt | jbruchanov/kuproxy | 2e10d85e7e318850679be3a21fe4406645f88811 | [
"Apache-2.0"
] | 3 | 2021-11-29T11:44:32.000Z | 2022-01-25T11:36:16.000Z | common/src/test/kotlin/com/scurab/kuproxy/matcher/DefaultRequestMatcherTest.kt | jbruchanov/kuproxy | 2e10d85e7e318850679be3a21fe4406645f88811 | [
"Apache-2.0"
] | null | null | null | common/src/test/kotlin/com/scurab/kuproxy/matcher/DefaultRequestMatcherTest.kt | jbruchanov/kuproxy | 2e10d85e7e318850679be3a21fe4406645f88811 | [
"Apache-2.0"
] | null | null | null | package com.scurab.kuproxy.matcher
import com.scurab.kuproxy.comm.IRequest
import com.scurab.kuproxy.comm.Url
import org.junit.jupiter.api.Assertions
import org.junit.jupiter.api.extension.ExtensionContext
import org.junit.jupiter.params.ParameterizedTest
import org.junit.jupiter.params.provider.Arguments
import org.junit.jupiter.params.provider.ArgumentsProvider
import org.junit.jupiter.params.provider.ArgumentsSource
import test.request
internal class DefaultRequestMatcherTest {
private val matcher = DefaultRequestMatcher()
@ParameterizedTest(name = "{0}")
@ArgumentsSource(Args::class)
fun match(@Suppress("UNUSED_PARAMETER") name: String, equals: Boolean, real: IRequest, stored: IRequest) {
Assertions.assertEquals(equals, matcher.isMatching(real, stored))
}
class Args : ArgumentsProvider {
override fun provideArguments(context: ExtensionContext) = listOf(
Arguments.of(
"Same request is matching", true,
request {
url = Url("http://www.test.com/test?a=b&b=c")
method = GET
},
request {
url = Url("http://www.test.com/test?b=c&a=b&")
method = GET
}
),
Arguments.of(
"Different methods is NOT matching", false,
request {
url = Url("http://www.test.com")
method = POST
},
request {
url = Url("http://www.test.com")
method = GET
}
),
Arguments.of(
"Matching headers, more values in real request is fine", true,
request {
url = Url("http://www.test.com")
method = GET
headers = mapOf(ContentType to "a; b", AcceptEncoding to "c")
},
request {
url = Url("http://www.test.com")
method = GET
headers = mapOf(ContentType to "a; b")
}
),
Arguments.of(
"Matching headers, no stored headers, matches anything", true,
request {
url = Url("http://www.test.com")
method = GET
headers = mapOf(ContentType to "a; b", AcceptEncoding to "c")
},
request {
url = Url("http://www.test.com")
method = GET
}
),
Arguments.of(
"Matching headers, different values are NOT matching", false,
request {
url = Url("http://www.test.com")
method = GET
headers = mapOf(ContentType to "a; b", AcceptEncoding to "c")
},
request {
url = Url("http://www.test.com")
method = GET
headers = mapOf(ContentType to "a; c", AcceptEncoding to "b")
}
)
).stream()
}
companion object {
const val GET = "GET"
const val POST = "POST"
const val ContentType = "Content-Type"
const val AcceptEncoding = "Accept-Encoding"
}
}
| 35.831579 | 110 | 0.493537 | 3.234375 |
a1c2d145290c136fb1d59d070f88426694752ce5 | 2,380 | go | Go | server.go | UKHomeOffice/dockerfile-validator | 056b995261861667aaab083c2c1f65d661112883 | [
"MIT"
] | 1 | 2017-09-03T12:08:04.000Z | 2017-09-03T12:08:04.000Z | server.go | UKHomeOffice/dockerfile-validator | 056b995261861667aaab083c2c1f65d661112883 | [
"MIT"
] | 2 | 2016-02-04T15:06:24.000Z | 2016-02-25T14:19:23.000Z | server.go | UKHomeOffice/dockerfile-validator | 056b995261861667aaab083c2c1f65d661112883 | [
"MIT"
] | 1 | 2021-04-11T09:41:02.000Z | 2021-04-11T09:41:02.000Z | package main
import (
"fmt"
"io/ioutil"
"net/http"
)
func uploadRulesHandler(w http.ResponseWriter, r *http.Request) {
switch r.Method {
//GET displays the upload form.
case "GET":
message := "Rules currently defined: \n\n" + rules.String()
w.Write([]byte(message))
//POST takes the uploaded file(s) and saves it to disk.
case "POST":
rulesfile, _, _ := r.FormFile("rules")
defer rulesfile.Close()
rulesdata, _ := ioutil.ReadAll(rulesfile)
rules, _ = loadRules(rulesdata)
fmt.Fprintf(w, "Rules file uploaded")
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
}
func validateHandler(w http.ResponseWriter, r *http.Request) {
switch r.Method {
//GET displays the upload form.
case "GET":
message := "Post your Dockerfile to validate it against the rules: \n\n" + rules.String()
w.Write([]byte(message))
//POST takes the uploaded file(s) and saves it to disk.
case "POST":
dockerfile, _, _ := r.FormFile("dockerfile")
dfile, _ := DockerfileRead(dockerfile)
defer dockerfile.Close()
v := Validation{rules, dfile}
valid, msg := v.validate()
if valid {
w.WriteHeader(http.StatusOK)
return
} else {
w.WriteHeader(http.StatusConflict)
fmt.Fprintf(w, msg)
}
// w.WriteHeader(http.StatusConflict)
// fmt.Fprintf(w, "No Dockerfile found in request")
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
}
func uploadHandler(w http.ResponseWriter, r *http.Request) {
switch r.Method {
//GET displays the upload form.
case "GET":
message := "Post your Dockerfile and Rules to check if it's valid"
w.Write([]byte(message))
//POST takes the uploaded file(s) and saves it to disk.
case "POST":
dockerfile, _, _ := r.FormFile("dockerfile")
dfile, _ := DockerfileRead(dockerfile)
defer dockerfile.Close()
rulesfile, _, _ := r.FormFile("rules")
defer rulesfile.Close()
rulesdata, _ := ioutil.ReadAll(rulesfile)
rules, _ := loadRules(rulesdata)
v := Validation{rules, dfile}
valid, msg := v.validate()
if valid {
w.WriteHeader(http.StatusOK)
} else {
w.WriteHeader(http.StatusConflict)
fmt.Fprintf(w, msg)
}
default:
w.WriteHeader(http.StatusMethodNotAllowed)
}
}
func defaultHandler(w http.ResponseWriter, r *http.Request) {
message := "Dockerfile Validator. Upload your Dockerfile to test if it's complaiant with the rules"
w.Write([]byte(message))
}
| 23.106796 | 100 | 0.692857 | 3.078125 |
7244081bae4a13a2235923b10100dc424d141fc8 | 5,691 | rs | Rust | control-plane/rest/service/src/authentication.rs | Abhinandan-Purkait-Bolt/mayastor-control-plane | ae4b39b42cc54bf8520f0256568e5d8635eb4f39 | [
"Apache-2.0"
] | 13 | 2021-03-29T15:10:15.000Z | 2022-03-23T04:01:59.000Z | control-plane/rest/service/src/authentication.rs | Abhinandan-Purkait-Bolt/mayastor-control-plane | ae4b39b42cc54bf8520f0256568e5d8635eb4f39 | [
"Apache-2.0"
] | 146 | 2021-03-30T13:14:56.000Z | 2022-03-31T15:54:38.000Z | control-plane/rest/service/src/authentication.rs | Abhinandan-Purkait-Bolt/mayastor-control-plane | ae4b39b42cc54bf8520f0256568e5d8635eb4f39 | [
"Apache-2.0"
] | 12 | 2021-06-22T13:53:01.000Z | 2022-03-31T12:07:36.000Z | use actix_web::HttpRequest;
use jsonwebtoken::{crypto, Algorithm, DecodingKey};
use http::HeaderValue;
use std::fs::File;
use snafu::{ResultExt, Snafu};
/// Authorization Errors
#[derive(Debug, Snafu)]
pub enum AuthError {
#[snafu(display("Internal error: {}", details))]
InternalError { details: String },
#[snafu(display("No Bearer Token was provided in the HTTP Header"))]
NoBearerToken {},
#[snafu(display("Invalid token, cannot be parsed into a string: {}", source.to_string()))]
InvalidTokenStr { source: http::header::ToStrError },
#[snafu(display("Unauthorized token({}) for uri({})", token, uri))]
Unauthorized { token: String, uri: String },
#[snafu(display(
"Verification process failed, {}. Please check your json web token.",
source
))]
Verification { source: jsonwebtoken::errors::Error },
#[snafu(display("Invalid Bearer Token: {}", details))]
InvalidToken { details: String },
}
/// Initialise JWK with the contents of the file at 'jwk_path'.
/// If jwk_path is 'None', authentication is disabled.
pub fn init(jwk_path: Option<String>) -> JsonWebKey {
match jwk_path {
Some(path) => {
let jwk_file = File::open(path).expect("Failed to open JWK file");
JsonWebKey::from(Some(jwk_file))
}
None => JsonWebKey::from(None),
}
}
#[derive(serde::Deserialize, Default, Debug)]
pub struct JsonWebKey {
#[serde(skip_deserializing)]
enabled: bool,
#[serde(alias = "alg")]
algorithm: Algorithm,
#[serde(alias = "n")]
modulus: String,
#[serde(alias = "e")]
exponent: String,
}
impl JsonWebKey {
/// Validates and returns new JsonWebKey
pub(crate) fn from(jwk_file: Option<File>) -> Self {
match jwk_file {
Some(jwk_file) => {
let mut jwk: Self = match serde_json::from_reader(jwk_file) {
Ok(jwk) => jwk,
Err(e) => panic!("Failed to deserialize the jwk: {}", e),
};
jwk.enabled = true;
jwk
}
None => Self::default(),
}
}
/// Validate a bearer token
pub(crate) fn validate(&self, token: &str, uri: &str) -> Result<(), AuthError> {
let (message, signature) = split_token(token)?;
match crypto::verify(&signature, &message, &self.decoding_key(), self.algorithm()) {
Ok(true) => Ok(()),
Ok(false) => Err(AuthError::Unauthorized {
token: token.to_string(),
uri: uri.to_string(),
}),
Err(source) => Err(AuthError::Verification { source }),
}
}
// Returns true if REST calls should be authenticated.
fn auth_enabled(&self) -> bool {
self.enabled
}
// Return the algorithm.
fn algorithm(&self) -> Algorithm {
self.algorithm
}
// Return the modulus.
fn modulus(&self) -> &str {
&self.modulus
}
// Return the exponent.
fn exponent(&self) -> &str {
&self.exponent
}
// Return the decoding key
fn decoding_key(&self) -> DecodingKey {
DecodingKey::from_rsa_components(self.modulus(), self.exponent())
}
}
/// Authenticate the HTTP request by checking the authorisation token to ensure
/// the sender is who they claim to be.
pub fn authenticate(req: &HttpRequest) -> Result<(), AuthError> {
let jwk: &JsonWebKey = match req.app_data() {
Some(jwk) => Ok(jwk),
None => Err(AuthError::InternalError {
details: "Json Web Token not configured in the REST server".to_string(),
}),
}?;
// If authentication is disabled there is nothing to do.
if !jwk.auth_enabled() {
return Ok(());
}
match req.headers().get(http::header::AUTHORIZATION) {
Some(token) => jwk.validate(&format_token(token)?, &req.uri().to_string()),
None => Err(AuthError::NoBearerToken {}),
}
}
// Ensure the token is formatted correctly by removing the "Bearer " prefix if
// present.
fn format_token(token: &HeaderValue) -> Result<String, AuthError> {
let token = token
.to_str()
.context(InvalidTokenStr)?
.trim_start_matches("Bearer ");
Ok(token.trim().into())
}
// Split the JSON Web Token (JWT) into 2 parts, message and signature.
// The message comprises the header and payload.
//
// JWT format:
// <header>.<payload>.<signature>
// \______ ________/
// \/
// message
fn split_token(token: &str) -> Result<(String, String), AuthError> {
let elems = token.split('.').collect::<Vec<&str>>();
if elems.len() == 3 {
let message = format!("{}.{}", elems[0], elems[1]);
let signature = elems[2];
Ok((message, signature.into()))
} else {
Err(AuthError::InvalidToken {
details: "Should be formatted as: header.payload.signature".to_string(),
})
}
}
#[test]
fn validate_test() {
let token_file = std::env::current_dir()
.expect("Failed to get current directory")
.join("authentication")
.join("token");
let mut token = std::fs::read_to_string(token_file).expect("Failed to get bearer token");
let jwk_file = std::env::current_dir()
.expect("Failed to get current directory")
.join("authentication")
.join("jwk");
let jwk = init(Some(jwk_file.to_str().unwrap().into()));
jwk.validate(&token, "uri").expect("Validation should pass");
// create invalid token
token.push_str("invalid");
jwk.validate(&token, "uri")
.expect_err("Validation should fail with an invalid token");
}
| 31.793296 | 94 | 0.593217 | 3.171875 |
de10e1509f6dfa140de63e5a9db392063e95dd58 | 17,775 | rs | Rust | 0xx_core_mix/src/crypto/backend/ristretto_b.rs | sololouve/MaaraaDAO | d53c244599182472789a2c71e6ea56bcc9f78569 | [
"CC0-1.0"
] | 2 | 2021-06-13T21:55:16.000Z | 2021-07-30T08:52:58.000Z | 0xx_core_mix/src/crypto/backend/ristretto_b.rs | sololouve/MaaraaDAO | d53c244599182472789a2c71e6ea56bcc9f78569 | [
"CC0-1.0"
] | null | null | null | 0xx_core_mix/src/crypto/backend/ristretto_b.rs | sololouve/MaaraaDAO | d53c244599182472789a2c71e6ea56bcc9f78569 | [
"CC0-1.0"
] | 2 | 2021-06-13T07:39:16.000Z | 2021-09-04T20:02:29.000Z | use rand::rngs::OsRng;
use rand::rngs::StdRng;
use rand::RngCore;
use rand::SeedableRng;
use serde::{Deserialize, Serialize};
use curve25519_dalek::constants::RISTRETTO_BASEPOINT_POINT;
use curve25519_dalek::ristretto::{CompressedRistretto, RistrettoPoint};
use curve25519_dalek::scalar::Scalar;
use curve25519_dalek::traits::Identity;
use crate::crypto::base::*;
use crate::crypto::elgamal::*;
use crate::crypto::hashing::{hash_bytes_256, HashTo, RistrettoHasher};
use crate::util;
impl Element for RistrettoPoint {
type Exp = Scalar;
type Plaintext = [u8; 30];
fn mul(&self, other: &Self) -> Self {
self + other
}
fn div(&self, other: &Self, _modulus: &Self) -> Self {
self - other
}
fn mod_pow(&self, other: &Self::Exp, _modulus: &Self) -> Self {
self * other
}
fn modulo(&self, _modulus: &Self) -> Self {
*self
}
fn mul_identity() -> RistrettoPoint {
RistrettoPoint::identity()
}
}
impl Exponent for Scalar {
fn add(&self, other: &Scalar) -> Scalar {
self + other
}
fn sub(&self, other: &Scalar) -> Scalar {
self - other
}
fn neg(&self) -> Scalar {
-self
}
fn mul(&self, other: &Scalar) -> Scalar {
self * other
}
fn modulo(&self, _modulus: &Scalar) -> Scalar {
*self
}
fn add_identity() -> Scalar {
Scalar::zero()
}
fn mul_identity() -> Scalar {
Scalar::one()
}
fn to_string(&self) -> String {
hex::encode(self.to_bytes())
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct RistrettoGroup;
impl RistrettoGroup {
fn encode_test(&self, data: [u8; 30]) -> (RistrettoPoint, usize) {
let mut bytes = [0u8; 32];
bytes[1..1 + data.len()].copy_from_slice(&data);
for j in 0..64 {
bytes[31] = j as u8;
for i in 0..128 {
bytes[0] = 2 * i as u8;
if let Some(point) = CompressedRistretto(bytes).decompress() {
return (point, i + j * 128);
}
}
}
panic!("a very unlikely event occurred");
}
}
impl Group<RistrettoPoint> for RistrettoGroup {
fn generator(&self) -> RistrettoPoint {
RISTRETTO_BASEPOINT_POINT
}
fn rnd(&self) -> RistrettoPoint {
let mut rng = OsRng;
RistrettoPoint::random(&mut rng)
}
fn modulus(&self) -> RistrettoPoint {
RistrettoPoint::default()
}
fn rnd_exp(&self) -> Scalar {
let mut rng = OsRng;
Scalar::random(&mut rng)
}
fn rnd_plaintext(&self) -> [u8; 30] {
let mut csprng = OsRng;
let mut value = [0u8; 30];
csprng.fill_bytes(&mut value);
value
}
fn exp_modulus(&self) -> Scalar {
Scalar::default()
}
// see https://github.com/ruescasd/braid-mg/issues/4
fn encode(&self, data: &[u8; 30]) -> RistrettoPoint {
let mut bytes = [0u8; 32];
bytes[1..1 + data.len()].copy_from_slice(data);
for j in 0..64 {
bytes[31] = j as u8;
for i in 0..128 {
bytes[0] = 2 * i as u8;
if let Some(point) = CompressedRistretto(bytes).decompress() {
return point;
}
}
}
panic!("Failed to encode into ristretto point");
}
fn decode(&self, element: &RistrettoPoint) -> [u8; 30] {
let compressed = element.compress();
let slice = &compressed.as_bytes()[1..31];
util::to_u8_30(&slice.to_vec())
}
fn gen_key(&self) -> PrivateKey<RistrettoPoint, Self> {
let secret = self.rnd_exp();
PrivateKey::from(&secret, self)
}
fn pk_from_value(&self, value: &RistrettoPoint) -> PublicKey<RistrettoPoint, Self> {
PublicKey::from(&value, &self.clone())
}
fn exp_hasher(&self) -> Box<dyn HashTo<Scalar>> {
Box::new(RistrettoHasher)
}
fn elem_hasher(&self) -> Box<dyn HashTo<RistrettoPoint>> {
Box::new(RistrettoHasher)
}
// FIXME not kosher
fn generators(&self, size: usize, contest: u32, seed: Vec<u8>) -> Vec<RistrettoPoint> {
let mut seed_ = seed.to_vec();
seed_.extend(&contest.to_le_bytes());
let hashed = hash_bytes_256(seed_);
let mut csprng: StdRng = SeedableRng::from_seed(hashed);
let mut ret: Vec<RistrettoPoint> = Vec::with_capacity(size);
for _ in 0..size {
let g = RistrettoPoint::random(&mut csprng);
ret.push(g);
}
ret
}
}
#[cfg(test)]
mod tests {
extern crate textplots;
use textplots::{utils, Chart, Plot, Shape};
use rand::rngs::OsRng;
use rand::RngCore;
use curve25519_dalek::ristretto::RistrettoPoint;
use curve25519_dalek::traits::Identity;
use crate::crypto::backend::ristretto_b::*;
use crate::crypto::keymaker::*;
use crate::crypto::shuffler::*;
use crate::crypto::symmetric;
use crate::data::artifact::*;
use crate::data::bytes::*;
use crate::util;
#[test]
fn test_ristretto_elgamal() {
let mut csprng = OsRng;
let group = RistrettoGroup;
let sk = group.gen_key();
let pk = PublicKey::from(&sk.public_value, &group);
let mut fill = [0u8; 30];
csprng.fill_bytes(&mut fill);
let plaintext = group.encode(&util::to_u8_30(&fill.to_vec()));
let c = pk.encrypt(&plaintext);
let d = sk.decrypt(&c);
let recovered = group.decode(&d).to_vec();
assert_eq!(fill.to_vec(), recovered);
}
#[test]
fn test_ristretto_js_encoding() {
let rg = RistrettoGroup;
// since we are not encoding ristretto, this string cannot be changed
let text = "this has to be exactly 32 bytes!";
// data generated by ristretto255.js
let skb: [u8; 32] = [
157, 127, 250, 139, 158, 32, 121, 69, 255, 102, 151, 206, 199, 225, 118, 203, 168, 220,
193, 198, 226, 74, 167, 77, 209, 52, 70, 173, 180, 176, 153, 9,
];
let a: [u8; 32] = [
72, 60, 143, 64, 93, 212, 68, 113, 253, 8, 206, 72, 111, 39, 75, 156, 189, 63, 176,
223, 97, 221, 58, 132, 11, 209, 70, 149, 90, 73, 141, 70,
];
let b: [u8; 32] = [
182, 67, 141, 0, 95, 109, 54, 179, 179, 226, 25, 148, 80, 160, 171, 82, 173, 129, 68,
24, 64, 236, 36, 144, 183, 193, 36, 180, 82, 206, 98, 41,
];
let sk_ = PrivateKey::from(&Scalar::from_bytes_mod_order(skb), &rg);
let c_ = Ciphertext {
a: CompressedRistretto(a).decompress().unwrap(),
b: CompressedRistretto(b).decompress().unwrap(),
};
let d_: RistrettoPoint = sk_.decrypt(&c_);
let recovered_ = String::from_utf8(d_.compress().as_bytes().to_vec());
assert_eq!(text, recovered_.unwrap());
}
#[test]
fn test_ristretto_prob_encoding() {
let mut csprng = OsRng;
let mut bytes = [00u8; 30];
let group = RistrettoGroup;
let iterations = 10000;
println!(
"test_r_encoding: running {} encode iterations..",
iterations
);
let v: Vec<(f32, f32)> = (0..iterations)
.map(|i| {
csprng.fill_bytes(&mut bytes);
let fixed = util::to_u8_30(&bytes.to_vec());
(i as f32, group.encode_test(fixed).1 as f32)
})
.collect();
let size: f32 = v.len() as f32;
let values: Vec<u32> = v.iter().map(|x| x.1 as u32).collect();
let sum: f32 = v.iter().map(|x| x.1).fold(0f32, |a, b| a + b);
let sum_f = sum as f32;
println!("test_r_encoding: average {}", sum_f / size);
println!("test_r_encoding: max is {}", values.iter().max().unwrap());
let hist = utils::histogram(&v, 0.0, 30.0, 30);
Chart::new(380, 100, 0.0, 30.0)
.lineplot(&Shape::Bars(&hist))
.nice();
}
#[test]
fn test_ristretto_schnorr() {
let group = RistrettoGroup;
let g = group.generator();
let secret = group.rnd_exp();
let public = g.mod_pow(&secret, &group.modulus());
let schnorr = group.schnorr_prove(&secret, &public, &g, &vec![]);
let verified = group.schnorr_verify(&public, &g, &schnorr, &vec![]);
assert!(verified == true);
let public_false = group
.generator()
.mod_pow(&group.rnd_exp(), &group.modulus());
let verified_false = group.schnorr_verify(&public_false, &g, &schnorr, &vec![]);
assert!(verified_false == false);
}
#[test]
fn test_ristretto_chaumpedersen() {
let group = RistrettoGroup;
let g1 = group.generator();
let g2 = group.rnd();
let secret = group.rnd_exp();
let public1 = g1.mod_pow(&secret, &group.modulus());
let public2 = g2.mod_pow(&secret, &group.modulus());
let proof = group.cp_prove(&secret, &public1, &public2, &g1, &g2, &vec![]);
let verified = group.cp_verify(&public1, &public2, &g1, &g2, &proof, &vec![]);
assert!(verified == true);
let public_false = group
.generator()
.mod_pow(&group.rnd_exp(), &group.modulus());
let verified_false = group.cp_verify(&public1, &public_false, &g1, &g2, &proof, &vec![]);
assert!(verified_false == false);
}
#[test]
fn test_ristretto_vdecryption() {
let mut csprng = OsRng;
let group = RistrettoGroup;
let sk = group.gen_key();
let pk = PublicKey::from(&sk.public_value, &group);
let mut fill = [0u8; 30];
csprng.fill_bytes(&mut fill);
let plaintext = group.encode(&util::to_u8_30(&fill.to_vec()));
let c = pk.encrypt(&plaintext);
let (d, proof) = sk.decrypt_and_prove(&c, &vec![]);
let dec_factor = c.a.div(&d, &group.modulus()).modulo(&group.modulus());
let verified = group.cp_verify(
&pk.value,
&dec_factor,
&group.generator(),
&c.b,
&proof,
&vec![],
);
let recovered = group.decode(&d).to_vec();
assert!(verified == true);
assert_eq!(fill.to_vec(), recovered);
}
#[test]
fn test_ristretto_distributed() {
let mut csprng = OsRng;
let group = RistrettoGroup;
let km1 = Keymaker::gen(&group);
let km2 = Keymaker::gen(&group);
let (pk1, proof1) = km1.share(&vec![]);
let (pk2, proof2) = km2.share(&vec![]);
let verified1 = group.schnorr_verify(&pk1.value, &group.generator(), &proof1, &vec![]);
let verified2 = group.schnorr_verify(&pk2.value, &group.generator(), &proof2, &vec![]);
assert!(verified1 == true);
assert!(verified2 == true);
let mut fill = [0u8; 30];
csprng.fill_bytes(&mut fill);
let plaintext = group.encode(&util::to_u8_30(&fill.to_vec()));
let pk1_value = &pk1.value.clone();
let pk2_value = &pk2.value.clone();
let pks = vec![pk1, pk2];
let pk_combined = Keymaker::combine_pks(&group, pks);
let c = pk_combined.encrypt(&plaintext);
let (dec_f1, proof1) = km1.decryption_factor(&c, &vec![]);
let (dec_f2, proof2) = km2.decryption_factor(&c, &vec![]);
let verified1 = group.cp_verify(
pk1_value,
&dec_f1,
&group.generator(),
&c.b,
&proof1,
&vec![],
);
let verified2 = group.cp_verify(
pk2_value,
&dec_f2,
&group.generator(),
&c.b,
&proof2,
&vec![],
);
assert!(verified1 == true);
assert!(verified2 == true);
let decs = vec![dec_f1, dec_f2];
let d = Keymaker::joint_dec(&group, decs, &c);
let recovered = group.decode(&d).to_vec();
assert_eq!(fill.to_vec(), recovered);
}
#[test]
fn test_ristretto_distributed_serde() {
let mut csprng = OsRng;
let group = RistrettoGroup;
let km1 = Keymaker::gen(&group);
let km2 = Keymaker::gen(&group);
let (pk1, proof1) = km1.share(&vec![]);
let (pk2, proof2) = km2.share(&vec![]);
let sym1 = symmetric::gen_key();
let sym2 = symmetric::gen_key();
let esk1 = km1.get_encrypted_sk(sym1);
let esk2 = km2.get_encrypted_sk(sym2);
let share1 = Keyshare {
share: pk1,
proof: proof1,
encrypted_sk: esk1,
};
let share2 = Keyshare {
share: pk2,
proof: proof2,
encrypted_sk: esk2,
};
let share1_b = share1.ser();
let share2_b = share2.ser();
let share1_d = Keyshare::<RistrettoPoint, RistrettoGroup>::deser(&share1_b).unwrap();
let share2_d = Keyshare::<RistrettoPoint, RistrettoGroup>::deser(&share2_b).unwrap();
let verified1 = Keymaker::verify_share(&group, &share1_d.share, &share1_d.proof, &vec![]);
let verified2 = Keymaker::verify_share(&group, &share2_d.share, &share2_d.proof, &vec![]);
assert!(verified1 == true);
assert!(verified2 == true);
let pk1_value = &share1_d.share.value.clone();
let pk2_value = &share2_d.share.value.clone();
let pks = vec![share1_d.share, share2_d.share];
let pk_combined = Keymaker::combine_pks(&group, pks);
let mut cs = Vec::with_capacity(10);
let mut bs = Vec::with_capacity(10);
for _ in 0..10 {
let mut fill = [0u8; 30];
csprng.fill_bytes(&mut fill);
let encoded = group.encode(&util::to_u8_30(&fill.to_vec()));
let c = pk_combined.encrypt(&encoded);
bs.push(fill.to_vec());
cs.push(c);
}
let (decs1, proofs1) = km1.decryption_factor_many(&cs, &vec![]);
let (decs2, proofs2) = km2.decryption_factor_many(&cs, &vec![]);
let pd1 = PartialDecryption {
pd_ballots: decs1,
proofs: proofs1,
};
let pd2 = PartialDecryption {
pd_ballots: decs2,
proofs: proofs2,
};
let pd1_b = pd1.ser();
let pd2_b = pd2.ser();
let pd1_d = PartialDecryption::<RistrettoPoint>::deser(&pd1_b).unwrap();
let pd2_d = PartialDecryption::<RistrettoPoint>::deser(&pd2_b).unwrap();
let verified1 = Keymaker::verify_decryption_factors(
&group,
pk1_value,
&cs,
&pd1_d.pd_ballots,
&pd1_d.proofs,
&vec![],
);
let verified2 = Keymaker::verify_decryption_factors(
&group,
pk2_value,
&cs,
&pd2_d.pd_ballots,
&pd2_d.proofs,
&vec![],
);
assert!(verified1 == true);
assert!(verified2 == true);
let decs = vec![pd1_d.pd_ballots, pd2_d.pd_ballots];
let ds = Keymaker::joint_dec_many(&group, &decs, &cs);
let recovered: Vec<Vec<u8>> = ds.into_iter().map(|d| group.decode(&d).to_vec()).collect();
assert_eq!(bs, recovered);
}
#[test]
fn test_identity() {
let mut csprng = OsRng;
let x = RistrettoPoint::random(&mut csprng);
assert_eq!(x + RistrettoPoint::identity(), x);
}
#[test]
fn test_ristretto_shuffle_serde() {
let group = RistrettoGroup;
let exp_hasher = &*group.exp_hasher();
let sk = group.gen_key();
let pk = PublicKey::from(&sk.public_value, &group);
let es = util::random_ristretto_ballots(10, &group).ciphertexts;
let seed = vec![];
let hs = generators(es.len() + 1, &group, 0, seed);
let shuffler = Shuffler {
pk: &pk,
generators: &hs,
hasher: exp_hasher,
};
let (e_primes, rs, perm) = shuffler.gen_shuffle(&es);
let proof = shuffler.gen_proof(&es, &e_primes, &rs, &perm, &vec![]);
let ok = shuffler.check_proof(&proof, &es, &e_primes, &vec![]);
let mix = Mix {
mixed_ballots: e_primes,
proof: proof,
};
let pk_b = pk.ser();
let es_b = es.ser();
let mix_b = mix.ser();
assert!(ok == true);
let pk_d = PublicKey::<RistrettoPoint, RistrettoGroup>::deser(&pk_b).unwrap();
let es_d = Vec::<Ciphertext<RistrettoPoint>>::deser(&es_b).unwrap();
let mix_d = Mix::<RistrettoPoint>::deser(&mix_b).unwrap();
let shuffler_d = Shuffler {
pk: &pk_d,
generators: &hs,
hasher: exp_hasher,
};
let ok_d = shuffler_d.check_proof(&mix_d.proof, &es_d, &mix_d.mixed_ballots, &vec![]);
assert!(ok_d == true);
}
#[test]
fn test_ristretto_encrypted_pk() {
let mut csprng = OsRng;
let group = RistrettoGroup;
let sk = group.gen_key();
let pk = PublicKey::from(&sk.public_value, &group);
let mut fill = [0u8; 30];
csprng.fill_bytes(&mut fill);
let plaintext = group.encode(&util::to_u8_30(&fill.to_vec()));
let c = pk.encrypt(&plaintext);
let sym_key = symmetric::gen_key();
let enc_sk = sk.to_encrypted(sym_key);
let enc_sk_b = enc_sk.ser();
let enc_sk_d = EncryptedPrivateKey::deser(&enc_sk_b).unwrap();
let sk_d = PrivateKey::from_encrypted(sym_key, enc_sk_d, &group);
let d = sk_d.decrypt(&c);
let recovered = group.decode(&d).to_vec();
assert_eq!(fill.to_vec(), recovered);
}
}
| 31.571936 | 99 | 0.547961 | 3.046875 |
f05ff75311109e9783190938edb768d0c1aeae71 | 2,772 | js | JavaScript | src/pages/authority/resource-mgnt/index.js | watertao/teemo | 13b0ebafb853d0f110c6ed7817667229efaa22e0 | [
"DOC",
"MIT"
] | 11 | 2019-07-11T04:16:32.000Z | 2021-05-16T09:26:51.000Z | src/pages/authority/resource-mgnt/index.js | watertao/teemo | 13b0ebafb853d0f110c6ed7817667229efaa22e0 | [
"DOC",
"MIT"
] | 5 | 2019-04-27T01:27:46.000Z | 2019-11-15T00:56:28.000Z | src/pages/authority/resource-mgnt/index.js | watertao/teemo | 13b0ebafb853d0f110c6ed7817667229efaa22e0 | [
"DOC",
"MIT"
] | 2 | 2019-02-20T00:58:33.000Z | 2019-08-08T03:03:50.000Z | import React, { Component } from 'react';
import PageHeaderWrapper from '@/components/PageHeaderWrapper';
import { formatMessage as fm, FormattedMessage } from 'umi/locale';
import mm from '@/utils/message-util';
import {Button, Icon, Table, Tag} from 'antd';
import TMDataList from '@/components/TMDataList';
import { connect } from 'dva';
import styles from './style.less';
@connect(({ resource, loading }) => ({ resource, loading }))
export default class ResourceManagement extends Component {
constructor(props) {
super(props);
this.tmDataListRef = React.createRef();
this._onDataFetchExpect = this._onDataFetchExpect.bind(this);
}
componentDidMount() {
this.tmDataListRef.current.query();
}
render = () => {
const { resource: { list }, loading } = this.props;
const columns = [
{
title: '#',
dataIndex: 'seq',
width: 50,
render: (text, record, index) => {
return <span style={{ fontWeight: '500' }}>{index + 1}</span>;
}
},
{
title: mm('resource.field.method'),
dataIndex: 'verb',
key: 'verb',
width: 170,
render: (text, record, index) => {
switch(record.verb) {
case 'DELETE':
return <Tag color='#F50'>{ record.verb }</Tag>;
case 'GET':
return <Tag color='#87D068'>{ record.verb }</Tag>;
case 'POST':
return <Tag color='#108EE9'>{ record.verb }</Tag>;
case 'PUT':
return <Tag color='#2DB7F5'>{ record.verb }</Tag>;
default:
return <Tag color='#BFBFBF'>{ record.verb }</Tag>
}
},
sorter: true,
},
{
title: mm('resource.field.pattern'),
dataIndex: 'uri_pattern',
key: 'uri_pattern',
sorter: true,
render: (text) => {
return <span className={styles['uri_pattern']}>{text}</span>
}
},
{
title: mm('resource.field.name'),
dataIndex: 'name',
key: 'name'
},
{
title: mm('resource.field.remark'),
dataIndex: 'remark',
key: 'remark'
},
];
return (
<PageHeaderWrapper
title={mm('moduleName')}
>
<TMDataList
ref={this.tmDataListRef}
onDataFetchExpect={this._onDataFetchExpect}
dataSource={list}
columns={columns}
loading={loading.effects['resource/fetchList']}
/>
</PageHeaderWrapper>
);
}
_onDataFetchExpect = (filterParameters) => {
const { dispatch } = this.props;
return dispatch({
type: 'resource/fetchList',
payload: {
filterParameters,
}
});
}
}
| 25.2 | 72 | 0.535714 | 3.0625 |
c3b0a8a2e995a2396bd816e1e98bc83cb2f40a2f | 2,684 | go | Go | engine/worker/cmd_update.go | mvdan/cds | fcf66ba206a2cbf45540b8b3f578436ec3da5678 | [
"BSD-3-Clause"
] | 1 | 2019-04-29T12:34:50.000Z | 2019-04-29T12:34:50.000Z | engine/worker/cmd_update.go | mvdan/cds | fcf66ba206a2cbf45540b8b3f578436ec3da5678 | [
"BSD-3-Clause"
] | null | null | null | engine/worker/cmd_update.go | mvdan/cds | fcf66ba206a2cbf45540b8b3f578436ec3da5678 | [
"BSD-3-Clause"
] | null | null | null | package main
import (
"fmt"
"net/http"
"time"
"github.com/inconshreveable/go-update"
"github.com/spf13/cobra"
"github.com/ovh/cds/sdk"
"github.com/ovh/cds/sdk/cdsclient"
)
func cmdUpdate(w *currentWorker) *cobra.Command {
c := &cobra.Command{
Use: "update",
Short: "worker update [flags]",
Long: `Update worker from CDS API or from CDS Release
Update from Github:
worker update --from-github
Update from your CDS API:
worker update --api https://your-cds-api.localhost
`,
Run: updateCmd(w),
}
c.Flags().Bool(flagFromGithub, false, "Update binary from latest github release")
c.Flags().String(flagAPI, "", "URL of CDS API")
c.Flags().Bool(flagInsecure, false, `(SSL) This option explicitly allows curl to perform "insecure" SSL connections and transfers.`)
return c
}
func updateCmd(w *currentWorker) func(cmd *cobra.Command, args []string) {
return func(cmd *cobra.Command, args []string) {
fmt.Println(sdk.VersionString())
var urlBinary string
if !FlagBool(cmd, "from-github") {
w.apiEndpoint = FlagString(cmd, flagAPI)
if w.apiEndpoint == "" {
sdk.Exit("--api not provided, aborting update.")
}
w.client = cdsclient.NewWorker(w.apiEndpoint, "download", cdsclient.NewHTTPClient(time.Second*360, FlagBool(cmd, flagInsecure)))
urlBinary = w.client.DownloadURLFromAPI("worker", sdk.GOOS, sdk.GOARCH)
fmt.Printf("Updating worker binary from CDS API on %s...\n", urlBinary)
} else {
// no need to have apiEndpoint here
w.client = cdsclient.NewWorker("", "download", nil)
var errGH error
urlBinary, errGH = w.client.DownloadURLFromGithub(sdk.GetArtifactFilename("worker", sdk.GOOS, sdk.GOARCH))
if errGH != nil {
sdk.Exit("Error while getting URL from Github: %s", errGH)
}
fmt.Printf("Updating worker binary from Github on %s...\n", urlBinary)
}
resp, err := http.Get(urlBinary)
if err != nil {
sdk.Exit("Error while getting binary from CDS API: %s\n", err)
}
defer resp.Body.Close()
if contentType := getContentType(resp); contentType != "application/octet-stream" {
sdk.Exit("Invalid Binary (Content-Type: %s). Please try again or download it manually from %s\n", contentType, sdk.URLGithubReleases)
}
if resp.StatusCode != 200 {
sdk.Exit("Error http code: %d, url called: %s\n", resp.StatusCode, urlBinary)
}
if err := update.Apply(resp.Body, update.Options{}); err != nil {
sdk.Exit("Error while getting updating worker from CDS API: %s\n", err)
}
fmt.Println("Update worker done.")
}
}
func getContentType(resp *http.Response) string {
for k, v := range resp.Header {
if k == "Content-Type" && len(v) >= 1 {
return v[0]
}
}
return ""
}
| 29.822222 | 136 | 0.684426 | 3.109375 |
17ebe374d409c0dc5b747caf9d0e1c4c93a76da6 | 2,940 | sql | SQL | junior/pack2_junior/p3_sql_jdbc/ch1_crud/create_v1.sql | multiscripter/job4j | 2aaaf59c2b4e1d9866395f1247897d5f09f9ef84 | [
"Apache-2.0"
] | 1 | 2018-01-16T11:27:23.000Z | 2018-01-16T11:27:23.000Z | junior/pack2_junior/p3_sql_jdbc/ch1_crud/create_v1.sql | multiscripter/job4j | 2aaaf59c2b4e1d9866395f1247897d5f09f9ef84 | [
"Apache-2.0"
] | 11 | 2020-07-01T18:39:12.000Z | 2022-02-16T01:11:31.000Z | junior/pack2_junior/p3_sql_jdbc/ch1_crud/create_v1.sql | multiscripter/job4j | 2aaaf59c2b4e1d9866395f1247897d5f09f9ef84 | [
"Apache-2.0"
] | null | null | null | -- Database: job4j
create database job4j
WITH
OWNER = postgres
ENCODING = 'UTF8'
LC_COLLATE = 'Russian_Russia.1251'
LC_CTYPE = 'Russian_Russia.1251'
TABLESPACE = pg_default
CONNECTION LIMIT = -1;
create table if not exists statuses (
id serial not null primary key,
status varchar (16) not null
);
insert into statuses (status) values ('new');
insert into statuses (status) values ('in work');
insert into statuses (status) values ('closed');
create table if not exists categories (
id serial not null primary key,
category varchar (16) not null
);
insert into categories (category) values ('normal');
insert into categories (category) values ('urgent');
create table if not exists rights (
id serial not null primary key,
canAdd bool not null,
canEdit bool not null,
canDel bool not null
);
insert into rights (canAdd, canEdit, canDel) values (true, false, false);
insert into rights (canAdd, canEdit, canDel) values (true, true, false);
insert into rights (canAdd, canEdit, canDel) values (true, true, true);
create table if not exists roles (
id serial not null primary key,
rightsId int not null references rights(id),
role varchar (16) not null
);
insert into roles (rightsId, role) values (1, 'user');
insert into roles (rightsId, role) values (2, 'moder');
insert into roles (rightsId, role) values (3, 'admin');
create table if not exists users (
id serial not null primary key,
name varchar (64) not null,
regdate date not null default CURRENT_DATE,
roleId int not null references roles(id)
);
insert into users (name, roleId) values ('Ivanov', 1);
insert into users (name, roleId) values ('Petrov', 2);
insert into users (name, roleId) values ('Sidorov', 3);
insert into users (name, roleId) values ('Smirnov', 1);
create table if not exists orders (
id serial not null primary key,
userId int not null references users(id),
catId int not null references categories(id),
message text not null,
regdate timestamp not null default now(),
statusId int not null references statuses(id) default 1
);
insert into orders (userId, catId, message) values (1, 1, 'Text content of first order.');
insert into orders (userId, catId, message) values (1, 1, 'Second order description.');
insert into orders (userId, catId, message) values (4, 1, 'Third order description.');
insert into orders (userId, catId, message) values (4, 2, 'four order description.');
create table if not exists comments (
id serial not null primary key,
orderId int not null references orders(id),
userId int not null references users(id),
comment text not null
);
insert into comments (orderId, userId, comment) values (1, 1, 'This is comment for order 1 from user Ivanov');
create table if not exists files (
id serial not null primary key,
orderId int not null references orders(id),
file varchar (255) not null
); | 35.853659 | 110 | 0.709524 | 3 |
39d7027b00af6f69984e54c088d2259050baa863 | 2,435 | js | JavaScript | .core/.cli/commands/electron/builder/index.js | Atomic-Reactor/Reactium-SSR | ce5906db22fa726cdd25d9670343b45b9ed33e61 | [
"MIT"
] | 18 | 2019-01-01T20:40:21.000Z | 2021-05-24T02:00:08.000Z | .core/.cli/commands/electron/builder/index.js | Atomic-Reactor/Reactium-SSR | ce5906db22fa726cdd25d9670343b45b9ed33e61 | [
"MIT"
] | 21 | 2018-07-12T12:11:19.000Z | 2022-02-20T23:26:18.000Z | .core/.cli/commands/electron/builder/index.js | Atomic-Reactor/Reactium-SSR | ce5906db22fa726cdd25d9670343b45b9ed33e61 | [
"MIT"
] | 6 | 2018-12-14T19:22:21.000Z | 2022-03-07T16:22:23.000Z | /**
* -----------------------------------------------------------------------------
* Imports
* -----------------------------------------------------------------------------
*/
const generator = require('./generator');
const path = require('path');
const op = require('object-path');
const mod = path.dirname(require.main.filename);
const { error, message } = require(`${mod}/lib/messenger`);
/**
* NAME String
* @description Constant defined as the command name. Value passed to the commander.command() function.
* @example $ arcli electron-build
* @see https://www.npmjs.com/package/commander#command-specific-options
* @since 2.0.0
*/
const NAME = 'electron-build';
/**
* DESC String
* @description Constant defined as the command description. Value passed to
* the commander.desc() function. This string is also used in the --help flag output.
* @see https://www.npmjs.com/package/commander#automated---help
* @since 2.0.0
*/
const DESC = 'Command for building Reactium into an Electron app.';
/**
* CANCELED String
* @description Message sent when the command is canceled
* @since 2.0.0
*/
const CANCELED = 'electron-build canceled!';
/**
* HELP Function
* @description Function called in the commander.on('--help', callback) callback.
* @see https://www.npmjs.com/package/commander#automated---help
* @since 2.0.0
*/
const HELP = () =>
console.log(`
Example:
$ arcli electron-build
`);
/**
* ACTION Function
* @description Function used as the commander.action() callback.
* @see https://www.npmjs.com/package/commander
* @param opt Object The commander options passed into the function.
* @param props Object The CLI props passed from the calling class `orcli.js`.
* @since 2.0.0
*/
const ACTION = ({ opt, props }) =>
generator({ props })
.then(() => console.log(''))
.catch(err => console.log(err));
/**
* COMMAND Function
* @description Function that executes program.command()
*/
const COMMAND = ({ program, props }) =>
program
.command(NAME)
.description(DESC)
.action(opt => ACTION({ opt, props }))
.on('--help', HELP);
/**
* Module Constructor
* @description Internal constructor of the module that is being exported.
* @param program Class Commander.program reference.
* @param props Object The CLI props passed from the calling class `arcli.js`.
* @since 2.0.0
*/
module.exports = {
COMMAND,
NAME,
};
| 28.647059 | 103 | 0.627926 | 3.125 |
89e111c50828992611f3715b9ae88f7d3b37d526 | 6,591 | swift | Swift | LazySwifter/StringExtension.swift | quocnb/LazySwifter | fa22f21b25803d8bf763a28dc27712ee573eb482 | [
"MIT"
] | null | null | null | LazySwifter/StringExtension.swift | quocnb/LazySwifter | fa22f21b25803d8bf763a28dc27712ee573eb482 | [
"MIT"
] | null | null | null | LazySwifter/StringExtension.swift | quocnb/LazySwifter | fa22f21b25803d8bf763a28dc27712ee573eb482 | [
"MIT"
] | null | null | null | //
// StringExtension.swift
// LazySwifter
//
// Created by Quoc Nguyen on 2/14/18.
//
import UIKit
public extension String {
public func index(by distance: Int) -> String.Index? {
if distance >= 0 {
return index(startIndex, offsetBy: distance, limitedBy: endIndex)
} else {
return index(endIndex, offsetBy: distance, limitedBy: startIndex)
}
}
public func index(from: Int, to: Int) -> String.Index? {
guard let left = index(by: from) else {
return nil
}
return index(left, offsetBy: to - from, limitedBy: endIndex)
}
// String[1]
public subscript(index: Int) -> String {
guard let idx = self.index(by: index) else {
return ""
}
return String(self[idx])
}
// String[1..<2]
public subscript(range: Range<Int>) -> String {
guard let left = index(by: range.lowerBound) else {
return ""
}
guard let right = index(from: range.lowerBound, to: range.upperBound) else {
return ""
}
return String(self[left..<right])
}
// String[1...2]
public subscript(range: ClosedRange<Int>) -> String {
guard let left = index(by: range.lowerBound) else {
return ""
}
guard let right = index(from: range.lowerBound, to: range.upperBound) else {
return ""
}
return String(self[left...right])
}
// String[..<1]
public subscript(range: PartialRangeUpTo<Int>) -> String {
guard let right = index(from: 0, to: range.upperBound) else {
return ""
}
return String(self[..<right])
}
// String[...1]
public subscript(range: PartialRangeThrough<Int>) -> String {
guard let right = index(from: 0, to: range.upperBound) else {
return ""
}
return String(self[...right])
}
// String[1...]
public subscript(range: PartialRangeFrom<Int>) -> String {
guard let left = index(by: range.lowerBound) else {
return ""
}
return String(self[left...])
}
// String["substring"]
public subscript(string: String) -> [Range<String.Index>] {
var occurences = [Range<String.Index>]()
var initialLeftBound = startIndex
while initialLeftBound < endIndex {
guard let range = self.range(
of: string, options: [],
range: initialLeftBound..<endIndex, locale: nil
) else {
break
}
occurences.append(range)
initialLeftBound = range.upperBound
}
return occurences
}
// String["begin"..."end"]
public subscript(range: ClosedRange<String>) -> [ClosedRange<String.Index>] {
var occurences = [ClosedRange<String.Index>]()
var initialLeftBound = startIndex
while initialLeftBound < endIndex {
guard let beginRange = self.range(
of: range.lowerBound, options: [],
range: initialLeftBound..<endIndex, locale: nil
) else {
break
}
guard let endRange = self.range(
of: range.upperBound, options: [],
range: beginRange.upperBound..<endIndex, locale: nil
) else {
break
}
occurences.append(beginRange.lowerBound...endRange.upperBound)
initialLeftBound = endRange.upperBound
}
return occurences
}
// String["begin"..<"end"]
public subscript(range: Range<String>) -> [Range<String.Index>] {
var occurences = [Range<String.Index>]()
var initialLeftBound = startIndex
while initialLeftBound < endIndex {
guard let beginRange = self.range(
of: range.lowerBound, options: [],
range: initialLeftBound..<endIndex, locale: nil
) else {
break
}
guard let endRange = self.range(
of: range.upperBound, options: [],
range: beginRange.upperBound..<endIndex, locale: nil
) else {
break
}
occurences.append(beginRange.upperBound..<endRange.lowerBound)
initialLeftBound = endRange.upperBound
}
return occurences
}
// String[Character("a")]
public subscript(character: Character) -> [String.Index] {
var occurences = [String.Index]()
var initialLeftBound = startIndex
while initialLeftBound < endIndex {
guard let beginRange = self.range(
of: String(character), options: [],
range: initialLeftBound..<endIndex, locale: nil
) else {
break
}
occurences.append(beginRange.lowerBound)
initialLeftBound = beginRange.upperBound
}
return occurences
}
// String["begin"...]
public subscript(range: PartialRangeFrom<String>) -> PartialRangeFrom<String.Index>? {
guard self.index(by: range.lowerBound.count) != nil else {
return nil
}
guard let beginRange = self.range(
of: range.lowerBound, options: [],
range: startIndex..<endIndex, locale: nil
) else {
return nil
}
return beginRange.upperBound...
}
// String[..."end"]
public subscript(range: PartialRangeThrough<String>) -> PartialRangeThrough<String.Index>? {
guard self.index(by: range.upperBound.count) != nil else {
return nil
}
guard let endRange = self.range(
of: range.upperBound, options: [],
range: startIndex..<endIndex, locale: nil
) else {
return nil
}
return ...endRange.lowerBound
}
}
public extension String {
public var localizedString: String {
return NSLocalizedString(self, comment: "")
}
public static func * (text: String, times: Int) -> String {
return String(repeating: text, count: times)
}
public func trimmed() -> String {
let trim = self.trimmingCharacters(in: .whitespacesAndNewlines)
guard let regex = try? NSRegularExpression(pattern: "\\s+", options: .caseInsensitive) else {
return trim
}
let range = NSMakeRange(0, trim.count)
return regex.stringByReplacingMatches(in: trim, options: [], range: range, withTemplate: " ")
}
}
| 31.84058 | 101 | 0.553634 | 3.359375 |
f1cf83410b8a5d4a579eebaf312a5d015ed272e8 | 15,621 | rb | Ruby | server/WAPI.rb | tl-its-umich-edu/StudentDashboard | eaaef3a834dd0fce164feea7ebe7bb7706c129ea | [
"Apache-2.0"
] | 1 | 2018-11-02T19:53:42.000Z | 2018-11-02T19:53:42.000Z | server/WAPI.rb | tl-its-umich-edu/StudentDashboard | eaaef3a834dd0fce164feea7ebe7bb7706c129ea | [
"Apache-2.0"
] | 99 | 2015-01-29T15:22:52.000Z | 2018-04-09T17:36:04.000Z | server/WAPI.rb | tl-its-umich-edu/StudentDashboard | eaaef3a834dd0fce164feea7ebe7bb7706c129ea | [
"Apache-2.0"
] | 4 | 2015-04-30T18:13:03.000Z | 2016-06-15T20:37:35.000Z | # Utility class to run requests against a WSO2 API.
# This has two entry points of interest:
# Constructor: new(hash)
# The hash should contain the following values:
# 'token_server','api_prefix','key','secret'
# It may contain the current token value but does not need to
# since WAPI will renew tokens as necessary.
# get_request(string): This string will be appended to the api_prefix and
# executed as a GET
# See the WAPI_result_wrapper class code for the output format. The data for a request
# is expected to be returned as a JSON string.
# Only GET is explicitly supported at the moment. (Internally it also uses a POST to renew tokens.)
require 'base64'
require 'rest-client'
require "link_header"
require_relative './Logging'
require_relative './WAPI_result_wrapper'
require_relative './WAPI_status'
require_relative './stopwatch'
## For detailed tracing set this to anything but FalseClass
TRACE=FalseClass
include Logging
class WAPI
# The application provides the values required to make a connection
# to the WSO2 ESB. The key and secret are oauth key and secret for generating tokens.
# The token_server is full url for request to generate / renew tokens.
#
# The api_prefix is string that will be prefixed to every request made
# through this instance. It will contain host and anything else
# that will appear in front of every request. E.g. It might contain
# https://woodpigeon.dsc.umich.edu:8243/StudentDashboard/v1 or just
# https://woodpigeon.dsc.umich.edu:8243 depending on how you choose
# to use it.
def initialize(application)
if application.nil?
msg = "No ESB Application values provided to WAPI initialize"
logger.warn msg
raise StandardError, msg
end
logger.debug("application: #{application}")
@token_server = application['token_server']
@api_prefix = application['api_prefix']
@key = application['key']
@secret = application['secret']
@token = application['token']
# # added for IBM Api manager
# # client_id and client_secret will be key and secret
@scope = application['scope']
@grant_type = application['grant_type']
#
if (@scope.nil?)
logger.error("missing value: scope")
raise "WAPI: missing value: scope"
end
if (@grant_type.nil?)
logger.error("missing value: grant_type") if (@grant_type.nil?)
raise "WAPI: missing value: grant_type"
end
#
# ## special uniqname may be supplied for testing
@uniqname = application['uniqname']
#
@renewal = WAPI.build_renewal(@key, @secret)
logger.info("#{self.class.to_s}:#{__method__}:#{__LINE__}: initialized WAPI with #{@api_prefix}")
end
def self.build_renewal(key, secret)
b64 = base64_key_secret(key, secret)
"Basic #{b64}"
end
def self.base64_key_secret(key, secret)
ks = "#{key}:#{secret}"
Base64.strict_encode64(ks)
end
######### utilities for URL formatting
# use instance specific configuration information to generate the full url.
def format_url(request)
"#{@api_prefix}#{request}"
end
# Responses may contain partial results. In that case information about how to get the remaining data is returned
# in the 'Link' header. Link headers come back with explicit URLs pointing to Canvas servers.
# Remove that server added by the external service since we need to send queries back through the ESB rather than
# straight to the external service.
# This code assumes that the main portion of the next link (more_url) looks like the original request except that
# the beginning may be different (e.g. direct to canvas host vs through the ESB proxy) and the query parameters
# may be different (e.g. maybe add info about where to restart retrieval).
# A trivial more_url should become an empty string.
# Query parameters on the more_url should be passed through.
# Query parameters on the request_string should be ignored.
# The more_url is a complete URL that was returned from the external service. The
# request_string it the partial url generated internally that doesn't have any explicit server / api
# version information.
def reduce_url(more_url, request_string)
logger.debug "#{self.class.to_s}:#{__method__}:#{__LINE__}: more_url: #{more_url} request_string: #{request_string}"
return "" if more_url.nil? || more_url.length == 0
# Get the main part of the original request without any query parameters.
main_request = request_string.index('?') ? request_string[/(^.+)\?/] : request_string
# Pull out the part of more_url that matches the original query and also pull along query parameters that were
# supplied in the more_url.
more_url = more_url[/#{main_request}.*/]
logger.debug "#{self.class.to_s}:#{__method__}:#{__LINE__}: reduced more_url: #{more_url}"
more_url
end
# Make a request to an external service and handle error conditions and headers.
def do_request(request_string)
RestClient.log = logger if (logger.debug? and TRACE != FalseClass)
# make the request specific to the separately configured API.
url=format_url(request_string)
logger.debug "#{self.class.to_s}:#{__method__}:#{__LINE__}: url: #{url}"
r = Stopwatch.new(Thread.current.to_s+": "+url)
r.start
begin
response = RestClient.get url, {:Authorization => "Bearer #{@token}",
'x-ibm-client-id' => @key,
:accept => :json,
:verify_ssl => true}
# If the request has more data pull out the external url to get it.
more_url = process_link_header(response)
# fix it up to go through our proxy.
more_url = reduce_url(more_url, request_string)
## try to parse as json. If can't do that generate an error.
json_response = JSON.parse(response)
## json_response is a JSON object. Only print part of it.
logger.debug "#{self.class.to_s}:#{__method__}:#{__LINE__}: esb response as json"+JSON.generate(json_response)[0..30]
# fix up the json a bit.
json_response = standardize_json(json_response, response)
####### Now we have a parsed json object
# figure out the overall response code for the request. That may come from the esb call or data returned
# from the request url
rc = compute_response_code_to_return(json_response, response)
## We have parsed JSON, now make it a json string so it can be returned
json_response = JSON.generate(json_response)
wrapped_response = WAPIResultWrapper.new(rc, "COMPLETED", json_response, more_url)
### handle some error conditions explicitly.
rescue URI::InvalidURIError => exp
logger.debug "#{self.class.to_s}:#{__method__}:#{__LINE__}: invalid URI: "+exp.to_s
wrapped_response = WAPIResultWrapper.new(WAPIStatus::BAD_REQUEST, "INVALID URL", exp.to_s)
rescue StandardError => exp
logger.debug "#{self.class.to_s}:#{__method__}:#{__LINE__}: exception: "+exp.inspect
if exp.response.code == WAPIStatus::HTTP_NOT_FOUND
wrapped_response = WAPIResultWrapper.new(WAPIStatus::HTTP_NOT_FOUND, "NOT FOUND", exp)
else
wrapped_response = WAPIResultWrapper.new(WAPIStatus::UNKNOWN_ERROR, "EXCEPTION", exp)
end
end
r.stop
logger.info "#{self.class.to_s}:#{__method__}:#{__LINE__}:do_request: stopwatch: "+ r.pretty_summary
wrapped_response
end
# A response may provide link headers that indicate the data returned is partial and more is available if you
# use the 'next' url provided. Get that link and log some information so we can track this.
def process_link_header(response)
linkheader = LinkHeader.parse(response.headers[:link]).to_a
next_link, last_link = nil, nil
#### extract the interesting header links
linkheader.each { |link|
next_link ||= header_link_for_rel(link, 'next')
last_link ||= header_link_for_rel(link, 'last')
}
# If there is more data on another page log that.
if !next_link.nil?
page_estimate = ""
# Log last_page and per_page values from the 'last' url so can get rough estimate of total number of
# entries for query. Note: We use the page/per_page information because it turns out that Canvas puts that
# in the URL. However that isn't a standard and we shouldn't rely on it for processing.
if !last_link.nil?
p = Regexp.new(/page=(\d+)&per_page=(\d+)/)
p.match(last_link)
last_page, per_page = $1, $2
page_estimate = "last_page: #{last_page} page_size: #{per_page} "
end
logger.warn "#{self.class.to_s}:#{__method__}:#{__LINE__}: pagination: #{page_estimate} next_link: #{next_link}"
end
# return the raw next link (or an empty string)
next_link.nil? ? "" : next_link
end
# Utility to extract URL for the desired link type from the full link header.
def header_link_for_rel(link, desired)
link[1][0][1] == desired ? link[0] : nil
end
## detailed dump of response object
def dump_json_object(json_response, response)
logger.debug "#{self.class.to_s}:#{__method__}:#{__LINE__}: after initial parse"
logger.info "#{self.class.to_s}:#{__method__}:#{__LINE__}: response.code: "+json_response[response.code].to_s
end
## Figure out the response status code to return. It might be from the response body or from the RestClient response.
def compute_response_code_to_return(j, response)
# default to the restClient value
rc = response.code
if Hash.try_convert(j)
# if the whole thing in an error response then pull out the contents
# of the error response.
if j.has_key?('ErrorResponse')
j=j['ErrorResponse']
end
# if there is a nested response code then use that.
if j.has_key?('responseCode')
rc = j['responseCode']
end
end
logger.debug "#{self.class.to_s}:#{__method__}:#{__LINE__}: use response code: [#{rc}]"
rc
end
## Fix up the json a bit.
def standardize_json(j, response)
# if there is a nested response code then make sure it is an integer.
begin
if (!j.kind_of?(Array) && j.has_key?('responseCode'))
# returned value may have a response code element that needs to be converted to an integer
j['responseCode'] = j['responseCode'].to_i
end
rescue => err
logger.info "#{self.class.to_s}:#{__method__}:#{__LINE__}: conversion error "+j.inspect
# because of the error reset j back to the original json response.
j = JSON.parse(response)
end
j
end
# Entry point to make the URL request. It may end up making multiple calls to do_request since
# it may need to deal with authorization / token renewal and with big requests that make
# many calls in order to get a complete data set.
# In any case will return a WAPI wrapper result.
def get_request(request)
wrapped_response = do_request(request)
logger.debug "#{self.class.to_s}:#{__method__}:#{__LINE__}: initial request: "+request.to_s
## If required try to renew the token and redo the request.
if wrapped_response.meta_status == WAPIStatus::UNKNOWN_ERROR &&
wrapped_response.result.respond_to?('http_code') &&
wrapped_response.result.http_code == WAPIStatus::HTTP_UNAUTHORIZED
wrapped_response = renew_token()
## if the token renewed ok then try the request again.
if wrapped_response.meta_status == WAPIStatus::SUCCESS
wrapped_response = do_request(request)
end
end
# If it didn't work just return that information.
logger.debug "#{self.class.to_s}:#{__method__}:#{__LINE__}: wrapped_response: meta_status: #{wrapped_response.meta_status}"
if wrapped_response.meta_status != WAPIStatus::SUCCESS
return wrapped_response
end
## Ran a query successfully. See if got partial data and need to keep going.
logger.debug "#{self.class.to_s}:#{__method__}:#{__LINE__}: wrapped_response: data length: #{wrapped_response.result.length}"
# See if there is a link header, if so get the rest of the data.
if wrapped_response.meta_more.length > 0
logger.debug "#{self.class.to_s}:#{__method__}:#{__LINE__}: found link header: >>#{wrapped_response.meta_more}<<"
more_data = get_request(wrapped_response.meta_more)
logger.debug "#{self.class.to_s}:#{__method__}:#{__LINE__}: more_data status: #{more_data.meta}"
if more_data.meta_status == WAPIStatus::SUCCESS
logger.debug "#{self.class.to_s}:#{__method__}:#{__LINE__}: will merge data: initial wrapped_response: #{wrapped_response.result.length} more_data: #{more_data.result.length}"
wrapped_response = wrapped_response.append_json_results(more_data)
else
logger.error "#{self.class.to_s}:#{__method__}:#{__LINE__}: can not merge more_data: #{more_data.inspect}"
end
end
logger.debug "#{self.class.to_s}:#{__method__}:#{__LINE__}: final wrapped_response: result length: #{wrapped_response.result.length}"
wrapped_response
end
# Renew the current token. Will set the current @token value in the object
def renew_token
begin
logger.info("#{self.class.to_s}:#{__method__}:#{__LINE__}: token_server: #{@token_server}")
response = runTokenRenewalPost
## If it worked then parse the result as json. This is here to capture any JSON parsing exceptions.
if response.code == WAPIStatus::HTTP_SUCCESS
## will need to get the access_token below. If it is not JSON that is an error.
s = JSON.parse(response)
@token = s['access_token']
end
rescue Exception => exp
# If got an exception for the renewal then wrap that up to be returned.
logger.warn("#{self.class.to_s}:#{__method__}:#{__LINE__}: renewal post exception: "+exp.to_json)
logger.warn("#{self.class.to_s}:#{__method__}:#{__LINE__}: renewal post exception: "+exp.to_json+":"+exp.http_code.to_s)
return WAPIResultWrapper.new(exp.http_code, "EXCEPTION DURING TOKEN RENEWAL", exp)
end
## got no response so say that.
if response.nil?
logger.warn("#{self.class.to_s}:#{__method__}:#{__LINE__}: error renewing token: nil response ")
return WAPIResultWrapper.new(WAPIStatus::UNKNOWN_ERROR, "error renewing token: nil response", response)
end
# if got an error so say that.
if response.code != WAPIStatus::HTTP_SUCCESS
logger.warn("#{self.class.to_s}:#{__method__}:#{__LINE__}: error renewing token: response code: "+response.code)
return WAPIResultWrapper.new(WAPIStatus::UNKNOWN_ERROR, "error renewing token: response code", response)
end
# all ok
print_token = sprintf "%5s", @token
logger.debug("#{self.class.to_s}:#{__method__}:#{__LINE__}: renewed token: #{print_token}")
return WAPIResultWrapper.new(WAPIStatus::SUCCESS, "token renewed", response)
end
## Uses global class instance variables for these values for now
def runTokenRenewalPost
msg = Thread.current.to_s
renew = Stopwatch.new(msg)
renew.start
payload = "grant_type=#{@grant_type}&scope=#{@scope}&client_id=#{@key}&client_secret=#{@secret}"
response = RestClient.post @token_server,
payload,
{
:content_type => "application/x-www-form-urlencoded"
}
ensure
# make sure to print the elapsed time for the renewal.
renew.stop
logger.info("WAPI: renew token post: stopwatch: "+renew.pretty_summary)
end
end
| 42.44837 | 184 | 0.690417 | 3.21875 |
8e851f54c36f67d2c1da7349d63d10d1507fd4b5 | 1,620 | kt | Kotlin | src/test/kotlin/xyz/poeschl/kixelflut/DrawUtilsTest.kt | Poeschl/Kixelflut | a4a423c1ab6157db9fdf30e5400f0b192e73c955 | [
"Apache-2.0"
] | null | null | null | src/test/kotlin/xyz/poeschl/kixelflut/DrawUtilsTest.kt | Poeschl/Kixelflut | a4a423c1ab6157db9fdf30e5400f0b192e73c955 | [
"Apache-2.0"
] | null | null | null | src/test/kotlin/xyz/poeschl/kixelflut/DrawUtilsTest.kt | Poeschl/Kixelflut | a4a423c1ab6157db9fdf30e5400f0b192e73c955 | [
"Apache-2.0"
] | null | null | null | package xyz.poeschl.kixelflut
import org.assertj.core.api.Assertions.assertThat
import org.junit.jupiter.api.Test
import java.awt.Color
internal class DrawUtilsTest {
@Test
fun createHorizontalPixels() {
//WHEN
val startingPoint = Point(1, 1)
//THEN
val pixels = createHorizontalPixels(startingPoint, 5, Color.BLUE)
//VERIFY
assertThat(pixels).containsExactlyInAnyOrder(
Pixel(Point(1, 1), Color.BLUE),
Pixel(Point(2, 1), Color.BLUE),
Pixel(Point(3, 1), Color.BLUE),
Pixel(Point(4, 1), Color.BLUE),
Pixel(Point(5, 1), Color.BLUE)
)
}
@Test
fun createVerticalPixels() {
//WHEN
val startingPoint = Point(1, 1)
//THEN
val pixels = createVerticalPixels(startingPoint, 5, Color.BLUE)
//VERIFY
assertThat(pixels).containsExactlyInAnyOrder(
Pixel(Point(1, 1), Color.BLUE),
Pixel(Point(1, 2), Color.BLUE),
Pixel(Point(1, 3), Color.BLUE),
Pixel(Point(1, 4), Color.BLUE),
Pixel(Point(1, 5), Color.BLUE)
)
}
@Test
fun createRectPixels() {
//WHEN
val startingPoint = Point(1, 1)
//THEN
val pixels = createRectPixels(startingPoint, Pair(2, 2), Color.GREEN)
//VERIFY
assertThat(pixels).containsExactlyInAnyOrder(
Pixel(Point(1, 1), Color.GREEN),
Pixel(Point(1, 2), Color.GREEN),
Pixel(Point(2, 1), Color.GREEN),
Pixel(Point(2, 2), Color.GREEN)
)
}
}
| 25.714286 | 77 | 0.562963 | 3.234375 |
b1f0da1864c6d0cb454639bbc614a6072fc389f1 | 1,463 | kt | Kotlin | server/src/main/kotlin/org/cueglow/server/gdtf/RenderGdtfStateToDmx.kt | cueglow/glowdtf | b670ca9768cdd49551fa32af126498fe2ff261ba | [
"MIT"
] | null | null | null | server/src/main/kotlin/org/cueglow/server/gdtf/RenderGdtfStateToDmx.kt | cueglow/glowdtf | b670ca9768cdd49551fa32af126498fe2ff261ba | [
"MIT"
] | null | null | null | server/src/main/kotlin/org/cueglow/server/gdtf/RenderGdtfStateToDmx.kt | cueglow/glowdtf | b670ca9768cdd49551fa32af126498fe2ff261ba | [
"MIT"
] | null | null | null | package org.cueglow.server.gdtf
fun renderGdtfStateToDmx(chValues: List<Long>, dmxMode: GlowDmxMode): ByteArray {
val output = mutableListOf<Byte>()
dmxMode.multiByteChannels
.groupBy { it.dmxBreak }
.toSortedMap()
.forEach { dmxBreak ->
val startInd = output.size
//logger.info("doing dmxBreak ${dmxBreak.key}")
dmxBreak.value.forEach { ch ->
val chInd = dmxMode.multiByteChannels.indexOf(ch)
val chValue = chValues[chInd]
ch.offsets
.reversed() // from least to most significant
.forEachIndexed { offsetIndex, offset ->
//logger.info("doing offset $offset with offsetIndex $offsetIndex")
val byteValue = (chValue shr 8 * offsetIndex).toByte()
// grow to right size
val dmxChannelInd = startInd + offset - 1
//logger.info("#wanting to insert at $dmxChannelInd")
while (output.size <= dmxChannelInd) {
//logger.info("growing one element for channel ${ch.name}")
output.add(0)
}
//logger.info("grown to ${output.size}")
output[dmxChannelInd] = byteValue
}
}
}
return output.toByteArray()
} | 43.029412 | 91 | 0.505126 | 3.015625 |
b2fd53048e194cb59b5b4420a7e50d932868c531 | 1,878 | py | Python | glance/rpc/common.py | Quinton/glance | 7674bc8963a3bec21f719c48f40e8a3fc0846e6f | [
"Apache-2.0"
] | 5 | 2017-04-23T05:50:36.000Z | 2019-03-12T09:45:20.000Z | glance/rpc/common.py | Quinton/glance | 7674bc8963a3bec21f719c48f40e8a3fc0846e6f | [
"Apache-2.0"
] | null | null | null | glance/rpc/common.py | Quinton/glance | 7674bc8963a3bec21f719c48f40e8a3fc0846e6f | [
"Apache-2.0"
] | 2 | 2018-08-16T11:41:18.000Z | 2018-10-21T06:56:50.000Z | #!/usr/bin/env python
#encode=utf-8
#vim: tabstop=4 shiftwidth=4 softtabstop=4
#Created on 2013-8-17
#Copyright 2013 nuoqingyun xuqifeng
import copy
import logging
import traceback
class RPCException(Exception):
message = "An Unknown RPC related exception occurred"
def __init__(self, message = None, **kwargs):
self.kwargs = kwargs
if not message:
try:
message = self.message % kwargs
except Exception as e:
message = self.message
super(RPCException, self).__init__(message)
class RemoteError(RPCException):
message = "Remote error: %(exc_type)s %(value)s\n%(traceback)s"
def __init__(self, exc_type = None, value = None, traceback = None):
self.exc_type = exc_type
self.value = value
self.traceback = traceback
super(RemoteError, self).__init__(exc_type = exc_type,
value = value,
traceback = traceback)
class Timeout(RPCException):
"""
"""
message = "Timeout while waiting on RPC response"
class InvalidRPCConnectionReuse(RPCException):
message = "Invalid reuse of an RPC Connection"
class Connection(object):
def close(self):
raise NotImplementedError()
def create_consumer(self, topic, proxy, fanout = False):
raise NotImplementedError()
def create_worker(self, topic, proxy, pool_name):
raise NotImplementedError()
def consumer_in_thread(self):
raise NotImplementedError()
def _sage_log(log_func, mes, msg_data):
"""
"""
pass
def serialize_remote_exception(failure_info):
"""
"""
pass
def deserialize_remote_exception(conf, data):
"""
"""
pass
| 22.357143 | 73 | 0.596912 | 3.046875 |
3ba4c8cd331637d82b35af577e3a36bb72ed495d | 1,539 | swift | Swift | TestGitHub/Sources/Common/DateFormat.swift | olgavorona/TestGitHub | a17c64569a279409d1be001007f53ba951fa42b5 | [
"MIT"
] | null | null | null | TestGitHub/Sources/Common/DateFormat.swift | olgavorona/TestGitHub | a17c64569a279409d1be001007f53ba951fa42b5 | [
"MIT"
] | null | null | null | TestGitHub/Sources/Common/DateFormat.swift | olgavorona/TestGitHub | a17c64569a279409d1be001007f53ba951fa42b5 | [
"MIT"
] | null | null | null | //
// DateFormat.swift
// TestGitHub
//
// Created by Olga Vorona on 23.12.2020.
//
import Foundation
@objc public enum DateFormat: Int, CaseIterable, CustomStringConvertible {
case standard
case serverFull
var value: String {
switch self {
case .serverFull: return "yyyy-MM-dd'T'HH:mm:ssZ"
case .standard: return "dd.MM.yyyy"
}
}
public var description: String {
return value
}
}
class CachedDateFormatter {
private static let dispatchQueue = DispatchQueue(label: String(describing: CachedDateFormatter.self))
private static var formatters: [String: DateFormatter] = [:]
static func formatDate(_ date: Date, with format: DateFormat) -> String {
return fetchFormatter(format).string(from: date)
}
static func date(from stringDate: String, of format: DateFormat) -> Date? {
return fetchFormatter(format).date(from: stringDate)
}
static func fetchFormatter(_ format: DateFormat) -> DateFormatter {
return dispatchQueue.sync {
let key = format.description
if let formatter = formatters[key] {
return formatter
} else {
let formatter = DateFormatter()
formatter.timeZone = .current
formatter.locale = .current
formatter.dateFormat = format.value
formatters[key] = formatter
return formatter
}
}
}
}
| 26.084746 | 105 | 0.594542 | 3.3125 |
fa2875eb370191626b9bf75d3da59bf056e9ab65 | 5,925 | swift | Swift | SwiftyBase/Classes/Control/PartiallyEditField.swift | mspvirajpatel/SwiftyBase | f6ddc8094512b11c7656fb61a02525e59fb1d6e1 | [
"MIT"
] | 31 | 2017-08-30T12:43:41.000Z | 2022-02-08T14:37:31.000Z | SwiftyBase/Classes/Control/PartiallyEditField.swift | mspvirajpatel/SwiftyBase | f6ddc8094512b11c7656fb61a02525e59fb1d6e1 | [
"MIT"
] | 1 | 2017-09-07T04:16:01.000Z | 2017-09-07T04:56:01.000Z | SwiftyBase/Classes/Control/PartiallyEditField.swift | mspvirajpatel/SwiftyBase | f6ddc8094512b11c7656fb61a02525e59fb1d6e1 | [
"MIT"
] | 21 | 2017-08-30T12:43:42.000Z | 2021-04-02T06:42:54.000Z | //
// PartiallyEditField.swift
// SwiftyBase
//
// Created by Viraj Patel on 13/11/17.
//
import Foundation
public enum PreTextSide: Int {
case kNone
case kLeft
case kRight
}
/**
* The PartiallyEditField will generate pretext for the textfield.
*/
open class PartiallyEditField: UITextField {
open var prevText = ""
open var prevTextRange: UITextRange?
open var originalColor: UIColor?
open var preTextFont: UIFont?
open var preTextColor: UIColor?
open var preText = ""
open var atributedPlaceHolderString: NSMutableAttributedString?
/**
* The side that you want append for pretext.
*/
open var preTextSide: PreTextSide?
open var placeHolderColor: UIColor?
/**
* This method will help to downlaod image with complete block. Block response will be an image.
*
* @param string The pre text that you wanted to append.
*/
// The converted code is limited to 4 KB.
// Upgrade your plan to remove this limitation.
open func setup(withPreText preText: String) {
self.preText = preText
prevText = text!
originalColor = textColor
if preTextSide == .kNone {
preTextSide = .kRight
}
addTarget(self, action: #selector(self.textChanged), for: .editingChanged)
createAtributedPlaceHolder()
}
/**
* This method will help to downlaod image with complete block. Block response will be an image.
*
* @param string The pre text that you wanted to append.
* @param color The text color for pretext that you wanted.
*/
open func setup(withPreText preText: String, color: UIColor) {
preTextColor = color
setup(withPreText: preText)
}
@objc open func textChanged(_ sender: PartiallyEditField) {
if !(text == "") {
if (text == preText) {
text = ""
prevText = ""
return
}
if (prevText == "") {
if preTextSide == .kLeft {
text = "\(preText)\(String(describing: text!))"
}
else {
text = "\(String(describing: text!))\(preText)"
}
prevText = text!
}
let range: NSRange? = (text! as NSString).range(of: preText)
if preTextSide == .kLeft {
if (text?.contains(preText))! && range?.location == 0 {
prevText = text!
}
else {
text = prevText
}
}
else {
if (self.text?.contains(self.preText))! {
if (Int(range!.location) + Int((range?.length)!)) >= (text?.lengthOfString)!
{
prevText = text!
}
else {
text = prevText
}
}
else {
text = prevText
}
}
createAtributedText()
changeRangeToBegin()
}
}
open func changeRangeToBegin() {
let range: NSRange? = (text! as NSString).range(of: preText)
let idx: Int = offset(from: beginningOfDocument, to: prevTextRange!.start)
if preTextSide == .kLeft {
if idx > ((range?.location)! + (range?.length)!) {
selectedTextRange = prevTextRange
}
else {
let end: UITextPosition? = position(from: beginningOfDocument, offset: (range?.location)!)
selectedTextRange = textRange(from: end ?? UITextPosition(), to: end ?? UITextPosition())
}
}
else {
if idx >= ((range?.location)! + (range?.length)!) {
let end: UITextPosition? = position(from: beginningOfDocument, offset: (range?.location)!)
selectedTextRange = textRange(from: end ?? UITextPosition(), to: end ?? UITextPosition())
}
else {
selectedTextRange = prevTextRange
}
}
}
open func createAtributedPlaceHolder() {
var placeHolderText = "\(String(describing: placeholder?.replacingOccurrences(of: preText, with: "")))\(preText)"
if preTextSide == .kLeft {
placeHolderText = "\(preText)\(String(describing: placeholder?.replacingOccurrences(of: preText, with: "")))"
}
attributedPlaceholder = atrributeText(fromText: placeHolderText, isText: false)
}
open func atrributeText(fromText str: String, isText: Bool) -> NSMutableAttributedString {
let range: NSRange? = (str as NSString).range(of: preText)
let string = NSMutableAttributedString(string: str)
if isText {
string.addAttribute(NSAttributedString.Key.foregroundColor, value: originalColor as Any, range: NSRange(location: 0, length: (str.count)))
}
else {
if (placeHolderColor != nil) {
string.addAttribute(NSAttributedString.Key.foregroundColor, value: placeHolderColor as Any, range: NSRange(location: 0, length: (str.count)))
}
}
if (preTextColor != nil) {
string.addAttribute(NSAttributedString.Key.foregroundColor, value: preTextColor as Any, range: range ?? NSRange())
}
return string
}
open func createAtributedText() {
prevTextRange = selectedTextRange
attributedText = atrributeText(fromText: text!, isText: true)
}
open func setPreTextSide(_ preTextSide: PreTextSide) {
self.preTextSide = preTextSide
createAtributedPlaceHolder()
}
open func setPlaceHolderColor(_ placeHolderColor: UIColor) {
self.placeHolderColor = placeHolderColor
createAtributedPlaceHolder()
}
}
| 32.554945 | 157 | 0.561013 | 3.078125 |
93716c5c56edfb87347d5de01fa12ab34bfa9756 | 3,034 | rs | Rust | zargo/src/command/download.rs | vikkkko/zincM | dd1fabf6d736ca2ac4884cca7a3929aadb9e7e28 | [
"Apache-2.0"
] | 291 | 2020-04-08T23:04:37.000Z | 2022-03-27T06:53:46.000Z | zargo/src/command/download.rs | vikkkko/zincM | dd1fabf6d736ca2ac4884cca7a3929aadb9e7e28 | [
"Apache-2.0"
] | 14 | 2020-07-12T23:14:11.000Z | 2021-10-04T15:51:06.000Z | zargo/src/command/download.rs | litezk/zinc | f28b41f172cfc14e6793ae5d6e37e59f1e406a7d | [
"Apache-2.0"
] | 34 | 2020-04-30T02:15:55.000Z | 2021-12-13T14:52:33.000Z | //!
//! The Zargo package manager `download` subcommand.
//!
use std::path::PathBuf;
use std::str::FromStr;
use structopt::StructOpt;
use crate::error::Error;
use crate::http::downloader::Downloader;
use crate::http::Client as HttpClient;
use crate::network::Network;
///
/// The Zargo package manager `download` subcommand.
///
#[derive(Debug, StructOpt)]
#[structopt(about = "Initializes a new project in the specified directory")]
pub struct Command {
/// Prints more logs, if passed several times.
#[structopt(short = "v", long = "verbose", parse(from_occurrences))]
pub verbosity: usize,
/// Suppresses output, if set.
#[structopt(short = "q", long = "quiet")]
pub quiet: bool,
/// If set, shows the list of uploaded projects and exits.
#[structopt(long = "list")]
pub list: bool,
/// Sets the project name to download.
#[structopt(long = "name")]
pub name: Option<String>,
/// Sets the project version to download.
#[structopt(long = "version")]
pub version: Option<semver::Version>,
/// Sets the network name, where the project must be downloaded from.
#[structopt(long = "network", default_value = "localhost")]
pub network: String,
/// The path to the project directory to initialize.
#[structopt(parse(from_os_str))]
pub path: Option<PathBuf>,
}
impl Command {
///
/// A shortcut constructor.
///
pub fn new(
verbosity: usize,
quiet: bool,
list: bool,
name: Option<String>,
version: Option<semver::Version>,
network: Option<String>,
path: Option<PathBuf>,
) -> Self {
Self {
verbosity,
quiet,
list,
name,
version,
network: network
.unwrap_or_else(|| Network::from(zksync::Network::Localhost).to_string()),
path,
}
}
///
/// Executes the command.
///
pub async fn execute(self) -> anyhow::Result<()> {
let network = zksync::Network::from_str(self.network.as_str())
.map(Network::from)
.map_err(Error::NetworkInvalid)?;
let url = network
.try_into_url()
.map_err(Error::NetworkUnimplemented)?;
let http_client = HttpClient::new(url);
if self.list {
for project in http_client.metadata().await?.projects.into_iter() {
if !self.quiet {
println!("{}-{}", project.name, project.version);
}
}
return Ok(());
}
let name = self.name.ok_or(Error::ProjectNameMissing)?;
let version = self.version.ok_or(Error::ProjectVersionMissing)?;
let project_path = match self.path {
Some(path) => path,
None => PathBuf::from(name.as_str()),
};
let mut downloader = Downloader::new(&http_client, &project_path);
downloader.download_project(name, version).await?;
Ok(())
}
}
| 27.581818 | 90 | 0.577456 | 3.25 |
a18118a6ca2fde0b3e43c9d426c35bb3e334ba52 | 1,617 | go | Go | pkg/highlight/unicode.go | 5paceToast/micro | 9484e083244f6249df59b364f3584ce0e95cac79 | [
"Apache-2.0",
"MIT"
] | 21,446 | 2016-03-23T19:56:03.000Z | 2022-03-31T22:08:38.000Z | pkg/highlight/unicode.go | 5paceToast/micro | 9484e083244f6249df59b364f3584ce0e95cac79 | [
"Apache-2.0",
"MIT"
] | 1,958 | 2016-04-17T21:51:01.000Z | 2022-03-31T22:26:16.000Z | pkg/highlight/unicode.go | 5paceToast/micro | 9484e083244f6249df59b364f3584ce0e95cac79 | [
"Apache-2.0",
"MIT"
] | 1,577 | 2016-04-18T02:33:59.000Z | 2022-03-30T15:49:20.000Z | package highlight
import (
"unicode"
"unicode/utf8"
)
var minMark = rune(unicode.Mark.R16[0].Lo)
func isMark(r rune) bool {
// Fast path
if r < minMark {
return false
}
return unicode.In(r, unicode.Mark)
}
// DecodeCharacter returns the next character from an array of bytes
// A character is a rune along with any accompanying combining runes
func DecodeCharacter(b []byte) (rune, []rune, int) {
r, size := utf8.DecodeRune(b)
b = b[size:]
c, s := utf8.DecodeRune(b)
var combc []rune
for isMark(c) {
combc = append(combc, c)
size += s
b = b[s:]
c, s = utf8.DecodeRune(b)
}
return r, combc, size
}
// DecodeCharacterInString returns the next character from a string
// A character is a rune along with any accompanying combining runes
func DecodeCharacterInString(str string) (rune, []rune, int) {
r, size := utf8.DecodeRuneInString(str)
str = str[size:]
c, s := utf8.DecodeRuneInString(str)
var combc []rune
for isMark(c) {
combc = append(combc, c)
size += s
str = str[s:]
c, s = utf8.DecodeRuneInString(str)
}
return r, combc, size
}
// CharacterCount returns the number of characters in a byte array
// Similar to utf8.RuneCount but for unicode characters
func CharacterCount(b []byte) int {
s := 0
for len(b) > 0 {
r, size := utf8.DecodeRune(b)
if !isMark(r) {
s++
}
b = b[size:]
}
return s
}
// CharacterCount returns the number of characters in a string
// Similar to utf8.RuneCountInString but for unicode characters
func CharacterCountInString(str string) int {
s := 0
for _, r := range str {
if !isMark(r) {
s++
}
}
return s
}
| 18.802326 | 68 | 0.672233 | 3.453125 |
c1ce13a49143e12b5f963e336148e6bc08da7f4b | 5,243 | lua | Lua | lua/gamemodes/survival/commands.lua | igor725/LuaClassic | 5a1642e907cba6d26ad2ed7660bc1e22289cb019 | [
"MIT"
] | 9 | 2019-06-09T16:17:44.000Z | 2021-11-23T02:46:38.000Z | lua/gamemodes/survival/commands.lua | codingwatching/LuaClassic | 92beacaf8505fdff82fd25215bb15443601f8b68 | [
"MIT"
] | 1 | 2020-12-03T23:38:44.000Z | 2021-08-08T04:52:30.000Z | lua/gamemodes/survival/commands.lua | codingwatching/LuaClassic | 92beacaf8505fdff82fd25215bb15443601f8b68 | [
"MIT"
] | 4 | 2019-06-13T22:09:54.000Z | 2022-03-04T14:20:20.000Z | --[[
Copyright (c) 2019 igor725, scaledteam
released under The MIT license http://opensource.org/licenses/MIT
]]
addCommand('give', function(isConsole, player, args)
if #args < 1 then return false end
local id, count, target
if isConsole then
if #args < 2 then return false end
target = getPlayerByName(args[1])
id = args[2]
count = args[3]
else
if #args == 2 then
id = args[1]
count = args[2]
elseif #args > 2 then
target = getPlayerByName(args[1])
id = args[2]
count = args[3]
elseif #args == 1 then
id = args[1]
count = SURV_STACK_SIZE
end
target = target or player
end
if not target then return MESG_PLAYERNF end
if player and target ~= player and not player:checkPermission('commands.give-others')then
return
end
id = tonumber(id)or 0
count = tonumber(count)or SURV_STACK_SIZE
count = math.min(math.max(count, 1), SURV_STACK_SIZE)
local given = survInvAddBlock(target, id, count)
if given > 0 then
target:holdThis(id)
return (CMD_GIVE):format(given, survGetBlockName(id), target)
end
end)
addCommand('heal', function(isConsole, player, args)
if isConsole and #args < 1 then return false end
local target = (#args > 0 and getPlayerByName(args[1]))or player
if not target then return MESG_PLAYERNF end
if player and target ~= player and not player:checkPermission('commands.heal-others')then
return
end
if target.health ~= SURV_MAX_HEALTH then
target.health = SURV_MAX_HEALTH
survUpdateHealth(target)
end
if target.oxygen ~= SURV_MAX_OXYGEN then
target.oxygen = SURV_MAX_OXYGEN
survUpdateOxygen(target)
end
return (CMD_HEAL):format(target)
end)
addCommand('drop', function(isConsole, player, args)
if isConsole then return CON_INGAMECMD end
local bId = player:getHeldBlock()
if bId < 1 then
return
end
if #args > 1 then
local target = getPlayerByName(args[1])or getPlayerByName(args[2])
local quantity = tonumber(args[2])or tonumber(args[1])or 1
local x, y, z = player:getPos()
if not target then
return MESG_PLAYERNF
end
if target == player then
return false
end
if distance(x, y, z, target:getPos()) > 6 then
return CMD_DROPTOOFAR
end
local inv1 = player.inventory
local inv2 = target.inventory
quantity = math.min(quantity, SURV_STACK_SIZE - inv2[bId])
if quantity < 1 then
return false
end
local name = survGetBlockName(bId)
if inv1[bId] >= quantity then
inv1[bId] = inv1[bId] - quantity
inv2[bId] = inv2[bId] + quantity
survUpdateBlockInfo(player)
survUpdateBlockInfo(target)
survUpdateInventory(player, bId)
survUpdateInventory(target, bId)
target:sendMessage((MESG_DROP):format(quantity, name, player))
return (CMD_DROPSUCCP):format(quantity, name, target)
else
return (CMD_DROPNE):format(quantity, name)
end
end
return false
end)
addCommand('kill', function(isConsole, player, args)
if isConsole and #args < 1 then return false end
local target = (#args > 0 and getPlayerByName(args[1]))or player
if not target then return MESG_PLAYERNF end
if player and target ~= player and not player:checkPermission('commands.kill-others')then
return
end
if not survDamage(nil, target, SURV_MAX_HEALTH, 0)then
return MESG_NODMG
end
end)
addCommand('god', function(isConsole, player, args)
if isConsole and #args < 1 then return false end
local target = getPlayerByName(args[1])or player
if not target then return MESG_PLAYERNF end
if player and target ~= player and not player:checkPermission('commands.god-others')then
return
end
target.isInGodmode = not target.isInGodmode
local state = (target.isInGodmode and ST_ON)or ST_OFF
local h = target.isInGodmode and 1 or 0
target:hackControl(h, h, h, 0, 1, -1)
for i = 1, SURV_INV_SIZE do
if isValidBlockID(i)then
survUpdatePermission(target, i)
end
end
if target.isInGodmode then
survPauseTimers(target)
target.inCraftMenu = false
target.health = SURV_MAX_HEALTH
target.oxygen = SURV_MAX_OXYGEN
else
survResumeTimers(target)
end
survUpdateHealth(target)
survUpdateOxygen(target)
survUpdateInventory(target)
survUpdateBlockInfo(target)
return (CMD_GOD):format(state, target)
end)
addCommand('home', function(isConsole, player, args)
if isConsole then return CON_INGAMECMD end
local hp = player.homepos
local ha = player.homeang
local hw = player.homeworld
if hp and ha and hw then
local wld = getWorld(hw)
if not wld then
return WORLD_NF
end
if player:isInWorld(wld)then
player:teleportTo(hp.x, hp.y, hp.z, ha.yaw, ha.pitch)
else
player:changeWorld(wld, true, hp.x, hp.y, hp.z, ha.yaw, ha.pitch)
end
else
return CMD_HOMENF
end
end)
addCommand('sethome', function(isConsole, player, args)
if isConsole then return CON_INGAMECMD end
local hp = player.homepos
local ha = player.homeang
if hp and ha then
hp.x, hp.y, hp.z = player:getPos()
ha.yaw, ha.pitch = player:getEyePos()
else
hp = newVector(player:getPos())
ha = newAngle(player:getEyePos())
end
player.homepos = hp
player.homeang = ha
player.homeworld = player.worldName
return CMD_HOMESET
end)
addCommand('pvp', function(isConsole, player, args)
player.pvpmode = not player.pvpmode
return (CMD_PVP):format((player.pvpmode and ST_ON)or ST_OFF)
end)
| 24.966667 | 90 | 0.730879 | 3.03125 |
7f0eb80d8aa6256c4e622a142b8edadd7657c4c8 | 10,592 | rs | Rust | base/src/error.rs | Snarpix/gluon | 25d949b8836d0cd28adc027f8d0e4e20a102aca1 | [
"MIT"
] | 2,261 | 2016-10-07T19:32:31.000Z | 2022-03-31T18:35:43.000Z | base/src/error.rs | Snarpix/gluon | 25d949b8836d0cd28adc027f8d0e4e20a102aca1 | [
"MIT"
] | 636 | 2016-10-08T01:43:31.000Z | 2022-02-04T18:12:38.000Z | base/src/error.rs | Snarpix/gluon | 25d949b8836d0cd28adc027f8d0e4e20a102aca1 | [
"MIT"
] | 156 | 2016-10-08T19:17:17.000Z | 2022-02-16T07:53:21.000Z | //! Module containing a few common error wrappers which allows more information to be saved for
//! later display to the user
use std::any::Any;
use std::error::Error as StdError;
use std::fmt;
use std::iter::{Extend, FromIterator};
use std::ops::{Index, IndexMut};
use std::slice;
use std::str;
use std::vec;
use codespan_reporting::diagnostic::{Diagnostic, Label};
use crate::{
pos::{BytePos, Spanned},
source::FileId,
};
/// An error type which can represent multiple errors.
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub struct Errors<T> {
errors: Vec<T>,
}
impl<T> Default for Errors<T> {
fn default() -> Self {
Errors::new()
}
}
impl<T> Errors<T> {
/// Creates a new, empty `Errors` instance.
pub fn new() -> Errors<T> {
Errors::from(Vec::new())
}
/// Returns true if `self` contains any errors
pub fn has_errors(&self) -> bool {
!self.is_empty()
}
/// The number of errors in the error list
pub fn len(&self) -> usize {
self.errors.len()
}
pub fn is_empty(&self) -> bool {
self.errors.is_empty()
}
/// Adds an error to `self`
pub fn push(&mut self, t: T) {
self.errors.push(t);
}
/// Pops and error off the error list
pub fn pop(&mut self) -> Option<T> {
self.errors.pop()
}
pub fn iter(&self) -> slice::Iter<T> {
self.errors.iter()
}
pub fn drain(
&mut self,
range: impl std::ops::RangeBounds<usize>,
) -> impl Iterator<Item = T> + '_ {
self.errors.drain(range)
}
}
impl<T> Index<usize> for Errors<T> {
type Output = T;
fn index(&self, index: usize) -> &T {
&self.errors[index]
}
}
impl<T> IndexMut<usize> for Errors<T> {
fn index_mut(&mut self, index: usize) -> &mut T {
&mut self.errors[index]
}
}
impl<T> Extend<T> for Errors<T> {
fn extend<Iter: IntoIterator<Item = T>>(&mut self, iter: Iter) {
self.errors.extend(iter);
}
}
impl<T> From<Vec<T>> for Errors<T> {
fn from(errors: Vec<T>) -> Errors<T> {
Errors { errors }
}
}
impl<T> FromIterator<T> for Errors<T> {
fn from_iter<Iter: IntoIterator<Item = T>>(iter: Iter) -> Errors<T> {
Errors {
errors: iter.into_iter().collect(),
}
}
}
impl<T> Into<Vec<T>> for Errors<T> {
fn into(self) -> Vec<T> {
self.errors
}
}
impl<T> IntoIterator for Errors<T> {
type Item = T;
type IntoIter = vec::IntoIter<T>;
fn into_iter(self) -> vec::IntoIter<T> {
self.errors.into_iter()
}
}
impl<'a, T> IntoIterator for &'a Errors<T> {
type Item = &'a T;
type IntoIter = slice::Iter<'a, T>;
fn into_iter(self) -> slice::Iter<'a, T> {
self.errors.iter()
}
}
impl<'a, T> IntoIterator for &'a mut Errors<T> {
type Item = &'a mut T;
type IntoIter = slice::IterMut<'a, T>;
fn into_iter(self) -> slice::IterMut<'a, T> {
self.errors.iter_mut()
}
}
impl<T: fmt::Display> fmt::Display for Errors<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for (i, error) in self.errors.iter().enumerate() {
write!(f, "{}", error)?;
// Errors are assumed to not have a newline at the end so we add one to keep errors on
// separate lines and one to space them out
if i + 1 != self.errors.len() {
writeln!(f)?;
writeln!(f)?;
}
}
Ok(())
}
}
impl<T: fmt::Display + fmt::Debug + Any> StdError for Errors<T> {
fn description(&self) -> &str {
"Errors"
}
}
/// Error type which contains information of which file and where in the file the error occurred
#[derive(Clone, Debug)]
pub struct InFile<E> {
source: crate::source::CodeMap,
error: Errors<Spanned<E, BytePos>>,
}
impl<E> Eq for InFile<E> where E: Eq {}
impl<E> PartialEq for InFile<E>
where
E: PartialEq,
{
fn eq(&self, other: &Self) -> bool {
self.error == other.error
}
}
impl<E> std::hash::Hash for InFile<E>
where
E: std::hash::Hash,
{
#[inline(always)]
fn hash<H>(&self, state: &mut H)
where
H: std::hash::Hasher,
{
self.error.hash(state)
}
}
impl<E: fmt::Display> InFile<E> {
/// Creates a new `InFile` error which states that the error occurred in `file` using the file
/// contents in `source` to provide a context to the span.
pub fn new(source: crate::source::CodeMap, error: Errors<Spanned<E, BytePos>>) -> InFile<E> {
let err = InFile { source, error };
// Verify that the source name can be accessed
debug_assert!({
err.source_name();
true
});
err
}
pub fn source_name(&self) -> &str {
self.source
.get(self.error[0].span.start())
.unwrap_or_else(|| {
panic!(
"Source file does not exist in associated code map. Error: {}",
self.error
)
})
.name()
}
pub fn source(&self) -> &crate::source::CodeMap {
&self.source
}
pub fn errors(&self) -> &Errors<Spanned<E, BytePos>> {
&self.error
}
pub fn into_errors(self) -> Errors<Spanned<E, BytePos>> {
self.error
}
pub fn emit_string(&self) -> crate::source::Result<String>
where
E: AsDiagnostic,
{
let mut output = Vec::new();
self.emit(&mut ::codespan_reporting::term::termcolor::NoColor::new(
&mut output,
))?;
Ok(String::from_utf8(output).unwrap())
}
pub fn emit(
&self,
writer: &mut dyn ::codespan_reporting::term::termcolor::WriteColor,
) -> crate::source::Result<()>
where
E: AsDiagnostic,
{
let iter = self
.error
.iter()
.map(|error| error.as_diagnostic(&self.source))
.enumerate();
for (i, diagnostic) in iter {
if i != 0 {
writeln!(writer)?;
}
::codespan_reporting::term::emit(
&mut *writer,
&Default::default(),
&self.source,
&diagnostic,
)?;
}
Ok(())
}
}
impl<E: fmt::Display + AsDiagnostic> fmt::Display for InFile<E> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut buffer = Vec::new();
{
let mut writer = ::codespan_reporting::term::termcolor::NoColor::new(&mut buffer);
self.emit(&mut writer).map_err(|_| fmt::Error)?;
}
write!(f, "{}", str::from_utf8(&buffer).unwrap())
}
}
impl<E: fmt::Display + fmt::Debug + Any + AsDiagnostic> StdError for InFile<E> {
fn description(&self) -> &str {
"Error in file"
}
}
#[derive(Debug, Eq, PartialEq, Hash, Clone)]
pub struct Help<E, H> {
pub error: E,
pub help: Option<H>,
}
impl<E, H> fmt::Display for Help<E, H>
where
E: fmt::Display,
H: fmt::Display,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.error)?;
if let Some(ref help) = self.help {
writeln!(f)?;
write!(f, "help: {}", help)?;
}
Ok(())
}
}
impl<E, H> From<E> for Help<E, H> {
fn from(error: E) -> Help<E, H> {
Help { error, help: None }
}
}
pub trait AsDiagnostic {
fn as_diagnostic(&self, map: &crate::source::CodeMap) -> Diagnostic<FileId>;
}
impl<E> AsDiagnostic for Spanned<E, BytePos>
where
E: AsDiagnostic,
{
fn as_diagnostic(&self, map: &crate::source::CodeMap) -> Diagnostic<FileId> {
let mut diagnostic = self.value.as_diagnostic(map);
for label in &mut diagnostic.labels {
if label.file_id == FileId::default() {
label.file_id = self.span.start();
}
if label.range == (0..0) {
if let Some(range) = self.span.to_range(map) {
label.range = range;
}
}
}
if diagnostic.labels.is_empty() {
if let Some(range) = self.span.to_range(map) {
diagnostic
.labels
.push(Label::primary(self.span.start(), range));
}
}
diagnostic
}
}
impl<E, H> AsDiagnostic for Help<E, H>
where
E: AsDiagnostic,
H: fmt::Display,
{
fn as_diagnostic(&self, map: &crate::source::CodeMap) -> Diagnostic<FileId> {
let mut diagnostic = self.error.as_diagnostic(map);
if let Some(ref help) = self.help {
diagnostic.labels.push(
Label::secondary(
diagnostic
.labels
.last()
.map(|label| label.file_id)
.unwrap_or_default(),
0..0,
)
.with_message(help.to_string()),
);
}
diagnostic
}
}
impl AsDiagnostic for Box<dyn ::std::error::Error + Send + Sync> {
fn as_diagnostic(&self, _map: &crate::source::CodeMap) -> Diagnostic<FileId> {
Diagnostic::error().with_message(self.to_string())
}
}
pub type SalvageResult<T, E> = std::result::Result<T, Salvage<T, E>>;
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Salvage<T, E> {
pub value: Option<T>,
pub error: E,
}
impl<T, E> fmt::Display for Salvage<T, E>
where
E: fmt::Display,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.error)
}
}
impl<T, E> Salvage<T, E> {
pub fn map<U>(self, f: impl FnOnce(T) -> U) -> Salvage<U, E> {
Salvage {
value: self.value.map(f),
error: self.error,
}
}
pub fn map_err<U>(self, f: impl FnOnce(E) -> U) -> Salvage<T, U> {
Salvage {
value: self.value,
error: f(self.error),
}
}
pub fn get_value(self) -> std::result::Result<T, E> {
self.value.ok_or(self.error)
}
pub fn err_into<F>(self) -> Salvage<T, F>
where
F: From<E>,
{
let Salvage { value, error } = self;
Salvage {
value,
error: error.into(),
}
}
}
impl<T, E> From<E> for Salvage<T, E> {
fn from(error: E) -> Self {
Salvage { value: None, error }
}
}
impl<T, E> From<Salvage<T, InFile<E>>> for InFile<E> {
fn from(s: Salvage<T, InFile<E>>) -> Self {
s.error
}
}
| 24.293578 | 98 | 0.525491 | 3.140625 |
8ac88f7873431cf53dfcb46d44cf51970fa164a5 | 6,279 | kt | Kotlin | @old/korge-spriter/src/commonMain/kotlin/com/soywiz/korge/ext/spriter/com/brashmonkey/spriter/Calculator.kt | jfbilodeau/korge | fb2be4c25a32fa6a4a200badcf9b610f115f763c | [
"Apache-2.0"
] | 1,212 | 2017-08-28T04:02:19.000Z | 2022-03-30T04:32:23.000Z | @old/korge-spriter/src/commonMain/kotlin/com/soywiz/korge/ext/spriter/com/brashmonkey/spriter/Calculator.kt | jfbilodeau/korge | fb2be4c25a32fa6a4a200badcf9b610f115f763c | [
"Apache-2.0"
] | 353 | 2017-10-26T10:39:30.000Z | 2022-03-22T14:08:55.000Z | @old/korge-spriter/src/commonMain/kotlin/com/soywiz/korge/ext/spriter/com/brashmonkey/spriter/Calculator.kt | jfbilodeau/korge | fb2be4c25a32fa6a4a200badcf9b610f115f763c | [
"Apache-2.0"
] | 77 | 2017-10-19T23:10:06.000Z | 2022-02-22T22:42:40.000Z | package com.soywiz.korge.ext.spriter.com.brashmonkey.spriter
import com.soywiz.korio.*
import com.soywiz.korma.geom.*
import kotlin.math.*
/**
* A utility class which provides methods to calculate Spriter specific issues,
* like linear interpolation and rotation around a parent object.
* Other interpolation types are coming with the next releases of Spriter.
* @author Trixt0r
*/
object Calculator {
const val PI = kotlin.math.PI.toFloat()
const val NO_SOLUTION = -1f
/**
* Calculates the smallest difference between angle a and b.
* @param a first angle (in degrees)
* *
* @param b second angle (in degrees)
* *
* @return Smallest difference between a and b (between 180� and -180�).
*/
fun angleDifference(a: Float, b: Float): Float {
return ((a - b) % 360 + 540) % 360 - 180
}
/**
* @param x1 x coordinate of first point.
* *
* @param y1 y coordinate of first point.
* *
* @param x2 x coordinate of second point.
* *
* @param y2 y coordinate of second point.
* *
* @return Angle between the two given points.
*/
fun angleBetween(x1: Float, y1: Float, x2: Float, y2: Float): Float {
return Angle.degreesToRadians(atan2((y2 - y1).toDouble(), (x2 - x1).toDouble())).toFloat()
}
/**
* @param x1 x coordinate of first point.
* *
* @param y1 y coordinate of first point.
* *
* @param x2 x coordinate of second point.
* *
* @param y2 y coordinate of second point.
* *
* @return Distance between the two given points.
*/
fun distanceBetween(x1: Float, y1: Float, x2: Float, y2: Float): Float {
val xDiff = x2 - x1
val yDiff = y2 - y1
return sqrt(xDiff * xDiff + yDiff * yDiff)
}
/**
* Solves the equation a*x^3 + b*x^2 + c*x +d = 0.
* @param a
* *
* @param b
* *
* @param c
* *
* @param d
* *
* @return the solution of the cubic function if it belongs [0, 1], [.NO_SOLUTION] otherwise.
*/
fun solveCubic(a: Float, b: Float, c: Float, d: Float): Float {
var b = b
var c = c
var d = d
if (a == 0f) return solveQuadratic(b, c, d)
if (d == 0f) return 0f
b /= a
c /= a
d /= a
val squaredB = squared(b)
var q = (3f * c - squaredB) / 9f
val r = (-27f * d + b * (9f * c - 2f * squaredB)) / 54f
val disc = cubed(q) + squared(r)
val term1 = b / 3f
if (disc > 0) {
val sqrtDisc = sqrt(disc)
var s = r + sqrtDisc
s = if (s < 0) -cubicRoot(-s) else cubicRoot(s)
var t = r - sqrtDisc
t = if (t < 0) -cubicRoot(-t) else cubicRoot(t)
val result = -term1 + s + t
if (result in 0.0..1.0) return result
} else if (disc == 0f) {
val r13 = if (r < 0) -cubicRoot(-r) else cubicRoot(r)
var result = -term1 + 2f * r13
if (result in 0.0..1.0) return result
result = -(r13 + term1)
if (result in 0.0..1.0) return result
} else {
q = -q
var dum1 = q * q * q
dum1 = acos(r / sqrt(dum1))
val r13 = 2f * sqrt(q)
var result = -term1 + r13 * cos(dum1 / 3f)
if (result in 0.0..1.0) return result
result = -term1 + r13 * cos((dum1 + 2f * PI) / 3f)
if (result in 0.0..1.0) return result
result = -term1 + r13 * cos((dum1 + 4f * PI) / 3f)
if (result in 0.0..1.0) return result
}
return NO_SOLUTION
}
/**
* Solves the equation a*x^2 + b*x + c = 0
* @param a
* *
* @param b
* *
* @param c
* *
* @return the solution for the quadratic function if it belongs [0, 1], [.NO_SOLUTION] otherwise.
*/
fun solveQuadratic(a: Float, b: Float, c: Float): Float {
val squaredB = squared(b)
val twoA = 2 * a
val fourAC = 4f * a * c
val sqrt = sqrt(squaredB - fourAC)
var result = (-b + sqrt) / twoA
if (result >= 0 && result <= 1) return result
result = (-b - sqrt) / twoA
if (result >= 0 && result <= 1) return result
return NO_SOLUTION
}
/**
* Returns the square of the given value.
* @param f the value
* *
* @return the square of the value
*/
fun squared(f: Float): Float {
return f * f
}
/**
* Returns the cubed value of the given one.
* @param f the value
* *
* @return the cubed value
*/
fun cubed(f: Float): Float {
return f * f * f
}
/**
* Returns the cubic root of the given value.
* @param f the value
* *
* @return the cubic root
*/
fun cubicRoot(f: Float): Float {
return f.toDouble().pow((1f / 3f).toDouble()).toFloat()
}
/**
* Returns the square root of the given value.
* @param x the value
* *
* @return the square root
*/
fun sqrt(x: Float): Float {
return kotlin.math.sqrt(x.toDouble()).toFloat()
}
/**
* Returns the arc cosine at the given value.
* @param x the value
* *
* @return the arc cosine
*/
fun acos(x: Float): Float {
return kotlin.math.acos(x.toDouble()).toFloat()
}
private val SIN_BITS = 14 // 16KB. Adjust for accuracy.
private val SIN_MASK = (-1 shl SIN_BITS).inv()
private val SIN_COUNT = SIN_MASK + 1
private val radFull = PI * 2
private val degFull = 360f
private val radToIndex = SIN_COUNT / radFull
private val degToIndex = SIN_COUNT / degFull
/** multiply by this to convert from radians to degrees */
val radiansToDegrees = 180f / PI
val radDeg = radiansToDegrees
/** multiply by this to convert from degrees to radians */
val degreesToRadians = PI / 180
val degRad = degreesToRadians
private object Sin {
internal val table = FloatArray(SIN_COUNT).apply {
for (i in 0 until SIN_COUNT)
this[i] = kotlin.math.sin(((i + 0.5f) / SIN_COUNT * radFull).toDouble()).toFloat()
var i = 0
while (i < 360) {
this[(i * degToIndex).toInt() and SIN_MASK] =
kotlin.math.sin((i * degreesToRadians).toDouble()).toFloat()
i += 90
}
}
}
/** Returns the sine in radians from a lookup table. */
fun sin(radians: Float): Float {
return Sin.table[(radians * radToIndex).toInt() and SIN_MASK]
}
/** Returns the cosine in radians from a lookup table. */
fun cos(radians: Float): Float {
return Sin.table[((radians + PI / 2) * radToIndex).toInt() and SIN_MASK]
}
/** Returns the sine in radians from a lookup table. */
fun sinDeg(degrees: Float): Float {
return Sin.table[(degrees * degToIndex).toInt() and SIN_MASK]
}
/** Returns the cosine in radians from a lookup table. */
fun cosDeg(degrees: Float): Float {
return Sin.table[((degrees + 90) * degToIndex).toInt() and SIN_MASK]
}
}
| 24.916667 | 99 | 0.619525 | 3.328125 |
e0ccf24b6e7f58f48d1befa403f02482ef446a04 | 12,686 | lua | Lua | ESX/server.lua | 0resmon/0R-admin | 25c6e0a1c0b9befdee372c50c7cb33cb28363ed1 | [
"MIT"
] | 8 | 2021-09-27T19:24:56.000Z | 2022-03-19T00:10:46.000Z | ESX/server.lua | 0resmon/0R-admin | 25c6e0a1c0b9befdee372c50c7cb33cb28363ed1 | [
"MIT"
] | 1 | 2021-10-03T18:41:12.000Z | 2021-10-03T18:41:12.000Z | ESX/server.lua | 0resmon/0R-admin | 25c6e0a1c0b9befdee372c50c7cb33cb28363ed1 | [
"MIT"
] | 9 | 2021-09-27T19:38:32.000Z | 2022-03-13T19:38:54.000Z | ESX = nil
TriggerEvent('esx:getSharedObject', function(obj) ESX = obj end)
AddEventHandler('playerConnecting', function(name, setCallback, defer)
defer.defer()
local banned = false
local src = source
defer.update(L("CHECKING_BANLIST"))
local bans = exports.ghmattimysql:executeSync("SELECT identifier,reason FROM `0r-bans` ", { })
if bans and bans[1] then
for k, v in pairs(bans) do
if GetPlayerIdentifier(src, 0) == v.identifier then
banned = true
defer.done(L('URE_BANNED')..v.reason..' ')
return
end
end
end
if banned == false then defer.done() end
end)
ESX.RegisterServerCallback('0R-admin:cp', function(source, cb)
if PermCheck(source, 'auth') then
cb({ action = "auth", auth = "acsess", lang = Lang.Languages[Lang.Language] })
else
cb(nil)
end
end)
ESX.RegisterServerCallback('0R-admin:GET.PLAYERS', function(source, cb)
if PermCheck(source, 'playerlist') then
local Players = {}
for k, v in pairs(ESX.GetPlayers()) do
local xPlayer = ESX.GetPlayerFromId(v)
Players[#Players+1] = {
id = v,
name = xPlayer.getName()
}
end
cb(Players)
end
end)
ESX.RegisterServerCallback('0R-admin:GET.BANS', function(source, cb)
if PermCheck(source, 'bansshow') then
local Players = {}
local bans = exports.ghmattimysql:executeSync("SELECT id,identifier FROM `0r-bans` ", { })
if bans and bans[1] then
for k, v in pairs(bans) do
Players[#Players+1] = {
identifier = v.identifier,
id = v.id
}
end
end
cb(Players)
end
end)
RegisterServerEvent('0R.ADMIN.DELETEBAN')
AddEventHandler('0R.ADMIN.DELETEBAN', function(id)
if PermCheck(source, 'bansdelete') then
exports.ghmattimysql:executeSync("DELETE FROM `0r-bans` WHERE id='"..id.."' ", { })
end
end)
RegisterServerEvent('0R.ADMIN.REVIVE')
AddEventHandler('0R.ADMIN.REVIVE', function(id)
if PermCheck(source, 'revive') then
if id == "self" then
id = tonumber(source)
end
TriggerClientEvent('esx_ambulancejob:revive', id)
TriggerClientEvent("0R.ADMIN.SENDNUI", source, { action = "notify", value = L('PROCESS') })
end
end)
RegisterServerEvent('0R.ADMIN.BRING')
AddEventHandler('0R.ADMIN.BRING', function(id)
if PermCheck(source, 'bring') then
SetEntityCoords(GetPlayerPed(source), GetEntityCoords(GetPlayerPed(id)))
end
end)
RegisterServerEvent('0R.ADMIN.GOTO')
AddEventHandler('0R.ADMIN.GOTO', function(id)
if PermCheck(source, 'goto') then
SetEntityCoords(GetPlayerPed(id), GetEntityCoords(GetPlayerPed(source)))
end
end)
RegisterServerEvent('0R.ADMIN.HEAL')
AddEventHandler('0R.ADMIN.HEAL', function(id)
if PermCheck(source, 'heal') then
if id == "self" then
id = tonumber(source)
end
TriggerClientEvent('0R.ADMIN.HEAL', id)
TriggerClientEvent("0R.ADMIN.SENDNUI", source, { action = "notify", value = L('PROCESS') })
end
end)
RegisterServerEvent('0R.ADMIN.KILL')
AddEventHandler('0R.ADMIN.KILL', function(id)
if PermCheck(source, 'kill') then
TriggerClientEvent('0R.CL.ADMIN.KILL', id)
end
end)
RegisterServerEvent('0R.ADMIN.JUDGE')
AddEventHandler('0R.ADMIN.JUDGE', function(id)
if PermCheck(source, 'judge') then
for k, v in pairs(ESX.GetPlayers()) do
TriggerEvent('esx_communityservice:endCommunityServiceCommand', v)
end
end
end)
RegisterServerEvent('0R.ADMIN.KICK')
AddEventHandler('0R.ADMIN.KICK', function(id, reason)
if PermCheck(source, 'kick') then
DropPlayer(id, "[0R-ADMIN] | "..L('KICKED_1').." "..GetPlayerName(source).." \n "..L('KICKED_2')..": "..reason);
TriggerClientEvent("0R.ADMIN.SENDNUI", source, { action = "notify", value = L('PROCESS') })
end
end)
RegisterServerEvent('0R.ADMIN.BAN')
AddEventHandler('0R.ADMIN.BAN', function(id, reason)
if PermCheck(source, 'kick') then
local xPlayer = ESX.GetPlayerFromId(id)
DropPlayer(id, "[0R-ADMIN] | "..L('KICKED_1').." "..GetPlayerName(source).." \n "..L('KICKED_2')..": "..reason);
exports.ghmattimysql:execute("INSERT INTO `0r-bans` (identifier, reason) VALUES ('"..xPlayer.identifier.."', '"..reason.."')", { })
TriggerClientEvent("0R.ADMIN.SENDNUI", source, { action = "notify", value = L('PROCESS') })
end
end)
local lastfreeze = false
RegisterServerEvent('0R.ADMIN.FREEZE')
AddEventHandler('0R.ADMIN.FREEZE', function(id)
if PermCheck(source, 'freeze') then
lastfreeze = not lastfreeze
FreezeEntityPosition(GetPlayerPed(id),lastfreeze)
local freeze = "unfreezed"
if lastfreeze then
freeze = "freezed"
end
TriggerClientEvent("0R.ADMIN.SENDNUI", source, { action = "notify", value = L('PLAYER').." "..freeze })
end
end)
RegisterServerEvent('0R.ADMIN.OPENINV')
AddEventHandler('0R.ADMIN.OPENINV', function(id)
if PermCheck(source, 'openinv') then
if Config.InventoryType == "qb-inventory" then
TriggerEvent("inventory:server:OpenInventory", "otherplayer", id)
elseif Config.InventoryType == "m3_inventoryhud" then
TriggerClientEvent("m3:inventoryhud:client:openPlayerInventory", source, id, 'admin')
end
end
end)
RegisterServerEvent('0R.ADMIN.GIVEITEM')
AddEventHandler('0R.ADMIN.GIVEITEM', function(id, item, count)
if PermCheck(source, 'giveitem') then
local xPlayer = ESX.GetPlayerFromId(id)
xPlayer.addInventoryItem(item, tonumber(count))
TriggerClientEvent("0R.ADMIN.SENDNUI", source, { action = "notify", value = L('PROCESS') })
end
end)
RegisterServerEvent('0R.ADMIN.SETJOB')
AddEventHandler('0R.ADMIN.SETJOB', function(id, job, grade)
if PermCheck(source, 'setjob') then
local xPlayer = ESX.GetPlayerFromId(id)
if ESX.DoesJobExist(job, tonumber(grade)) then
xPlayer.setJob(job, tonumber(grade))
TriggerClientEvent("0R.ADMIN.SENDNUI", source, { action = "notify", value = L('JOB') })
end
end
end)
RegisterServerEvent('0R.ADMIN.TAKESS')
AddEventHandler('0R.ADMIN.TAKESS', function(id)
if PermCheck(source, 'takess') then
TriggerClientEvent("0R.ADMIN.REQUESTSS", id, source)
end
end)
RegisterServerEvent('0R.ADMIN.OFFERSS')
AddEventHandler('0R.ADMIN.OFFERSS', function(id, url)
if PermCheck(source, 'takess') then
TriggerClientEvent("0R.ADMIN.SENDNUI", id, { action = "request_conf", url = url })
end
end)
RegisterServerEvent('0R.ADMIN.CLEARINV')
AddEventHandler('0R.ADMIN.CLEARINV', function(id)
if PermCheck(source, 'clearinv') then
local xPlayer = ESX.GetPlayerFromId(source)
for k, v in pairs(xPlayer.inventory) do
if (v.count > 0) then
xPlayer.removeInventoryItem(v.name, v.count)
end
end
end
end)
RegisterServerEvent('0R.ADMIN.GIVECAR')
AddEventHandler('0R.ADMIN.GIVECAR', function(id, carmodel, own)
if PermCheck(source, 'givecar') then
local vehicle = CreateVehicle(carmodel, GetEntityCoords(GetPlayerPed(id)), GetEntityHeading(GetPlayerPed(id)), true, false)
SetPedIntoVehicle(GetPlayerPed(id), vehicle, -1)
if own then
if PermCheck(source, 'giveownedcar') then
Wait(1000)
TriggerClientEvent('0R.CL.GET.VEHICLEPROPS', id, vehicle)
end
end
end
end)
RegisterServerEvent('0R.SET.VEHICLEPROPS')
AddEventHandler('0R.SET.VEHICLEPROPS', function(vehicle, props)
if PermCheck(source, 'giveownedcar') then
local xPlayer = ESX.GetPlayerFromId(source)
TriggerClientEvent("0R.ADMIN.SENDNUI", source, { action = "notify", value = L('PROCESS') })
exports.ghmattimysql:execute("INSERT INTO owned_vehicles (owner, plate, vehicle) VALUES ('"..xPlayer.identifier.."', '"..props.plate.."', '"..json.encode(props).."')", { })
end
end)
RegisterServerEvent('0R.ADMIN.WIPE')
AddEventHandler('0R.ADMIN.WIPE', function(id)
if PermCheck(source, 'wipe') then
local xPlayer = ESX.GetPlayerFromId(id)
local wipetables = {
{ table = "owned_vehicles", column = "owner"},
{ table = "users", column = "identifier"},
{ table = "datastore_data", column = "owner"},
{ table = "user_licenses", column = "owner"},
}
TriggerClientEvent("0R.ADMIN.SENDNUI", source, { action = "notify", value = L('WIPE1') })
for k, v in pairs(wipetables) do
exports.ghmattimysql:executeSync("DELETE FROM "..v.table.." WHERE "..v.column.." like '%"..xPlayer.identifier.."%' ", { })
Wait(5000)
if k == #wipetables then
DropPlayer(id, "[0R-ADMIN] | "..L('WIPE2'));
break;
end
end
end
end)
RegisterServerEvent('0R.ADMIN.WIPEOFFLINE')
AddEventHandler('0R.ADMIN.WIPEOFFLINE', function(identifier)
if PermCheck(source, 'wipeoffline') then
local xPlayer = ESX.GetPlayerFromId(id)
local wipetables = {
{ table = "owned_vehicles", column = "owner"},
{ table = "users", column = "identifier"},
{ table = "datastore_data", column = "owner"},
{ table = "user_licenses", column = "owner"},
}
TriggerClientEvent("0R.ADMIN.SENDNUI", source, { action = "notify", value = L('WIPE1') })
for k, v in pairs(wipetables) do
exports.ghmattimysql:executeSync("DELETE FROM "..v.table.." WHERE "..v.column.." like '%"..identifier.."%' ", { })
Wait(5000)
if k == #wipetables then
DropPlayer(id, "[0R-ADMIN] | "..L('WIPE2'));
break;
end
end
end
end)
RegisterServerEvent('0R.ADMIN.BANPLAYEROFFLINE')
AddEventHandler('0R.ADMIN.BANPLAYEROFFLINE', function(identifier, reason)
if PermCheck(source, 'banoffline') then
exports.ghmattimysql:execute("INSERT INTO `0r-bans` (identifier, reason) VALUES ('"..identifier.."', '"..reason.."')", { })
end
end)
RegisterServerEvent('0R.ADMIN.SETTIME')
AddEventHandler('0R.ADMIN.SETTIME', function(clock, min, type)
if PermCheck(source, 'time') then
if type == "set" then
ExecuteCommand("time "..clock.." "..min)
elseif type == "freeze" then
ExecuteCommand("freezetime")
end
end
end)
RegisterServerEvent('0R.ADMIN.SETWEATHER')
AddEventHandler('0R.ADMIN.SETWEATHER', function(weather, type)
if PermCheck(source, 'weather') then
if type == "set" then
ExecuteCommand("weather "..weather)
elseif type == "freeze" then
ExecuteCommand("freezeweather")
end
end
end)
RegisterServerEvent('0R.ADMIN.PLAYERBLIPS')
AddEventHandler('0R.ADMIN.PLAYERBLIPS', function(bool)
if PermCheck(source, 'playerblips') then
TriggerClientEvent("0R.ADMIN.SHOWBLIPS", source, ESX.GetPlayers())
end
end)
RegisterServerEvent('0R.ADMIN.ANNOUNCEMENT')
AddEventHandler('0R.ADMIN.ANNOUNCEMENT', function(text)
if PermCheck(source, 'announcement') then
TriggerClientEvent("0R.ADMIN.SENDNUI", -1, { action = "Announcement", value = text, perm = PermCheck(source, 'announcement') })
end
end)
RegisterServerEvent('0R.ADMIN.REVIVEALL')
AddEventHandler('0R.ADMIN.REVIVEALL', function()
if PermCheck(source, 'reviveall') then
for k, v in pairs(ESX.GetPlayers()) do
TriggerClientEvent('esx_ambulancejob:revive', v)
end
end
end)
RegisterServerEvent('0R.ADMIN.BRINGALL')
AddEventHandler('0R.ADMIN.BRINGALL', function()
if PermCheck(source, 'bringall') then
for k, v in pairs(ESX.GetPlayers()) do
SetEntityCoords(GetPlayerPed(source), GetEntityCoords(GetPlayerPed(v)))
end
end
end)
RegisterServerEvent('0R.ADMIN.KICKALL')
AddEventHandler('0R.ADMIN.KICKALL', function(reason)
if PermCheck(source, 'kickall') then
for k, v in pairs(ESX.GetPlayers()) do
DropPlayer(v, "[0R-ADMIN] | "..L('KICKED_1').." "..GetPlayerName(source).." \n "..L('KICKED_2')..": "..reason);
end
end
end)
RegisterServerEvent('0R.ADMIN.DELETEALLCARS')
AddEventHandler('0R.ADMIN.DELETEALLCARS', function()
if PermCheck(source, 'deleteallcars') then
TriggerClientEvent("0R.CL.ADMIN.DELETEALLCARS", -1)
end
end)
ESX.RegisterServerCallback('0R.ADMIN.GETPLAYER', function(source, cb, id)
if PermCheck(source, 'playerblips') then
cb({ name = GetPlayerName(id), ped = GetPlayerPed(id), coords = GetEntityCoords(GetPlayerPed(id)) })
end
end)
ESX.RegisterServerCallback('0R.ADMIN.PERMCHECK', function(source, cb, perm)
cb(PermCheck(source, perm))
end)
function PermCheck(source, perm)
local xPlayer = ESX.GetPlayerFromId(source)
local auth = false
local acsess = false
for k, v in pairs(Config.Auth) do
if v.identifier == xPlayer.identifier then
auth = v.perm
break;
end
end
if auth then
for k, v in pairs(Config.Perms[auth]) do
acsess = false
if v == perm then
acsess = true
break;
end
end
else
acsess = false
end
return acsess
end
| 31.093137 | 178 | 0.674365 | 3.1875 |
ccffe8b77e927a8a8f1a62cf12062db15ffb09c5 | 1,841 | lua | Lua | 3DDemo1/assets/scenes/menu.lua | hgy29/GIDEROS_LUAU_SAMPLES | abedadd4542967042b99b0383aa60e9e3d15907b | [
"MIT"
] | null | null | null | 3DDemo1/assets/scenes/menu.lua | hgy29/GIDEROS_LUAU_SAMPLES | abedadd4542967042b99b0383aa60e9e3d15907b | [
"MIT"
] | null | null | null | 3DDemo1/assets/scenes/menu.lua | hgy29/GIDEROS_LUAU_SAMPLES | abedadd4542967042b99b0383aa60e9e3d15907b | [
"MIT"
] | 1 | 2022-03-11T07:54:57.000Z | 2022-03-11T07:54:57.000Z | Menu = Core.class(Sprite)
function Menu:init()
-- BG
application:setBackgroundColor(0x1234AA)
-- a button
local btn01 = ButtonMonster.new({
pixelcolorup=0x00ff00, pixelcolordown=0x0000ff,
text="let's go!", textscalexup=6,
}, 1)
-- position
btn01:setPosition(myappwidth/2, 3*myappheight/10)
-- order
self:addChild(btn01)
-- btns listeners
btn01:addEventListener("clicked", function() self:gotoScene("levelX") end)
-- listeners
self:addEventListener("enterBegin", self.onTransitionInBegin, self)
self:addEventListener("enterEnd", self.onTransitionInEnd, self)
self:addEventListener("exitBegin", self.onTransitionOutBegin, self)
self:addEventListener("exitEnd", self.onTransitionOutEnd, self)
end
-- GAME LOOP
function Menu:onEnterFrame(e)
end
-- EVENT LISTENERS
function Menu:onTransitionInBegin() self:addEventListener(Event.ENTER_FRAME, self.onEnterFrame, self) end
function Menu:onTransitionInEnd() self:myKeysPressed() end
function Menu:onTransitionOutBegin() self:removeEventListener(Event.ENTER_FRAME, self.onEnterFrame, self) end
function Menu:onTransitionOutEnd() end
-- KEYS HANDLER
function Menu:myKeysPressed()
self:addEventListener(Event.KEY_DOWN, function(e)
-- for mobiles and desktops
if e.keyCode == KeyCode.BACK or e.keyCode == KeyCode.ESC then
-- scenemanager:changeScene("menu", 1, transitions[2], easing.outBack)
end
-- fullscreen
local modifier = application:getKeyboardModifiers()
local alt = (modifier & KeyCode.MODIFIER_ALT) > 0
if alt and e.keyCode == KeyCode.ENTER then
isfullscreen = not isfullscreen
fullScreen(isfullscreen)
end
end)
end
-- change scene
function Menu:gotoScene(xscene)
scenemanager:changeScene( xscene, 1,
transitions[math.random(1, #transitions)], easings[math.random(1, #easings)] )
end
| 32.875 | 110 | 0.745247 | 3.203125 |
2e4c829fa6c657780dce45008da5142e943be37b | 1,358 | lua | Lua | src/main/lua/art/storage/cluster/space.lua | art-community/art-tarantool | 4a1a0ecdfbeff958ca231500126a8bae463e9dbd | [
"Apache-2.0"
] | null | null | null | src/main/lua/art/storage/cluster/space.lua | art-community/art-tarantool | 4a1a0ecdfbeff958ca231500126a8bae463e9dbd | [
"Apache-2.0"
] | null | null | null | src/main/lua/art/storage/cluster/space.lua | art-community/art-tarantool | 4a1a0ecdfbeff958ca231500126a8bae463e9dbd | [
"Apache-2.0"
] | null | null | null | local space = {
cancellerFiber = nil,
checkOperationAvailability = function(operation, args)
art.box.space.waitForClusterOperation()
art.box.space.activeClusterOperation = true
art.cluster.space.cancellerFiber = art.core.fiber.create(art.cluster.space.autoCanceller)
local result = {}
box.begin()
result[1], result[2] = pcall(art.box.space[operation], unpack(args))
box.rollback()
return result
end,
autoCanceller = function()
art.core.fiber.sleep(art.config.space.autoCancelTimeout)
art.box.space.activeClusterOperation = false
art.cluster.space.cancellerFiber = nil
end,
cancelOperation = function()
art.box.space.activeClusterOperation = false
art.cluster.space.cancellerFiber:cancel()
art.cluster.space.cancellerFiber = nil
end,
executeOperation = function(operation, args)
local result = {}
box.begin()
result[1], result[2] = pcall(art.box.space[operation], unpack(args))
if (result[1])
then
box.commit()
else
box.rollback()
end
art.box.space.activeClusterOperation = false
art.cluster.space.cancellerFiber:cancel()
art.cluster.space.cancellerFiber = nil
return result
end,
}
return space | 30.863636 | 97 | 0.635493 | 3.03125 |
e2c450f08b2de7c75b8add925102f1f5cb976967 | 2,288 | lua | Lua | mock/sqscript/SQNodeIf.lua | tommo/mock | 276f0b5acf1ee41e84d5a80a5e12bf81ebaf8815 | [
"MIT"
] | 8 | 2015-12-14T16:09:13.000Z | 2019-12-12T17:22:56.000Z | mock/sqscript/SQNodeIf.lua | tommo/mock | 276f0b5acf1ee41e84d5a80a5e12bf81ebaf8815 | [
"MIT"
] | null | null | null | mock/sqscript/SQNodeIf.lua | tommo/mock | 276f0b5acf1ee41e84d5a80a5e12bf81ebaf8815 | [
"MIT"
] | 5 | 2015-02-16T08:47:31.000Z | 2019-12-12T17:22:59.000Z | module 'mock'
--------------------------------------------------------------------
CLASS: SQNodeIf ( SQNodeGroup )
:MODEL{}
function SQNodeIf:__init()
self.expr = false
end
function SQNodeIf:load( data )
self.expr = data.args[ 1 ]
local valueFunc, err = loadEvalScriptWithEnv( self.expr )
if not valueFunc then
self:_warn( 'failed compiling condition expr:', err )
self.valueFunc = false
else
self.valueFunc = valueFunc
end
end
local setfenv = setfenv
function SQNodeIf:checkCondition( state, env )
local func = self.valueFunc
if not func then return false end
local ok, result = func( state:getEvalEnv() )
if ok then return result end
return false
end
function SQNodeIf:enter( state, env )
local result = self:checkCondition( state, env )
env[ 'result' ] = result
if not result then return false end
end
function SQNodeIf:getIcon()
return 'sq_node_if'
end
--------------------------------------------------------------------
CLASS: SQNodeElseIf ( SQNodeIf )
function SQNodeElseIf:__init()
self.parentIfNode = false
end
function SQNodeElseIf:build()
local prev = self:getPrevSibling()
if prev:isInstance( SQNodeElseIf ) then
self.parentIfNode = prev.parentIfNode
elseif prev:isInstance( SQNodeIf ) then
self.parentIfNode = prev
end
end
function SQNodeElseIf:enter( state, env )
local parentEnv = state:getNodeEnvTable( self.parentIfNode )
if parentEnv[ 'result' ] then
return false
end
local result = self:checkCondition( state, env )
parentEnv[ 'result' ] = result
if not result then return false end
end
--------------------------------------------------------------------
CLASS: SQNodeElse ( SQNodeGroup )
function SQNodeElse:__init()
self.parentIfNode = false
end
function SQNodeElse:build()
local prev = self:getPrevSibling()
if prev:isInstance( SQNodeElseIf ) then
self.parentIfNode = prev.parentIfNode
elseif prev:isInstance( SQNodeIf ) then
self.parentIfNode = prev
end
end
function SQNodeElse:enter( state, env )
local parentEnv = state:getNodeEnvTable( self.parentIfNode )
if parentEnv[ 'result' ] then
return false
end
end
-------------------------------------------------------------------
registerSQNode( 'if', SQNodeIf )
registerSQNode( 'elseif', SQNodeElseIf )
registerSQNode( 'else', SQNodeElse )
| 23.346939 | 68 | 0.659091 | 3.171875 |
b14d3262a54a8a0dbe965c24d5a7a90600966352 | 1,651 | c | C | chapter_01/1-22.c | ryo-utsunomiya/answers_to_c_programming_language | b5a4ee2e2ef4c8273af3a2276a3a5f7a66012331 | [
"MIT"
] | null | null | null | chapter_01/1-22.c | ryo-utsunomiya/answers_to_c_programming_language | b5a4ee2e2ef4c8273af3a2276a3a5f7a66012331 | [
"MIT"
] | null | null | null | chapter_01/1-22.c | ryo-utsunomiya/answers_to_c_programming_language | b5a4ee2e2ef4c8273af3a2276a3a5f7a66012331 | [
"MIT"
] | null | null | null | /**
* Exercise 1-22
*
* Write a program to “fold” long input lines into two or more shorter lines after the last non-blank character that
* occurs before the n-th column of input. Make sure your program does something intelligent with very long lines, and
* if there are no blanks or tabs before the specified column.
*/
#include <stdio.h>
#define TAB_WIDTH 4
#define LINE_LENGTH 40
int main(void) {
int pos = 0, last = 0, c, i, col = 0;
char line[LINE_LENGTH];
while ((c = getchar()) != EOF) {
line[pos] = c;
if (c == '\t' && col <= LINE_LENGTH - TAB_WIDTH) {
for (i = col % TAB_WIDTH; i < TAB_WIDTH; ++i) {
line[pos] = ' ';
++pos;
}
col += (TAB_WIDTH - col % TAB_WIDTH);
}
if (c != '\t') {
++pos;
++col;
}
if (c == ' ' || c == '\t') {
last = pos - 1;
}
if (c == '\n') {
line[pos] = '\0';
printf("%s", line);
pos = last = 0;
col = 0;
}
if (pos == LINE_LENGTH) {
for (i = 0; i < last; ++i) {
printf("%c", line[i]);
}
putchar('\n');
for (i = 0; i < LINE_LENGTH - last; ++i) {
if (line[last + i + 1] != ' ' || line[last + i + 1] != '\t') {
line[i] = line[last + i + 1];
}
}
pos = LINE_LENGTH - last - 1;
col = pos;
}
}
for (i = 0; i < pos; ++i) {
printf("%c", line[i]);
}
putchar('\n');
return 0;
}
| 22.930556 | 118 | 0.411266 | 3.3125 |
8688af2d119bbeb956d3989cf4bfe400516e7e0c | 1,993 | rs | Rust | examples/receive.rs | gitter-badger/coremidi | 60d5bf18ecf3e4abb66f4830e6ece94558129fc1 | [
"MIT"
] | null | null | null | examples/receive.rs | gitter-badger/coremidi | 60d5bf18ecf3e4abb66f4830e6ece94558129fc1 | [
"MIT"
] | null | null | null | examples/receive.rs | gitter-badger/coremidi | 60d5bf18ecf3e4abb66f4830e6ece94558129fc1 | [
"MIT"
] | null | null | null | extern crate coremidi;
use std::env;
fn main() {
let source_index = get_source_index();
println!("Source index: {}", source_index);
let source = coremidi::Source::from_index(source_index);
println!("Source display name: {}", source.display_name().unwrap());
let client = coremidi::Client::new("example-client").unwrap();
let callback = |packet_list: coremidi::PacketList| {
println!("{}", packet_list);
};
let input_port = client.input_port("example-port", callback).unwrap();
input_port.connect_source(&source).unwrap();
let mut input_line = String::new();
println!("Press [Intro] to finish ...");
std::io::stdin().read_line(&mut input_line).ok().expect("Failed to read line");
input_port.disconnect_source(&source).unwrap();
}
fn get_source_index() -> usize {
let mut args_iter = env::args();
let tool_name = args_iter.next()
.and_then(|path| path.split(std::path::MAIN_SEPARATOR).last().map(|v| v.to_string()))
.unwrap_or("receive".to_string());
match args_iter.next() {
Some(arg) => match arg.parse::<usize>() {
Ok(index) => {
if index >= coremidi::Sources::count() {
println!("Source index out of range: {}", index);
std::process::exit(-1);
}
index
},
Err(_) => {
println!("Wrong source index: {}", arg);
std::process::exit(-1);
}
},
None => {
println!("Usage: {} <source-index>", tool_name);
println!("");
println!("Available Sources:");
print_sources();
std::process::exit(-1);
}
}
}
fn print_sources() {
for (i, source) in coremidi::Sources.into_iter().enumerate() {
match source.display_name() {
Some(display_name) => println!("[{}] {}", i, display_name),
None => ()
}
}
}
| 30.19697 | 93 | 0.538384 | 3.28125 |
d4da4511b7d7a97d494cce68974850c235770652 | 1,683 | rs | Rust | src/main.rs | jddf/jddf-codegen | 5320efebbc407f76793129d008a3ab218c6fa57c | [
"MIT"
] | null | null | null | src/main.rs | jddf/jddf-codegen | 5320efebbc407f76793129d008a3ab218c6fa57c | [
"MIT"
] | null | null | null | src/main.rs | jddf/jddf-codegen | 5320efebbc407f76793129d008a3ab218c6fa57c | [
"MIT"
] | null | null | null | mod target;
mod targets;
use clap::{App, AppSettings, Arg};
use failure::Error;
use jddf::{Schema, SerdeSchema};
use std::fs::File;
use target::Target;
fn main() -> Result<(), Error> {
let app = App::new("jddf-codegen")
.version("0.1")
.about("Generates data structures from JDDF schemas")
.setting(AppSettings::ColoredHelp)
.arg(
Arg::with_name("INPUT")
.help("Input JDDF schema file")
.last(true)
.required(true),
);
let app = targets::typescript::Target::args(app);
let app = targets::golang::Target::args(app);
let matches = app.get_matches();
let target_ts = targets::typescript::Target::from_args(&matches)?;
let target_go = targets::golang::Target::from_args(&matches)?;
// Parse out the input schema, and ensure it is valid.
let input = matches.value_of("INPUT").unwrap();
let file = File::open(input)?;
let serde_schema: SerdeSchema = serde_json::from_reader(file)?;
let schema = Schema::from_serde(serde_schema)?;
// Run each of the target transformation routines. If any fail, do not
// generate code.
let ast_ts = if let Some(ref t) = target_ts {
Some(t.transform(&schema)?)
} else {
None
};
let ast_go = if let Some(ref t) = target_go {
Some(t.transform(&schema)?)
} else {
None
};
// Serialize each of the ASTs. At this point, only IO errors can cause
// issues.
if let Some(ref t) = target_ts {
t.serialize(&ast_ts.unwrap())?;
}
if let Some(ref t) = target_go {
t.serialize(&ast_go.unwrap())?;
}
Ok(())
}
| 27.145161 | 74 | 0.592989 | 3.015625 |
ebba17527a3b26dc119432db6fdce779c0d1bc78 | 3,914 | lua | Lua | CommunityContent/NPC AI Kit/Scripts/NPCWaypoint.lua | evilotaku/MGD-Core-Tutorial | 6ca2f592c55f42f9bc0194152acfdc0c7b0ff669 | [
"MIT"
] | 3 | 2021-03-27T00:20:13.000Z | 2021-08-17T08:53:59.000Z | Data/Subfolders/NPC AI Kit/Scripts/NPCWaypoint.lua | Core-Team-META/CC-Quest-System | 477ce5ad6e297642f9bae95596cf6569c604c8bb | [
"Apache-2.0"
] | null | null | null | Data/Subfolders/NPC AI Kit/Scripts/NPCWaypoint.lua | Core-Team-META/CC-Quest-System | 477ce5ad6e297642f9bae95596cf6569c604c8bb | [
"Apache-2.0"
] | 1 | 2021-06-15T03:59:23.000Z | 2021-06-15T03:59:23.000Z | --[[
NPC Waypoint
by: standardcombo
v1.0.0
The waypoint detects NPCs entering its trigger and directs them to move to the next objective.
Can be used for patrolling behaviors, random folks in town, traffic lanes, etc.
See the "Waypoint Example" template that is part of this package.
Any object that enters the waypoint is evaluated as a possible NPC. It works if the object conforms
to these rules:
1. The object is part of a template.
2. The template contains a script.
3. The script has a function called SetObjective(Vector3).
4a. Both the template and the waypoint's trigger are on the same team
4b. or, one of the two has team = 0 (neutral).
Waypoint setups can be as simple as two pointing at each other, or a complex network of travel points.
To setup a network of waypoints:
1. Add waypoints to the map and resize their triggers to fit the level design.
(Don't move the triggers around. Keep them at local position 0,0,0. Move the whole waypoint instead)
2. Deinstance the waypoints
3. In the hierarchy, select a waypoint that is supposed to send NPCs to the next one
4. Drag another waypoint from the hierarchy onto the Properties, to create a destination custom property.
5. You can add multiple custom property destinations to have it select one at random.
6. Any Core Objects can be used as destinations, not just other waypoints.
7. If you don't add any destinations in this way the waypoint will behave as a "dead end".
8. A waypoint can have itself as a custom property. NPCs will patrol the internal space of the waypoint.
--]]
local ROOT = script.parent
local TRIGGER = ROOT:FindChildByType("Trigger")
local TRIGGER_SCALE = TRIGGER:GetWorldScale()
destinationRadius = math.min(TRIGGER_SCALE.x, TRIGGER_SCALE.y) * 50
function FindAiScript(obj)
if not obj.FindTemplateRoot then return end
local templateRoot = obj:FindTemplateRoot()
if (templateRoot == nil) then
templateRoot = obj.parent
end
if templateRoot then
-- Team mismatch exit condition
local team = templateRoot:GetCustomProperty("Team") or 0
if (team ~= 0 and TRIGGER.team ~= 0 and team ~= TRIGGER.team) then
return nil
end
-- Search for AI script
local scripts = templateRoot:FindDescendantsByType("Script")
for _,s in ipairs(scripts) do
if s.context.SetObjective then
return s
end
end
end
return nil
end
function GetDestination()
local key,value = GetRandomCustomProperty(ROOT)
if value and value:IsA("CoreObjectReference") then
local obj = value:GetObject()
local targetPos = obj:GetWorldPosition()
local radius = destinationRadius
local scripts = obj:FindDescendantsByType("Script")
for _,s in ipairs(scripts) do
if s.context.destinationRadius then
radius = s.context.destinationRadius
break
end
end
local rngAngle = math.random() * 2 * math.pi
local rngRadius = radius * math.sqrt(math.random())
targetPos.x = targetPos.x + rngRadius * math.cos(rngAngle)
targetPos.y = targetPos.y + rngRadius * math.sin(rngAngle)
return targetPos
else
UI.PrintToScreen("ERROR: NPCWaypoint has no destinations. Add CoreObject references as custom properties.")
end
return nil
end
function GetRandomCustomProperty(obj)
local allProperties = obj:GetCustomProperties()
local propertyCount = 0
for _,_ in pairs(allProperties) do
propertyCount = propertyCount + 1
end
if (propertyCount > 0) then
local selectedIndex = math.random(1, propertyCount)
local i = 0
for key,value in pairs(allProperties) do
i = i + 1
if selectedIndex == i then
return key,value
end
end
end
return nil,nil
end
function OnBeginOverlap(theTrigger, obj)
--print("OnBeginOverlap obj = " .. tostring(obj))
local aiScript = FindAiScript(obj)
if aiScript then
local destination = GetDestination()
if destination then
aiScript.context.SetObjective(destination)
end
end
end
TRIGGER.beginOverlapEvent:Connect(OnBeginOverlap)
| 30.818898 | 109 | 0.753449 | 3.09375 |
efb7a6c2089d4ecc1b1947737290313d332ec7bb | 1,148 | kt | Kotlin | app/src/main/kotlin/com/vsouhrada/kotlin/android/anko/fibo/core/session/ISession.kt | Squibblet/206CDE_Attendance_System | 8c72986ebecb845d71fb0bacdcfd5a05c53fff9b | [
"Apache-2.0"
] | null | null | null | app/src/main/kotlin/com/vsouhrada/kotlin/android/anko/fibo/core/session/ISession.kt | Squibblet/206CDE_Attendance_System | 8c72986ebecb845d71fb0bacdcfd5a05c53fff9b | [
"Apache-2.0"
] | null | null | null | app/src/main/kotlin/com/vsouhrada/kotlin/android/anko/fibo/core/session/ISession.kt | Squibblet/206CDE_Attendance_System | 8c72986ebecb845d71fb0bacdcfd5a05c53fff9b | [
"Apache-2.0"
] | null | null | null | package com.vsouhrada.kotlin.android.anko.fibo.core.session
/**
* @author vsouhrada
* @since 0.1.0
*/
interface ISession {
/**
* Method is call when session should be open and in case of that method *refresh* was called on [ISessionManager]
* @since 0.1.0
*/
fun openSession()
/**
* Called when session should be close.
* @since 0.1.0
*/
fun closeSession()
/**
* Method is call when validation of the session is required.
* @return true in case of that session is valid, otherwise false is returned
* @since 1.0.0
*/
fun isValid(): Boolean
/**
* Returned Object stored in session under the value *key*.
* @param key the key value
* @return data contained under the *key* value
* @since 0.1.0
*/
fun getData(key: Int): Any
/**
* Store Object in session under the value *key*.
* @param key the key value
* @param value the Object which you want to store
* @since 0.1.0
*/
fun putData(key: Int, value: Any)
/**
* Delete the Object stored in session under the value *key*.
* @param key the key value
* @since 0.1.0
*/
fun removeData(key: Int)
} | 22.076923 | 116 | 0.63676 | 3.09375 |
0c85c0c3fc3e88d3d1512b447e6a7c16569279b2 | 1,376 | py | Python | nginx_rtmp_wizard/models.py | Gerhut/nginx-rtmp-wizard | c821c3bb262503ee26408b8b3bf4a252b49a29d6 | [
"Unlicense"
] | null | null | null | nginx_rtmp_wizard/models.py | Gerhut/nginx-rtmp-wizard | c821c3bb262503ee26408b8b3bf4a252b49a29d6 | [
"Unlicense"
] | 1 | 2021-06-10T20:32:59.000Z | 2021-06-10T20:32:59.000Z | nginx_rtmp_wizard/models.py | Gerhut/nginx-rtmp-wizard | c821c3bb262503ee26408b8b3bf4a252b49a29d6 | [
"Unlicense"
] | null | null | null | from django.conf import settings
from django.core import validators
from django.db import models
DEFAULT_RTMP_PORT = 1935
class Server(models.Model):
listen = models.PositiveIntegerField(
default=DEFAULT_RTMP_PORT,
unique=True,
validators=[
validators.MinValueValidator(1024),
validators.MaxValueValidator(65535)
])
def __str__(self):
if self.listen == DEFAULT_RTMP_PORT:
return 'rtmp://{}'.format(settings.RTMP_HOSTNAME)
else:
return 'rtmp://{}:{}'.format(settings.RTMP_HOSTNAME, self.listen)
class Application(models.Model):
server = models.ForeignKey(Server, on_delete=models.CASCADE)
name = models.SlugField(default='live')
live = models.BooleanField(default=False)
def __str__(self):
return '{}/{}'.format(self.server, self.name)
class Meta:
constraints = [
models.UniqueConstraint(
fields=['server', 'name'],
name='unique_server_application_name')]
class Push(models.Model):
application = models.ForeignKey(Application, on_delete=models.CASCADE)
url = models.CharField(
max_length=255,
unique=True,
validators=[
validators.URLValidator(schemes=['rtmp'])
])
def __str__(self):
return 'push {};'.format(self.url)
| 27.52 | 77 | 0.634448 | 3.296875 |
e74da2d78d304199dfccbeca07631b9f42b9d6aa | 935 | js | JavaScript | reactPrimerosPasos/basicos/src/components/Producto.js | maxicosia/maxicosia.github.io | 36bd9d5dcb199b4cab890128624d45bb3450f34b | [
"MIT"
] | null | null | null | reactPrimerosPasos/basicos/src/components/Producto.js | maxicosia/maxicosia.github.io | 36bd9d5dcb199b4cab890128624d45bb3450f34b | [
"MIT"
] | 10 | 2020-05-08T22:33:38.000Z | 2022-03-02T10:14:24.000Z | reactPrimerosPasos/basicos/src/components/Producto.js | maxicosia/maxicosia.github.io | 36bd9d5dcb199b4cab890128624d45bb3450f34b | [
"MIT"
] | 1 | 2020-05-08T18:54:14.000Z | 2020-05-08T18:54:14.000Z | import React from "react";
const Producto = ({ producto, carrito, agregarProducto, productos }) => {
const { nombre, precio, id } = producto;
//Agregar producto al carrito
const seleccionarProducto = (id) => {
const producto = productos.filter((producto) => producto.id === id)[0];
agregarProducto([...carrito, producto]);
};
//Eliminar producto del carrito
const eliminarProducto = (id) => {
const productos = carrito.filter((producto) => producto.id !== id);
//Colocar los productos en el state
agregarProducto(productos);
};
return (
<div>
<h2>{nombre}</h2>
<p>${precio}</p>
{productos ? (
<button type="button" onClick={() => seleccionarProducto(id)}>
Comprar
</button>
) : (
<button type="button" onClick={() => eliminarProducto(id)}>
Eliminar
</button>
)}
</div>
);
};
export default Producto;
| 23.974359 | 75 | 0.586096 | 3.03125 |
a07384c93e6f601fd0987241c409b99976c3d998 | 1,931 | lua | Lua | kong/db/dao/vaults.lua | liyangau/kong | 57f9ac4b92017ae34922773118e9d4caa9b19f08 | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2022-03-12T22:13:37.000Z | 2022-03-12T22:13:37.000Z | kong/db/dao/vaults.lua | johnfishbein/kong | 6c0154e77985fc7b3bd5bdf6dfe924bf6dfd7ae3 | [
"ECL-2.0",
"Apache-2.0"
] | 5 | 2022-02-16T15:57:27.000Z | 2022-03-29T00:06:06.000Z | kong/db/dao/vaults.lua | johnfishbein/kong | 6c0154e77985fc7b3bd5bdf6dfe924bf6dfd7ae3 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | local constants = require "kong.constants"
local utils = require "kong.tools.utils"
local vault_loader = require "kong.db.schema.vault_loader"
local Vaults = {}
local type = type
local pairs = pairs
local concat = table.concat
local insert = table.insert
local tostring = tostring
local log = ngx.log
local WARN = ngx.WARN
local DEBUG = ngx.DEBUG
local function load_vault_strategy(vault)
local ok, strategy = utils.load_module_if_exists("kong.vaults." .. vault)
if not ok then
return nil, vault .. " vault is enabled but not installed;\n" .. strategy
end
return strategy
end
local function load_vault(self, vault)
local db = self.db
if constants.DEPRECATED_VAULTS[vault] then
log(WARN, "vault '", vault, "' has been deprecated")
end
local strategy, err = load_vault_strategy(vault)
if not strategy then
return nil, err
end
if type(strategy.init) == "function" then
strategy.init()
end
local _, err = vault_loader.load_subschema(self.schema, vault, db.errors)
if err then
return nil, err
end
log(DEBUG, "Loading vault: ", vault)
return strategy
end
--- Load subschemas for enabled vaults into the Vaults entity. It has two side effects:
-- * It makes the Vault sub-schemas available for the rest of the application
-- * It initializes the Vault.
-- @param vault_set a set of vault names.
-- @return true if success, or nil and an error message.
function Vaults:load_vault_schemas(vault_set)
local strategies = {}
local errors
for vault in pairs(vault_set) do
local strategy, err = load_vault(self, vault)
if strategy then
strategies[vault] = strategy
else
errors = errors or {}
insert(errors, "on vault '" .. vault .. "': " .. tostring(err))
end
end
if errors then
return nil, "error loading vault schemas: " .. concat(errors, "; ")
end
self.strategies = strategies
return true
end
return Vaults
| 21.943182 | 87 | 0.701191 | 3.34375 |
d45383d66c3a021c9c9768a3b0d28a1c7b9fdec4 | 3,933 | lua | Lua | scripts/states/creeperEffect.lua | nanderv/LD41 | 1ccfa25fa76dfcf86e0dc314c0e13f3116904f72 | [
"BSD-3-Clause"
] | null | null | null | scripts/states/creeperEffect.lua | nanderv/LD41 | 1ccfa25fa76dfcf86e0dc314c0e13f3116904f72 | [
"BSD-3-Clause"
] | null | null | null | scripts/states/creeperEffect.lua | nanderv/LD41 | 1ccfa25fa76dfcf86e0dc314c0e13f3116904f72 | [
"BSD-3-Clause"
] | null | null | null | --
-- Created by IntelliJ IDEA.
-- User: nander
-- Date: 21/04/2018
-- Time: 18:03
-- To change this template use File | Settings | File Templates.
--
--
-- Created by IntelliJ IDEA.
-- User: nander
-- Date: 21/04/2018
-- Time: 18:25
-- To change this template use File | Settings | File Templates.
--
local menu = {} -- previously: Gamestate.new()
menu.name = "runCard"
function menu:enter(prev, state, cardIndex, card)
menu.prev = prev
menu.state = state
menu.card = cardIndex
menu.cardData = card or STATE.hand[cardIndex]
menu.showing = "costs"
menu.item = 1
menu.time = 0
menu.fromHand = not card
menu.cardDone = false
menu.cardEnding = false
end
local effects = {}
effects.add_cost = {
exec = function(card, index)
local c = scripts.gameobjects.cards[menu.cardData]
if c.costs and c.costs.type then
STATE.properties[c.costs.type] = STATE.properties[c.costs.type] - c.costs.value
end
end,
draw = function(card, index, time)
end,
duration = 1,
small = false,
}
effects.add_card = {
exec = function(card, index)
local c = scripts.gameobjects.cards[menu.cardData]
local effect = c.effects[index]
STATE.discardPile[#STATE.discardPile + 1] = effect.card
end,
draw = function(c, index, time)
scripts.rendering.renderCard.renderCard(scripts.gameobjects.cards[c.effects[index].card], 1210 - 1200 * (0.5 - time), 568 - 800 * (0.5 - time), 0.5)
end,
duration = 0.5,
small = false,
}
effects.place_building = {
exec = function(card, index)
local c = scripts.gameobjects.cards[menu.cardData]
local effect = c.effects[index]
scripts.gameobjects.buildings[effect.building]:build(STATE)
menu.cardEnding = true
end,
draw = function(card, index, time)
end,
duration = 0,
small = true,
}
effects.next_turn = {
exec = function(card, index)
local c = scripts.gameobjects.cards[STATE.hand[card]]
local effect = c.effects[index]
table.insert(STATE.currentTurnEffects, table.clone(effect))
end,
draw = function(card, index, time) end,
duration = 0,
small = true,
}
effects.resource = {
exec = function(card, index)
local c = scripts.gameobjects.cards[menu.cardData]
local effect = c.effects[index]
if effect.resource and STATE.properties[effect.resource] then
STATE.properties[effect.resource] = STATE.properties[effect.resource] + effect.value
end
end,
draw = function(card, index, time)
end,
duration = 0.5,
small = false,
}
menu.effects = effects
function menu:update(dt, wait)
menu.prev:update(dt, true)
end
function menu:draw()
menu.prev:draw(true)
love.graphics.push()
love.graphics.scale(GLOBSCALE())
scripts.rendering.renderUI.drawCard(menu.state, menu.cardData, false, true)
if scripts.gameobjects.cards[menu.cardData].is_creeper then
scripts.rendering.renderUI.drawMessage("Drew creeper .. " .. scripts.gameobjects.cards[menu.cardData].name .. "; a disaster occured.")
end
if menu.showing == "effects" then
effects[scripts.gameobjects.cards[menu.cardData].effects[menu.item].type].draw(scripts.gameobjects.cards[menu.cardData], menu.item, menu.time)
end
love.graphics.pop()
end
function menu:mousepressed(x, y, click)
if click == 1 then
Gamestate.pop()
for item = 1, #scripts.gameobjects.cards[menu.cardData].effects do
effects[scripts.gameobjects.cards[menu.cardData].effects[item].type].exec(menu.cardData, item)
end
else
scripts.rendering.renderUI.mousePressed(x, y, click)
end
end
function menu:mousereleased(x, y, mouse_btn)
scripts.rendering.renderUI.mouseReleased(x, y, mouse_btn)
end
function menu:wheelmoved(x, y)
scripts.rendering.renderUI.wheelmoved(x, y)
end
return menu
| 28.708029 | 156 | 0.660564 | 3.140625 |
9dd71fac75b849d02211ccd58050b9f05f9b57f2 | 3,086 | swift | Swift | Sources/SwiftUIToolbox/Extensions/Publishers/Publishers+CombineLatestCollection.swift | devQQ/SwiftUIToolbox | c4286ea7fd19c24b92e7bb24cbe418a38b331dae | [
"MIT"
] | null | null | null | Sources/SwiftUIToolbox/Extensions/Publishers/Publishers+CombineLatestCollection.swift | devQQ/SwiftUIToolbox | c4286ea7fd19c24b92e7bb24cbe418a38b331dae | [
"MIT"
] | null | null | null | Sources/SwiftUIToolbox/Extensions/Publishers/Publishers+CombineLatestCollection.swift | devQQ/SwiftUIToolbox | c4286ea7fd19c24b92e7bb24cbe418a38b331dae | [
"MIT"
] | null | null | null | //
// Publishers+CombineLatestCollection.swift
//
//
// Created by Q Trang on 8/1/20.
//
import Foundation
import Combine
extension Collection where Element: Publisher {
public func combineLatest(waitForAllPublishers wait: Bool = true) -> CombineLatestCollection<Self> {
CombineLatestCollection(self, wait: wait)
}
}
public struct CombineLatestCollection<Publishers>: Publisher where Publishers: Collection, Publishers.Element: Publisher {
public typealias Output = [Publishers.Element.Output]
public typealias Failure = Publishers.Element.Failure
private let publishers: Publishers
private let wait: Bool
public init(_ publishers: Publishers, wait: Bool = true) {
self.publishers = publishers
self.wait = wait
}
public func receive<S>(subscriber: S) where S : Subscriber, Self.Failure == S.Failure, Self.Output == S.Input {
let subscription = Subscription(subscriber: subscriber, publishers: publishers, wait: wait)
subscriber.receive(subscription: subscription)
}
}
extension CombineLatestCollection {
fileprivate final class Subscription<S: Subscriber>: Combine.Subscription where S.Failure == Failure, S.Input == Output {
private let subscribers: [AnyCancellable]
init(subscriber: S, publishers: Publishers, wait: Bool = true) {
let count = publishers.count
var outputs: [Publishers.Element.Output?] = Array(repeating: nil, count: count)
var completions = 0
var hasCompleted = false
let lock = NSLock()
subscribers = publishers.enumerated().map { index, publisher in
publisher.sink(receiveCompletion: { completion in
lock.lock()
defer { lock.unlock() }
guard case .finished = completion else {
subscriber.receive(completion: completion)
hasCompleted = true
return
}
completions += 1
guard completions == count else { return }
subscriber.receive(completion: completion)
hasCompleted = true
}) { value in
lock.lock()
defer { lock.unlock() }
guard !hasCompleted else { return }
outputs[index] = value
let values = outputs.compactMap { $0 }
guard values.count == count ||
!wait else { return }
_ = subscriber.receive(values)
}
}
}
func request(_ demand: Subscribers.Demand) {}
func cancel() {
subscribers.forEach { $0.cancel() }
}
}
}
| 33.912088 | 125 | 0.529488 | 3.109375 |
7ffe8d53372253ef7c79103b3298346a603047c0 | 1,328 | go | Go | pkg/tile/proxy_test.go | fafeitsch/Open-Traffic-Sandbox | 5682995e1956bcec07736007070d295f2a15007b | [
"MIT"
] | null | null | null | pkg/tile/proxy_test.go | fafeitsch/Open-Traffic-Sandbox | 5682995e1956bcec07736007070d295f2a15007b | [
"MIT"
] | null | null | null | pkg/tile/proxy_test.go | fafeitsch/Open-Traffic-Sandbox | 5682995e1956bcec07736007070d295f2a15007b | [
"MIT"
] | null | null | null | package tile
import (
"fmt"
"github.com/stretchr/testify/assert"
"net/http"
"net/http/httptest"
"net/url"
"testing"
)
func TestNewProxy(t *testing.T) {
t.Run("no redirect", func(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(writer http.ResponseWriter, request *http.Request) {
writer.WriteHeader(http.StatusOK)
_, _ = fmt.Fprintf(writer, "I am an osm tile (path: %s)", request.URL.Path)
}))
tileServer, _ := url.Parse(server.URL + "/osm-api/tiles/{z}/{x}/${y}.png")
defer server.Close()
proxy := NewProxy(tileServer, false)
recorder := httptest.NewRecorder()
request := httptest.NewRequest("GET", "/tile/8/652/332", nil)
proxy.ServeHTTP(recorder, request)
assert.Equal(t, "I am an osm tile (path: /osm-api/tiles/8/652/332.png)", recorder.Body.String())
})
t.Run("redirect", func(t *testing.T) {
tileServer, _ := url.Parse("https://example.com/osm-api/tiles/{z}/{x}/${y}.png")
proxy := NewProxy(tileServer, true)
recorder := httptest.NewRecorder()
request := httptest.NewRequest("GET", "/tile/8/652/332", nil)
proxy.ServeHTTP(recorder, request)
assert.Equal(t, http.StatusMovedPermanently, recorder.Code, "status code wrong")
assert.Equal(t, "https://example.com/osm-api/tiles/8/652/332.png", recorder.Header().Get("Location"), "location header wrong")
})
}
| 36.888889 | 128 | 0.685994 | 3.015625 |
54720744bdc96f85b7b00ede8f55486c542f3dec | 3,387 | go | Go | lib/quotes/yahoo/yahoo.go | sboehler/knut | f5b754b08c9c0f97db3bb237c0fbac7deb33baa4 | [
"Apache-2.0"
] | 42 | 2020-11-28T22:35:53.000Z | 2022-03-05T19:15:44.000Z | lib/quotes/yahoo/yahoo.go | sboehler/knut | f5b754b08c9c0f97db3bb237c0fbac7deb33baa4 | [
"Apache-2.0"
] | 5 | 2021-01-03T21:51:47.000Z | 2021-11-22T13:53:51.000Z | lib/quotes/yahoo/yahoo.go | sboehler/knut | f5b754b08c9c0f97db3bb237c0fbac7deb33baa4 | [
"Apache-2.0"
] | 7 | 2020-11-23T23:15:22.000Z | 2022-02-20T12:08:31.000Z | // Copyright 2021 Silvio Böhler
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package yahoo
import (
"encoding/csv"
"fmt"
"io"
"net/http"
"net/url"
"path"
"strconv"
"time"
)
const yahooURL string = "https://query1.finance.yahoo.com/v7/finance/download"
// Quote represents a quote on a given day.
type Quote struct {
Date time.Time
Open float64
High float64
Low float64
Close float64
AdjClose float64
Volume int
}
// Client is a client for Yahoo! quotes.
type Client struct {
url string
}
// New creates a new client with the default URL.
func New() Client {
return Client{yahooURL}
}
// Fetch fetches a set of quotes
func (c *Client) Fetch(sym string, t0, t1 time.Time) ([]Quote, error) {
u, err := createURL(c.url, sym, t0, t1)
if err != nil {
return nil, err
}
resp, err := http.Get(u.String())
if err != nil {
return nil, err
}
defer resp.Body.Close()
return decodeResponse(resp.Body)
}
// createURL creates a URL for the given root URL and parameters.
func createURL(rootURL, sym string, t0, t1 time.Time) (*url.URL, error) {
u, err := url.Parse(rootURL)
if err != nil {
return u, err
}
u.Path = path.Join(u.Path, url.PathEscape(sym))
u.RawQuery = url.Values{
"events": {"history"},
"interval": {"1d"},
"period1": {fmt.Sprint(t0.Unix())},
"period2": {fmt.Sprint(t1.Unix())},
}.Encode()
return u, nil
}
// decodeResponse takes a reader for the response and returns
// the parsed quotes.
func decodeResponse(r io.ReadCloser) ([]Quote, error) {
var csvReader = csv.NewReader(r)
csvReader.FieldsPerRecord = 7
// skip header
if _, err := csvReader.Read(); err != nil {
return nil, err
}
// read lines
var res []Quote
for {
r, err := csvReader.Read()
if err == io.EOF {
return res, nil
}
if err != nil {
return nil, err
}
quote, ok, err := recordToQuote(r)
if err != nil {
return nil, err
}
if ok {
res = append(res, quote)
}
}
}
// recordToQuote decodes one line of the response CSV.
func recordToQuote(r []string) (Quote, bool, error) {
var (
quote Quote
err error
)
for _, item := range r {
if item == "null" {
return quote, false, nil
}
}
quote.Date, err = time.Parse("2006-01-02", r[0])
if err != nil {
return quote, false, err
}
quote.Open, err = strconv.ParseFloat(r[1], 64)
if err != nil {
return quote, false, err
}
quote.High, err = strconv.ParseFloat(r[2], 64)
if err != nil {
return quote, false, err
}
quote.Low, err = strconv.ParseFloat(r[3], 64)
if err != nil {
return quote, false, err
}
quote.Close, err = strconv.ParseFloat(r[4], 64)
if err != nil {
return quote, false, err
}
quote.AdjClose, err = strconv.ParseFloat(r[5], 64)
if err != nil {
return quote, false, err
}
quote.Volume, err = strconv.Atoi(r[6])
if err != nil {
return quote, false, err
}
return quote, true, nil
}
| 22.430464 | 78 | 0.656038 | 3.015625 |
86502ec9713367b4f1cb452d57f0f2214dee1027 | 18,390 | rs | Rust | gui/src/rust/ide/src/double_representation/graph.rs | vitvakatu/enso | 99053decd8790a7c533d56065d92548fcb512503 | [
"Apache-2.0"
] | null | null | null | gui/src/rust/ide/src/double_representation/graph.rs | vitvakatu/enso | 99053decd8790a7c533d56065d92548fcb512503 | [
"Apache-2.0"
] | null | null | null | gui/src/rust/ide/src/double_representation/graph.rs | vitvakatu/enso | 99053decd8790a7c533d56065d92548fcb512503 | [
"Apache-2.0"
] | null | null | null | //! Code for retrieving graph description from AST.
use crate::prelude::*;
use crate::double_representation::definition::DefinitionInfo;
use crate::double_representation::definition::DefinitionProvider;
use crate::double_representation::node;
use crate::double_representation::node::LocatedNode;
use crate::double_representation::node::NodeInfo;
use ast::Ast;
use ast::BlockLine;
use ast::known;
use utils::fail::FallibleResult;
use crate::double_representation::connection::Connection;
/// Graph uses the same `Id` as the definition which introduces the graph.
pub type Id = double_representation::definition::Id;
// ====================
// === LocationHint ===
// ====================
/// Describes the desired position of the node's line in the graph's code block.
#[derive(Clone,Copy,Debug)]
pub enum LocationHint {
/// Try placing this node's line before the line described by id.
Before(ast::Id),
/// Try placing this node's line after the line described by id.
After(ast::Id),
/// Try placing this node's line at the start of the graph's code block.
Start,
/// Try placing this node's line at the end of the graph's code block.
End,
}
// =================
// === GraphInfo ===
// =================
/// Description of the graph, based on information available in AST.
#[derive(Clone,Debug,Shrinkwrap)]
pub struct GraphInfo {
/// The definition providing this graph.
pub source:DefinitionInfo,
}
impl GraphInfo {
/// Look for a node with given id in the graph.
pub fn locate_node(&self, id:double_representation::node::Id) -> FallibleResult<LocatedNode> {
let lines = self.source.block_lines();
double_representation::node::locate(&lines, self.source.context_indent, id)
}
/// Describe graph of the given definition.
pub fn from_definition(source:DefinitionInfo) -> GraphInfo {
GraphInfo {source}
}
/// Gets the AST of this graph definition.
pub fn ast(&self) -> Ast {
self.source.ast.clone().into()
}
/// Gets all known nodes in this graph (does not include special pseudo-nodes like graph
/// inputs and outputs).
pub fn nodes(&self) -> Vec<NodeInfo> {
let ast = &self.source.ast;
let body = &ast.rarg;
if let Ok(body_block) = known::Block::try_new(body.clone()) {
let context_indent = self.source.indent();
let lines_iter = body_block.enumerate_non_empty_lines();
let nodes_iter = node::NodeIterator {lines_iter,context_indent};
nodes_iter.map(|n| n.node).collect()
} else if let Some(node) = node::NodeInfo::from_main_line_ast(body) {
// There's no way to attach a documentation comment to an inline node, it consists only
// of the main line.
vec![node]
} else {
// It should not be possible to have empty definition without any nodes but it is
// possible to represent such thing in AST. Anyway, it has no nodes.
vec![]
}
}
/// Gets the list of connections between the nodes in this graph.
pub fn connections(&self) -> Vec<Connection> {
double_representation::connection::list(&self.source.ast.rarg)
}
/// Adds a new node to this graph.
pub fn add_node
(&mut self, node:&NodeInfo, location_hint:LocationHint) -> FallibleResult {
let mut lines = self.source.block_lines();
let last_non_empty = || lines.iter().rposition(|line| line.elem.is_some());
let index = match location_hint {
LocationHint::Start => 0,
LocationHint::End => last_non_empty().map_or(lines.len(),|ix| ix + 1),
LocationHint::After(id) => self.locate_node(id)?.index.last() + 1,
LocationHint::Before(id) => self.locate_node(id)?.index.first(),
};
let elem = Some(node.ast().clone_ref());
let off = 0;
lines.insert(index,BlockLine{elem,off});
if let Some(documentation) = &node.documentation {
let elem = Some(documentation.ast().into());
let line = BlockLine {elem,off};
lines.insert(index,line);
}
self.source.set_block_lines(lines)
}
/// Locates a node with the given id.
pub fn find_node(&self,id:ast::Id) -> Option<NodeInfo> {
self.nodes().iter().find(|node| node.id() == id).cloned()
}
/// After removing last node, we want to insert a placeholder value for definition value.
/// This defines its AST. Currently it is just `Nothing`.
pub fn empty_graph_body() -> Ast {
Ast::cons(constants::keywords::NOTHING).with_new_id()
}
/// Removes the node from graph.
pub fn remove_node(&mut self, node_id:ast::Id) -> FallibleResult {
self.update_node(node_id, |_| None)
}
/// Sets a new state for the node. The id of the described node must denote already existing
/// node.
pub fn set_node(&mut self, node:&NodeInfo) -> FallibleResult {
self.update_node(node.id(), |_| Some(node.clone()))
}
/// Sets a new state for the node. The id of the described node must denote already existing
/// node.
pub fn update_node(&mut self, id:ast::Id, f:impl FnOnce(NodeInfo) -> Option<NodeInfo>) -> FallibleResult {
let LocatedNode{index,node} = self.locate_node(id)?;
let mut lines = self.source.block_lines();
if let Some(updated_node) = f(node) {
lines[index.main_line].elem = Some(updated_node.main_line.ast().clone_ref());
match (index.documentation_line, updated_node.documentation) {
(Some(old_comment_index),None) => {
lines.remove(old_comment_index);
}
(Some(old_comment_index),Some(new_comment)) =>
lines[old_comment_index] = new_comment.block_line(),
(None,Some(new_comment)) =>
lines.insert(index.main_line, new_comment.block_line()),
(None,None) => {},
}
} else {
lines.remove(index.main_line);
if let Some(doc_index) = index.documentation_line {
lines.remove(doc_index);
}
}
if lines.is_empty() {
self.source.set_body_ast(Self::empty_graph_body());
Ok(())
} else {
self.source.set_block_lines(lines)
}
// TODO tests for cases with comments involved
}
/// Sets expression of the given node.
pub fn edit_node(&mut self, node_id:ast::Id, new_expression:Ast) -> FallibleResult {
self.update_node(node_id, |mut node| {
node.set_expression(new_expression);
Some(node)
})
}
#[cfg(test)]
pub fn expect_code(&self, expected_code:impl Str) {
let code = self.source.ast.repr();
assert_eq!(code,expected_code.as_ref());
}
}
// =============
// === Tests ===
// =============
#[cfg(test)]
mod tests {
use super::*;
use crate::double_representation::definition::DefinitionName;
use crate::double_representation::definition::DefinitionProvider;
use crate::double_representation::module::get_definition;
use ast::HasRepr;
use ast::macros::DocumentationCommentInfo;
use ast::test_utils::expect_single_line;
use utils::test::ExpectTuple;
use wasm_bindgen_test::wasm_bindgen_test;
wasm_bindgen_test::wasm_bindgen_test_configure!(run_in_browser);
/// Takes a program with main definition in root and returns main's graph.
fn main_graph(parser:&parser::Parser, program:impl Str) -> GraphInfo {
let module = parser.parse_module(program.into(), default()).unwrap();
let name = DefinitionName::new_plain("main");
let main = module.def_iter().find_by_name(&name).unwrap();
GraphInfo::from_definition(main.item)
}
fn find_graph(parser:&parser::Parser, program:impl Str, name:impl Str) -> GraphInfo {
let module = parser.parse_module(program.into(), default()).unwrap();
let crumbs = name.into().split(".").map(|name| {
DefinitionName::new_plain(name)
}).collect();
let id = Id{crumbs};
let definition = get_definition(&module, &id).unwrap();
GraphInfo::from_definition(definition)
}
#[wasm_bindgen_test]
fn detect_a_node() {
let mut parser = parser::Parser::new_or_panic();
// Each of these programs should have a `main` definition with a single `2+2` node.
let programs = vec![
"main = 2+2",
"main = \n 2+2",
"main = \n foo = 2+2",
"main = \n foo = 2+2\n bar b = 2+2", // `bar` is a definition, not a node
];
for program in programs {
let graph = main_graph(&mut parser, program);
let nodes = graph.nodes();
assert_eq!(nodes.len(), 1);
let node = &nodes[0];
assert_eq!(node.expression().repr(), "2+2");
let _ = node.id(); // just to make sure it is available
}
}
fn new_expression_node(parser:&parser::Parser, expression:&str) -> NodeInfo {
let node_ast = parser.parse(expression.to_string(), default()).unwrap();
let line_ast = expect_single_line(&node_ast).clone();
NodeInfo::from_main_line_ast(&line_ast).unwrap()
}
fn assert_all(nodes:&[NodeInfo], expected:&[NodeInfo]) {
assert_eq!(nodes.len(), expected.len());
for (left,right) in nodes.iter().zip(expected) {
assert_same(left,right)
}
}
fn assert_same(left:&NodeInfo, right:&NodeInfo) {
assert_eq!(left.id(), right.id());
assert_eq!( left.documentation.as_ref().map(DocumentationCommentInfo::to_string)
, right.documentation.as_ref().map(DocumentationCommentInfo::to_string));
assert_eq!(left.main_line.repr(), right.main_line.repr());
}
#[wasm_bindgen_test]
fn add_node_to_graph_with_single_line() {
let program = "main = print \"hello\"";
let parser = parser::Parser::new_or_panic();
let mut graph = main_graph(&parser, program);
let nodes = graph.nodes();
assert_eq!(nodes.len(), 1);
let initial_node = nodes[0].clone();
assert_eq!(initial_node.expression().repr(), "print \"hello\"");
let expr0 = "a + 2";
let expr1 = "b + 3";
let node_to_add0 = new_expression_node(&parser, expr0);
let node_to_add1 = new_expression_node(&parser, expr1);
graph.add_node(&node_to_add0,LocationHint::Start).unwrap();
assert_eq!(graph.nodes().len(), 2);
graph.add_node(&node_to_add1,LocationHint::Before(graph.nodes()[0].id())).unwrap();
let nodes = graph.nodes();
assert_all(nodes.as_slice(), &[node_to_add1, node_to_add0, initial_node]);
}
#[wasm_bindgen_test]
fn add_node_to_graph_with_multiple_lines() {
// TODO [dg] Also add test for binding node when it's possible to update its id.
let program = r#"main =
foo = node
foo a = not_node
print "hello""#;
let mut parser = parser::Parser::new_or_panic();
let mut graph = main_graph(&mut parser, program);
let node_to_add0 = new_expression_node(&mut parser, "4 + 4");
let node_to_add1 = new_expression_node(&mut parser, "a + b");
let node_to_add2 = new_expression_node(&mut parser, "x * x");
let node_to_add3 = new_expression_node(&mut parser, "x / x");
let node_to_add4 = new_expression_node(&mut parser, "2 - 2");
graph.add_node(&node_to_add0, LocationHint::Start).unwrap();
graph.add_node(&node_to_add1, LocationHint::Before(graph.nodes()[0].id())).unwrap();
graph.add_node(&node_to_add2, LocationHint::After(graph.nodes()[1].id())).unwrap();
graph.add_node(&node_to_add3, LocationHint::End).unwrap();
// Node 4 will be added later.
let nodes = graph.nodes();
assert_eq!(nodes.len(), 6);
assert_eq!(nodes[0].expression().repr(), "a + b");
assert_eq!(nodes[0].id(), node_to_add1.id());
// Sic: `node_to_add1` was added at index `0`.
assert_eq!(nodes[1].expression().repr(), "4 + 4");
assert_eq!(nodes[1].id(), node_to_add0.id());
assert_eq!(nodes[2].expression().repr(), "x * x");
assert_eq!(nodes[2].id(), node_to_add2.id());
assert_eq!(nodes[3].expression().repr(), "node");
assert_eq!(nodes[4].expression().repr(), "print \"hello\"");
assert_eq!(nodes[5].expression().repr(), "x / x");
assert_eq!(nodes[5].id(), node_to_add3.id());
let expected_code = r#"main =
a + b
4 + 4
x * x
foo = node
foo a = not_node
print "hello"
x / x"#;
graph.expect_code(expected_code);
let mut graph = find_graph(&mut parser, program, "main.foo");
assert_eq!(graph.nodes().len(), 1);
graph.add_node(&node_to_add4, LocationHint::Start).unwrap();
assert_eq!(graph.nodes().len(), 2);
assert_eq!(graph.nodes()[0].expression().repr(), "2 - 2");
assert_eq!(graph.nodes()[0].id(), node_to_add4.id());
assert_eq!(graph.nodes()[1].expression().repr(), "not_node");
}
#[wasm_bindgen_test]
fn add_node_to_graph_with_blank_line() {
// The trailing `foo` definition is necessary for the blank line after "node2" to be
// included in the `main` block. Otherwise, the block would end on "node2" and the blank
// line would be parented to the module.
let program = r"main =
node2
foo = 5";
let mut parser = parser::Parser::new_or_panic();
let mut graph = main_graph(&mut parser, program);
let id2 = graph.nodes()[0].id();
let node_to_add0 = new_expression_node(&mut parser, "node0");
let node_to_add1 = new_expression_node(&mut parser, "node1");
let node_to_add3 = new_expression_node(&mut parser, "node3");
let node_to_add4 = new_expression_node(&mut parser, "node4");
graph.add_node(&node_to_add0, LocationHint::Start).unwrap();
graph.add_node(&node_to_add1, LocationHint::Before(id2)).unwrap();
graph.add_node(&node_to_add3, LocationHint::After(id2)).unwrap();
graph.add_node(&node_to_add4, LocationHint::End).unwrap();
let expected_code = r"main =
node0
node1
node2
node3
node4
";
// `foo` is not part of expected code, as it belongs to module, not `main` graph.
graph.expect_code(expected_code);
}
#[wasm_bindgen_test]
fn multiple_node_graph() {
let mut parser = parser::Parser::new_or_panic();
let program = r"
main =
## Faux docstring
## Docstring 0
foo = node0
## Docstring 1
# disabled node1
foo a = not_node
## Docstring 2
node2
node3
";
// TODO [mwu]
// Add case like `Int.+ a = not_node` once https://github.com/enso-org/enso/issues/565 is fixed
let graph = main_graph(&mut parser, program);
let nodes = graph.nodes();
assert_eq!(nodes[0].documentation_text(), Some(" Docstring 0".into()));
assert_eq!(nodes[0].ast().repr(), "foo = node0");
assert_eq!(nodes[1].documentation_text(), Some(" Docstring 1".into()));
assert_eq!(nodes[1].ast().repr(), "# disabled node1");
assert_eq!(nodes[2].documentation_text(), Some(" Docstring 2".into()));
assert_eq!(nodes[2].ast().repr(), "node2");
assert_eq!(nodes[3].documentation_text(), None);
assert_eq!(nodes[3].ast().repr(), "node3");
assert_eq!(nodes.len(), 4);
}
#[wasm_bindgen_test]
fn removing_node_from_graph() {
let mut parser = parser::Parser::new_or_panic();
let program = r"
main =
foo = 2 + 2
bar = 3 + 17";
let mut graph = main_graph(&mut parser, program);
let nodes = graph.nodes();
assert_eq!(nodes.len(), 2);
assert_eq!(nodes[0].expression().repr(), "2 + 2");
assert_eq!(nodes[1].expression().repr(), "3 + 17");
graph.remove_node(nodes[0].id()).unwrap();
let nodes = graph.nodes();
assert_eq!(nodes.len(), 1);
assert_eq!(nodes[0].expression().repr(), "3 + 17");
let expected_code = "main =\n bar = 3 + 17";
graph.expect_code(expected_code);
assert!(graph.remove_node(uuid::Uuid::new_v4()).is_err());
graph.expect_code(expected_code);
}
#[wasm_bindgen_test]
fn removing_last_node_from_graph() {
let mut parser = parser::Parser::new_or_panic();
let program = r"
main =
foo = 2 + 2";
let mut graph = main_graph(&mut parser, program);
DEBUG!("aa");
let (node,) = graph.nodes().expect_tuple();
assert_eq!(node.expression().repr(), "2 + 2");
DEBUG!("vv");
graph.remove_node(node.id()).unwrap();
DEBUG!("zz");
let (node,) = graph.nodes().expect_tuple();
assert_eq!(node.expression().repr(), constants::keywords::NOTHING);
graph.expect_code("main = Nothing");
}
#[wasm_bindgen_test]
fn editing_nodes_expression_in_graph() {
let mut parser = parser::Parser::new_or_panic();
let program = r"
main =
foo = 2 + 2
bar = 3 + 17";
let new_expression = parser.parse("print \"HELLO\"".to_string(), default()).unwrap();
let new_expression = expect_single_line(&new_expression).clone();
let mut graph = main_graph(&mut parser, program);
let nodes = graph.nodes();
assert_eq!(nodes.len(), 2);
assert_eq!(nodes[0].expression().repr(), "2 + 2");
assert_eq!(nodes[1].expression().repr(), "3 + 17");
graph.edit_node(nodes[0].id(),new_expression).unwrap();
let nodes = graph.nodes();
assert_eq!(nodes.len(), 2);
assert_eq!(nodes[0].expression().repr(), "print \"HELLO\"");
assert_eq!(nodes[1].expression().repr(), "3 + 17");
let expected_code = r#"main =
foo = print "HELLO"
bar = 3 + 17"#;
graph.expect_code(expected_code);
assert!(graph.edit_node(uuid::Uuid::new_v4(), Ast::var("foo")).is_err());
graph.expect_code(expected_code);
}
}
| 37.302231 | 110 | 0.601958 | 3.125 |
5efd0672d9053fa4e6bd2901e58a4d2d802a8d92 | 6,895 | asm | Assembly | Tools/System/Minnow5DataManipulation.asm | jaredwhitney/os3 | 05e0cda4670da093cc720d0dccbfeb29e788fa0f | [
"MIT"
] | 5 | 2015-02-25T01:28:09.000Z | 2021-05-22T09:03:04.000Z | Tools/System/Minnow5DataManipulation.asm | jaredwhitney/os3 | 05e0cda4670da093cc720d0dccbfeb29e788fa0f | [
"MIT"
] | 38 | 2015-02-10T18:37:11.000Z | 2017-10-03T03:08:50.000Z | Tools/System/Minnow5DataManipulation.asm | jaredwhitney/os3 | 05e0cda4670da093cc720d0dccbfeb29e788fa0f | [
"MIT"
] | 2 | 2016-05-06T22:48:46.000Z | 2017-01-12T19:28:49.000Z | Minnow5.appendDataBlock : ; qword buffer (file) in eax, returns eax=blockptr
methodTraceEnter
pusha
push eax ; interface setting
mov eax, [eax+0x0]
call Minnow5.setInterfaceSmart
pop eax
mov eax, [eax+0x4]
mov dword [.base], eax
call Minnow5.findFreeBlock ; find a free block
mov dword [.block], ebx
mov ebx, [Minnow5._dat] ; figure out what should point to it; make it do so
mov ecx, [.block]
call Minnow5.readBlock
mov edx, [ebx+Minnow5.Block_innerPointer]
cmp edx, null
jne .noInner
mov [ebx+Minnow5.Block_innerPointer], ecx
call Minnow5.writeBlock
jmp .doPlacementEnd
.noInner :
mov eax, edx
.loopGoOn :
call Minnow5.readBlock
cmp dword [ebx+Minnow5.Block_nextPointer], null
je .loopDone
mov eax, [ebx+Minnow5.Block_nextPointer]
jmp .loopGoOn
.loopDone :
mov [ebx+Minnow5.Block_nextPointer], ecx
call Minnow5.writeBlock
.doPlacementEnd :
mov eax, [Minnow5._dat] ; clear a buffer
mov ebx, 0x200
call Buffer.clear
mov dword [eax], "MINF" ; fill it with the proper headers
mov dword [eax+Minnow5.Block_signatureHigh], Minnow5.SIGNATURE_HIGH
mov dword [eax+Minnow5.Block_nextPointer], null
mov ecx, [.base]
mov dword [eax+Minnow5.Block_upperPointer], ecx
mov dword [eax+Minnow5.Block_type], Minnow5.BLOCK_TYPE_DATA
mov ebx, eax ; write it out to the disk
mov eax, [.block]
call Minnow5.writeBlock
popa
mov eax, [.block]
methodTraceLeave
ret
.base :
dd 0x0
.block :
dd 0x0
Minnow5.writeBuffer : ; eax = qword buffer (file), ebx = buffer (data), ecx = buffer size, edx = position in file
methodTraceEnter
pusha
mov [.file], eax
mov [.dataBuffer], ebx
mov [.buffersize], ecx
mov [.writePos], edx
push eax ; interface setting
mov eax, [eax+0x0]
call Minnow5.setInterfaceSmart
pop eax
mov eax, [eax+0x4]
; mov dword [.base], eax
mov ebx, [Minnow5._dat]
call Minnow5.readBlock
mov ecx, [ebx+Minnow5.Block_byteSize]
; mov [.oldsize], ecx
mov edx, [.buffersize]
add edx, [.writePos]
cmp edx, ecx
jbe .noSizeUp
mov dword [ebx+Minnow5.Block_byteSize], edx
.noSizeUp :
call Minnow5.writeBlock
cmp dword [ebx+Minnow5.Block_innerPointer], null
jne .notEmptyFile
mov eax, [.file]
call Minnow5.appendDataBlock
mov eax, [.file]
mov eax, [eax+0x4]
call Minnow5.readBlock
.notEmptyFile :
mov eax, [ebx+Minnow5.Block_innerPointer]
mov ecx, [.writePos]
shr ecx, 9 ; div by 0x200 (ecx is now the start sector)
push ecx
.goToPosLoop :
call Minnow5.readBlock
cmp ecx, 0
je .gotoPosDone
dec ecx
mov edx, [ebx+Minnow5.Block_nextPointer]
cmp edx, null
jne .noMake
mov eax, [.file]
call Minnow5.appendDataBlock
jmp .gtpmdone
.noMake :
mov eax, edx
.gtpmdone :
jmp .goToPosLoop
.gotoPosDone :
mov [.currentBlock], eax ; also already read into [ebx]
mov ecx, [.writePos]
pop edx
shl edx, 9
sub ecx, edx
mov [.startOffs], ecx
; ;
; Now that we know where to start, do the actual copying
; ;
mov ecx, 0x200-Minnow5.Block_data
sub ecx, [.startOffs]
add ebx, [.startOffs]
.innerLoop :
mov eax, [.currentBlock]
mov ebx, [Minnow5._dat]
call Minnow5.readBlock
cmp [.buffersize], ecx
jae .nosmod
mov ecx, [.buffersize]
.nosmod :
sub [.buffersize], ecx
mov eax, [.dataBuffer]
add dword [.dataBuffer], ecx
add ebx, Minnow5.Block_data
; copy from [eax] to [ebx]... size = ecx
.copyLoop :
mov dl, [eax]
mov [ebx], dl
add ebx, 1
add eax, 1
sub ecx, 1
cmp ecx, 0
jg .copyLoop
mov eax, [.currentBlock]
mov ebx, [Minnow5._dat]
call Minnow5.writeBlock
mov eax, [ebx+Minnow5.Block_nextPointer]
cmp eax, null
jne .noMake2
mov eax, [.file]
call Minnow5.appendDataBlock
.noMake2 :
mov [.currentBlock], eax
mov ecx, 0x200-Minnow5.Block_data
cmp dword [.buffersize], 0
jg .innerLoop
popa
methodTraceLeave
ret
.file :
dd 0x0
.startOffs :
dd 0x0
.dataBuffer :
dd 0x0
.buffersize :
dd 0x0
.writePos :
dd 0x0
.currentBlock :
dd 0x0
Minnow5.readBuffer : ; eax = qword buffer (file), ebx = buffer (data), ecx = buffer size, edx = position in file, ecx out = bytes read
methodTraceEnter
pusha
mov [.dataBuffer], ebx
mov [.buffersize], ecx
mov [.readPos], edx
push eax
mov eax, [eax+0x0]
call Minnow5.setInterfaceSmart
pop eax
mov eax, [eax+0x4]
mov ebx, [Minnow5._dat]
call Minnow5.readBlock
mov ecx, [ebx+Minnow5.Block_byteSize]
; mov [.filesize], ecx
; Check / correct actual read size... return if the offset is at or after the EOF
mov edx, [.buffersize]
add edx, [.readPos]
cmp ecx, edx
jae .nosizelimit
cmp ecx, [.readPos]
ja .readOffsetExistsInFile
mov dword [.bytesread], 0
jmp .readLoopDone
.readOffsetExistsInFile :
sub ecx, [.readPos]
mov [.buffersize], ecx
mov [.bytesread], ecx
.nosizelimit :
; Navigate to the read offset
mov eax, [ebx+Minnow5.Block_innerPointer]
mov ecx, [.readPos]
shr ecx, 9 ; div by 0x200 (ecx is now the start sector)
push ecx
.goToPosLoop :
call Minnow5.readBlock
cmp ecx, 0
je .gotoPosDone
dec ecx
mov eax, [ebx+Minnow5.Block_nextPointer]
jmp .goToPosLoop
.gotoPosDone : ; the first block has already been read into [ebx]
; Calculate the starting offset
mov ecx, [.readPos]
pop edx
shl edx, 9
sub ecx, edx
mov [.offs], ecx
; Figure out values for the first read (the offset one) then jump into the loop
mov ecx, [ebx+Minnow5.Block_nextPointer] ; ready next sector
mov [.next], ecx
mov ecx, 0x200-Minnow5.Block_data ; get read size
sub ecx, [.offs]
cmp [.buffersize], ecx ; fix read size if it should be an incomplete read
jge .startNoReadLimit
mov ecx, [.buffersize]
.startNoReadLimit :
sub [.buffersize], ecx
add ebx, [.offs]
jmp .readLoopEntryPoint
; Read / copy the actual data
.readLoop :
cmp dword [.buffersize], 0
jle .readLoopDone
mov ebx, [Minnow5._dat]
call Minnow5.readBlock
mov ecx, [ebx+Minnow5.Block_nextPointer]
mov [.next], ecx
mov ecx, 0x200-Minnow5.Block_data
cmp [.buffersize], ecx
jge .noReadLimit
mov ecx, [.buffersize]
.noReadLimit :
sub [.buffersize], ecx
.readLoopEntryPoint :
mov eax, [.dataBuffer]
add [.dataBuffer], ecx
add ebx, Minnow5.Block_data
; copy from [ebx] to [eax]... size = ecx
.copyLoop :
mov dl, [ebx]
mov [eax], dl
inc eax
inc ebx
dec ecx
cmp ecx, 0
jg .copyLoop
mov eax, [.next]
jmp .readLoop
.readLoopDone :
popa
mov ecx, [.bytesread]
methodTraceLeave
ret
.dataBuffer :
dd 0x0
.buffersize :
dd 0x0
.readPos :
dd 0x0
.bytesread :
dd 0x0
.offs :
dd 0x0
.next :
dd 0x0 | 20.893939 | 134 | 0.655112 | 3.078125 |
0e3a1fa6911fb97236da1ed645fbe345fb0b5239 | 5,205 | lua | Lua | lua/starfall/preprocessor.lua | Mijyuoon/starfall | f5faa0750c9b9f9848a9e68d0d7b1753210e9db0 | [
"BSD-3-Clause"
] | null | null | null | lua/starfall/preprocessor.lua | Mijyuoon/starfall | f5faa0750c9b9f9848a9e68d0d7b1753210e9db0 | [
"BSD-3-Clause"
] | null | null | null | lua/starfall/preprocessor.lua | Mijyuoon/starfall | f5faa0750c9b9f9848a9e68d0d7b1753210e9db0 | [
"BSD-3-Clause"
] | 2 | 2021-04-20T14:50:10.000Z | 2021-07-07T18:27:41.000Z | -------------------------------------------------------------------------------
-- SF Preprocessor.
-- Processes code for compile time directives.
-------------------------------------------------------------------------------
-- TODO: Make an @include-only parser
SF.Preprocessor = {}
SF.Preprocessor.directives = {}
--- Sets a global preprocessor directive.
-- @param directive The directive to set.
-- @param func The callback. Takes the directive arguments, the file name, and instance.data
function SF.Preprocessor.SetGlobalDirective(directive, func)
SF.Preprocessor.directives[directive] = func
end
local function FindComments( line )
local ret, count, pos, found = {}, 0, 1
repeat
found = line:find( '["%-%[%]]', pos )
if (found) then -- We found something
local oldpos = pos
local char = line:sub(found,found)
if char == "-" then
if line:sub(found,found+1) == "--" then
-- Comment beginning
if line:sub(found,found+3) == "--[[" then
-- Block Comment beginning
count = count + 1
ret[count] = {type = "start", pos = found}
pos = found + 4
else
-- Line comment beginning
count = count + 1
ret[count] = {type = "line", pos = found}
pos = found + 2
end
else
pos = found + 1
end
elseif char == "[" then
local level = line:sub(found+1):match("^(=*)")
if level then level = string.len(level) else level = 0 end
if line:sub(found+level+1, found+level+1) == "[" then
-- Block string start
count = count + 1
ret[count] = {type = "stringblock", pos = found, level = level}
pos = found + level + 2
else
pos = found + 1
end
elseif char == "]" then
local level = line:sub(found+1):match("^(=*)")
if level then level = string.len(level) else level = 0 end
if line:sub(found+level+1,found+level+1) == "]" then
-- Ending
count = count + 1
ret[count] = {type = "end", pos = found, level = level}
pos = found + level + 2
else
pos = found + 1
end
elseif char == "\"" then
if line:sub(found-1,found-1) == "\\" and line:sub(found-2,found-1) ~= "\\\\" then
-- Escaped character
pos = found+1
else
-- String
count = count + 1
ret[count] = {type = "string", pos = found}
pos = found + 1
end
end
if oldpos == pos then error("Regex found something, but nothing handled it") end
end
until not found
return ret, count
end
--- Parses a source file for directives.
-- @param filename The file name of the source code
-- @param source The source code to parse.
-- @param directives A table of additional directives to use.
-- @param data The data table passed to the directives.
function SF.Preprocessor.ParseDirectives(filename, source, directives, data)
local ending = nil
local endingLevel = nil
local str = source
while str ~= "" do
local line
line, str = string.match(str,"^([^\n]*)\n?(.*)$")
for _,comment in ipairs(FindComments(line)) do
if ending then
if comment.type == ending then
if endingLevel then
if comment.level and comment.level == endingLevel then
ending = nil
endingLevel = nil
end
else
ending = nil
end
end
elseif comment.type == "start" then
ending = "end"
elseif comment.type == "string" then
ending = "string"
elseif comment.type == "stringblock" then
ending = "end"
endingLevel = comment.level
elseif comment.type == "line" then
local directive, args = string.match(line,"--@([^ ]+)%s*(.*)$")
local func = directives[directive] or SF.Preprocessor.directives[directive]
if func then
func(args, filename, data)
end
end
end
if ending == "newline" then ending = nil end
end
end
local function directive_include(args, filename, data)
if not data.includes then data.includes = {} end
if not data.includes[filename] then data.includes[filename] = {} end
if args:sub(-4,-1) ~= ".txt" then
args = args .. ".txt"
end
local incl = data.includes[filename]
incl[#incl+1] = args
end
SF.Preprocessor.SetGlobalDirective("include", directive_include)
local function directive_exclude(args, filename, data)
if not data.excludes then data.excludes = {} end
if args:sub(-4,-1) ~= ".txt" then
args = args .. ".txt"
end
data.excludes[args] = true
end
SF.Preprocessor.SetGlobalDirective("exclude", directive_exclude)
local function directive_name(args, filename, data)
if not data.scriptnames then data.scriptnames = {} end
data.scriptnames[filename] = args
end
SF.Preprocessor.SetGlobalDirective("name", directive_name)
local function directive_sharedscreen(args, filename, data)
if not data.sharedscreen then data.sharedscreen = true end
end
SF.Preprocessor.SetGlobalDirective("sharedscreen", directive_sharedscreen)
local function directive_moonscript(args, filename, data)
if not data.moonscript then data.moonscript = true end
end
SF.Preprocessor.SetGlobalDirective("moonscript", directive_moonscript)
local function directive_nosandbox(args, filename, data)
if not data.nosandbox then data.nosandbox = true end
end
SF.Preprocessor.SetGlobalDirective("nosandbox", directive_nosandbox)
| 29.913793 | 92 | 0.640538 | 3.234375 |
95bb40c2fdede4768fb90c9d98aab2c147822ec5 | 43,938 | sql | SQL | app/database/scripts/wellsearch/wells_replication_stored_functions.sql | cedar-technologies/gwells | 9023034698a9c25e5a49193242678c1aee3c6f4d | [
"Apache-2.0"
] | 1 | 2020-01-29T22:42:40.000Z | 2020-01-29T22:42:40.000Z | app/database/scripts/wellsearch/wells_replication_stored_functions.sql | cedar-technologies/gwells | 9023034698a9c25e5a49193242678c1aee3c6f4d | [
"Apache-2.0"
] | 1 | 2018-05-02T05:28:33.000Z | 2018-05-09T15:58:07.000Z | app/database/scripts/wellsearch/wells_replication_stored_functions.sql | cedar-technologies/gwells | 9023034698a9c25e5a49193242678c1aee3c6f4d | [
"Apache-2.0"
] | 1 | 2018-05-02T23:56:48.000Z | 2018-05-02T23:56:48.000Z | /*
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
--
-- PostgreSQL script to create stored functions that replicate legacy
-- WELLS data from production Oracle Database, to the GWELLS application
-- database.
--
-- These stored functions will not be needed once we cut over
-- from legacy WELLS to the new GWELLS application.
--
-- DESCRIPTION
-- Define the SQL INSERT command that copies from the legacy WELLS table to a temporary
-- ETL or 'transformation' (i.e. *_xform) table using dynamic SQL to support the optional
-- SQL subset clause.
--
-- PARAMETERS
-- _subset_ind Boolean indicator flag: 'true' to append additional WHERE clause, limiting
-- the copy to a smaller subset
-- 'false' to copy ALL data
-- RETURNS
-- None as this is a stored procedure
--
CREATE OR REPLACE FUNCTION populate_xform(
_subset_ind boolean DEFAULT true) RETURNS void AS $$
DECLARE
xform_rows integer;
sql_stmt text;
subset_clause text := 'AND wells.well_tag_number between 100001 and 113567';
insert_sql text := 'INSERT INTO xform_well (
well_tag_number ,
well_id ,
well_guid ,
acceptance_status_code ,
owner_full_name ,
owner_mailing_address ,
owner_city ,
owner_postal_code ,
street_address ,
city ,
legal_lot ,
legal_plan ,
legal_district_lot ,
legal_block ,
legal_section ,
legal_township ,
legal_range ,
legal_pid ,
well_location_description ,
identification_plate_number ,
diameter ,
total_depth_drilled ,
finished_well_depth ,
static_water_level ,
well_cap_type ,
well_disinfected ,
well_yield ,
intended_water_use_code ,
land_district_code ,
province_state_code ,
well_class_code ,
well_subclass_guid ,
well_yield_unit_code ,
latitude ,
longitude ,
ground_elevation ,
well_orientation ,
other_drilling_method ,
drilling_method_code ,
ground_elevation_method_code ,
well_status_code ,
observation_well_number ,
obs_well_status_code ,
licenced_status_code ,
alternative_specifications_ind ,
construction_start_date ,
construction_end_date ,
alteration_start_date ,
alteration_end_date ,
decommission_start_date ,
decommission_end_date ,
drilling_company_guid ,
final_casing_stick_up ,
artesian_flow ,
artesian_pressure ,
bedrock_depth ,
water_supply_system_name ,
water_supply_system_well_name ,
well_identification_plate_attached,
ems ,
screen_intake_method_code ,
screen_type_code ,
screen_material_code ,
screen_opening_code ,
screen_bottom_code ,
utm_zone_code ,
utm_northing ,
utm_easting ,
utm_accuracy_code ,
bcgs_id ,
development_method_code ,
development_duration ,
surface_seal_method_code ,
surface_seal_material_code,
surface_seal_length ,
surface_seal_thickness ,
backfill_type ,
backfill_depth ,
liner_material_code ,
decommission_reason ,
decommission_method_code ,
sealant_material ,
backfill_material ,
decommission_details ,
comments ,
create_date ,
update_date ,
create_user ,
update_user)
SELECT
wells.well_tag_number ,
wells.well_id ,
gen_random_uuid() ,
wells.acceptance_status_code AS acceptance_status_code ,
concat_ws('' '', owner.giVEN_NAME,OWNER.SURNAME) AS owner_full_name ,
concat_ws('' '',OWNER.STREET_NUMBER,STREET_NAME) AS owner_mailing_address,
owner.city AS owner_city ,
owner.postal_code AS owner_postal_code ,
wells.site_street AS street_address ,
wells.site_area AS city ,
wells.lot_number AS legal_lot ,
wells.legal_plan AS legal_plan ,
wells.legal_district_lot AS legal_district_lot ,
wells.legal_block AS legal_block ,
wells.legal_section AS legal_section ,
wells.legal_township AS legal_township ,
wells.legal_range AS legal_range ,
to_char(wells.pid,''fm000000000'') AS legal_pid ,
wells.well_location AS well_location_description ,
wells.well_identification_plate_no AS identification_plate_number ,
wells.diameter AS diameter ,
wells.total_depth_drilled AS total_depth_drilled ,
wells.depth_well_drilled AS finished_well_depth ,
wells.water_depth ,
wells.type_of_well_cap ,
CASE wells.well_disinfected_ind
WHEN ''Y'' THEN TRUE
WHEN ''N'' THEN FALSE
ELSE FALSE
END AS well_disinfected ,
wells.yield_value AS well_yield ,
CASE wells.well_use_code
WHEN ''OTH'' THEN ''OTHER''
ELSE wells.well_use_code
END AS intended_water_use_code ,
wells.legal_land_district_code as land_district_code ,
CASE owner.province_state_code
WHEN ''WASH_STATE'' THEN ''WA''
ELSE owner.province_state_code
END AS province_state_code ,
wells.class_of_well_codclassified_by AS well_class_code ,
subclass.well_subclass_guid ,
CASE wells.yield_unit_code
WHEN ''USGM'' THEN ''USGPM''
ELSE wells.yield_unit_code
END AS well_yield_unit_code ,
wells.latitude ,
CASE
WHEN wells.longitude > 0 THEN wells.longitude * -1
ELSE wells.longitude
END AS longitude ,
wells.elevation AS ground_elevation ,
CASE wells.orientation_of_well_code
WHEN ''HORIZ'' THEN false
ELSE true
END AS well_orientation ,
null AS other_drilling_method, -- placeholder as it is brand new content
wells.drilling_method_code AS drilling_method_code, -- supersedes CONSTRUCTION_METHOD_CODE
wells.ground_elevation_method_code AS ground_elevation_method_code,
CASE wells.status_of_well_code
WHEN ''UNK'' THEN null -- ''OTHER''
ELSE wells.status_of_well_code
END AS well_status_code ,
to_char(wells.observation_well_number,''fm000'') AS observation_well_number,
CASE wells.ministry_observation_well_stat
WHEN ''Abandoned'' THEN ''Inactive''
ELSE wells.ministry_observation_well_stat
END AS obs_well_status_code,
wells.well_licence_general_status AS licenced_status_code ,
CASE wells.alternative_specifications_ind
WHEN ''N'' THEN false
WHEN ''Y'' THEN true
ELSE null
END AS alternative_specifications_ind ,
wells.construction_start_date AT TIME ZONE ''GMT'' ,
wells.construction_end_date AT TIME ZONE ''GMT'' ,
wells.alteration_start_date AT TIME ZONE ''GMT'' ,
wells.alteration_end_date AT TIME ZONE ''GMT'' ,
wells.closure_start_date AT TIME ZONE ''GMT'' ,
wells.closure_end_date AT TIME ZONE ''GMT'' ,
drilling_company.drilling_company_guid ,
wells.final_casing_stick_up ,
wells.artesian_flow_value ,
wells.artesian_pressure ,
wells.bedrock_depth ,
wells.water_supply_system_name ,
wells.water_supply_well_name ,
wells.where_plate_attached ,
wells.chemistry_site_id ,
wells.screen_intake_code ,
CASE wells.screen_type_code
WHEN ''UNK'' THEN null
ELSE wells.screen_type_code
END AS screen_type_code ,
CASE wells.screen_material_code
WHEN ''UNK'' THEN ''OTHER''
ELSE wells.screen_material_code
END AS screen_material_code ,
wells.screen_opening_code ,
wells.screen_bottom_code ,
wells.utm_zone_code ,
wells.utm_north ,
wells.utm_east ,
wells.utm_accuracy_code ,
wells.bcgs_id ,
wells.development_method_code ,
wells.development_hours ,
CASE wells.surface_seal_method_code
WHEN ''UNK'' THEN null
ELSE wells.surface_seal_method_code
END AS surface_seal_method_code ,
CASE wells.surface_seal_material_code
WHEN ''UNK'' THEN ''OTHER''
ELSE wells.surface_seal_material_code
END AS surface_seal_material_code ,
wells.surface_seal_depth ,
wells.surface_seal_thickness ,
wells.backfill_type ,
wells.backfill_depth ,
wells.liner_material_code AS liner_material_code ,
wells.closure_reason ,
wells.closure_method_code ,
wells.sealant_material ,
wells.backfill_material ,
wells.closure_details ,
wells.general_remarks ,
wells.when_created ,
COALESCE(wells.when_updated,wells.when_created) ,
wells.who_created ,
COALESCE(wells.who_updated,wells.who_created)
FROM wells.wells_wells wells LEFT OUTER JOIN wells.wells_owners owner ON owner.owner_id=wells.owner_id
LEFT OUTER JOIN drilling_company drilling_company ON UPPER(wells.driller_company_code)=UPPER(drilling_company.drilling_company_code)
LEFT OUTER JOIN well_subclass_code subclass ON UPPER(wells.subclass_of_well_classified_by)=UPPER(subclass.well_subclass_code)
AND subclass.well_class_code = wells.class_of_well_codclassified_by
WHERE wells.acceptance_status_code NOT IN (''PENDING'', ''REJECTED'', ''NEW'') ';
BEGIN
raise notice 'Starting populate_xform() procedure...';
DROP TABLE IF EXISTS xform_well;
CREATE unlogged TABLE IF NOT EXISTS xform_well (
well_tag_number integer,
well_id bigint,
well_guid uuid,
acceptance_status_code character varying(10),
owner_full_name character varying(200),
owner_mailing_address character varying(100),
owner_city character varying(100),
owner_postal_code character varying(10),
street_address character varying(100),
city character varying(50),
legal_lot character varying(10),
legal_plan character varying(20),
legal_district_lot character varying(20),
legal_block character varying(10),
legal_section character varying(10),
legal_township character varying(20),
legal_range character varying(10),
legal_pid character varying(9),
well_location_description character varying(500),
identification_plate_number integer,
diameter character varying(9),
total_depth_drilled numeric(7,2),
finished_well_depth numeric(7,2),
static_water_level numeric(7,2),
well_cap_type character varying(40),
well_disinfected boolean,
well_yield numeric(8,3),
intended_water_use_code character varying(10),
land_district_code character varying(10),
province_state_code character varying(10),
well_class_code character varying(10),
well_subclass_guid uuid,
well_yield_unit_code character varying(10),
latitude numeric(8,6),
longitude numeric(9,6),
ground_elevation numeric(10,2),
well_orientation boolean,
other_drilling_method character varying(50),
drilling_method_code character varying(10),
ground_elevation_method_code character varying(10),
well_status_code character varying(10),
observation_well_number character varying(3),
obs_well_status_code character varying(10),
licenced_status_code character varying(10),
alternative_specifications_ind boolean,
construction_start_date timestamp with time zone,
construction_end_date timestamp with time zone,
alteration_start_date timestamp with time zone,
alteration_end_date timestamp with time zone,
decommission_start_date timestamp with time zone,
decommission_end_date timestamp with time zone,
drilling_company_guid uuid,
final_casing_stick_up integer,
artesian_flow numeric(7,2),
artesian_pressure numeric(5,2),
bedrock_depth numeric(7,2),
well_identification_plate_attached character varying(500),
water_supply_system_name character varying(80),
water_supply_system_well_name character varying(80),
ems character varying(10),
screen_intake_method_code character varying(10),
screen_type_code character varying(10),
screen_material_code character varying(10),
screen_opening_code character varying(10),
screen_bottom_code character varying(10),
utm_zone_code character varying(10),
utm_northing integer,
utm_easting integer,
utm_accuracy_code character varying(10),
bcgs_id bigint,
development_method_code character varying(10),
development_duration integer,
yield_estimation_method_code character varying(10),
surface_seal_method_code character varying(10),
surface_seal_material_code character varying(10),
surface_seal_length numeric(5,2),
surface_seal_thickness numeric(7,2),
backfill_type character varying(250),
backfill_depth numeric(7,2),
liner_material_code character varying(10),
decommission_reason character varying(250),
decommission_method_code character varying(10),
sealant_material character varying(100),
backfill_material character varying(100),
decommission_details character varying(250),
comments character varying(255),
create_date timestamp with time zone,
update_date timestamp with time zone,
create_user character varying(30),
update_user character varying(30)
);
raise notice 'Created xform_well ETL table';
IF _subset_ind THEN
sql_stmt := insert_sql || ' ' || subset_clause;
ELSE
sql_stmt := insert_sql;
END IF;
raise notice '... transforming wells data (= ACCEPTED) via xform_well ETL table...';
EXECUTE sql_stmt;
SELECT count(*) from xform_well into xform_rows;
raise notice '... % rows loaded into the xform_well table', xform_rows;
raise notice 'Finished populate_xform() procedure.';
END;
$$ LANGUAGE plpgsql;
COMMENT ON FUNCTION populate_xform (boolean) IS 'Load ETL Transform Table from legacy Oracle Database using Foreign Data Wrapper.';
-- DESCRIPTION
-- Define the SQL INSERT command that copies from the legacy WELLS_BCGS_NUMBERS table to
-- the analogous GWELLS lookup table (bcgs_number). Note that the BCGS table isn't strictly
-- a static lookup table as it will be updated with new BCGS mapsheets as they become
-- referenced from new wells in the system. This happens several times a year.
--
-- PARAMETERS
-- None
--
-- RETURNS
-- None as this is a stored procedure
--
CREATE OR REPLACE FUNCTION migrate_bcgs() RETURNS void AS $$
DECLARE
row_count integer;
BEGIN
raise notice '...importing wells_screens data';
INSERT INTO bcgs_number (
create_user, create_date, update_user, update_date, bcgs_id, bcgs_number
)
SELECT
who_created ,when_created ,who_updated ,when_updated, bcgs_id, bcgs_number
FROM WELLS.WELLS_BCGS_NUMBERS
ORDER BY BCGS_NUMBER;
raise notice '...BCGS data imported into the bcgs_number table';
SELECT count(*) from bcgs_number into row_count;
raise notice '% rows loaded into the bcgs_number table', row_count;
END;
$$ LANGUAGE plpgsql;
COMMENT ON FUNCTION migrate_bcgs () IS 'Load BCGS numbers from legacy Oracle Database using Foreign Data Wrapper.';
-- DESCRIPTION
-- Define the SQL INSERT command that copies from the temporary ETL or 'transformation'
-- (i.e. *_xform) table to the main GWELLS 'well' table.
--
-- PARAMETERS
-- None
--
-- RETURNS
-- None as this is a stored procedure
--
CREATE OR REPLACE FUNCTION populate_well() RETURNS void AS $$
DECLARE
row_count integer;
BEGIN
raise notice '... importing transformed WELLS data into the GWELLS main ''well'' table';
INSERT INTO well (
well_tag_number ,
well_guid ,
owner_full_name ,
owner_mailing_address ,
owner_city ,
owner_postal_code ,
street_address ,
city ,
legal_lot ,
legal_plan ,
legal_district_lot ,
legal_block ,
legal_section ,
legal_township ,
legal_range ,
land_district_code ,
legal_pid ,
well_location_description ,
identification_plate_number ,
diameter ,
total_depth_drilled ,
finished_well_depth ,
static_water_level ,
well_cap_type ,
well_disinfected ,
well_yield ,
intended_water_use_code ,
province_state_code ,
well_class_code ,
well_subclass_guid ,
well_yield_unit_code ,
latitude ,
longitude ,
ground_elevation ,
well_orientation ,
other_drilling_method ,
drilling_method_code ,
ground_elevation_method_code,
create_date ,
update_date ,
create_user ,
update_user ,
surface_seal_length ,
surface_seal_thickness ,
surface_seal_method_code ,
surface_seal_material_code ,
backfill_type ,
backfill_depth ,
liner_material_code ,
well_status_code ,
observation_well_number ,
obs_well_status_code ,
licenced_status_code ,
other_screen_bottom ,
other_screen_material ,
development_notes ,
water_quality_colour ,
water_quality_odour ,
alternative_specs_submitted ,
construction_start_date ,
construction_end_date ,
alteration_start_date ,
alteration_end_date ,
decommission_start_date ,
decommission_end_date ,
drilling_company_guid ,
final_casing_stick_up ,
artesian_flow ,
artesian_pressure ,
bedrock_depth ,
water_supply_system_name ,
water_supply_system_well_name,
well_identification_plate_attached,
ems ,
screen_intake_method_code,
screen_type_code ,
screen_material_code ,
screen_opening_code ,
screen_bottom_code ,
utm_zone_code ,
utm_northing ,
utm_easting ,
utm_accuracy_code ,
bcgs_id ,
development_method_code ,
development_hours ,
decommission_reason ,
decommission_method_code ,
sealant_material ,
backfill_material ,
decommission_details ,
comments
)
SELECT
xform.well_tag_number ,
gen_random_uuid() ,
COALESCE(xform.owner_full_name,' ') ,
COALESCE(xform.owner_mailing_address, ' ') ,
COALESCE(xform.owner_city, ' ') ,
COALESCE(xform.owner_postal_code , ' ') ,
COALESCE(xform.street_address , ' ') ,
COALESCE(xform.city , ' ') ,
COALESCE(xform.legal_lot , ' ') ,
COALESCE(xform.legal_plan , ' ') ,
COALESCE(xform.legal_district_lot, ' ') ,
COALESCE(xform.legal_block , ' ') ,
COALESCE(xform.legal_section , ' ') ,
COALESCE(xform.legal_township , ' ') ,
COALESCE(xform.legal_range , ' ') ,
xform.land_district_code ,
xform.legal_pid ,
COALESCE(xform.well_location_description,' '),
xform.identification_plate_number ,
COALESCE(xform.diameter, ' ') ,
xform.total_depth_drilled ,
xform.finished_well_depth ,
xform.static_water_level ,
xform.well_cap_type ,
xform.well_disinfected ,
xform.well_yield ,
xform.intended_water_use_code ,
COALESCE(xform.province_state_code,'OTHER') ,
xform.well_class_code ,
xform.well_subclass_guid ,
xform.well_yield_unit_code ,
xform.latitude ,
xform.longitude ,
xform.ground_elevation ,
xform.well_orientation ,
NULL ,
xform.drilling_method_code ,
xform.ground_elevation_method_code ,
xform.create_date ,
xform.update_date ,
xform.create_user ,
xform.update_user ,
xform.surface_seal_length ,
xform.surface_seal_thickness ,
xform.surface_seal_method_code ,
xform.surface_seal_material_code ,
xform.backfill_type ,
xform.backfill_depth ,
xform.liner_material_code ,
xform.well_status_code ,
xform.observation_well_number ,
xform.obs_well_status_code ,
xform.licenced_status_code ,
'' ,
'' ,
'' ,
'' ,
'' ,
false ,
xform.construction_start_date ,
xform.construction_end_date ,
xform.alteration_start_date ,
xform.alteration_end_date ,
xform.decommission_start_date ,
xform.decommission_end_date ,
xform.drilling_company_guid ,
xform.final_casing_stick_up ,
xform.artesian_flow ,
xform.artesian_pressure ,
xform.bedrock_depth ,
xform.water_supply_system_name ,
xform.water_supply_system_well_name ,
xform.well_identification_plate_attached,
xform.ems ,
xform.screen_intake_method_code ,
xform.screen_type_code ,
xform.screen_material_code ,
xform.screen_opening_code ,
xform.screen_bottom_code ,
xform.utm_zone_code ,
xform.utm_northing ,
xform.utm_easting ,
xform.utm_accuracy_code ,
xform.bcgs_id ,
xform.development_method_code ,
xform.development_duration ,
xform.decommission_reason ,
xform.decommission_method_code ,
xform.sealant_material ,
xform.backfill_material ,
xform.decommission_details ,
xform.comments
FROM xform_well xform;
raise notice '...xform data imported into the well table';
SELECT count(*) from well into row_count;
raise notice '% rows loaded into the well table', row_count;
END;
$$ LANGUAGE plpgsql;
COMMENT ON FUNCTION populate_well () IS 'Transfer from local XFORM ETL table into well.';
-- DESCRIPTION
-- Define the SQL INSERT command that copies the screen details from the legacy
-- database to the analogous GWELLS table (screen).
--
-- PARAMETERS
-- None
--
-- RETURNS
-- None as this is a stored procedure
--
CREATE OR REPLACE FUNCTION migrate_screens() RETURNS void AS $$
DECLARE
row_count integer;
BEGIN
raise notice '...importing wells_screens data';
INSERT INTO screen(
screen_guid ,
filing_number ,
well_tag_number ,
screen_from ,
screen_to ,
internal_diameter ,
screen_assembly_type_code ,
slot_size ,
create_date ,
update_date ,
create_user ,
update_user)
SELECT
gen_random_uuid() ,
null ,
xform.well_tag_number ,
screens.screen_from ,
screens.screen_to ,
screens.screen_internal_diameter,
CASE screens.screen_assembly_type_code
WHEN 'L' THEN 'LEAD'
WHEN 'K & Riser' THEN 'K_RISER'
ELSE screens.screen_assembly_type_code
END AS screen_assembly_type_code,
screens.screen_slot_size ,
screens.when_created ,
screens.when_updated ,
screens.who_created ,
screens.who_updated
FROM wells.wells_screens screens
INNER JOIN xform_well xform ON xform.well_id=screens.well_id;
raise notice '...wells_screens data imported';
SELECT count(*) from screen into row_count;
raise notice '% rows loaded into the screen table', row_count;
END;
$$ LANGUAGE plpgsql;
COMMENT ON FUNCTION migrate_screens () IS 'Load Screen details numbers, only for the wells that have been replicated.';
-- DESCRIPTION
-- Define the SQL INSERT command that copies the production data from the legacy
-- database to the analogous GWELLS table (production_data).
--
-- PARAMETERS
-- None
--
-- RETURNS
-- None as this is a stored procedure
--
CREATE OR REPLACE FUNCTION migrate_production() RETURNS void AS $$
DECLARE
row_count integer;
BEGIN
raise notice '...importing wells_production_data data';
INSERT INTO production_data(
production_data_guid ,
filing_number ,
well_tag_number ,
yield_estimation_method_code ,
yield_estimation_rate ,
yield_estimation_duration ,
well_yield_unit_code ,
static_level ,
drawdown ,
hydro_fracturing_performed ,
create_user, create_date ,
update_user, update_date
)
SELECT
gen_random_uuid() ,
null ,
xform.well_tag_number ,
CASE production_data.yield_estimated_method_code
WHEN 'UNK' THEN null
ELSE production_data.yield_estimated_method_code
END AS yield_estimation_method_code,
production_data.test_rate ,
production_data.test_duration ,
CASE production_data.test_rate_units_code
WHEN 'USGM' THEN 'USGPM'
ELSE production_data.test_rate_units_code
END AS well_yield_unit_code ,
production_data.static_level ,
production_data.net_drawdown ,
false ,
production_data.who_created, production_data.when_created,
COALESCE(production_data.who_updated,production_data.who_created) ,
COALESCE(production_data.when_updated,production_data.when_created)
FROM wells.wells_production_data production_data
INNER JOIN xform_well xform ON production_data.well_id=xform.well_id;
raise notice '...wells_production_data data imported';
SELECT count(*) from production_data into row_count;
raise notice '% rows loaded into the production_data table', row_count;
END;
$$ LANGUAGE plpgsql;
COMMENT ON FUNCTION migrate_production () IS 'Load Production Data, only for the wells that have been replicated.';
-- DESCRIPTION
-- Define the SQL INSERT command that copies the casings data from the legacy
-- database to the analogous GWELLS table (casing), referencing well tag number that
-- continues to be used in the new system (table join to the transformation table).
--
-- PARAMETERS
-- None
--
-- RETURNS
-- None as this is a stored procedure
--
CREATE OR REPLACE FUNCTION migrate_casings() RETURNS void AS $$
DECLARE
row_count integer;
BEGIN
raise notice '...importing wells_casings data';
INSERT INTO casing(
casing_guid ,
filing_number ,
well_tag_number ,
casing_from ,
casing_to ,
diameter ,
casing_material_code,
wall_thickness ,
drive_shoe ,
create_date, update_date, create_user, update_user
)
SELECT
gen_random_uuid() ,
null ,
xform.well_tag_number ,
casings.casing_from ,
casings.casing_to ,
casings.casing_size ,
CASE casings.casing_material_code
WHEN 'UNK' THEN null
ELSE casings.casing_material_code
END AS casing_material_code ,
casings.casing_wall ,
CASE casings.casing_drive_shoe_ind
WHEN '' THEN null
WHEN 'Y' THEN TRUE
WHEN 'N' THEN FALSE
END ,
casings.when_created, casings.when_updated, casings.who_created, casings.who_updated
FROM wells.wells_casings casings
INNER JOIN xform_well xform ON xform.well_id=casings.well_id;
raise notice '...wells_casings data imported';
SELECT count(*) from casing into row_count;
raise notice '% rows loaded into the casing table', row_count;
END;
$$ LANGUAGE plpgsql;
COMMENT ON FUNCTION migrate_casings () IS 'Load Casing details, only for the wells that have been replicated.';
-- DESCRIPTION
-- Define the SQL INSERT command that copies the perforation data from the legacy
-- database to the analogous GWELLS table (perforation), referencing well tag number that
-- continues to be used in the new system (table join to the transformation table).
--
-- NOTE: The legacy data has thousands of rows with 'empty' columns; these are
-- filtered out via the SQL WHERE clause.
--
-- PARAMETERS
-- None
--
-- RETURNS
-- None as this is a stored procedure
--
CREATE OR REPLACE FUNCTION migrate_perforations() RETURNS void AS $$
DECLARE
row_count integer;
BEGIN
raise notice '...importing wells_perforations data';
INSERT INTO perforation(
perforation_guid ,
well_tag_number ,
liner_thickness ,
liner_diameter ,
liner_from ,
liner_to ,
liner_perforation_from ,
liner_perforation_to ,
create_user, create_date, update_user, update_date
)
SELECT
gen_random_uuid() ,
xform.well_tag_number ,
perforations.liner_thickness ,
perforations.liner_diameter ,
perforations.liner_from ,
perforations.liner_to ,
perforations.liner_perforation_from,
perforations.liner_perforation_to ,
perforations.who_created, perforations.when_created, perforations.who_updated, perforations.when_updated
FROM wells.wells_perforations perforations
INNER JOIN xform_well xform ON perforations.well_id=xform.well_id
WHERE NOT (liner_from is null
AND liner_to IS NULL
AND liner_diameter IS NULL
AND liner_thickness IS NULL
AND liner_perforation_from IS NULL
AND liner_perforation_to IS NULL);
raise notice '...wells_perforations data imported';
SELECT count(*) from perforation into row_count;
raise notice '% rows loaded into the perforation table', row_count;
END;
$$ LANGUAGE plpgsql;
COMMENT ON FUNCTION migrate_perforations () IS 'Load BCGS numbers, only for the wells that have been replicated.';
-- DESCRIPTION
-- Define the SQL INSERT command that copies the aquifer linkages from the legacy
-- database to the analogous GWELLS table (aquifer_well), referencing well tag number that
-- continues to be used in the new system (table join to the transformation table).
--
-- PARAMETERS
-- None
--
-- RETURNS
-- None as this is a stored procedure
--
CREATE OR REPLACE FUNCTION migrate_aquifers() RETURNS void AS $$
DECLARE
row_count integer;
BEGIN
raise notice '...importing gw_aquifer_wells data';
INSERT INTO aquifer_well(
aquifer_well_guid,
aquifer_id,
well_tag_number,
create_user,create_date,update_user,update_date
)
SELECT
gen_random_uuid() ,
aws.aquifer_id ,
xform.well_tag_number,
aws.who_created ,
aws.when_created ,
coalesce(aws.who_updated, aws.who_created),
coalesce(aws.when_updated,aws.when_created)
FROM wells.gw_aquifer_wells aws INNER JOIN xform_well xform ON aws.well_id = xform.well_id;
raise notice '...gw_aquifer_well data imported';
SELECT count(*) from aquifer_well into row_count;
raise notice '% rows loaded into the aquifer_well table', row_count;
END;
$$ LANGUAGE plpgsql;
COMMENT ON FUNCTION migrate_aquifers () IS 'Load Aquifer Wells, only for the wells that have been replicated.';
-- DESCRIPTION
-- Define the SQL INSERT command that copies the lithology from the legacy
-- database to the analogous GWELLS table (lithology_description), referencing well tag
-- number that continues to be used in the new system (table join to the transformation
-- table).
--
-- NOTE: This copy also converts the flow units ('USGM'to 'USGPM').
-- PARAMETERS
-- None
--
-- RETURNS
-- None as this is a stored procedure
--
CREATE OR REPLACE FUNCTION migrate_lithology() RETURNS void AS $$
DECLARE
row_count integer;
BEGIN
raise notice '...importing wells_lithology_descriptions data';
INSERT INTO lithology_description(
lithology_description_guid ,
filing_number ,
well_tag_number ,
lithology_from ,
lithology_to ,
lithology_raw_data ,
lithology_description_code ,
lithology_material_code ,
lithology_hardness_code ,
lithology_colour_code ,
water_bearing_estimated_flow,
well_yield_unit_code ,
lithology_observation ,
lithology_sequence_number ,
create_user, create_date, update_user, update_date
)
SELECT
gen_random_uuid() ,
null ,
xform.well_tag_number ,
wld.lithology_from ,
wld.lithology_to ,
wld.lithology_raw_data ,
wld.lithology_code ,
wld.lithology_material_code ,
wld.relative_hardness_code ,
wld.lithology_colour_code ,
wld.water_bearing_estimated_flow ,
CASE wld.water_bearing_est_flw_unt_cd
WHEN 'USGM' THEN 'USGPM'
ELSE wld.water_bearing_est_flw_unt_cd
END AS well_yield_unit_code ,
wld.lithology_observation ,
wld.lithology_sequence_number ,
wld.who_created, wld.when_created, COALESCE(wld.who_updated, wld.who_created), COALESCE(wld.when_updated, wld.when_created)
FROM wells.wells_lithology_descriptions wld
INNER JOIN xform_well xform ON xform.well_id=wld.well_id
INNER JOIN wells.wells_wells wells ON wells.well_id=wld.well_id;
raise notice '...wells_lithology_descriptions data imported';
SELECT count(*) from lithology_description into row_count;
raise notice '% rows loaded into the lithology_description table', row_count;
END;
$$ LANGUAGE plpgsql;
COMMENT ON FUNCTION migrate_lithology () IS 'Load Lithology, only for the wells that have been replicated.';
-- DESCRIPTION
-- Define two driver stored procedures, grouping the SQL commands that runs the WELLS to
-- GWELLS replication, into two distinct steps. This does NOT include a refresh of the
-- static code tables; that is done only during the pipeline build and deploy.
--
-- This division (into two steps) of the WELLS to GWELLS replication, is required to work
-- around an intermittent postgresql bug; for more details see:
-- https://github.com/bcgov/gwells/wiki/Regular-Corruption-of-the-PostgreSQL-DB
--
-- There is the opportunity to run 'VACUUM FULL' to reclaim disk space, in between
-- these two steps.
--
-- These steps will succeed only if the static code tables are already populated.
--
-- NOTE: This procedure is meant to be run from the Database Pod, during scheduled nightly
-- replications or on an ad-hoc fashion. It is not part of a pipeline build or deployment.
--
-- USAGE
-- 1. If run from a terminal window on the postgresql pod (the $POSTGRESQL_* environment variables
-- are guaranteed to be set correctly on the pod). For example:
--
-- psql -t -d $POSTGRESQL_DATABASE -U $POSTGRESQL_USER -c 'SELECT db_replicate_step1(_subset_ind=>false);'
-- psql -t -d $POSTGRESQL_DATABASE -U $POSTGRESQL_USER -c 'SELECT db_replicate_step2;'
--
-- 2. If invoked remotely from a developer workstation, on the postgresql pod (the quotes and double-quotes
-- are required, and the pod name will vary with each deployment. For example:
--
-- oc exec postgresql-80-04n7h -- /bin/bash -c 'psql -t -d $POSTGRESQL_DATABASE -U $POSTGRESQL_USER -c "SELECT db_replicate_step1(_subset_ind=>false);"'
-- oc exec postgresql-80-04n7h -- /bin/bash -c 'psql -t -d $POSTGRESQL_DATABASE -U $POSTGRESQL_USER -c "SELECT db_replicate_step2"'
--
--
-- 3. If run on the local environment of a developer workstation, replace the password and username with the
-- local credentials, or ensure that the $POSTGRESQL_* environment variables are set correctly to point
-- to the local database. For example:
--
-- psql -d $POSTGRESQL_DATABASE -U $POSTGRESQL_USER -c 'SELECT db_replicate_step1(_subset_ind=>true);'
-- psql -d $POSTGRESQL_DATABASE -U $POSTGRESQL_USER -c 'SELECT db_replicate_step2;'
--
CREATE OR REPLACE FUNCTION db_replicate_step1(_subset_ind boolean default true) RETURNS void AS $$
BEGIN
raise notice 'Replicating WELLS to GWELLS.';
raise notice '.. step 1 (of 2)';
PERFORM populate_xform(_subset_ind);
TRUNCATE TABLE bcgs_number CASCADE;
PERFORM migrate_bcgs();
TRUNCATE TABLE well CASCADE;
PERFORM populate_well();
PERFORM migrate_screens();
PERFORM migrate_production();
END;
$$ LANGUAGE plpgsql;
COMMENT ON FUNCTION db_replicate_step1 (boolean) IS 'SQL Driver script to run replication, without code table refresh (step 1).';
CREATE OR REPLACE FUNCTION db_replicate_step2 () RETURNS void AS $$
BEGIN
raise notice 'Replicating WELLS to GWELLS.';
raise notice '.. step 2 (of 2)';
PERFORM migrate_casings();
PERFORM migrate_perforations();
PERFORM migrate_aquifers();
PERFORM migrate_lithology();
DROP TABLE IF EXISTS xform_well;
raise notice 'Finished replicating WELLS to GWELLS.';
END;
$$ LANGUAGE plpgsql;
COMMENT ON FUNCTION db_replicate_step2 () IS 'SQL Driver script to run replication, without code table refresh (step 2).';
| 42.411197 | 155 | 0.584414 | 3.015625 |
0b39211e58c62524837539f8c02eb738f733141e | 1,037 | py | Python | GraphSAGE/fix.py | attre2vec/attre2vec | f36a2581f3d17887d6201a76624d4ced93d6503f | [
"MIT"
] | null | null | null | GraphSAGE/fix.py | attre2vec/attre2vec | f36a2581f3d17887d6201a76624d4ced93d6503f | [
"MIT"
] | null | null | null | GraphSAGE/fix.py | attre2vec/attre2vec | f36a2581f3d17887d6201a76624d4ced93d6503f | [
"MIT"
] | null | null | null | import pickle
import networkx as nx
import numpy as np
import torch
for name in ('cora', 'citeseer', 'pubmed'):
with open(f'data/datasets/{name}.pkl', 'rb') as fin:
dataset = pickle.load(fin)
test_graph = dataset['original_graph']
e2i = dataset['edge2idx']
H = dataset['H']
node_fts = torch.zeros((test_graph.number_of_nodes(), 128))
for u, v in test_graph.edges():
ef = H[e2i[(u, v)]][3:-1]
node_fts[u] = ef[:128]
node_fts[v] = ef[128:]
train_nodes = []
for idx in range(dataset['num_datasets']):
tn = []
for u, v in dataset['Xy'][idx]['train']['X']:
if u not in tn:
tn.append(u)
if v not in tn:
tn.append(v)
train_nodes.append(tn)
nx.write_edgelist(test_graph, f'GraphSAGE/data/{name}.edgelist')
np.save(f'GraphSAGE/data/{name}-node-features', node_fts.numpy())
with open(f'GraphSAGE/data/{name}-train-nodes.pkl', 'wb') as fout:
pickle.dump(train_nodes, fout)
| 25.925 | 70 | 0.580521 | 3.125 |
cb8b3d39476b988461cc3bd81b209d2441d29158 | 2,859 | go | Go | cmd/gopmctl/main.go | brettbuddin/gopm | c1f851755872a27f9f7e2f6a1ff8664b6198506d | [
"MIT"
] | null | null | null | cmd/gopmctl/main.go | brettbuddin/gopm | c1f851755872a27f9f7e2f6a1ff8664b6198506d | [
"MIT"
] | null | null | null | cmd/gopmctl/main.go | brettbuddin/gopm | c1f851755872a27f9f7e2f6a1ff8664b6198506d | [
"MIT"
] | null | null | null | package main
import (
"fmt"
"os"
"strings"
"text/tabwriter"
"github.com/logrusorgru/aurora"
"github.com/spf13/cobra"
"github.com/stuartcarnie/gopm/config"
"github.com/stuartcarnie/gopm/rpc"
"google.golang.org/grpc"
)
type Control struct {
Configuration string
Address string
client rpc.GopmClient
}
var (
control = &Control{}
rootCmd = cobra.Command{
Use: "gopmctl",
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
return control.initializeClient()
},
}
)
func init() {
rootCmd.PersistentFlags().StringVarP(&control.Configuration, "config", "c", "", "Configuration file")
rootCmd.PersistentFlags().StringVar(&control.Address, "addr", "localhost:9002", "gopm server address")
rootCmd.AddCommand(&statusCmd)
rootCmd.AddCommand(&tailLogCmd)
rootCmd.AddCommand(&signalCmd)
rootCmd.AddCommand(&startCmd)
rootCmd.AddCommand(&stopCmd)
rootCmd.AddCommand(&reloadCmd)
rootCmd.AddCommand(&shutdownCmd)
rootCmd.AddCommand(&stopAllCmd)
rootCmd.AddCommand(&startAllCmd)
}
func main() {
if err := rootCmd.Execute(); err != nil {
_, _ = fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
}
func (ctl *Control) initializeClient() error {
gc, err := grpc.Dial(ctl.getServerURL(), grpc.WithInsecure())
if err != nil {
return err
}
control.client = rpc.NewGopmClient(gc)
return nil
}
func (ctl *Control) getServerURL() string {
if ctl.Address != "" {
return ctl.Address
} else if _, err := os.Stat(ctl.Configuration); err == nil {
cfg := config.NewConfig()
_, _ = cfg.LoadPath(ctl.Configuration)
if cfg.GrpcServer != nil && cfg.GrpcServer.Address != "" {
return cfg.GrpcServer.Address
}
}
return "localhost:9002"
}
// other commands
func (ctl *Control) printProcessInfo(res *rpc.ProcessInfoResponse, processes map[string]bool) {
tw := tabwriter.NewWriter(os.Stdout, 20, 4, 5, ' ', 0)
state := func(s string) aurora.Value {
switch strings.ToUpper(s) {
case "RUNNING":
return aurora.Green(s)
case "BACKOFF", "FATAL":
return aurora.Red(s)
default:
return aurora.Yellow(s)
}
}
for _, pinfo := range res.Processes {
if ctl.inProcessMap(pinfo, processes) {
processName := pinfo.GetFullName()
_, _ = fmt.Fprintln(tw, strings.Join([]string{processName, state(pinfo.StateName).String(), pinfo.Description}, "\t"))
}
}
tw.Flush()
}
func (ctl *Control) inProcessMap(procInfo *rpc.ProcessInfo, processesMap map[string]bool) bool {
if len(processesMap) <= 0 {
return true
}
for procName := range processesMap {
if procName == procInfo.Name || procName == procInfo.GetFullName() {
return true
}
// check the wildcard '*'
pos := strings.Index(procName, ":")
if pos != -1 {
groupName := procName[0:pos]
programName := procName[pos+1:]
if programName == "*" && groupName == procInfo.Group {
return true
}
}
}
return false
}
| 23.056452 | 121 | 0.684855 | 3 |
0bcfb65f371793f7e6f494b825fee38b4329d334 | 7,098 | js | JavaScript | src/parser.js | dragonhailstone/m42kup | d5561945a31c995e8b19d9c75ae2591ec3206bb9 | [
"MIT"
] | null | null | null | src/parser.js | dragonhailstone/m42kup | d5561945a31c995e8b19d9c75ae2591ec3206bb9 | [
"MIT"
] | null | null | null | src/parser.js | dragonhailstone/m42kup | d5561945a31c995e8b19d9c75ae2591ec3206bb9 | [
"MIT"
] | null | null | null | function input2pt(input) {
var levels = [],
stack = [];
function push(fragment) {
// normalize text
if (fragment.type == 'text'
&& stack.length
&& stack[stack.length - 1].type == 'text') {
var prepend = stack.pop();
fragment = {
type: 'text',
start: prepend.start,
end: fragment.end,
data: prepend.data + fragment.data
};
}
if (fragment.type == 'right boundary marker') {
var buf = [fragment], tmp;
while (true) {
tmp = stack.pop();
if (!tmp) throw new Error('No lbm found');
buf.unshift(tmp);
if (tmp.type == 'left boundary marker' && tmp.level == fragment.level) break;
}
var elementStart = buf[0].start,
elementEnd = buf[buf.length - 1].end;
fragment = {
type: 'element',
start: elementStart,
end: elementEnd,
data: input.substring(elementStart, elementEnd),
children: buf
};
}
if (fragment.type == 'right verbatim marker') {
var buf = [fragment], tmp;
while (true) {
tmp = stack.pop();
if (!tmp) throw new Error('No lvm found');
buf.unshift(tmp);
if (tmp.type == 'left verbatim marker' && tmp.level == fragment.level) break;
}
var verbatimStart = buf[0].start,
verbatimEnd = buf[buf.length - 1].end;
fragment = {
type: 'verbatim',
start: verbatimStart,
end: verbatimEnd,
data: input.substring(verbatimStart, verbatimEnd),
children: buf
};
}
stack.push(fragment);
}
// main loop
for (var cur = 0; cur < input.length;) {
if (input[cur] == '`') {
var lvmStart = cur;
for (cur++; cur < input.length; cur++) {
if (input[cur] != '<') break;
}
var lvmLevel = cur - lvmStart;
if (cur < input.length - 1
&& input[cur] == '.'
&& input[cur + 1] == '<') {
cur++;
}
var lvmEnd = cur;
push({
type: 'left verbatim marker',
start: lvmStart,
end: lvmEnd,
data: input.substring(lvmStart, lvmEnd),
level: lvmLevel
});
levels.push(-lvmLevel);
var rvmString = '>'.repeat(lvmLevel - 1) + '`';
var rvmIndex = input.indexOf(rvmString, cur);
var rvmFound = rvmIndex >= 0, rvmStart, rvmEnd;
if (rvmFound)
[rvmStart, rvmEnd] = [rvmIndex, rvmIndex + rvmString.length];
cur = rvmFound ? rvmEnd : input.length;
var textStart = lvmEnd,
textEnd = rvmFound ? rvmStart : cur;
push({
type: 'text',
start: textStart,
end: textEnd,
data: input.substring(textStart, textEnd)
});
if (rvmFound) {
push({
type: 'right verbatim marker',
start: rvmStart,
end: rvmEnd,
data: input.substring(rvmStart, rvmEnd),
level: rvmEnd - rvmStart
});
levels.pop();
}
} else if (input[cur] == '[') {
var lbmStart = cur;
for (cur++; cur < input.length; cur++) {
if (input[cur] != '<') break;
}
var lbmEnd = cur;
var currentLevel = levels[levels.length - 1] || 0;
if (lbmEnd - lbmStart < currentLevel) {
push({
type: 'text',
start: lbmStart,
end: lbmEnd,
data: input.substring(lbmStart, lbmEnd)
});
continue;
}
levels.push(lbmEnd - lbmStart);
push({
type: 'left boundary marker',
start: lbmStart,
end: lbmEnd,
data: input.substring(lbmStart, lbmEnd),
level: lbmEnd - lbmStart
});
// excludes: '(', '.', ':', '[', ']', '<', '`'
// this regex always matches something
var tagNameRegex = /^(?:(?:\*{1,3}|={1,6}|\${1,2}|;{1,3}|[!"#$%&')*+,\-\/;=>?@\\^_{|}~]|[a-z][a-z0-9]*)|)/i,
tagNameStart = cur,
tagNameEnd = tagNameStart + input.substring(tagNameStart)
.match(tagNameRegex)[0].length;
push({
type: 'tag name',
start: tagNameStart,
end: tagNameEnd,
data: input.substring(tagNameStart, tagNameEnd)
});
cur = tagNameEnd;
var separatorRegex = /^(?:[.]|)/i,
separatorStart = cur,
separatorEnd = separatorStart
+ input.substring(separatorStart)
.match(separatorRegex)[0].length;
push({
type: 'separator',
start: separatorStart,
end: separatorEnd,
data: input.substring(separatorStart, separatorEnd)
});
cur = separatorEnd;
} else if (input[cur] == '>' || input[cur] == ']') {
var currentLevel = levels[levels.length - 1] || 0;
var gtStart = cur;
for (; cur < input.length; cur++) {
if (input[cur] != '>') break;
}
var gtEnd = cur;
// >>... does not end with a ]
if (gtEnd == input.length || input[gtEnd] != ']') {
push({
type: 'text',
start: gtStart,
end: gtEnd,
data: input.substring(gtStart, gtEnd)
});
continue;
}
// invalid ]
if (currentLevel == 0) {
if (gtStart < gtEnd)
push({
type: 'text',
start: gtStart,
end: gtEnd,
data: input.substring(gtStart, gtEnd)
});
var mrbmStart = cur;
var mrbmEnd = ++cur;
push({
type: 'mismatched right boundary marker',
start: mrbmStart,
end: mrbmEnd,
data: input.substring(mrbmStart, mrbmEnd)
});
continue;
}
cur++;
// not enough level
if (cur - gtStart < currentLevel) {
push({
type: 'text',
start: gtStart,
end: cur,
data: input.substring(gtStart, cur)
});
continue;
}
// too much >
if (cur - gtStart > currentLevel) {
push({
type: 'text',
start: gtStart,
end: cur - currentLevel,
data: input.substring(gtStart, cur - currentLevel)
});
}
var rbmStart = cur - currentLevel,
rbmEnd = cur;
push({
type: 'right boundary marker',
start: rbmStart,
end: rbmEnd,
data: input.substring(rbmStart, rbmEnd),
level: rbmEnd - rbmStart
});
levels.pop();
} else /* none of '[', ']', '`', '>' */ {
// reduce text normalization overhead
var textStart = cur;
for (cur++; cur < input.length; cur++) {
if (['[', ']', '`', '>'].includes(input[cur])) break;
}
var textEnd = cur;
push({
type: 'text',
start: textStart,
end: textEnd,
data: input.substring(textStart, textEnd)
});
}
}
// close the unclosed
for (var i = levels.length - 1; i >= 0; i--) {
var type = levels[i] > 0
? 'right boundary marker'
: 'right verbatim marker';
var absLevel = levels[i] > 0
? levels[i] : -levels[i];
push({
type: type,
start: input.length,
end: input.length,
data: '',
level: absLevel
});
}
return stack;
}
function pt2ast(pt) {
function recurse(pt) {
var ast = pt.map(e => {
switch (e.type) {
case 'text':
return {
type: 'text',
text: e.data
};
case 'verbatim':
return {
type: 'text',
text: e.children[1].data
};
case 'element':
return {
type: 'element',
name: e.children[1].data,
code: e.data,
children: recurse(e.children.slice(3, -1))
};
case 'mismatched right boundary marker':
return {
type: 'error',
text: e.data
};
default:
throw new TypeError(`Unknown type: ${e.type}`);
}
});
return ast;
};
return recurse(pt);
}
module.exports = {
input2pt,
pt2ast
};
| 21.251497 | 111 | 0.554945 | 3.046875 |
c34e8d465fd7724ea767d7527c6612a040dd6fe2 | 1,299 | go | Go | parse.go | mkeeler/pointerstructure | f252a8fd71c835fa8c8ead2ce2cc3a91e214cf83 | [
"MIT"
] | null | null | null | parse.go | mkeeler/pointerstructure | f252a8fd71c835fa8c8ead2ce2cc3a91e214cf83 | [
"MIT"
] | null | null | null | parse.go | mkeeler/pointerstructure | f252a8fd71c835fa8c8ead2ce2cc3a91e214cf83 | [
"MIT"
] | 1 | 2021-02-02T00:01:17.000Z | 2021-02-02T00:01:17.000Z | package pointerstructure
import (
"fmt"
"strings"
)
// Parse parses a pointer from the input string. The input string
// is expected to follow the format specified by RFC 6901: '/'-separated
// parts. Each part can contain escape codes to contain '/' or '~'.
func Parse(input string) (*Pointer, error) {
// Special case the empty case
if input == "" {
return &Pointer{}, nil
}
// We expect the first character to be "/"
if input[0] != '/' {
return nil, fmt.Errorf(
"parse Go pointer %q: first char must be '/'", input)
}
// Trim out the first slash so we don't have to +1 every index
input = input[1:]
// Parse out all the parts
var parts []string
lastSlash := -1
for i, r := range input {
if r == '/' {
parts = append(parts, input[lastSlash+1:i])
lastSlash = i
}
}
// Add last part
parts = append(parts, input[lastSlash+1:])
// Process each part for string replacement
for i, p := range parts {
// Replace ~1 followed by ~0 as specified by the RFC
parts[i] = strings.Replace(
strings.Replace(p, "~1", "/", -1), "~0", "~", -1)
}
return &Pointer{Parts: parts}, nil
}
// MustParse is like Parse but panics if the input cannot be parsed.
func MustParse(input string) *Pointer {
p, err := Parse(input)
if err != nil {
panic(err)
}
return p
}
| 22.396552 | 72 | 0.640493 | 3.359375 |
9e9dd5978796a3c1d073a35cdc2594060765692b | 2,386 | rs | Rust | src/tooling/reader.rs | deltapi/aoc2020-rs | 770fc31e7ce9101a9073321d7c2d0aa016d12266 | [
"MIT"
] | 1 | 2020-12-04T21:23:40.000Z | 2020-12-04T21:23:40.000Z | src/tooling/reader.rs | deltapi/aoc2020-rs | 770fc31e7ce9101a9073321d7c2d0aa016d12266 | [
"MIT"
] | null | null | null | src/tooling/reader.rs | deltapi/aoc2020-rs | 770fc31e7ce9101a9073321d7c2d0aa016d12266 | [
"MIT"
] | null | null | null | use std::fs::File;
use std::io::{self, BufRead};
use std::path::Path;
pub fn read_file_to_vec(path: &str) -> Vec<usize> {
let mut entries = vec![];
if let Ok(lines) = read_lines(path) {
for line in lines {
if let Ok(ip) = line {
entries.push(ip.parse::<usize>().unwrap());
}
}
}
entries
}
pub fn read_file_to_vec_u64(path: &str) -> Vec<u64> {
let mut entries = vec![];
if let Ok(lines) = read_lines(path) {
for line in lines {
if let Ok(ip) = line {
entries.push(ip.parse::<u64>().unwrap());
}
}
}
entries
}
pub fn read_file_to_vec_of_string(path: &str) -> Vec<String> {
let mut entries = vec![];
if let Ok(lines) = read_lines(path) {
for line in lines {
if let Ok(ip) = line {
entries.push(ip);
}
}
}
entries
}
pub fn read_block_file_to_vec_of_string(path: &str) -> Vec<String> {
let mut entries = vec![];
if let Ok(lines) = read_lines(path) {
let mut entry = "".to_string();
for line in lines {
if let Ok(ip) = line {
match ip.as_str() {
"" => {
entries.push(entry);
entry = "".to_string();
}
_ => {
entry.push_str(" ");
entry.push_str(&ip);
}
}
}
}
entries.push(entry);
}
entries
}
pub fn read_block_file_to_vec_vec_of_string(path: &str) -> Vec<Vec<String>> {
let mut entries: Vec<Vec<String>> = vec![];
if let Ok(lines) = read_lines(path) {
let mut entry = vec![];
for line in lines {
if let Ok(ip) = line {
match ip.as_str() {
"" => {
entries.push(entry);
entry = vec![];
}
_ => {
entry.push(ip);
}
}
}
}
entries.push(entry);
}
entries
}
fn read_lines<P>(filename: P) -> io::Result<io::Lines<io::BufReader<File>>>
where
P: AsRef<Path>,
{
let file = File::open(filename)?;
Ok(io::BufReader::new(file).lines())
}
| 25.655914 | 77 | 0.436714 | 3.078125 |
856ce1c3c2a2dcde8a5682cbfe0aa954e45042d7 | 3,256 | js | JavaScript | crawler/fetchStandards.js | gnehs/ntut-course-crawler-node | 2084e374e6930135ffcfaaa22604b357c47b43ec | [
"MIT"
] | 2 | 2021-02-26T08:37:37.000Z | 2021-02-28T18:43:10.000Z | crawler/fetchStandards.js | gnehs/ntut-course-crawler-node | 2084e374e6930135ffcfaaa22604b357c47b43ec | [
"MIT"
] | null | null | null | crawler/fetchStandards.js | gnehs/ntut-course-crawler-node | 2084e374e6930135ffcfaaa22604b357c47b43ec | [
"MIT"
] | null | null | null | const { fetchSinglePage } = require('./fetchSinglePage')
const jsonfile = require('jsonfile');
const fs = require('fs');
const pangu = require('./tools/pangu').spacing;
async function main() {
let $ = await fetchSinglePage('https://aps.ntut.edu.tw/course/tw/Cprog.jsp?format=-1')
let years = []
for (let yr of $('a')) {
years.push($(yr).attr('href').match(/\&year=(.+)$/)[1])
}
// 儲存各年份課程標準
for (let yr of years) {
await parseYear(yr)
}
jsonfile.writeFileSync(`./dist/standards.json`, years, { spaces: 2, EOL: '\r\n' })
}
async function parseYear(year) {
fs.mkdirSync(`./dist/${year}/`, { recursive: true });
let $ = await fetchSinglePage(`https://aps.ntut.edu.tw/course/tw/Cprog.jsp?format=-2&year=${year}`)
let martics = $('a')
let result = {}
for (let martic of martics) {
let title = $(martic).text()
let url = $(martic).attr('href').replace('.', '')
url = `https://aps.ntut.edu.tw/course/tw/${url}`
console.log('[fetch]', year, title)
result[title] = await parseSystem(url)
}
jsonfile.writeFileSync(`./dist/${year}/standard.json`, result, { spaces: 2, EOL: '\r\n' })
}
function getChildText($, tr, i) {
return $($(tr).children('td')[i]).text().replace(/\n| /g, '')
}
async function parseSystem(url = 'https://aps.ntut.edu.tw/course/tw/Cprog.jsp?format=-3&year=109&matric=7') {
let $ = await fetchSinglePage(url)
let result = {}
//parse table title
let tableTitle = []
for (let th of $('table tr th')) {
tableTitle.push($(th).text().replace(/\n| /g, ''))
}
$('tr:first-child').remove()
//parse table body
let trs = $('table tr')
for (let tr of trs) {
//parseCredit
let credits = {}
for (let i = 1; i < 9; i++) {
credits[tableTitle[i]] = getChildText($, tr, i)
}
// data
// body > table > tbody > tr:nth-child(2) > td:nth-child(1) > p > a
let departmentUrl = $(tr).find('a').attr('href').replace('.', '')
departmentUrl = `https://aps.ntut.edu.tw/course/tw${departmentUrl}`
let departmentTitle = getChildText($, tr, 0)
result[departmentTitle] = {
credits,
...(await parseDeaprtment(departmentUrl))
}
}
return result
}
async function parseDeaprtment(url = 'https://aps.ntut.edu.tw/course/tw/Cprog.jsp?format=-4&year=109&matric=7&division=340') {
let $ = await fetchSinglePage(url)
let result = {
courses: [], rules: []
}
$('body > table:nth-child(5) tr:first-child').remove()
let trs = $('body > table:nth-child(5) tr')
for (let tr of trs) {
result.courses.push({
year: getChildText($, tr, 0),
sem: getChildText($, tr, 1),
type: getChildText($, tr, 2),
name: getChildText($, tr, 4),
credit: getChildText($, tr, 5),
hours: getChildText($, tr, 6),
stage: getChildText($, tr, 7),
})
}
result.rules = pangu($('body > table:nth-child(9) > tbody > tr > td > font').html())
result.rules = result.rules ? result.rules.split('<br>').map(x => x.replace(/(.+)\.|\n/g, '')) : null
return result
}
module.exports = main; | 38.305882 | 126 | 0.558354 | 3.078125 |
1e42ea2deb67ec282bdca0b60a66ee53ccd30f42 | 3,828 | lua | Lua | Start.lua | Venika/LuaPrograms | 4c0cb662b0fd171192d0f661ce3b074de470764f | [
"MIT"
] | null | null | null | Start.lua | Venika/LuaPrograms | 4c0cb662b0fd171192d0f661ce3b074de470764f | [
"MIT"
] | 1 | 2018-01-29T07:53:29.000Z | 2018-01-29T08:42:22.000Z | Start.lua | Venika/LuaRockPaperScissors | 4c0cb662b0fd171192d0f661ce3b074de470764f | [
"MIT"
] | null | null | null | local composer = require( "composer" )
local scene = composer.newScene()
local options ={
effect = "fade",
time=200
}
---------------------------------------------------------------------------------
-- All code outside of the listener functions will only be executed ONCE
-- unless "composer.removeScene()" is called.
---------------------------------------------------------------------------------
-- local forward references should go here
---------------------------------------------------------------------------------
-- "scene:create()"
function scene:create( event )
local sceneGroup = self.view
-- Initialize the scene here.
-- Example: add display objects to "sceneGroup", add touch listeners, etc.
-- Create the widget
----------------------------background--------------------------------
local bg=display.newImage("tittle.png")
bg.x= display.contentWidth/2
bg.y= display.contentHeight/2
bg:scale(.50,.60)
local myText = display.newText("Rock-Paper-Scissor", display.contentCenterX, display.contentCenterY-200, native.systemFont, 25)
myText:setFillColor(1,1,0)
local myText1 = display.newText("Venika Gaur", display.contentCenterX, display.contentCenterY-150, native.systemFont, 25)
myText:setFillColor(1,1,0)
local startBtn= display.newImage("Startbutton.png")
startBtn.x=display.contentCenterX-70
startBtn.y=display.contentCenterY+150
startBtn:scale(.15,.15)
local function button1(event)
-- composer.removeScene( "Start" )
audio.pause(startsound);
composer.gotoScene( "Enemy1" , options )
end
startBtn:addEventListener("tap", button1 )
local setBtn= display.newImage("Setbutton.png")
setBtn.x=display.contentCenterX+62
setBtn.y=display.contentCenterY+150
setBtn:scale(.14,.13)
local function button2(event)
composer.gotoScene( "Settings" , options )
end
setBtn:addEventListener("tap", button2 )
sceneGroup:insert(bg)
sceneGroup:insert(myText1)
sceneGroup:insert(myText)
sceneGroup:insert(startBtn)
sceneGroup:insert(setBtn)
end
-- "scene:show()"
function scene:show( event )
local sceneGroup = self.view
local phase = event.phase
if ( phase == "will" ) then
-- Called when the scene is still off screen (but is about to come on screen).
elseif ( phase == "did" ) then
-- Called when the scene is now on screen.
-- Insert code here to make the scene come alive.
-- Example: start timers, begin animation, play audio, etc.
local startsound = audio.loadSound("Startmusic.mp3")
audio.play(startsound);
end
end
-- "scene:hide()"
function scene:hide( event )
local sceneGroup = self.view
local phase = event.phase
if ( phase == "will" ) then
-- Called when the scene is on screen (but is about to go off screen).
-- Insert code here to "pause" the scene.
-- Example: stop timers, stop animation, stop audio, etc.
elseif ( phase == "did" ) then
-- Called immediately after scene goes off screen.
end
end
-- "scene:destroy()"
function scene:destroy( event )
local sceneGroup = self.view
-- Called prior to the removal of scene's view ("sceneGroup").
-- Insert code here to clean up the scene.
-- Example: remove display objects, save state, etc.
end
---------------------------------------------------------------------------------
-- Listener setup
scene:addEventListener( "create", scene )
scene:addEventListener( "show", scene )
scene:addEventListener( "hide", scene )
scene:addEventListener( "destroy", scene )
---------------------------------------------------------------------------------
return scene
| 28.355556 | 131 | 0.581766 | 3.078125 |
ae5568f97858db34d551a260f52a507d47adafe3 | 964 | rs | Rust | src/utils.rs | EngineOrion/container | 03bdd229cdf4ac25238314c4b60586d28c33c38e | [
"MIT"
] | null | null | null | src/utils.rs | EngineOrion/container | 03bdd229cdf4ac25238314c4b60586d28c33c38e | [
"MIT"
] | null | null | null | src/utils.rs | EngineOrion/container | 03bdd229cdf4ac25238314c4b60586d28c33c38e | [
"MIT"
] | null | null | null | pub fn print_startup() {
println!(
"
+-------------+
| Container |
| v{} |
+-------------+
",
env!("CARGO_PKG_VERSION")
);
}
pub fn print_help() {
println!(
r#"Usage: ./container [options]
start start the application
help print this help text
repl start the REPL to configure the application"#
);
}
pub fn print_repl_help() {
println!(
r#"
REPL HELP:
start Start the main application.
get [option] Get status of something printed. Supported is:
config '-> Get status of config printed.
fetch [option] Import a file. Supported is:
config '-> Import the config.
inform [variable] Get information about a variable from the config.
eval (defun) [code] Eval a function with defun parameter. Else eval a
built in expression.
exit Exit the application.
"#
);
}
| 25.368421 | 72 | 0.54668 | 3.125 |
4c9147e4b94c987c89cc00075bf420933d637454 | 2,969 | swift | Swift | Tests/Rocc/AsyncWhileTests.swift | gswirski/rocc | 57f7934c096c27efc12981b691a81e85359bcfb9 | [
"MIT"
] | 71 | 2019-02-26T20:11:59.000Z | 2022-03-29T18:12:56.000Z | Tests/Rocc/AsyncWhileTests.swift | simonmitchell/rocc | b8b29207e808c1a2e5b397f4d4c0950149e8b76d | [
"MIT"
] | 40 | 2019-04-24T08:03:17.000Z | 2021-08-10T07:48:59.000Z | Tests/Rocc/AsyncWhileTests.swift | gswirski/rocc | 57f7934c096c27efc12981b691a81e85359bcfb9 | [
"MIT"
] | 6 | 2019-08-11T12:44:45.000Z | 2022-02-04T00:38:09.000Z | //
// AsyncWhileTests.swift
// RoccTests
//
// Created by Simon Mitchell on 19/11/2019.
// Copyright © 2019 Simon Mitchell. All rights reserved.
//
import XCTest
@testable import Rocc
class AsyncWhileTests: XCTestCase {
func testTimeoutCalledBeforeBreakingTwice() {
var continueCalls: Int = 0
let expectation = XCTestExpectation(description: "timeout called")
DispatchQueue.global().asyncWhile({ (continueClosure) in
DispatchQueue.global().asyncAfter(deadline: .now() + 1.2) {
continueCalls += 1
continueClosure(true)
}
}, timeout: 1.0) {
expectation.fulfill()
}
wait(for: [expectation], timeout: 2.0)
XCTAssertEqual(continueCalls, 1)
}
func testWhileClosureCalledAppropriateNumberOfTimes() {
let expectation = XCTestExpectation(description: "timeout called")
var calls: Int = 0
DispatchQueue.global().asyncWhile({ (continueClosure) in
DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) {
calls += 1
continueClosure(false)
}
}, timeout: 2.1) {
expectation.fulfill()
}
wait(for: [expectation], timeout: 3.0)
XCTAssertEqual(calls, 11)
}
func testCallingContinueWithTrueBreaksWhile() {
let expectation = XCTestExpectation(description: "timeout called")
var calls: Int = 0
DispatchQueue.global().asyncWhile({ (continueClosure) in
DispatchQueue.global().asyncAfter(deadline: .now() + 0.2) {
calls += 1
continueClosure(true)
}
}, timeout: 2.1) {
expectation.fulfill()
}
wait(for: [expectation], timeout: 3.0)
XCTAssertEqual(calls, 1)
}
func testWhileClosureCalledOnMainThread() {
let expectation = XCTestExpectation(description: "timeout called")
DispatchQueue.global().asyncWhile({ (continueClosure) in
XCTAssertEqual(Thread.current.isMainThread, true)
expectation.fulfill()
}, timeout: 0.2) {
}
wait(for: [expectation], timeout: 0.3)
}
func testDoneClosureCalledOnMainThread() {
let expectation = XCTestExpectation(description: "timeout called")
DispatchQueue.global().asyncWhile({ (continueClosure) in
continueClosure(true)
}, timeout: 0.2) {
XCTAssertEqual(Thread.current.isMainThread, true)
expectation.fulfill()
}
wait(for: [expectation], timeout: 0.3)
}
}
| 26.990909 | 74 | 0.529471 | 3.109375 |
85bf0fed252edb69e5181ccd21ee2a61d4d1f97d | 1,453 | rs | Rust | backend/src/types/ws.rs | jay3332/KeyMaster | 927b453c70d240bdfdc4c82c92de4ffc1756f966 | [
"MIT"
] | 2 | 2021-12-27T01:28:23.000Z | 2021-12-28T00:46:34.000Z | backend/src/types/ws.rs | jay3332/KeyMaster | 927b453c70d240bdfdc4c82c92de4ffc1756f966 | [
"MIT"
] | null | null | null | backend/src/types/ws.rs | jay3332/KeyMaster | 927b453c70d240bdfdc4c82c92de4ffc1756f966 | [
"MIT"
] | null | null | null | use crate::types::Quote;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(tag = "op", content = "data")]
pub enum WebSocketInboundEvent {
/// Sends information about who is connecting to the websocket and for what intent.
/// This operation is required before any other events can be received.
Identify {
/// The session token which is to be used to identify who is connecting to the socket.
token: String,
/// What this connection will be used for.
intent: WebSocketIntent,
},
/// Start the quote after the countdown and start timing.
/// This signals the server to start receiving `KeyPress` events.
Start,
/// Gracefully quit this typing session.
Quit,
/// Signify that a key was pressed.
KeyPress {
/// The key which was pressed.
key: String,
},
}
#[derive(Serialize, Deserialize, Clone, Copy, Debug)]
pub enum WebSocketIntent {
/// Typing practice with quotes.
PracticeQuote = 0,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
#[serde(tag = "op", content = "data")]
pub enum WebSocketOutboundEvent {
/// Send quote data to the client.
PracticeQuoteReady { quote: Quote },
/// The user has finished typing the quote - give statistics.
PracticeQuoteFinish {
wpm: f32,
wpm_raw: f32,
accuracy: f32,
replay: String,
errors: u16,
},
}
| 27.942308 | 94 | 0.645561 | 3.359375 |
31f1e638594b126919fb5b25385de1d366ea1adc | 5,932 | lua | Lua | tank.lua | 71460-4-F/Tank_WTISC_UFC | 0b88988b1774c3441a8478c037a93c722423816a | [
"MIT"
] | null | null | null | tank.lua | 71460-4-F/Tank_WTISC_UFC | 0b88988b1774c3441a8478c037a93c722423816a | [
"MIT"
] | 1 | 2019-02-05T00:04:02.000Z | 2019-02-05T00:04:02.000Z | tank.lua | 71460-4-F/Tank_WTISC_UFC | 0b88988b1774c3441a8478c037a93c722423816a | [
"MIT"
] | null | null | null | bullet = class:new()
bullet.x = 0
bullet.y = 0
bullet.speed = 1000
bullet.angle = 0
bullet.raio = 10
function bullet:draw()
love.graphics.setColor(0, 0, 0)
love.graphics.circle('fill', self.x, self.y, self.raio)
end
function bullet:update(dt)
self.x = self.x + dt*self.speed*math.cos(self.angle)
self.y = self.y + dt*self.speed*math.sin(self.angle)
end
tank = {}
tank.x = width/2
tank.y = height/2
tank.vx = 0
tank.vy = 0
tank.speed = 300
tank.raio = 40
tank.color = {0, 0, 255}
tank.angle = 0
tank.size = 60
tank.bullets = {}
tank.health = 100
tank.pontuacao = 0
function tank:draw()
love.graphics.setColor(self.color)
--love.graphics.circle("fill", self.x, self.y, self.raio)
love.graphics.draw(ufo, self.x, self.y, self.angle*2, self.raio*2/32, self.raio*2/32, 16, 16)
local x_cano = self.x + self.size*math.cos(self.angle)
local y_cano = self.y + self.size*math.sin(self.angle)
love.graphics.setLineWidth(5)
love.graphics.setColor(0, 0, 0)
love.graphics.line(self.x, self.y, x_cano, y_cano)
for i = 1, #self.bullets do
self.bullets[i]:draw()
end
love.graphics.setColor(0, 0, 0)
local pont = string.format("%d", self.pontuacao)
love.graphics.print(pont, width - 0.5*myfont:getWidth(pont), 0, 0, 0.5, 0.5)
if self.health >= 0 then
if self.health > 50 then
love.graphics.setColor(0, 200, 0)
elseif self.health > 20 then
love.graphics.setColor(200, 200, 0)
else
love.graphics.setColor(255, 0, 0)
end
love.graphics.rectangle("fill", 0, 0, 2*self.health, 20, 10)
end
love.graphics.setColor(0, 0, 0)
love.graphics.rectangle("line", 0, 0, 200, 20, 10)
end
function tank:update(dt)
if self.health <= 0 then
game_over = true
go_song:play()
musica:stop()
end
self.x = self.x + self.speed*self.vx*dt
self.y = self.y + self.speed*self.vy*dt
self.angle = math.atan2(love.mouse.getY() - self.y,
love.mouse.getX() - self.x)
if self.x < self.raio then
self.x = self.raio
end
if self.x > width - self.raio then
self.x = width - self.raio
end
if self.y < self.raio then
self.y = self.raio
end
if self.y > height - self.raio then
self.y = height - self.raio
end
for i = 1, #self.bullets do
self.bullets[i]:update(dt)
end
for i = 1, #self.bullets do
if fora_da_tela(self.bullets[i].x,
self.bullets[i].y) then
table.remove(self.bullets, i)
break
end
end
for i = 1, #controle_meteoro.bullets do
local xm = controle_meteoro.bullets[i].x
local ym = controle_meteoro.bullets[i].y
local rm = controle_meteoro.bullets[i].raio
local sm = controle_meteoro.bullets[i].speed
if distance(tank.x, tank.y, xm, ym) <= self.raio + rm then
local explosion = exp:clone()
explosion:play()
self.health = self.health - rm/10
controle_meteoro.bullets[i].death = true
end
for j = 1, #self.bullets do
if distance(self.bullets[j].x,
self.bullets[j].y, xm, ym) <= self.bullets[j].raio + rm then
self.pontuacao = self.pontuacao + 1
controle_meteoro.bullets[i].death = true
controle_meteoro:explodir(i)
table.remove(self.bullets, j)
break
end
end
end
end
function tank:shot()
local p = pew:clone()
p:play()
table.insert(self.bullets, bullet:new({x = self.x,
y = self.y, angle = self.angle}))
end
function fora_da_tela(x, y)
if x < 0 or x > width or y < 0 or y > height then
return true
end
end
meteoro = class:new()
meteoro.x = 0
meteoro.y = 0
meteoro.speed = 1000
meteoro.angle = 0
meteoro.raio = 20
meteoro.death = false
meteoro.color = {107,66,38}
function meteoro:draw()
love.graphics.setColor(self.color)
love.graphics.circle('fill', self.x, self.y, self.raio)
end
function meteoro:update(dt)
self.x = self.x + dt*self.speed*math.cos(self.angle)
self.y = self.y + dt*self.speed*math.sin(self.angle)
end
controle_meteoro = {}
controle_meteoro.bullets = {}
controle_meteoro.freq = 0.3
controle_meteoro.temp = 0
controle_meteoro.last_m = 0
function controle_meteoro:draw()
for i = 1, #self.bullets do
self.bullets[i]:draw()
end
end
function controle_meteoro:explodir(i, max)
local explosion = exp:clone()
explosion:play()
if not max then
max = 7
end
local xm = self.bullets[i].x
local ym = self.bullets[i].y
local rm = self.bullets[i].raio
local limit = love.math.random(1+max)
local d_angle = math.pi/limit
for k = 3, limit do
table.insert(controle_meteoro.bullets,
meteoro:new({x = xm + rm*math.cos(k*d_angle), y = ym + rm*math.sin(k*d_angle), speed = 300 + k*50, angle = k*d_angle, raio = rm/2}))
end
end
function controle_meteoro:update(dt)
for i = 1, #self.bullets do
self.bullets[i]:update(dt)
end
for i = 1, #self.bullets do
if self.bullets[i].death then
table.remove(self.bullets, i)
break
end
end
self.temp = self.temp + dt
if self.temp - self.last_m >= self.freq then
self:shot()
self.last_m = self.temp
end
end
function controle_meteoro:shot()
local x_rand = love.math.random(width)
local speed_rand = 400 + love.math.random(300)
local angle_rand = math.atan2(tank.y + 20, tank.x - x_rand)
angle_rand = angle_rand + love.math.random() - 0.5
table.insert(self.bullets,
meteoro:new({x = x_rand, y = -20, speed = speed_rand, angle = angle_rand}))
end
function distance(x1, y1, x2, y2)
return math.sqrt((x1 - x2)^2 + (y1 - y2)^2)
end | 28.382775 | 144 | 0.603675 | 3.171875 |
584baa75f023df3c6bd0fa4f8206cbac617d965f | 2,910 | kt | Kotlin | app/src/main/java/com/tardivon/quentin/hackoeur/Event.kt | quentin-tardivon/hackoeur-android | eb71cfece8dd9144a669f9b4cb84ab60752e3970 | [
"MIT"
] | null | null | null | app/src/main/java/com/tardivon/quentin/hackoeur/Event.kt | quentin-tardivon/hackoeur-android | eb71cfece8dd9144a669f9b4cb84ab60752e3970 | [
"MIT"
] | 5 | 2018-01-20T14:51:52.000Z | 2018-01-20T14:51:52.000Z | app/src/main/java/com/tardivon/quentin/hackoeur/Event.kt | quentin-tardivon/hackoeur-android | eb71cfece8dd9144a669f9b4cb84ab60752e3970 | [
"MIT"
] | null | null | null | package com.tardivon.quentin.hackoeur
import android.os.Parcel
import android.os.Parcelable
/**
* Created by quentin on 11/13/17.
*/
class Event() : Parcelable {
var name: String? =null
internal set
var description: String?=null
internal set
var location: String? =null
internal set
var date: String? = null
internal set
var time: String? = null
internal set
var locationGPS: LatLng? = null
internal set
var registeredUsers: MutableList<String>? =null
internal set
var imgId: String? =null
internal set
constructor(parcel: Parcel) : this() {
name = parcel.readString()
description = parcel.readString()
location = parcel.readString()
date = parcel.readString()
time = parcel.readString()
imgId = parcel.readString()
}
constructor(name: String, description: String, location: String, date: String, time: String) : this() {
this.name = name
this.description = description
this.location = location
this.date = date
this.time = time
}
constructor(name: String, description: String, location: String, date: String, time: String, locationGPS: LatLng) : this() {
this.name = name
this.description = description
this.location = location
this.date = date
this.time = time
this.locationGPS = locationGPS
}
constructor(name: String, description: String, location: String, date: String, time: String, locationGPS: LatLng, UsersId: MutableList<String> ) : this() {
this.name = name
this.description = description
this.location = location
this.date = date
this.time = time
this.locationGPS = locationGPS
this.registeredUsers = UsersId
}
constructor(name: String, description: String, location: String, date: String, time: String, locationGPS: LatLng, UsersId: MutableList<String>, imgId: String) : this() {
this.name = name
this.description = description
this.location = location
this.date = date
this.time = time
this.locationGPS = locationGPS
this.registeredUsers = UsersId
this.imgId = imgId
}
override fun writeToParcel(parcel: Parcel, flags: Int) {
parcel.writeString(name)
parcel.writeString(description)
parcel.writeString(location)
parcel.writeString(date)
parcel.writeString(time)
parcel.writeString(imgId)
}
override fun describeContents(): Int {
return 0
}
companion object CREATOR : Parcelable.Creator<Event> {
override fun createFromParcel(parcel: Parcel): Event {
return Event(parcel)
}
override fun newArray(size: Int): Array<Event?> {
return arrayOfNulls(size)
}
}
} | 30 | 173 | 0.623711 | 3.21875 |
b7197765f83447df87de9671e0d83b600483e63e | 2,568 | kt | Kotlin | core/src/main/java/com/mobilejazz/harmony/kotlin/core/repository/datasource/file/FileStreamValueDataStorage.kt | mobilejazz/harmony-kotlin | 364b610d9ed818ff102cfe779a7ee23f4d5595d5 | [
"Apache-2.0"
] | 10 | 2020-07-22T06:51:51.000Z | 2022-03-29T17:05:18.000Z | core/src/main/java/com/mobilejazz/harmony/kotlin/core/repository/datasource/file/FileStreamValueDataStorage.kt | mobilejazz/harmony-kotlin | 364b610d9ed818ff102cfe779a7ee23f4d5595d5 | [
"Apache-2.0"
] | 37 | 2019-07-23T09:41:09.000Z | 2022-03-31T09:01:53.000Z | core/src/main/java/com/mobilejazz/harmony/kotlin/core/repository/datasource/file/FileStreamValueDataStorage.kt | mobilejazz/harmony-kotlin | 364b610d9ed818ff102cfe779a7ee23f4d5595d5 | [
"Apache-2.0"
] | null | null | null | package com.mobilejazz.harmony.kotlin.core.repository.datasource.file
import com.mobilejazz.harmony.kotlin.core.repository.datasource.DeleteDataSource
import com.mobilejazz.harmony.kotlin.core.repository.datasource.GetDataSource
import com.mobilejazz.harmony.kotlin.core.repository.datasource.PutDataSource
import com.mobilejazz.harmony.kotlin.core.repository.query.Query
import com.mobilejazz.harmony.kotlin.core.threading.extensions.Future
import java.io.EOFException
import java.io.File
import java.io.ObjectInputStream
import java.io.ObjectOutputStream
class FileStreamValueDataStorage<T>(val file: File) : GetDataSource<T>, PutDataSource<T>, DeleteDataSource {
override fun get(query: Query): Future<T> {
TODO("not implemented") //To change body of created functions use File | Settings | File Templates.
}
override fun getAll(query: Query): Future<List<T>> {
return Future {
val values = mutableListOf<T>()
val fin = file.inputStream()
var ois: ObjectInputStream? = null
try {
ois = ObjectInputStream(fin)
while (true) {
val value = ois.readObject() as T
values.add(value)
}
} catch (e: EOFException) {
ois?.close()
fin.close()
}
return@Future values
}
}
override fun put(query: Query, value: T?): Future<T> {
return Future {
return@Future value?.let {
putAll(query, listOf(value)).get()[0]
} ?: throw IllegalArgumentException("FileStreamValueDataStorage: value must be not null")
}
}
override fun putAll(query: Query, value: List<T>?): Future<List<T>> {
return Future {
value?.let {
val allCurrentValues = getAll(query).get()
val fos = file.outputStream()
val oos = ObjectOutputStream(fos)
val allValues = value.toMutableList()
allValues.addAll(allCurrentValues)
for (obj in allValues) {
oos.writeObject(obj)
}
oos.flush()
oos.close()
fos.close()
return@Future value ?: notSupportedQuery()
} ?: throw IllegalArgumentException("FileStreamValueDataStorage: value must be not null")
}
}
override fun delete(query: Query): Future<Unit> {
TODO("not implemented") //To change body of created functions use File | Settings | File Templates.
}
override fun deleteAll(query: Query): Future<Unit> {
return Future {
val outputStream = file.outputStream()
val objectOutputStream = ObjectOutputStream(outputStream)
objectOutputStream.reset()
}
}
} | 31.703704 | 108 | 0.67796 | 3.09375 |
de9bf10781608cc332e58d91b799e26a6f9d34a5 | 1,067 | kt | Kotlin | App/app/src/test/java/at/tugraz/vaccinationpassport/UserTest.kt | sw21-tug/Team_20 | 04a904fada50a3a710e3163634ce38cf4b83d4c7 | [
"MIT"
] | null | null | null | App/app/src/test/java/at/tugraz/vaccinationpassport/UserTest.kt | sw21-tug/Team_20 | 04a904fada50a3a710e3163634ce38cf4b83d4c7 | [
"MIT"
] | 33 | 2021-04-28T08:55:47.000Z | 2021-06-10T20:03:58.000Z | App/app/src/test/java/at/tugraz/vaccinationpassport/UserTest.kt | sw21-tug/Team_20 | 04a904fada50a3a710e3163634ce38cf4b83d4c7 | [
"MIT"
] | 9 | 2021-04-28T09:02:41.000Z | 2021-06-10T17:37:28.000Z | package at.tugraz.vaccinationpassport
import org.junit.Test
import org.junit.Assert.*
class UserTest {
@Test
fun addVaccinesTest() {
val user = User("John Doe", 27)
user.addVaccination(Vaccination("Covid", "12-02-2018"))
assertEquals(1, user.vaccines.size)
assertEquals("Covid", user.vaccines[0].name)
assertEquals("12-02-2018", user.vaccines[0].date)
var vaccines = mutableListOf<Vaccination>()
vaccines.add(Vaccination("Malaria", "12-02-2018"))
vaccines.add(Vaccination("Hepatitis A", "12-02-2018"))
vaccines.add(Vaccination("Hepatitis B", "12-02-2018"))
user.addVaccination(vaccines)
var matches = 0
for(vaccine in vaccines)
{
for(user_vaccine in user.vaccines)
{
if(user_vaccine.name == vaccine.name &&
user_vaccine.date == vaccine.date)
{
matches += 1
}
}
}
assertEquals(vaccines.size, matches)
}
} | 27.358974 | 63 | 0.564199 | 3.078125 |
dedef6f66c5decdf2a036615dd479ab1c9b6e034 | 6,741 | rs | Rust | src/commands/dev/socket.rs | jashandeep-sohi/wrangler | e98bb95f000a7751297181577a7e62647309397b | [
"Apache-2.0",
"MIT"
] | null | null | null | src/commands/dev/socket.rs | jashandeep-sohi/wrangler | e98bb95f000a7751297181577a7e62647309397b | [
"Apache-2.0",
"MIT"
] | 61 | 2021-07-19T14:40:18.000Z | 2022-03-08T00:25:58.000Z | src/commands/dev/socket.rs | jashandeep-sohi/wrangler | e98bb95f000a7751297181577a7e62647309397b | [
"Apache-2.0",
"MIT"
] | null | null | null | use std::time::Duration;
use chrome_devtools as protocol;
use futures_util::future::TryFutureExt;
use futures_util::sink::SinkExt;
use futures_util::stream::{SplitStream, StreamExt};
use tokio_stream::wrappers::UnboundedReceiverStream;
use crate::terminal::colored_json_string;
use crate::terminal::message::{Message, StdErr, StdOut};
use protocol::domain::runtime::event::Event::ExceptionThrown;
use tokio::net::TcpStream;
use tokio::sync::mpsc;
use tokio::time::sleep;
use tokio_tungstenite::{connect_async, tungstenite, MaybeTlsStream, WebSocketStream};
use anyhow::{anyhow, Result};
use url::Url;
const KEEP_ALIVE_INTERVAL: u64 = 10;
/// connect to a Workers runtime WebSocket emitting the Chrome Devtools Protocol
/// parse all console messages, and print them to stdout
pub async fn listen(socket_url: Url) -> Result<()> {
// we loop here so we can issue a reconnect when something
// goes wrong with the websocket connection
loop {
let ws_stream = connect_retry(&socket_url).await;
let (mut write, read) = ws_stream.split();
// console.log messages are in the Runtime domain
// we must signal that we want to receive messages from the Runtime domain
// before they will be sent
let enable_runtime = protocol::runtime::SendMethod::Enable(1.into());
let enable_runtime = serde_json::to_string(&enable_runtime)?;
let enable_runtime = tungstenite::protocol::Message::Text(enable_runtime);
write.send(enable_runtime).await?;
// if left unattended, the preview service will kill the socket
// that emits console messages
// send a keep alive message every so often in the background
let (keep_alive_tx, keep_alive_rx) = mpsc::unbounded_channel();
// every 10 seconds, send a keep alive message on the channel
let heartbeat = keep_alive(keep_alive_tx);
// when the keep alive channel receives a message from the
// heartbeat future, write it to the websocket
let keep_alive_to_ws = UnboundedReceiverStream::new(keep_alive_rx)
.map(Ok)
.forward(write)
.map_err(Into::into);
// parse all incoming messages and print them to stdout
let printer = print_ws_messages(read);
// run the heartbeat and message printer in parallel
if tokio::try_join!(heartbeat, keep_alive_to_ws, printer).is_ok() {
break Ok(());
} else {
}
}
}
// Endlessly retry connecting to the chrome devtools instance with exponential backoff.
// The backoff maxes out at 60 seconds.
async fn connect_retry(socket_url: &Url) -> WebSocketStream<MaybeTlsStream<TcpStream>> {
let mut wait_seconds = 2;
let maximum_wait_seconds = 60;
let mut failed = false;
loop {
match connect_async(socket_url).await {
Ok((ws_stream, _)) => {
if failed {
// only report success if there was a failure, otherwise be quiet about it
StdErr::success("Connected!");
}
return ws_stream;
}
Err(e) => {
failed = true;
StdErr::warn(&format!("Failed to connect to devtools instance: {}", e));
StdErr::warn(&format!(
"Will retry connection in {} seconds",
wait_seconds
));
sleep(Duration::from_secs(wait_seconds)).await;
wait_seconds = wait_seconds.pow(2);
if wait_seconds > maximum_wait_seconds {
// max out at 60 seconds
wait_seconds = maximum_wait_seconds;
}
StdErr::working("Retrying...");
}
}
}
}
fn print_json(value: Result<serde_json::Value, serde_json::Error>, fallback: String) {
if let Ok(json) = value {
if let Ok(json_str) = colored_json_string(&json) {
println!("{}", json_str);
} else {
StdOut::message(fallback.as_str());
}
} else {
println!("{}", fallback);
}
}
async fn print_ws_messages(
mut read: SplitStream<WebSocketStream<MaybeTlsStream<TcpStream>>>,
) -> Result<()> {
while let Some(message) = read.next().await {
let message = message?;
let message_text = message.into_text().unwrap();
log::info!("{}", &message_text);
let parsed_message: Result<protocol::Runtime> = serde_json::from_str(&message_text)
.map_err(|e| anyhow!("Failed to parse event:\n{}", e));
match parsed_message {
Ok(protocol::Runtime::Event(ExceptionThrown(params))) => {
let default_description = "N/A".to_string();
let description = params
.exception_details
.exception
.description
.as_ref()
.unwrap_or(&default_description);
StdOut::message(&format!(
"{} at line {:?}, col {:?}",
description,
params.exception_details.line_number,
params.exception_details.column_number,
));
let json_parse = serde_json::to_value(params.clone());
print_json(json_parse, format!("{:?}", params));
}
Ok(protocol::Runtime::Event(event)) => {
// Try to parse json to pretty print, otherwise just print string
let json_parse: Result<serde_json::Value, serde_json::Error> =
serde_json::from_str(&*event.to_string());
print_json(json_parse, event.to_string());
}
Ok(protocol::Runtime::Method(_)) => {}
Err(err) => log::debug!("{}", err),
}
}
Ok(())
}
async fn keep_alive(tx: mpsc::UnboundedSender<tungstenite::protocol::Message>) -> Result<()> {
let duration = Duration::from_millis(1000 * KEEP_ALIVE_INTERVAL);
let mut delay = sleep(duration);
// this is set to 2 because we have already sent an id of 1 to enable the runtime
// eventually this logic should be moved to the chrome-devtools-rs library
let mut id = 2;
loop {
delay.await;
let keep_alive_message = protocol::runtime::SendMethod::GetIsolateId(id.into());
let keep_alive_message = serde_json::to_string(&keep_alive_message)
.expect("Could not convert keep alive message to JSON");
let keep_alive_message = tungstenite::protocol::Message::Text(keep_alive_message);
tx.send(keep_alive_message).unwrap();
id += 1;
delay = sleep(duration);
}
}
| 38.084746 | 94 | 0.601246 | 3.046875 |
d1b93711e70ce3c6de6ab7cd387a7081a626b0a4 | 12,530 | rs | Rust | simple-dns/src/dns/mod.rs | Disasm/simple-dns | 64248bae1f4a0e075cd1800ab9159cb1370cbff7 | [
"MIT"
] | 9 | 2021-06-02T12:33:10.000Z | 2021-11-25T20:25:40.000Z | simple-dns/src/dns/mod.rs | Disasm/simple-dns | 64248bae1f4a0e075cd1800ab9159cb1370cbff7 | [
"MIT"
] | 3 | 2021-07-31T10:33:19.000Z | 2022-01-29T22:22:17.000Z | simple-dns/src/dns/mod.rs | Disasm/simple-dns | 64248bae1f4a0e075cd1800ab9159cb1370cbff7 | [
"MIT"
] | 4 | 2021-09-07T14:51:17.000Z | 2022-02-18T06:39:42.000Z | //! Provides parsing and manipulation for DNS packets
mod character_string;
mod name;
mod packet;
mod packet_header;
mod question;
pub mod rdata;
mod resource_record;
use std::{collections::HashMap, convert::TryFrom};
pub use character_string::CharacterString;
pub use name::Name;
pub use packet::{Packet, PacketBuf, QuestionsIter};
pub use packet_header::PacketHeader;
pub use question::Question;
pub use resource_record::ResourceRecord;
const MAX_LABEL_LENGTH: usize = 63;
const MAX_NAME_LENGTH: usize = 255;
const MAX_CHARACTER_STRING_LENGTH: usize = 255;
const MAX_NULL_LENGTH: usize = 65535;
/// The maximum DNS packet size is 9000 bytes less the maximum
/// sizes of the IP (60) and UDP (8) headers.
// const MAX_PACKET_SIZE: usize = 9000 - 68;
/// Represents anything that can be part of a dns packet (Question, Resource Record, RData)
pub(crate) trait DnsPacketContent<'a> {
/// Parse the contents of the data buffer begining in the given position
/// It is necessary to pass the full buffer to this function, to be able to correctly implement name compression
fn parse(data: &'a [u8], position: usize) -> crate::Result<Self>
where
Self: Sized;
/// Append the bytes of this content to a given vector
fn append_to_vec(&self, out: &mut Vec<u8>) -> crate::Result<()>;
/// Append the bytes of this content to a given vector, compress Names before appending
fn compress_append_to_vec(
&self,
out: &mut Vec<u8>,
_name_refs: &mut HashMap<u64, usize>,
) -> crate::Result<()> {
self.append_to_vec(out)
}
/// Returns the length in bytes of this content
fn len(&self) -> usize;
}
/// Possible TYPE values in DNS Resource Records
/// Each value is described according to its own RFC
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum TYPE {
/// Host address, [RFC 1035](https://tools.ietf.org/html/rfc1035)
A,
/// Host address (IPv6) [rfc3596](https://tools.ietf.org/html/rfc3596)
AAAA,
/// Authoritative name server, [RFC 1035](https://tools.ietf.org/html/rfc1035)
NS,
/// Mail destination (Obsolete - use MX), [RFC 1035](https://tools.ietf.org/html/rfc1035)
MD,
/// Mail forwarder (Obsolete - use MX), [RFC 1035](https://tools.ietf.org/html/rfc1035)
MF,
/// Canonical name for an alias, [RFC 1035](https://tools.ietf.org/html/rfc1035)
CNAME,
/// Marks the start of a zone of authority, [RFC 1035](https://tools.ietf.org/html/rfc1035)
SOA,
/// Mailbox domain name (EXPERIMENTAL), [RFC 1035](https://tools.ietf.org/html/rfc1035)
MB,
/// Mail group member (EXPERIMENTAL), [RFC 1035](https://tools.ietf.org/html/rfc1035)
MG,
/// Mail rename domain name (EXPERIMENTAL), [RFC 1035](https://tools.ietf.org/html/rfc1035)
MR,
/// Null RR (EXPERIMENTAL), [RFC 1035](https://tools.ietf.org/html/rfc1035)
NULL,
/// Well known service description, [RFC 1035](https://tools.ietf.org/html/rfc1035)
WKS,
/// Domain name pointer, [RFC 1035](https://tools.ietf.org/html/rfc1035)
PTR,
/// Host information, [RFC 1035](https://tools.ietf.org/html/rfc1035)
HINFO,
/// Mailbox or mail list information, [RFC 1035](https://tools.ietf.org/html/rfc1035)
MINFO,
/// Mail exchange, [RFC 1035](https://tools.ietf.org/html/rfc1035)
MX,
/// Text strings, [RFC 1035](https://tools.ietf.org/html/rfc1035)
TXT,
/// SRV specifies the location of the server(s) for a specific protocol and domain. [RFC 2780](https://tools.ietf.org/html/rfc2782)
SRV,
/// Unknown value, for future (or unimplemented RFC) compatibility
Unknown(u16),
}
impl From<TYPE> for u16 {
fn from(value: TYPE) -> Self {
match value {
TYPE::A => 1,
TYPE::AAAA => 28,
TYPE::NS => 2,
TYPE::MD => 3,
TYPE::MF => 4,
TYPE::CNAME => 5,
TYPE::SOA => 6,
TYPE::MB => 7,
TYPE::MG => 8,
TYPE::MR => 9,
TYPE::NULL => 10,
TYPE::WKS => 11,
TYPE::PTR => 12,
TYPE::HINFO => 13,
TYPE::MINFO => 14,
TYPE::MX => 15,
TYPE::TXT => 16,
TYPE::SRV => 33,
TYPE::Unknown(x) => x,
}
}
}
impl From<u16> for TYPE {
fn from(value: u16) -> Self {
use self::TYPE::*;
match value {
1 => A,
2 => NS,
3 => MD,
4 => MF,
5 => CNAME,
6 => SOA,
7 => MB,
8 => MG,
9 => MR,
10 => NULL,
11 => WKS,
12 => PTR,
13 => HINFO,
14 => MINFO,
15 => MX,
16 => TXT,
28 => AAAA,
33 => SRV,
v => TYPE::Unknown(v),
}
}
}
/// Possible QTYPE values for a Question in a DNS packet
/// Each value is described according to its own RFC
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum QTYPE {
/// Host address, [RFC 1035](https://tools.ietf.org/html/rfc1035)
A = 1,
/// Host address (IPv6) [rfc3596](https://tools.ietf.org/html/rfc3596)
AAAA = 28,
/// Authoritative name server, [RFC 1035](https://tools.ietf.org/html/rfc1035)
NS = 2,
/// Mail destination (Obsolete - use MX), [RFC 1035](https://tools.ietf.org/html/rfc1035)
MD = 3,
/// Mail forwarder (Obsolete - use MX), [RFC 1035](https://tools.ietf.org/html/rfc1035)
MF = 4,
/// Canonical name for an alias, [RFC 1035](https://tools.ietf.org/html/rfc1035)
CNAME = 5,
/// Marks the start of a zone of authority, [RFC 1035](https://tools.ietf.org/html/rfc1035)
SOA = 6,
/// Mailbox domain name (EXPERIMENTAL), [RFC 1035](https://tools.ietf.org/html/rfc1035)
MB = 7,
/// Mail group member (EXPERIMENTAL), [RFC 1035](https://tools.ietf.org/html/rfc1035)
MG = 8,
/// Mail rename domain name (EXPERIMENTAL), [RFC 1035](https://tools.ietf.org/html/rfc1035)
MR = 9,
/// Null RR (EXPERIMENTAL), [RFC 1035](https://tools.ietf.org/html/rfc1035)
NULL = 10,
/// Well known service description, [RFC 1035](https://tools.ietf.org/html/rfc1035)
WKS = 11,
/// Domain name pointer, [RFC 1035](https://tools.ietf.org/html/rfc1035)
PTR = 12,
/// Host Information, [RFC 1035](https://tools.ietf.org/html/rfc1035)
HINFO = 13,
/// Mailbox or mail list information, [RFC 1035](https://tools.ietf.org/html/rfc1035)
MINFO = 14,
/// Mail exchange, [RFC 1035](https://tools.ietf.org/html/rfc1035)
MX = 15,
/// Text strings, [RFC 1035](https://tools.ietf.org/html/rfc1035)
TXT = 16,
/// SRV specifies the location of the server(s) for a specific protocol and domain. [RFC 2780](https://tools.ietf.org/html/rfc2782)
SRV = 33,
/// A request for a transfer of an entire zone, [RFC 1035](https://tools.ietf.org/html/rfc1035)
AXFR = 252,
/// A request for mailbox-related records (MB, MG or MR), [RFC 1035](https://tools.ietf.org/html/rfc1035)
MAILB = 253,
/// A request for mail agent RRs (Obsolete - see MX), [RFC 1035](https://tools.ietf.org/html/rfc1035)
MAILA = 254,
/// A request for all records, [RFC 1035](https://tools.ietf.org/html/rfc1035)
ANY = 255,
}
impl TryFrom<u16> for QTYPE {
type Error = crate::SimpleDnsError;
fn try_from(value: u16) -> Result<Self, Self::Error> {
use self::QTYPE::*;
match value {
1 => Ok(A),
2 => Ok(NS),
3 => Ok(MD),
4 => Ok(MF),
5 => Ok(CNAME),
6 => Ok(SOA),
7 => Ok(MB),
8 => Ok(MG),
9 => Ok(MR),
10 => Ok(NULL),
11 => Ok(WKS),
12 => Ok(PTR),
13 => Ok(HINFO),
14 => Ok(MINFO),
15 => Ok(MX),
16 => Ok(TXT),
28 => Ok(AAAA),
33 => Ok(SRV),
252 => Ok(AXFR),
253 => Ok(MAILB),
254 => Ok(MAILA),
255 => Ok(ANY),
v => Err(Self::Error::InvalidQType(v)),
}
}
}
/// Possible CLASS values for a Resource in a DNS packet
/// Each value is described according to its own RFC
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum CLASS {
/// The Internet, [RFC 1035](https://tools.ietf.org/html/rfc1035)
IN = 1,
/// The CSNET class (Obsolete - used only for examples in some obsolete RFCs), [RFC 1035](https://tools.ietf.org/html/rfc1035)
CS = 2,
/// The CHAOS class, [RFC 1035](https://tools.ietf.org/html/rfc1035)
CH = 3,
/// Hesiod [Dyer 87], [RFC 1035](https://tools.ietf.org/html/rfc1035)
HS = 4,
}
impl TryFrom<u16> for CLASS {
type Error = crate::SimpleDnsError;
fn try_from(value: u16) -> Result<Self, Self::Error> {
use self::CLASS::*;
match value {
1 => Ok(IN),
2 => Ok(CS),
3 => Ok(CH),
4 => Ok(HS),
v => Err(Self::Error::InvalidClass(v)),
}
}
}
/// Possible QCLASS values for a Question in a DNS packet
/// Each value is described according to its own RFC
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum QCLASS {
/// The Internet, [RFC 1035](https://tools.ietf.org/html/rfc1035)
IN = 1,
/// The CSNET class (Obsolete - used only for examples in some obsolete RFCs), [RFC 1035](https://tools.ietf.org/html/rfc1035)
CS = 2,
/// The CHAOS class, [RFC 1035](https://tools.ietf.org/html/rfc1035)
CH = 3,
/// Hesiod [Dyer 87], [RFC 1035](https://tools.ietf.org/html/rfc1035)
HS = 4,
/// [RFC 1035](https://tools.ietf.org/html/rfc1035)
ANY = 255,
}
impl TryFrom<u16> for QCLASS {
type Error = crate::SimpleDnsError;
fn try_from(value: u16) -> Result<Self, Self::Error> {
use self::QCLASS::*;
match value {
1 => Ok(IN),
2 => Ok(CS),
3 => Ok(CH),
4 => Ok(HS),
255 => Ok(ANY),
v => Err(Self::Error::InvalidQClass(v)),
}
}
}
/// Possible OPCODE values for a DNS packet, use to specify the type of operation.
/// [RFC 1035](https://tools.ietf.org/html/rfc1035): A four bit field that specifies kind of query in this message.
/// This value is set by the originator of a query and copied into the response.
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum OPCODE {
/// Normal query
StandardQuery = 0,
/// Inverse query (query a name by IP)
InverseQuery = 1,
/// Server status request
ServerStatusRequest = 2,
/// Reserved opcode for future use
Reserved,
}
impl From<u16> for OPCODE {
fn from(code: u16) -> Self {
match code {
0 => OPCODE::StandardQuery,
1 => OPCODE::InverseQuery,
2 => OPCODE::ServerStatusRequest,
_ => OPCODE::Reserved,
}
}
}
/// Possible RCODE values for a DNS packet
/// [RFC 1035](https://tools.ietf.org/html/rfc1035) Response code - this 4 bit field is set as part of responses.
/// The values have the following interpretation
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum RCODE {
/// No error condition
NoError = 0,
/// Format error - The name server was unable to interpret the query.
FormatError = 1,
/// Server failure - The name server was unable to process this query due to a problem with the name server.
ServerFailure = 2,
/// Name Error - Meaningful only for responses from an authoritative name server,
/// this code signifies that the domain name referenced in the query does not exist.
NameError = 3,
/// Not Implemented - The name server does not support the requested kind of query.
NotImplemented = 4,
/// Refused - The name server refuses to perform the specified operation for policy reasons.
/// For example, a name server may not wish to provide the information to the particular requester,
/// or a name server may not wish to perform a particular operation (e.g., zone transfer) for particular data.
Refused = 5,
/// Reserved for future use.
Reserved,
}
impl From<u16> for RCODE {
fn from(code: u16) -> Self {
match code {
0 => RCODE::NoError,
1 => RCODE::FormatError,
2 => RCODE::ServerFailure,
3 => RCODE::NameError,
4 => RCODE::NotImplemented,
5 => RCODE::Refused,
_ => RCODE::Reserved,
}
}
}
| 34.902507 | 135 | 0.58739 | 3.078125 |
52f4b4809d5b9283d35a309f086892764afc5863 | 845 | kt | Kotlin | src/main/kotlin/com/flaidzeres/hid4kotlin/Example.kt | DmitriyGovorukhin/project-H | 29c9c1146cc033d8be942599cbc5df692e11cccc | [
"MIT"
] | null | null | null | src/main/kotlin/com/flaidzeres/hid4kotlin/Example.kt | DmitriyGovorukhin/project-H | 29c9c1146cc033d8be942599cbc5df692e11cccc | [
"MIT"
] | null | null | null | src/main/kotlin/com/flaidzeres/hid4kotlin/Example.kt | DmitriyGovorukhin/project-H | 29c9c1146cc033d8be942599cbc5df692e11cccc | [
"MIT"
] | null | null | null | package com.flaidzeres.hid4kotlin
import java.nio.ByteBuffer
import java.util.*
fun main(args: Array<String>) {
val lsnr = object : HidDeviceListener {
override fun onConnect(d: HidDevice) {
println("Connected: $d")
}
override fun onDisconnect(d: HidDevice) {
println("Disconnected: $d")
}
}
val mgr = HidDeviceManager(mod = HIDRAW)
mgr += lsnr
mgr.open {
val d = device("/dev/hidraw1")
d.open {
val buf = ByteBuffer.allocate(64)
val arr = buf.array()
var cnt = 1000
while (true) {
if (cnt <= 0)
break
read(arr)
println(Arrays.toString(arr))
buf.clear()
cnt--
}
}
}
} | 18.777778 | 49 | 0.474556 | 3.125 |
0be05d4a2cf8a6a79eafd93aa3b055ae54cbe534 | 5,435 | js | JavaScript | lib/nontree.js | calvinmetcalf/nontree | 66b161dfb5c560e7f02366a91a095aacf106e41e | [
"MIT"
] | 2 | 2015-07-02T21:49:36.000Z | 2017-12-13T15:24:26.000Z | lib/nontree.js | calvinmetcalf/nontree | 66b161dfb5c560e7f02366a91a095aacf106e41e | [
"MIT"
] | null | null | null | lib/nontree.js | calvinmetcalf/nontree | 66b161dfb5c560e7f02366a91a095aacf106e41e | [
"MIT"
] | null | null | null | 'use strict';
function getMid(box) {
var dif = [(box[2]-box[0])/3,(box[3]-box[1])/3];
return [[box[0]+dif[0],box[1]+dif[1]],[box[2]-dif[0],box[3]-dif[1]]];
}
var tiles = [
['a','b','c'],
['f','e','d'],
['g','h','i']
];
function whichNon(coord, mid, prev) {
var x,y;
if (coord[0] < mid[0][0]) {
x = 0;
} else if (coord[0] > mid[1][0]) {
x = 2;
} else{
x = 1;
}
if (coord[1] < mid[0][1]) {
y = 2;
} else if (coord[1] > mid[1][1]) {
y = 0;
} else{
y = 1;
}
return normilize(prev, tiles[y][x]);
}
function normilize(prev, current) {
var flipx = 0;
var flipy = 0;
var i = 0;
var len = prev.length;
while(i<len) {
if (~['b','e','h'].indexOf(prev[i])) {
flipy+=1;
}
if (~['f','e','d'].indexOf(prev[i])) {
flipx+=1;
}
i++;
}
return normalization[Boolean(flipx % 2)][Boolean(flipy % 2)][current];
}
function newBox(non, oldBox, mid, prev) {
var out = [0,0,0,0];
var normilizedNon = normilize(prev, non);
if (~['a','b','c'].indexOf(normilizedNon)) {
out[1]=mid[1][1];
out[3]=oldBox[3];
} else if (~['f','e','d'].indexOf(normilizedNon)) {
out[1]=mid[0][1];
out[3]=mid[1][1];
} else{
out[1]=oldBox[1];
out[3]=mid[0][1];
}
if (~['a','f','g'].indexOf(normilizedNon)) {
out[0]=oldBox[0];
out[2]=mid[0][0];
} else if (~['b','e','h'].indexOf(normilizedNon)) {
out[0] = mid[0][0];
out[2] = mid[1][0];
} else{
out[0] = mid[1][0];
out[2] = oldBox[2];
}
return out;
}
exports.toNon = toNon;
function toNon(coords, maxDepth, box) {
var depth = -1;
var out = '';
var current, mid, prev;
while (++depth < maxDepth) {
mid = getMid(box);
prev = current;
current = whichNon(coords, mid, out);
box = newBox(current, box, mid, out);
out += current;
}
return out;
}
exports.fromNon = fromNon;
function fromNon(non, range) {
var depth = non.length;
var i = -1;
var letter,prev,mid;
while (++i < depth) {
prev = letter;
letter = non[i];
mid = getMid(range);
range = newBox(letter, range, mid, non.slice(0, i));
}
return range;
}
exports.next = next;
function next(non) {
if (!non.length) {
return non;
}
var i = non.length;
while (i--) {
if (non.charCodeAt(i) < 105) {
return non.slice(0,i ) + String.fromCharCode(non.charCodeAt(i) + 1) + non.slice(i + 1);
} else if (non.charCodeAt(i)===105) {
non = non.slice(0, i);
} else{
throw new Error('invalid tile name');
}
}
}
exports.whichChildren = whichChildren;
function whichChildren(non, bbox, searchDepth, range) {
var children = [
non + 'a',
non + 'b',
non + 'c',
non + 'd',
non + 'e',
non + 'f',
non + 'g',
non + 'h',
non + 'i'
];
var full = [];
var partial = [];
children.forEach(function(child) {
var childBox = fromNon(child, range);
if (contains(bbox, childBox)) {
full.push(child);
} else if (intersects(childBox, bbox)) {
if (child.length === searchDepth) {
full.push(child);
} else {
partial.push(child);
}
}
});
return {
full: full,
partial: partial,
id: non
};
}
var normalization = {
true:{
true:{
c:'g',
b:'h',
a:'i',
f:'d',
e:'e',
d:'f',
i:'a',
h:'b',
g:'c'
},
false:{
c:'a',
b:'b',
a:'c',
f:'d',
e:'e',
d:'f',
i:'g',
h:'h',
g:'i'
}
},
false:{
true:{
a:'g',
b:'h',
c:'i',
d:'d',
e:'e',
f:'f',
g:'a',
h:'b',
i:'c'
},
false:{
a:'a',
b:'b',
c:'c',
d:'d',
e:'e',
f:'f',
g:'g',
h:'h',
i:'i'
}
}
};
function sort(a, b){
return a.num - b.num;
}
exports.extent = extent;
function extent(bbox, range, maxPieces, precision){
var todo = [''];
var done = [];
var current, newTodo, tempTodo, len, i, item;
while(todo.length){
len = todo.length;
i = -1;
newTodo = new Array(len);
while (++i < len) {
newTodo[i] = whichChildren(todo[i], bbox, precision, range);
}
todo = [];
tempTodo = [];
len = newTodo.length;
i = -1;
while (++i < len) {
item = newTodo[i];
if(item.full.length ===1 && !item.partial.length){
done.push(item.full[0]);
}else if(!item.full.length && item.partial.length ===1){
todo.push(item.partial[0]);
}else{
item.num = item.full.length + item.partial.length;
tempTodo.push(item);
}
}
if(tempTodo.length){
tempTodo.sort(sort);
i = -1;
len = tempTodo.length;
while(++i < len){
current = tempTodo[i];
if((current.num + todo.length + done.length) > maxPieces){
done.push(current.id);
}else{
done = done.concat(current.full);
todo = todo.concat(current.partial);
}
}
}
}
return done;
}
exports.deA = deA;
function deA(a) {
while (a.length) {
if (a[a.length - 1] === 'a') {
a = a.slice(0, -1);
} else {
return a;
}
}
return a;
}
//from rbush
function contains(a, b) {
return a[0] <= b[0] &&
a[1] <= b[1] &&
b[2] <= a[2] &&
b[3] <= a[3];
}
function intersects(a, b) {
return b[0] < a[2] &&
b[1] < a[3] &&
b[2] > a[0] &&
b[3] > a[1];
} | 20.12963 | 93 | 0.478013 | 3.046875 |
4100fcac1b71200cd34bda2d68966cace9e69413 | 1,210 | c | C | Tute04.c | IT1050-2022-Feb/tutorial-01-IT21285738 | 8021f3d96c31d5615ccaf60e804e28322499f254 | [
"MIT"
] | null | null | null | Tute04.c | IT1050-2022-Feb/tutorial-01-IT21285738 | 8021f3d96c31d5615ccaf60e804e28322499f254 | [
"MIT"
] | null | null | null | Tute04.c | IT1050-2022-Feb/tutorial-01-IT21285738 | 8021f3d96c31d5615ccaf60e804e28322499f254 | [
"MIT"
] | null | null | null | /*Exercise 4 - Functions
Implement the three functions minimum(), maximum() and multiply() below the main() function.
Do not change the code given in the main() function when you are implementing your solution.*/
#include <stdio.h>
//declaring Functions
int minimum(int num1 , int num2);
int maximum(int num1 , int num2);
int multiply(int num1 , int num2);
//function main begins program execution
int main()
{
int no1, no2;
printf("Enter a value for no 1 : ");
scanf("%d", &no1);
printf("Enter a value for no 2 : ");
scanf("%d", &no2);
printf("%d ", minimum(no1, no2));
printf("%d ", maximum(no1, no2));
printf("%d ", multiply(no1, no2));
return 0;
}
//function implementation
int minimum(int num1 , int num2)
{
int min;
if(num1 < num2)
{
min = num1;
}
else if (num1 > num2)
{
min = num2;
}
else
min = num1; //if num1 and num2 are the same
return min;
}
int maximum(int num1 , int num2)
{
int max;
if(num1 > num2)
{
max = num1;
}
else if (num1 < num2)
{
max = num2;
}
else
max = num1; //if num1 and num2 are the same
return max;
}
int multiply(int num1 , int num2)
{
int mul;
return mul = num1 * num2;
}
| 18.059701 | 94 | 0.612397 | 3.171875 |
49ff51d9d6190bc4654b92afbaeeea16863d3004 | 19,761 | lua | Lua | spec/domain/trait/player/storage_spec.lua | czenker/lively-epsilon | 8da2dd7912409378e7637d561363f623e5270bbe | [
"MIT"
] | 9 | 2017-12-29T12:24:25.000Z | 2021-04-26T15:48:09.000Z | spec/domain/trait/player/storage_spec.lua | czenker/lively-epsilon | 8da2dd7912409378e7637d561363f623e5270bbe | [
"MIT"
] | 3 | 2017-12-23T17:17:01.000Z | 2018-01-09T22:05:13.000Z | spec/domain/trait/player/storage_spec.lua | czenker/lively-epsilon | 8da2dd7912409378e7637d561363f623e5270bbe | [
"MIT"
] | 2 | 2018-02-01T19:05:33.000Z | 2018-02-11T01:42:00.000Z | insulate("Player:withStorage()", function()
require "init"
require "spec.mocks"
require "spec.asserts"
local product1 = productMock()
local product2 = productMock()
local product3 = productMock()
it("creates a valid storage", function()
local player = PlayerSpaceship()
Player:withStorage(player)
assert.is_true(Player:hasStorage(player))
assert.is_number(player:getMaxStorageSpace())
end)
it("allows to configure the maxStorage", function()
local player = PlayerSpaceship()
Player:withStorage(player, {maxStorage = 1000})
assert.is_true(Player:hasStorage(player))
assert.is_same(1000, player:getMaxStorageSpace())
end)
it("fails if first argument is a number", function()
assert.has_error(function() Player:withStorage(42) end)
end)
it("fails if second argument is a number", function()
assert.has_error(function() Player:withStorage(PlayerSpaceship(), 42) end)
end)
describe(":getStoredProducts()", function()
it("returns all the products that are currently stored", function()
local player = PlayerSpaceship()
Player:withStorage(player)
assert.is_same({}, player:getStoredProducts())
player:modifyProductStorage(product1, 1)
assert.is_same(1, Util.size(player:getStoredProducts()))
assert.contains_value(product1, player:getStoredProducts())
player:modifyProductStorage(product1, 1)
assert.is_same(1, Util.size(player:getStoredProducts()))
assert.contains_value(product1, player:getStoredProducts())
player:modifyProductStorage(product2, 1)
assert.is_same(2, Util.size(player:getStoredProducts()))
assert.contains_value(product2, player:getStoredProducts())
player:modifyProductStorage(product2, -1)
assert.is_same(1, Util.size(player:getStoredProducts()))
assert.contains_value(product1, player:getStoredProducts())
assert.not_contains_value(product2, player:getStoredProducts())
end)
it("does not return rockets and probes, because we might not know the correct object to return", function()
local player = PlayerSpaceship()
Player:withStorage(player)
player:setWeaponStorageMax("hvli", 5)
player:setWeaponStorage("hvli", 5)
player:setWeaponStorageMax("homing", 4)
player:setWeaponStorage("homing", 4)
player:setWeaponStorageMax("mine", 3)
player:setWeaponStorage("mine", 3)
player:setWeaponStorageMax("emp", 2)
player:setWeaponStorage("emp", 2)
player:setWeaponStorageMax("nuke", 1)
player:setWeaponStorage("nuke", 1)
player:setMaxScanProbeCount(4)
player:setScanProbeCount(4)
assert.is_same({}, player:getStoredProducts())
end)
end)
describe(":getProductStorage(), :getEmptyProductStorage(), :getMaxProductStorage()", function()
it("returns the correct value", function()
local player = PlayerSpaceship()
Player:withStorage(player, {maxStorage = 100})
assert.is_same(0, player:getProductStorage(product1))
assert.is_same(100, player:getMaxProductStorage(product1))
assert.is_same(100, player:getEmptyProductStorage(product1))
assert.is_same(0, player:getProductStorage(product2))
assert.is_same(100, player:getMaxProductStorage(product2))
assert.is_same(100, player:getEmptyProductStorage(product2))
assert.is_same(0, player:getProductStorage(product3))
assert.is_same(100, player:getMaxProductStorage(product3))
assert.is_same(100, player:getEmptyProductStorage(product3))
player:modifyProductStorage(product1, 10)
assert.is_same(10, player:getProductStorage(product1))
assert.is_same(100, player:getMaxProductStorage(product1))
assert.is_same(90, player:getEmptyProductStorage(product1))
assert.is_same(0, player:getProductStorage(product2))
assert.is_same(90, player:getMaxProductStorage(product2))
assert.is_same(90, player:getEmptyProductStorage(product2))
assert.is_same(0, player:getProductStorage(product3))
assert.is_same(90, player:getMaxProductStorage(product3))
assert.is_same(90, player:getEmptyProductStorage(product3))
player:modifyProductStorage(product2, 10)
assert.is_same(10, player:getProductStorage(product1))
assert.is_same(90, player:getMaxProductStorage(product1))
assert.is_same(80, player:getEmptyProductStorage(product1))
assert.is_same(10, player:getProductStorage(product2))
assert.is_same(90, player:getMaxProductStorage(product2))
assert.is_same(80, player:getEmptyProductStorage(product2))
assert.is_same(0, player:getProductStorage(product3))
assert.is_same(80, player:getMaxProductStorage(product3))
assert.is_same(80, player:getEmptyProductStorage(product3))
player:modifyProductStorage(product2, -5)
assert.is_same(10, player:getProductStorage(product1))
assert.is_same(95, player:getMaxProductStorage(product1))
assert.is_same(85, player:getEmptyProductStorage(product1))
assert.is_same(5, player:getProductStorage(product2))
assert.is_same(90, player:getMaxProductStorage(product2))
assert.is_same(85, player:getEmptyProductStorage(product2))
assert.is_same(0, player:getProductStorage(product3))
assert.is_same(85, player:getMaxProductStorage(product3))
assert.is_same(85, player:getEmptyProductStorage(product3))
end)
it("works correctly with sized products", function()
local product1 = productMock()
local product2 = productMock()
local product3 = productMock()
product1.getSize = function() return 1 end
product2.getSize = function() return 2 end
product3.getSize = function() return 4 end
local player = PlayerSpaceship()
Player:withStorage(player, {maxStorage = 100})
assert.is_same(0, player:getProductStorage(product1))
assert.is_same(100, player:getMaxProductStorage(product1))
assert.is_same(100, player:getEmptyProductStorage(product1))
assert.is_same(0, player:getProductStorage(product2))
assert.is_same(50, player:getMaxProductStorage(product2))
assert.is_same(50, player:getEmptyProductStorage(product2))
assert.is_same(0, player:getProductStorage(product3))
assert.is_same(25, player:getMaxProductStorage(product3))
assert.is_same(25, player:getEmptyProductStorage(product3))
player:modifyProductStorage(product1, 10)
assert.is_same(10, player:getProductStorage(product1))
assert.is_same(100, player:getMaxProductStorage(product1))
assert.is_same(90, player:getEmptyProductStorage(product1))
assert.is_same(0, player:getProductStorage(product2))
assert.is_same(45, player:getMaxProductStorage(product2))
assert.is_same(45, player:getEmptyProductStorage(product2))
assert.is_same(0, player:getProductStorage(product3))
assert.is_same(22, player:getMaxProductStorage(product3))
assert.is_same(22, player:getEmptyProductStorage(product3))
player:modifyProductStorage(product2, 10)
assert.is_same(10, player:getProductStorage(product1))
assert.is_same(80, player:getMaxProductStorage(product1))
assert.is_same(70, player:getEmptyProductStorage(product1))
assert.is_same(10, player:getProductStorage(product2))
assert.is_same(45, player:getMaxProductStorage(product2))
assert.is_same(35, player:getEmptyProductStorage(product2))
assert.is_same(0, player:getProductStorage(product3))
assert.is_same(17, player:getMaxProductStorage(product3))
assert.is_same(17, player:getEmptyProductStorage(product3))
player:modifyProductStorage(product2, -5)
assert.is_same(10, player:getProductStorage(product1))
assert.is_same(90, player:getMaxProductStorage(product1))
assert.is_same(80, player:getEmptyProductStorage(product1))
assert.is_same(5, player:getProductStorage(product2))
assert.is_same(45, player:getMaxProductStorage(product2))
assert.is_same(40, player:getEmptyProductStorage(product2))
assert.is_same(0, player:getProductStorage(product3))
assert.is_same(20, player:getMaxProductStorage(product3))
assert.is_same(20, player:getEmptyProductStorage(product3))
player:modifyProductStorage(product3, 5)
assert.is_same(10, player:getProductStorage(product1))
assert.is_same(70, player:getMaxProductStorage(product1))
assert.is_same(60, player:getEmptyProductStorage(product1))
assert.is_same(5, player:getProductStorage(product2))
assert.is_same(35, player:getMaxProductStorage(product2))
assert.is_same(30, player:getEmptyProductStorage(product2))
assert.is_same(5, player:getProductStorage(product3))
assert.is_same(20, player:getMaxProductStorage(product3))
assert.is_same(15, player:getEmptyProductStorage(product3))
end)
it("they fail when called without argument", function()
local player = PlayerSpaceship()
Player:withStorage(player)
assert.has_error(function() player:getProductStorage() end)
assert.has_error(function() player:getMaxProductStorage() end)
assert.has_error(function() player:getEmptyProductStorage() end)
end)
it("works with rockets", function()
local hvli = Product:new("HVLI", {id = "hvli"})
local player = PlayerSpaceship()
Player:withStorage(player)
player:setWeaponStorageMax("hvli", 8)
player:setWeaponStorage("hvli", 6)
assert.is_same(6, player:getProductStorage(hvli))
assert.is_same(8, player:getMaxProductStorage(hvli))
assert.is_same(2, player:getEmptyProductStorage(hvli))
for _, weapon in pairs({"hvli", "homing", "mine", "nuke", "emp"}) do
local rocket = Product:new(weapon, {id = weapon})
player:setWeaponStorageMax(weapon, 0)
player:setWeaponStorage(weapon, 0)
assert.is_same(0, player:getProductStorage(rocket))
assert.is_same(0, player:getMaxProductStorage(rocket))
assert.is_same(0, player:getEmptyProductStorage(rocket))
end
end)
it("works with scan probes", function()
local probe = Product:new("Scan Probe", {id = "scanProbe"})
local player = PlayerSpaceship()
Player:withStorage(player)
player:setMaxScanProbeCount(8)
player:setScanProbeCount(6)
assert.is_same(6, player:getProductStorage(probe))
assert.is_same(8, player:getMaxProductStorage(probe))
assert.is_same(2, player:getEmptyProductStorage(probe))
player:setMaxScanProbeCount(0)
player:setScanProbeCount(0)
assert.is_same(0, player:getProductStorage(probe))
assert.is_same(0, player:getMaxProductStorage(probe))
assert.is_same(0, player:getEmptyProductStorage(probe))
end)
end)
describe("getStorageSpace(), getEmptyStorageSpace(), getMaxStorageSpace()", function()
it("returns the correct value", function()
local player = PlayerSpaceship()
Player:withStorage(player, {maxStorage = 100})
assert.is_same(0, player:getStorageSpace())
assert.is_same(100, player:getMaxStorageSpace())
assert.is_same(100, player:getEmptyStorageSpace())
player:modifyProductStorage(product1, 10)
assert.is_same(10, player:getStorageSpace())
assert.is_same(100, player:getMaxStorageSpace())
assert.is_same(90, player:getEmptyStorageSpace())
player:modifyProductStorage(product2, 10)
assert.is_same(20, player:getStorageSpace())
assert.is_same(100, player:getMaxStorageSpace())
assert.is_same(80, player:getEmptyStorageSpace())
player:modifyProductStorage(product2, -5)
assert.is_same(15, player:getStorageSpace())
assert.is_same(100, player:getMaxStorageSpace())
assert.is_same(85, player:getEmptyStorageSpace())
end)
it("returns the correct values for sized products", function()
local product1 = productMock()
local product2 = productMock()
local product3 = productMock()
product1.getSize = function() return 1 end
product2.getSize = function() return 2 end
product3.getSize = function() return 4 end
local player = PlayerSpaceship()
Player:withStorage(player, {maxStorage = 100})
assert.is_same(0, player:getStorageSpace())
assert.is_same(100, player:getMaxStorageSpace())
assert.is_same(100, player:getEmptyStorageSpace())
player:modifyProductStorage(product1, 10)
assert.is_same(10, player:getStorageSpace())
assert.is_same(100, player:getMaxStorageSpace())
assert.is_same(90, player:getEmptyStorageSpace())
player:modifyProductStorage(product2, 10)
assert.is_same(30, player:getStorageSpace())
assert.is_same(100, player:getMaxStorageSpace())
assert.is_same(70, player:getEmptyStorageSpace())
player:modifyProductStorage(product2, -5)
assert.is_same(20, player:getStorageSpace())
assert.is_same(100, player:getMaxStorageSpace())
assert.is_same(80, player:getEmptyStorageSpace())
player:modifyProductStorage(product3, 5)
assert.is_same(40, player:getStorageSpace())
assert.is_same(100, player:getMaxStorageSpace())
assert.is_same(60, player:getEmptyStorageSpace())
end)
end)
describe(":modifyProductStorage()", function()
it("it allows to overload the storage so that important mission items are not lost", function()
local player = PlayerSpaceship()
Player:withStorage(player, {maxStorage = 100})
assert.is_same(0, player:getStorageSpace())
assert.is_same(100, player:getMaxStorageSpace())
assert.is_same(100, player:getEmptyStorageSpace())
assert.is_same(0, player:getProductStorage(product1))
assert.is_same(100, player:getMaxProductStorage(product1))
assert.is_same(100, player:getEmptyProductStorage(product1))
assert.is_same(0, player:getProductStorage(product2))
assert.is_same(100, player:getMaxProductStorage(product2))
assert.is_same(100, player:getEmptyProductStorage(product2))
player:modifyProductStorage(product1, 999)
assert.is_same(999, player:getStorageSpace())
assert.is_same(100, player:getMaxStorageSpace())
assert.is_same(0, player:getEmptyStorageSpace())
assert.is_same(999, player:getProductStorage(product1))
assert.is_same(100, player:getMaxProductStorage(product1))
assert.is_same(0, player:getEmptyProductStorage(product1))
assert.is_same(0, player:getProductStorage(product2))
assert.is_same(0, player:getMaxProductStorage(product2))
assert.is_same(0, player:getEmptyProductStorage(product2))
end)
it("it keeps sure the storage level will not be negative", function()
local player = PlayerSpaceship()
Player:withStorage(player, {maxStorage = 100})
assert.is_same(0, player:getStorageSpace())
assert.is_same(100, player:getMaxStorageSpace())
assert.is_same(100, player:getEmptyStorageSpace())
assert.is_same(0, player:getProductStorage(product1))
assert.is_same(100, player:getMaxProductStorage(product1))
assert.is_same(100, player:getEmptyProductStorage(product1))
assert.is_same(0, player:getProductStorage(product2))
assert.is_same(100, player:getMaxProductStorage(product2))
assert.is_same(100, player:getEmptyProductStorage(product2))
player:modifyProductStorage(product1, -10)
assert.is_same(0, player:getStorageSpace())
assert.is_same(100, player:getMaxStorageSpace())
assert.is_same(100, player:getEmptyStorageSpace())
assert.is_same(0, player:getProductStorage(product1))
assert.is_same(100, player:getMaxProductStorage(product1))
assert.is_same(100, player:getEmptyProductStorage(product1))
assert.is_same(0, player:getProductStorage(product2))
assert.is_same(100, player:getMaxProductStorage(product2))
assert.is_same(100, player:getEmptyProductStorage(product2))
end)
it("fails if no product is given", function()
local player = PlayerSpaceship()
Player:withStorage(player)
assert.has_error(function() player:modifyProductStorage(nil, 10) end)
end)
it("fails if no amount is given", function()
local player = PlayerSpaceship()
Player:withStorage(player)
assert.has_error(function() player:modifyProductStorage(product1, nil) end)
end)
it("allows to handle rockets", function()
local player = PlayerSpaceship()
Player:withStorage(player)
for _, weapon in pairs({"hvli", "homing", "mine", "nuke", "emp"}) do
local rocket = Product:new(weapon, {id = weapon})
player:setWeaponStorageMax(weapon, 4)
player:setWeaponStorage(weapon, 0)
player:modifyProductStorage(rocket, 2)
assert.is_same(2, player:getWeaponStorage(weapon))
end
end)
it("allows to handle scan probes", function()
local probe = Product:new("Scan Probe", {id = "scanProbe"})
local player = PlayerSpaceship()
Player:withStorage(player)
player:setMaxScanProbeCount(4)
player:setScanProbeCount(0)
player:modifyProductStorage(probe, 2)
assert.is_same(2, player:getScanProbeCount())
end)
end)
describe(":setMaxStorageSpace()", function()
it("allows to set the maximum storage space", function()
local player = PlayerSpaceship()
Player:withStorage(player, {maxStorage = 100})
assert.is_same(0, player:getStorageSpace())
assert.is_same(100, player:getMaxStorageSpace())
assert.is_same(100, player:getEmptyStorageSpace())
player:setMaxStorageSpace(120)
assert.is_same(0, player:getStorageSpace())
assert.is_same(120, player:getMaxStorageSpace())
assert.is_same(120, player:getEmptyStorageSpace())
end)
end)
end) | 46.170561 | 115 | 0.6527 | 3.0625 |
dbe35854154ff85524a983c24c0fea5447ab2392 | 1,851 | lua | Lua | src/util/mbg/Event/DataOperateAction.lua | Karl5766/LuaSTG-x_BHElib | 8f4504fb39d5081016b3b8a123b475a3a8087e07 | [
"MIT"
] | 9 | 2019-03-15T11:50:16.000Z | 2021-06-03T12:52:30.000Z | src/util/mbg/Event/DataOperateAction.lua | Karl5766/LuaSTG-x_BHElib | 8f4504fb39d5081016b3b8a123b475a3a8087e07 | [
"MIT"
] | 2 | 2019-08-28T10:35:33.000Z | 2020-06-23T07:15:49.000Z | src/util/mbg/Event/DataOperateAction.lua | Karl5766/LuaSTG-x_BHElib | 8f4504fb39d5081016b3b8a123b475a3a8087e07 | [
"MIT"
] | 5 | 2020-06-21T05:01:48.000Z | 2022-01-16T17:02:20.000Z | ---
--- DataOperateAction.lua
---
--- Copyright (C) 2018 Xrysnow. All rights reserved.
---
local mbg = require('util.mbg.main')
local String = require('util.mbg.String')
---@class mbg.DataOperateAction:mbg.IAction
local DataOperateAction = {}
mbg.DataOperateAction = DataOperateAction
local function _DataOperateAction()
---@type mbg.DataOperateAction
local ret = {}
ret.LValue = ''
ret.TweenTime = 0
ret.Times = 0
ret.RValue = ''
ret.TweenFunction = 0
ret.Operator = 0
return ret
end
local mt = {
__call = function()
return _DataOperateAction()
end
}
setmetatable(DataOperateAction, mt)
DataOperateAction.TweenFunctionType = {
Proportional = 0,
Fixed = 1,
Sin = 2,
}
local TweenFunctionType = DataOperateAction.TweenFunctionType
DataOperateAction.OperatorType = {
ChangeTo = 0,
Add = 1,
Subtraction = 2,
}
---ParseFrom
---@param c mbg.String
---@return mbg.DataOperateAction
function DataOperateAction.ParseFrom(c)
local sents = c:split(',')
---@type mbg.DataOperateAction
local d = mbg.DataOperateAction()
mbg.ActionHelper.ParseFirstSentence(String(sents[1]), d)
local str = sents[2]
if str == '固定' then
d.TweenFunction = TweenFunctionType.Fixed
elseif str == '正比' then
d.TweenFunction = TweenFunctionType.Proportional
elseif str == '正弦' then
d.TweenFunction = TweenFunctionType.Sin
else
error("无法解析变化曲线名称: " .. sents[2])
end
local s3 = String(sents[3])
local tweenTimeEnd = s3:find('帧')
d.TweenTime = s3:sub(1, tweenTimeEnd - 1):toint()
d.Times = nil
local timesL = s3:findlast('%(')
local timesR = s3:findlast('%)')
if timesL ~= -1 and timesR ~= -1 then
d.Times = s3:sub(timesL + 1, timesR - 1):toint()
end
return d
end
| 24.355263 | 61 | 0.640735 | 3.09375 |
6b35382527aab887493e92af2c484b2a8c66c413 | 1,920 | c | C | cli.c | mtratsiuk/b2c | ba23e5de130b9cbaaf42311c03865b83acf1c5f5 | [
"MIT"
] | null | null | null | cli.c | mtratsiuk/b2c | ba23e5de130b9cbaaf42311c03865b83acf1c5f5 | [
"MIT"
] | null | null | null | cli.c | mtratsiuk/b2c | ba23e5de130b9cbaaf42311c03865b83acf1c5f5 | [
"MIT"
] | null | null | null | #include <assert.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "cli.h"
#include "darray.h"
Cli cli_create(void *state) {
assert(state);
Darray entries = darray_create(sizeof(CliEntry), 8);
Cli cli = {.menu_entries = entries, .state = state};
return cli;
}
CliEntry cli_create_entry(KeyCode code, char *prompt,
void (*action)(void *state)) {
assert(action);
assert(prompt);
CliEntry entry = {.code = code, .prompt = prompt, .action = action};
return entry;
}
void cli_add_entry(Cli *cli, CliEntry entry) {
assert(cli);
darray_push(&cli->menu_entries, &entry);
}
void print_entry_callback(void *entry, size_t index) {
CliEntry *cli_entry = entry;
printf("%d) %s\n", cli_entry->code - '0', cli_entry->prompt);
}
int find_entry_callback(void *key, void *element) {
return *(KeyCode *)key - ((CliEntry *)element)->code;
}
void cli_start(Cli *cli) {
assert(cli);
int key_code = KeyCode_Enter;
while (1) {
if (key_code == KeyCode_Enter) {
darray_foreach(&cli->menu_entries, &print_entry_callback);
}
key_code = getchar();
if (key_code == KeyCode_Esc) {
break;
}
CliEntry *entry =
darray_find(&cli->menu_entries, &key_code, &find_entry_callback);
if (entry) {
entry->action(cli->state);
}
puts("\n");
}
}
char *cli_read_str(char *prompt) {
puts(prompt);
size_t entry_size = sizeof(char);
size_t capacity = 16;
char *str = malloc(entry_size * capacity);
size_t size = 0;
char ch = getchar();
if (ch == KeyCode_Enter) {
ch = getchar();
}
while (1) {
if (ch == EOF || ch == KeyCode_Enter) {
break;
}
if (size == capacity) {
capacity *= 2;
str = realloc(str, entry_size * capacity);
}
str[size] = ch;
size += 1;
ch = getchar();
}
return realloc(str, entry_size * (strlen(str) + 1));
}
| 18.823529 | 73 | 0.609375 | 3.25 |
19cc48afe8a6bcf2bffbab4b2a313a112ec7033f | 2,885 | sql | SQL | sql/_13_issues/_12_2h/cases/bug_bts_7856_4.sql | Zhaojia2019/cubrid-testcases | 475a828e4d7cf74aaf2611fcf791a6028ddd107d | [
"BSD-3-Clause"
] | 9 | 2016-03-24T09:51:52.000Z | 2022-03-23T10:49:47.000Z | sql/_13_issues/_12_2h/cases/bug_bts_7856_4.sql | Zhaojia2019/cubrid-testcases | 475a828e4d7cf74aaf2611fcf791a6028ddd107d | [
"BSD-3-Clause"
] | 173 | 2016-04-13T01:16:54.000Z | 2022-03-16T07:50:58.000Z | sql/_13_issues/_12_2h/cases/bug_bts_7856_4.sql | Zhaojia2019/cubrid-testcases | 475a828e4d7cf74aaf2611fcf791a6028ddd107d | [
"BSD-3-Clause"
] | 38 | 2016-03-24T17:10:31.000Z | 2021-10-30T22:55:45.000Z | --TEST: [Merge Statement] The tuples that are not updated by the update_clause can be deleted by the delete_clause when the update_clause is with where conditions.
drop table if exists t1, t2;
create table t1(a int, b char(10));
insert into t1 values(1, 'aaa');
insert into t1 values(2, 'bbb');
insert into t1 values(3, 'ccc');
create table t2(a int, b char(10));
insert into t2 values(1, 'AAA');
insert into t2 values(2, 'BBB');
insert into t2 values(4, 'DDD');
merge into t1 using (select * from t2 where t2.a > 1) as d on (t1.a = d.a)
when matched then
update set t1.b = d.b where t1.a > 1
delete where t1.b='bbb';
select * from t1 order by 1;
drop table t1, t2;
create table t1(a int, b char(10));
insert into t1 values(1, 'aaa');
insert into t1 values(2, 'bbb');
insert into t1 values(3, 'ccc');
create table t2(a int, b char(10));
insert into t2 values(1, 'AAA');
insert into t2 values(2, 'BBB');
insert into t2 values(4, 'DDD');
create trigger trg before update on t1 execute insert into t2 values (NULL, NULL);
merge into t1 using (select * from t2 where t2.a > 1) as d on (t1.a = d.a)
when matched then
update set t1.b = d.b where t1.a > 3
delete where t1.b='bbb';
select * from t1 order by 1;
drop table t1, t2;
--- default
create table t1(a int , b char(10) default 'zzz');
insert into t1 values(1, 'aaa');
insert into t1 values(2, 'bbb');
insert into t1 values(3, 'ccc');
create table t2(a int, b char(10) default 'www');
insert into t2 values(1, 'AAA');
insert into t2 values(2, 'BBB');
insert into t2 values(4, 'DDD');
merge into t1 using (select * from t2 where t2.a > 1) as d on (t1.a = d.a)
when matched then
update set t1.b = default where t1.a > 1
delete where t1.b='zzz';
select * from t1 order by 1;
drop table t1, t2;
create table t1(a int , b char(10));
insert into t1 values(1, 'aaa');
insert into t1 values(2, 'bbb');
insert into t1 values(3, 'ccc');
create table t2(a int, b char(10));
insert into t2 values(1, 'AAA');
insert into t2 values(2, 'BBB');
insert into t2 values(4, 'DDD');
merge into t1 using (select * from t2 where t2.a > 1) as d on (t1.a = d.a)
when matched then
update set t1.b = left(d.b,1) where t1.a > 1
delete where left(t1.b,1)='B';
select * from t1 order by 1;
drop table t1, t2;
create table t1(a int , b char(10));
insert into t1 values(1, 'aaa');
insert into t1 values(2, 'bbb');
insert into t1 values(3, 'ccc');
create table t2(a int, b char(10));
insert into t2 values(1, 'AAA');
insert into t2 values(2, 'BBB');
insert into t2 values(4, 'DDD');
create trigger trg before update on t1 execute insert into t2 values (NULL, NULL);
merge into t1 using (select * from t2 where t2.a > 1) as d on (t1.a = d.a)
when matched then
update set t1.b = left(d.b, 1) where t1.a > 1
delete where left(t1.b,1) > 'B';
select * from t1 order by 1;
select if(count(*)=4, 'ok', 'nok') from t2;
drop table t1, t2;
| 26.46789 | 163 | 0.669671 | 3.203125 |
0cdb9744480da6f8e1b4899b7fcf04b7238e340b | 1,551 | py | Python | MachineLearning.BayesianNetwork/python-imp/bayes_core.py | JillyMan/decision-tree | 8e2efc914aaade9cc97a2c94052bc909e50fdb48 | [
"MIT"
] | null | null | null | MachineLearning.BayesianNetwork/python-imp/bayes_core.py | JillyMan/decision-tree | 8e2efc914aaade9cc97a2c94052bc909e50fdb48 | [
"MIT"
] | 1 | 2019-12-29T13:49:52.000Z | 2019-12-29T13:49:52.000Z | MachineLearning.BayesianNetwork/python-imp/bayes_core.py | JillyMan/MachineLerningFramework | 8e2efc914aaade9cc97a2c94052bc909e50fdb48 | [
"MIT"
] | null | null | null | import math
RangeType = 'Range'
BinaryType = 'Binary'
class Hipothesis:
def __init__(self, id, name, p):
self.id = id
self.name = name
self.p = p
class Attribute:
def __init__(self, id, name, question, _type):
self.id = id
self.name = name
self.question = question
self.type = _type
class Tag:
def __init__(self, hipothesis, attribute, pp, pm):
self.pp = pp
self.pm = pm
self.attribute = attribute
self.hipothesis = hipothesis
class InputType:
def __init__(self, _type, value):
self.type = _type
self.value = int(value)
class Binary(InputType):
def __init__(self, value):
InputType.__init__(self, BinaryType, value)
class Range(InputType):
def __init__(self, start, end, value):
InputType.__init__(self, RangeType, value)
self.start = int(start)
self.end = int(end)
def normalize(self):
l = self.end - self.start
v = self.value - self.start
return v / l
def phe_func(p, pp, pm):
return (p * pp) / (p * pp + (1-p) * pm)
def calc_probs(pp, pm, p):
phe = phe_func(p, pp, pm)
phne = phe_func(p, 1 - pp, 1 - pm)
return (phe, phne)
def lerp(start, end, t):
return start + (end - start) * t
def interpolate_result_clamp01(phne, ph, phe, r):
if r > 0.5:
return lerp(ph, phe, r)
elif r < 0.5:
return lerp(phne, ph, r)
return ph
def interpolate_result_binary(phne, phe, r):
return phne if r == 0 else phe
| 23.149254 | 54 | 0.588008 | 3.328125 |
da2e3e68e442e1e9d9e4bc5a3b172b942b936234 | 4,113 | rs | Rust | src/geometry.rs | White-Green/ray-tracing | da7c357bf88bb67429fef8f05d7eeb0c25bd1955 | [
"MIT"
] | null | null | null | src/geometry.rs | White-Green/ray-tracing | da7c357bf88bb67429fef8f05d7eeb0c25bd1955 | [
"MIT"
] | null | null | null | src/geometry.rs | White-Green/ray-tracing | da7c357bf88bb67429fef8f05d7eeb0c25bd1955 | [
"MIT"
] | null | null | null | use std::ops::{Add, Div, Mul, Neg, Sub};
#[derive(Clone, Copy, PartialEq, Debug)]
pub struct Vec3<V> {
x: V,
y: V,
z: V,
}
#[derive(Clone, Copy, PartialEq, Debug)]
pub struct NormalizedVec3<V> {
x: V,
y: V,
z: V,
}
impl<V> Vec3<V> {
pub fn new(x: V, y: V, z: V) -> Self {
Self { x, y, z }
}
pub fn inner_product<Rhs, P: Add<Output=P>>(self, rhs: Vec3<Rhs>) -> P
where V: Mul<Rhs, Output=P> {
self.x * rhs.x + self.y * rhs.y + self.z * rhs.z
}
pub fn outer_product<Rhs: Clone, P: Sub<Output=P>>(self, rhs: Vec3<Rhs>) -> Vec3<P>
where V: Mul<Rhs, Output=P> + Clone {
Vec3::new(
self.y.clone() * rhs.z.clone() - self.z.clone() * rhs.y.clone(),
self.z * rhs.x.clone() - self.x.clone() * rhs.z,
self.x * rhs.y - self.y * rhs.x,
)
}
pub fn squared_len<P: Add<Output=P>>(self) -> P
where V: Mul<Output=P>, Self: Clone {
self.clone().inner_product(self)
}
pub fn x(&self) -> &V {
&self.x
}
pub fn y(&self) -> &V {
&self.y
}
pub fn z(&self) -> &V {
&self.z
}
}
impl Vec3<f64> {
pub fn normalize(self) -> NormalizedVec3<f64> {
let len = self.squared_len().sqrt();
NormalizedVec3 {
x: self.x / len,
y: self.y / len,
z: self.z / len,
}
}
}
impl<V> From<NormalizedVec3<V>> for Vec3<V> {
fn from(value: NormalizedVec3<V>) -> Self {
Self {
x: value.x,
y: value.y,
z: value.z,
}
}
}
impl<V> NormalizedVec3<V> {
pub fn vec(self) -> Vec3<V> {
Vec3::new(self.x, self.y, self.z)
}
}
impl<V: Add> Add for Vec3<V> {
type Output = Vec3<V::Output>;
fn add(self, rhs: Self) -> Self::Output {
Vec3::new(self.x + rhs.x, self.y + rhs.y, self.z + rhs.z)
}
}
impl<V: Sub> Sub for Vec3<V> {
type Output = Vec3<V::Output>;
fn sub(self, rhs: Self) -> Self::Output {
Vec3::new(self.x - rhs.x, self.y - rhs.y, self.z - rhs.z)
}
}
impl<V: Neg> Neg for Vec3<V> {
type Output = Vec3<V::Output>;
fn neg(self) -> Self::Output {
Vec3::new(-self.x, -self.y, -self.z)
}
}
impl<Rhs: Clone, V: Mul<Rhs>> Mul<Rhs> for Vec3<V> {
type Output = Vec3<V::Output>;
fn mul(self, rhs: Rhs) -> Self::Output {
Vec3::new(self.x * rhs.clone(), self.y * rhs.clone(), self.z * rhs)
}
}
impl<Rhs: Clone, V: Div<Rhs>> Div<Rhs> for Vec3<V> {
type Output = Vec3<V::Output>;
fn div(self, rhs: Rhs) -> Self::Output {
Vec3::new(self.x / rhs.clone(), self.y / rhs.clone(), self.z / rhs)
}
}
#[cfg(test)]
mod tests {
use crate::geometry::Vec3;
#[test]
fn vec3_add_test() {
assert_eq!(Vec3::new(1, 2, 3) + Vec3::new(5, 7, 9), Vec3::new(6, 9, 12));
}
#[test]
fn vec3_sub_test() {
assert_eq!(Vec3::new(1, 2, 3) - Vec3::new(5, 7, 9), Vec3::new(-4, -5, -6));
}
#[test]
fn vec3_mul_test() {
assert_eq!(Vec3::new(1, 2, 3) * 3, Vec3::new(3, 6, 9));
}
#[test]
fn vec3_div_test() {
assert_eq!(Vec3::new(3, 7, 11) / 3, Vec3::new(1, 2, 3));
}
#[test]
fn vec3_inner_product_test() {
assert_eq!(Vec3::new(1, 2, 3).inner_product(Vec3::new(5, 4, 6)), 5 + 8 + 18);
}
#[test]
fn vec3_outer_product_test() {
assert_eq!(Vec3::new(1, 2, 3).outer_product(Vec3::new(5, 4, 7)), Vec3::new(2, 8, -6));
}
#[test]
fn vec3_normalize_test() {
let normalized: Vec3<_> = Vec3::new(1f64, 2f64, 3f64).normalize().into();
let len_expect = 3.7416573867739413855837487323165;
let diff = normalized - Vec3::new(1f64 / len_expect, 2f64 / len_expect, 3f64 / len_expect);
assert!(diff.x().abs() < 1e-3);
assert!(diff.y().abs() < 1e-3);
assert!(diff.z().abs() < 1e-3);
}
#[test]
fn vec3_util_test() {
assert_eq!(Vec3::new(1, 2, 3).x(), &1);
assert_eq!(Vec3::new(1, 2, 3).y(), &2);
assert_eq!(Vec3::new(1, 2, 3).z(), &3);
}
}
| 23.912791 | 99 | 0.502796 | 3.203125 |
8ec21d06dd5d4c7578cf5d935a09e467c81e71a7 | 1,783 | rb | Ruby | lib/office_quote_controller.rb | emhopkins/office-quote-cli-app | dd64ffa680aabb210ebe98c2b6e2a5387ba0ee0c | [
"MIT"
] | null | null | null | lib/office_quote_controller.rb | emhopkins/office-quote-cli-app | dd64ffa680aabb210ebe98c2b6e2a5387ba0ee0c | [
"MIT"
] | null | null | null | lib/office_quote_controller.rb | emhopkins/office-quote-cli-app | dd64ffa680aabb210ebe98c2b6e2a5387ba0ee0c | [
"MIT"
] | null | null | null | class OfficeQuoteController
def initialize
quotes = Scraper.new
quotes.get_quote_pages
start_cli
end
def start_cli
input = ""
puts "Welcome to The Office Quote Generator"
while input != "exit" do
puts "If you'd like to exit, enter \"exit\""
puts "Please select from the following options: "
puts "1. Choose a quote from a specific character"
puts "2. Hear a dialogue quote (between multiple characters)"
puts "3. Hear a random quote"
input = gets.chomp
if input == "1"
get_quotes_by_character
elsif input == "2"
get_dialogue_quote
elsif input == "3"
get_random_quote
elsif input != "exit"
puts "Please enter 1, 2 or 3"
end
end
end
def get_quotes_by_character
input = ""
while input != "exit" && input != "n" && input != "N" do
puts "You may choose from these characters: "
Character.list_all_characters
puts "Please select from one of the following characters by entering their name: "
input = gets.chomp
puts "Would you like to hear 1 or all of their quotes?"
puts "Enter \"1\" for one quote and anything else to hear all."
option = gets.chomp
if option != "1"
Character.list_all_quotes_for_character(input)
else
Character.random_quote_for_character(input)
end
puts "Would you like to choose another character? (y/n)"
input = gets.chomp
end
end
def get_dialogue_quote
input = ""
while input != "exit" && input != "n" && input != "N" do
Quote.get_dialogue
puts "Would you like to hear another dialogue? (y/n)"
input = gets.chomp
end
end
def get_random_quote
input = ""
while input != "exit" && input != "n" && input != "N" do
Quote.get_random
puts "Would you like to hear another quote? (y/n)"
input = gets.chomp
end
end
end | 25.84058 | 85 | 0.67134 | 3.234375 |
3d58729365cab6672b0e7b134b1b2395dcdc48f0 | 6,948 | lua | Lua | Code/['World']['Global']['Weapon_Plugin_Package']['Module']['C_Module']['PlayerBehaviorModule'].ModuleScript.lua | lilith-avatar/rapid-duel | e91ce60e5dbd490763964f3449d5f907d7216742 | [
"MIT"
] | 8 | 2021-04-20T09:39:22.000Z | 2021-10-19T06:08:29.000Z | Code/['World']['Global']['Weapon_Plugin_Package']['Module']['C_Module']['PlayerBehaviorModule'].ModuleScript.lua | lilith-avatar/rapid-duel | e91ce60e5dbd490763964f3449d5f907d7216742 | [
"MIT"
] | null | null | null | Code/['World']['Global']['Weapon_Plugin_Package']['Module']['C_Module']['PlayerBehaviorModule'].ModuleScript.lua | lilith-avatar/rapid-duel | e91ce60e5dbd490763964f3449d5f907d7216742 | [
"MIT"
] | 2 | 2021-05-08T11:36:09.000Z | 2021-05-12T09:59:50.000Z | --- @module PlayerBehavior 枪械模块:玩家行为树
--- @copyright Lilith Games, Avatar Team
--- @author RopzTao
local PlayerBehavior, this =
{
playerActionStateFunc = {} ---状态触发表
},
nil
local firParam = nil
local function OnKeyDown()
---按C下蹲
if Input.GetPressKeyData(Enum.KeyCode.C) ~= Enum.KeyState.KeyStateNone then
PlayerBehavior:PlayerCrouch()
end
---按Shift进入快跑
if Input.GetPressKeyData(Enum.KeyCode.LeftShift) ~= Enum.KeyState.KeyStateNone then
---速度变化
PlayerBehavior:PlayerBehaviorChanged('isQuickly')
---准星消失
if PlayerGunMgr.gun then
---PlayerBehavior.gun.m_gui.crosshair:SetActive(false)
end
end
---按下空格键跳
if Input.GetPressKeyData(Enum.KeyCode.Space) ~= Enum.KeyState.KeyStateNone then
PlayerBehavior:PlayerJump()
end
end
local function OnKeyUp()
---抬起Shift结束快跑
if Input.GetPressKeyData(Enum.KeyCode.LeftShift) == Enum.KeyState.KeyStateRelease then
PlayerBehavior:PlayerBehaviorChanged('isQuickly')
end
end
---初始化函数
function PlayerBehavior:Init()
this = self
self:InitListener()
---@type PlayerInstance 当前的玩家对象
self.player = localPlayer
self.state = PlayerActionModeEnum.Run
---不同职业的配速
self.SpeedStdCoeft = self.player.SpeedScale.Value
---人物移动状态系数
self.coefInertia = 1
---人物加速度系数
self.InerPara = GunConfig.GlobalConfig.InertialParam
self.GunWeight = 1
self:InitialDataRead()
self:InitPlayerAttributes()
self:PlayerBehaviorChanged('isRun')
Input.OnKeyDown:Connect(OnKeyDown)
Input.OnKeyUp:Connect(OnKeyUp)
self.player.OnDead:Connect(
function()
self.state = PlayerActionModeEnum.Run
self.BehJudgeTab.isAim = false
end
)
end
---监听函数
function PlayerBehavior:InitListener()
LinkConnects(localPlayer.C_Event, PlayerBehavior, this)
end
---初始数据
function PlayerBehavior:InitialDataRead()
---玩家行为判断参数
self.BehJudgeTab = {
isRun = false,
isCrouch = false,
isQuickly = false,
isAim = false
}
self.keyDownTab = {}
end
---玩家职业不同速度标准系数不同
---@param _occ 职业种类
function PlayerBehavior:ChangeOccEventHandler(_occ)
---更新参数
self.SpeedStdCoeft = localPlayer.SpeedScale.Value
end
---初始玩家设定
function PlayerBehavior:InitPlayerAttributes()
self.player.JumpUpVelocity = GunConfig.GlobalConfig.JumpSpeed
end
---装备枪更新跳跃速度
function PlayerBehavior:OnEquipWeaponEventHandler()
if PlayerGunMgr.curGun == nil then
return
end
self.player.JumpUpVelocity = GunConfig.GlobalConfig.JumpSpeed * self.SpeedStdCoeft * self.GunWeight
end
---玩家行为判断
function PlayerBehavior:PlayerBehaviorChanged(_behavior)
if self.BehJudgeTab[_behavior] then
self.BehJudgeTab[_behavior] = false
else
self.BehJudgeTab[_behavior] = true
end
for k, v in pairs(self.BehJudgeTab) do
if v then
table.insert(self.keyDownTab, k)
end
end
if #self.keyDownTab == 1 then
firParam = string.gsub(tostring(self.keyDownTab[1]), 'is', '')
self:PlayerModeChanged(firParam)
elseif #self.keyDownTab == 2 then
for i, j in pairs(self.keyDownTab) do
firParam = string.gsub(tostring(j), 'is', '')
if firParam ~= 'Run' then
self:PlayerModeChanged(firParam .. 'Run')
end
end
elseif #self.keyDownTab == 3 then
for m, n in pairs(self.keyDownTab) do
firParam = string.gsub(tostring(n), 'is', '')
if firParam ~= 'Run' and firParam ~= 'Crouch' then
self:PlayerModeChanged(firParam .. 'CrouchRun')
end
end
end
self.keyDownTab = {}
end
---玩家状态判断
---@param _modeName String
function PlayerBehavior:PlayerModeChanged(_modeName)
self.playerActionStateFunc[PlayerActionModeEnum[_modeName]] = function()
self.state = PlayerActionModeEnum[_modeName]
end
self.playerActionStateFunc[PlayerActionModeEnum[_modeName]]()
end
---@param direParam Float 摇杆方向对应不同速度的参数
local direRes, direParam, tt = 0, 1, 0
local directionFactor = Vector2.Zero
---前后左右移动速度不一致
function PlayerBehavior:DiffDireMovement(dt)
---如果摇杆的位移坐标前一帧为directionFactor,后一帧为原点
if
Vector2(BattleGUI.horizontal, BattleGUI.vertical) == directionFactor and
Vector2(BattleGUI.horizontal, BattleGUI.vertical) == Vector2.Zero
then
tt = 0
self.coefInertia = 1
else
tt = tt + dt
self.coefInertia = Asymptote(self.InerPara * tt)
end
directionFactor = Vector2(BattleGUI.horizontal, BattleGUI.vertical)
if directionFactor ~= Vector2.Zero then
direRes = Vector2.Dot(directionFactor, Vector2(0, 1)) / directionFactor.Magnitude
if direRes >= 0.5 then
direParam = 1.35 * directionFactor.Magnitude
elseif direRes <= -0.5 then
direParam = (1 / 1.2) * directionFactor.Magnitude
else
direParam = (1 / 1.05) * directionFactor.Magnitude
end
end
end
---Update函数
function PlayerBehavior:Update(dt)
localPlayer.ActionState.Value = self.state
self:DiffDireMovement(dt)
self:CharacterStartInertia()
---更新速度
for k, v in pairs(PlayerActionModeEnum) do
if v == self.state then
self.player.WalkSpeed =
GunConfig.GlobalConfig[tostring(k) .. 'Speed'] * self.SpeedStdCoeft * self.coefInertia * direParam *
self.GunWeight
end
end
end
---人物启动加速惯性函数
---匹配到对应的枪械的重量
function PlayerBehavior:CharacterStartInertia()
---不同枪械的质量系数
if PlayerGunMgr.curGun then
self.GunWeight = 1 / PlayerGunMgr.curGun.config_weight
end
end
---玩家跳跃
function PlayerBehavior:PlayerJump()
if (self.player.IsOnGround or self.player.State == Enum.CharacterState.Seated) and not isDead then
if self.player:IsCrouch() then
self.player:EndCrouch()
self:PlayerBehaviorChanged('isCrouch')
CameraControl:Crouch()
return
else
if (PlayerGunMgr.curGun and PlayerGunMgr.curGun.m_isZoomIn) then
PlayerGunMgr.curGun:MechanicalAimStop()
end
self.player:Jump()
return
end
end
end
---玩家蹲下
function PlayerBehavior:PlayerCrouch()
self:PlayerBehaviorChanged('isCrouch')
if not self.player:IsCrouch() then
self.player:StartCrouch()
else
self.player:EndCrouch()
end
CameraControl:Crouch()
end
---玩家蹲下重置
function PlayerBehavior:CrouchReset()
if self.player:IsCrouch() then
BattleGUI:PlayerCrouchClick()
end
end
---状态初始化或重置
function PlayerBehavior:InitsetOrReset()
---表现重置
self:CrouchReset()
BattleGUI:GunInterReset()
---数据重置
self:InitialDataRead()
self:InitPlayerAttributes()
self:PlayerBehaviorChanged('isRun')
self.player.Health = 100
end
return PlayerBehavior
| 27.035019 | 116 | 0.665803 | 3.125 |
016d368f13289f909c62b804612e30451d898657 | 3,083 | sql | SQL | sql/cohort_sepsis.sql | benpopoff/hyperoxemia-mimic | 6cb32fddd2bead6a2a904ff2bee81c163a73e3c4 | [
"MIT"
] | null | null | null | sql/cohort_sepsis.sql | benpopoff/hyperoxemia-mimic | 6cb32fddd2bead6a2a904ff2bee81c163a73e3c4 | [
"MIT"
] | null | null | null | sql/cohort_sepsis.sql | benpopoff/hyperoxemia-mimic | 6cb32fddd2bead6a2a904ff2bee81c163a73e3c4 | [
"MIT"
] | null | null | null | -- ------------------------------------------------------------------
--
-- Cohort sepsis selection
--
-- Collect data for population with relevant data and flags
--
-- ------------------------------------------------------------------
DROP MATERIALIZED VIEW IF EXISTS cohort_sepsis CASCADE;
CREATE MATERIALIZED VIEW cohort_sepsis AS
with abg_order as (
select icustay_id, charttime, po2,
dense_rank() over (partition by icustay_id order by charttime) AS abg_nb
from abg_sepsis
where po2 is not NULL
),
first as (
select icustay_id,
po2 as first_po2
from abg_order
where abg_nb = 1
),
abg_24 as (
select ie.icustay_id,
round(cast(avg(po2) as numeric), 2) as avg_24_po2,
min(po2) as min_24_po2,
max(po2) as max_24_po2
from icustays as ie
inner join abg_sepsis as ab using (icustay_id)
where charttime <= ie.intime + interval '1' day
group by ie.icustay_id
),
abg_whole as (
select ie.icustay_id,
round(cast(avg(po2) as numeric), 2) as avg_whole_po2,
min(po2) as min_whole_po2,
max(po2) as max_whole_po2
from icustays as ie
inner join abg_sepsis as ab using (icustay_id)
group by ie.icustay_id
),
sources as (
select icustay_id,
source_pulm,
source_urogyn,
source_neuro,
source_dig,
source_ost,
source_ent,
source_card,
source_skin,
source_other
from infectious_sources
)
select subject_id,
icustay_id,
hadm_id,
first_po2,
avg_24_po2,
min_24_po2,
max_24_po2,
--avg_48_po2,
--min_48_po2,
--max_48_po2,
--avg_72_po2,
--min_72_po2,
--max_72_po2,
--avg_whole_po2,
--min_whole_po2,
--max_whole_po2,
first_careunit,
first_wardid,
admission_type,
intime,
outtime,
los_icu,
los_hospital,
vent_first_duration,
vaso_firstday,
norepinephrine_duration,
norepinephrine_max_dose,
admission_age,
gender,
height_firstday,
weight_firstday,
bmi_firstday,
abg_count,
lactate_max_firstday,
pco2_max_firstday,
ph_min_firstday,
albumin_min_firstday,
bicarbonate_min_firstday,
bilirubin_max_firstday,
creatinine_max_firstday,
chloride_max_firstday,
sodium_max_firstday,
potassium_max_firstday,
hematocrit_min_firstday,
hemoglobin_min_firstday,
platelet_min_firstday,
ptt_min_firstday,
rrt,
comorbidity_cardio,
comorbidity_vascular,
comorbidity_neuro,
comorbidity_pneumo,
comorbidity_metabolic,
comorbidity_renal,
comorbidity_liver,
comorbidity_cancer,
sofa,
sapsii,
apsiii,
noso_vap,
noso_urinary,
noso_cvc,
noso_cvc_other,
source_pulm,
source_urogyn,
source_neuro,
source_dig,
source_ost,
source_ent,
source_card,
source_skin,
source_other,
death_thirtydays,
death_icu,
death_hosp,
flag_shock
from cohort_all
left join first using (icustay_id)
left join abg_24 using (icustay_id)
left join abg_whole using (icustay_id)
left join sources using (icustay_id)
where flag_include = 1 and flag_sepsis = 1;
| 21.711268 | 75 | 0.674992 | 3.34375 |
da38b0e69516ed55dca4677ec6fda5a9f9130e89 | 4,719 | rs | Rust | src/models/timestamp.rs | jay3332/rs-cord | 0096062a8cccc4721d0fac3ac8b3db73eae4c34b | [
"MIT"
] | 5 | 2022-01-05T16:17:55.000Z | 2022-01-11T01:24:10.000Z | src/models/timestamp.rs | jay3332/rs-cord | 0096062a8cccc4721d0fac3ac8b3db73eae4c34b | [
"MIT"
] | null | null | null | src/models/timestamp.rs | jay3332/rs-cord | 0096062a8cccc4721d0fac3ac8b3db73eae4c34b | [
"MIT"
] | null | null | null | use std::time::Duration;
#[cfg(feature = "chrono")]
extern crate chrono;
#[cfg(feature = "chrono")]
use chrono::{DateTime, NaiveDateTime, Utc};
/// An enum to represent relative time.
///
/// # See
/// [`Timestamp::as_relative_time`]
#[derive(Clone, Debug)]
pub enum RelativeTime {
/// This timestamp is in the past.
Past(Duration),
/// This timestamp is in the future.
Future(Duration),
}
/// Represents a timestamp which contains date and time.
#[derive(Clone, Debug)]
pub struct Timestamp {
/// Unix timestamp in milliseconds
timestamp: u64,
}
impl Timestamp {
/// Creates a new timestamp from a Unix timestamp in milliseconds.
#[must_use]
pub fn from_unix(timestamp: u64) -> Self {
Self { timestamp }
}
/// Creates a new timestamp from a Discord snowflake.
#[must_use]
pub fn from_snowflake(snowflake: u64) -> Self {
Self::from_unix(snowflake >> 22)
}
/// Creates a new timestamp from a [`chrono::DateTime`].
///
/// # Panics
/// - Timestamp is before the Unix epoch
#[cfg(feature = "chrono")]
#[must_use]
pub fn from_datetime(dt: DateTime<Utc>) -> Self {
Self::from_unix(
dt.timestamp_millis()
.try_into()
.expect("Timestamp is before the Unix epoch."),
)
}
/// Creates a new timestamp from an ISO 8601 timestamp.
///
/// # Panics
/// - Timestamp is not a valid timestamp.
#[cfg(feature = "chrono")]
#[must_use]
pub fn from_iso(iso: String) -> Self {
Self::from_datetime(
DateTime::parse_from_rfc3339(&iso)
.expect("Failed to parse ISO 8601 timestamp. (Note: this is confined to the RFC 3339 standard.)")
.with_timezone(&Utc)
)
}
/// The amount of milliseconds since the Unix epoch.
#[must_use]
pub fn timestamp_millis(&self) -> u64 {
self.timestamp
}
/// The amount of seconds since the Unix epoch, as a whole number.
#[must_use]
pub fn timestamp_secs(&self) -> u64 {
self.timestamp / 1000_u64
}
/// Converts this timestamp into a tuple (seconds, nonoseconds).
#[must_use]
pub fn to_secs_nanos(&self) -> (u64, u32) {
let secs = self.timestamp_secs();
let nanos = (self.timestamp - secs * 1000_u64) * 1_000_000_u64;
#[allow(clippy::cast_possible_truncation)]
(secs, nanos as u32)
}
/// The timestamp as a [`chrono::DateTime`].
#[cfg(feature = "chrono")]
#[must_use]
pub fn as_datetime(&self) -> DateTime<Utc> {
let (secs, nanos) = self.to_secs_nanos();
#[allow(clippy::cast_possible_wrap)]
DateTime::from_utc(NaiveDateTime::from_timestamp(secs as i64, nanos), Utc)
}
/// Returns a [`Duration`][`std::time::Duration`] that represents how much time has elapsed from this timestamp.
///
/// Note that this method will panic if this timestamp is in the future.
/// If you would like to convert this into relative time, use [`as_relative_time()`][`Timestamp::as_relative_time`].
///
/// # Panics
/// - Timestamp is in the future
#[cfg(feature = "chrono")]
#[must_use]
pub fn elapsed(&self) -> Duration {
Duration::from_millis(
(chrono::Utc::now().timestamp_millis() - self.timestamp_millis() as i64)
.try_into()
.expect("Timestamp is in the future."),
)
}
/// Returns a [`RelativeTime`] that represents how much time has elapsed from this timestamp.
///
/// If the timestamp is exactly now, the returned value is [`RelativeTime::Future`].
///
/// # Example
/// ```rust
/// use rs_cord::{Timestamp, RelativeTime};
/// use RelativeTime::{Past, Future};
///
/// match Timestamp::from_unix(1234).as_relative_time() {
/// Past(dur) => println!("{} seconds ago", dur.as_secs()),
/// Future(dur) => println!("{} seconds from now", dur.as_secs()),
/// }
/// ```
#[cfg(feature = "chrono")]
#[must_use]
pub fn as_relative_time(&self) -> RelativeTime {
let delta = chrono::Utc::now().timestamp_millis() - self.timestamp_millis() as i64;
if delta <= 0 {
RelativeTime::Future(Duration::from_millis(delta.abs() as u64))
} else {
RelativeTime::Past(Duration::from_millis(delta as u64))
}
}
}
#[cfg(feature = "chrono")]
impl From<Timestamp> for DateTime<Utc> {
fn from(ts: Timestamp) -> Self {
ts.as_datetime()
}
}
#[cfg(feature = "chrono")]
impl From<DateTime<Utc>> for Timestamp {
fn from(dt: DateTime<Utc>) -> Self {
Self::from_datetime(dt)
}
}
| 29.867089 | 120 | 0.594194 | 3.359375 |
Subsets and Splits