hexsha
stringlengths 40
40
| size
int64 140
1.03M
| ext
stringclasses 94
values | lang
stringclasses 21
values | max_stars_repo_path
stringlengths 3
663
| max_stars_repo_name
stringlengths 4
120
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
368k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
663
| max_issues_repo_name
stringlengths 4
120
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
663
| max_forks_repo_name
stringlengths 4
135
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 140
1.03M
| avg_line_length
float64 2.32
23.1k
| max_line_length
int64 11
938k
| alphanum_fraction
float64 0.01
1
| score
float32 3
4.25
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
fe09189f423ef1ebd5418cdddb923a88150ff419
| 776 |
c
|
C
|
ITP/Atividades/ITP - Ponteiros 1 - Lista/4.c
|
danielhenrif/C
|
0280e65b9c21d5ae7cb697a8df562373c42c3a3f
|
[
"MIT"
] | null | null | null |
ITP/Atividades/ITP - Ponteiros 1 - Lista/4.c
|
danielhenrif/C
|
0280e65b9c21d5ae7cb697a8df562373c42c3a3f
|
[
"MIT"
] | null | null | null |
ITP/Atividades/ITP - Ponteiros 1 - Lista/4.c
|
danielhenrif/C
|
0280e65b9c21d5ae7cb697a8df562373c42c3a3f
|
[
"MIT"
] | 1 |
2021-01-23T18:39:46.000Z
|
2021-01-23T18:39:46.000Z
|
#include <stdio.h>
void MAXMIN(int n, int *m){
int maior = *m ,menor = *m;
int i_max = 0 ,j_max = 0 ,i_min = 0 ,j_min = 0;
for (int i = 0; i < n ; i++)
{
for (int j = 0; j < n; j++)
{
if(*m > maior){
maior = *m;
i_max = i; j_max = j;
}
if(*m < menor){
menor = *m;
i_min = i; j_min = j;
}
m++;
}
}
printf("%d %d %d %d %d %d \n",maior,i_max, j_max, menor, i_min, j_min);
}
int main(){
int n;
scanf("%d",&n);
int matriz[n*n];
//entrada da matriz
for (int i = 0; i < n*n; i++)
{
scanf("%d",&matriz[i]);
}
MAXMIN(n,matriz);
return 0;
}
| 19.4 | 75 | 0.360825 | 3.09375 |
c08b1795bdfd5ec88aa9f0ec6e675aa8455fb324
| 1,035 |
sql
|
SQL
|
src/Resources/NumericIdGeneratorStoredProcedure.sql
|
satano/Kros.Utils
|
1f7de934cc68819862c48ea669dd1484d22618f2
|
[
"MIT"
] | 3 |
2019-03-01T11:13:44.000Z
|
2022-02-04T08:58:03.000Z
|
src/Resources/NumericIdGeneratorStoredProcedure.sql
|
satano/Kros.Utils
|
1f7de934cc68819862c48ea669dd1484d22618f2
|
[
"MIT"
] | 9 |
2019-03-22T08:17:34.000Z
|
2021-04-14T07:39:30.000Z
|
src/Resources/NumericIdGeneratorStoredProcedure.sql
|
satano/Kros.Utils
|
1f7de934cc68819862c48ea669dd1484d22618f2
|
[
"MIT"
] | 7 |
2019-04-25T12:39:18.000Z
|
2022-03-23T07:07:36.000Z
|
CREATE PROCEDURE [{{StoredProcedureName}}]
(
@TableName nvarchar(100) = '',
@NumberOfItems int = 1
)
AS
BEGIN
set nocount on
begin transaction
save transaction transSavePoint
begin try
declare @lastId {{DataType}}
SELECT @lastId = LastId FROM [{{TableName}}] WITH (XLOCK) WHERE (TableName = @TableName)
if (@lastId is null)
begin
INSERT INTO [{{TableName}}] (TableName, LastId) VALUES (@TableName, @NumberOfItems)
set @lastId = 1
end
else
begin
UPDATE [{{TableName}}] SET LastId = @lastId + @NumberOfItems WHERE (TableName = @TableName)
set @lastId = @lastId + 1
end
select @lastId
end try
begin catch
if @@TRANCOUNT > 0
begin
rollback transaction transSavePoint
end
declare @errorMessage nvarchar(4000)
declare @errorSeverity int
declare @errorState int
select @errorMessage = ERROR_MESSAGE()
select @errorSeverity = ERROR_SEVERITY()
select @errorState = ERROR_STATE()
raiserror (@errorMessage, @errorSeverity, @errorState)
end catch
commit transaction
END
| 19.166667 | 94 | 0.708213 | 3.25 |
5c511ccbc8c61be155752f4524fb6f49cafed401
| 5,341 |
h
|
C
|
mentos/inc/klib/hashmap.h
|
zfd-progetti-univr-2021-2022/MentOS
|
fc4a9456520a9a79a2c3875aff0f527de51bc6e3
|
[
"MIT"
] | null | null | null |
mentos/inc/klib/hashmap.h
|
zfd-progetti-univr-2021-2022/MentOS
|
fc4a9456520a9a79a2c3875aff0f527de51bc6e3
|
[
"MIT"
] | null | null | null |
mentos/inc/klib/hashmap.h
|
zfd-progetti-univr-2021-2022/MentOS
|
fc4a9456520a9a79a2c3875aff0f527de51bc6e3
|
[
"MIT"
] | null | null | null |
/// MentOS, The Mentoring Operating system project
/// @file hashmap.h
/// @brief Functions for managing a structure that can map keys to values.
/// @copyright (c) 2014-2021 This file is distributed under the MIT License.
/// See LICENSE.md for details.
#pragma once
#include "klib/list.h"
// == OPAQUE TYPES ============================================================
/// @brief Stores information of an entry of the hashmap.
typedef struct hashmap_entry_t hashmap_entry_t;
/// @brief Stores information of a hashmap.
typedef struct hashmap_t hashmap_t;
// == HASHMAP FUNCTIONS =======================================================
/// @brief Hashing function, used to generate hash keys.
typedef unsigned int (*hashmap_hash_t)(const void *key);
/// @brief Comparison function, used to compare hash keys.
typedef int (*hashmap_comp_t)(const void *a, const void *b);
/// @brief Key duplication function, used to duplicate hash keys.
typedef void *(*hashmap_dupe_t)(const void *);
/// @brief Key deallocation function, used to free the memory occupied by hash keys.
typedef void (*hashmap_free_t)(void *);
// == HASHMAP KEY MANAGEMENT FUNCTIONS ========================================
/// @brief Transforms an integer key into a hash key.
/// @param key The integer key.
/// @return The resulting hash key.
unsigned int hashmap_int_hash(const void *key);
/// @brief Compares two integer hash keys.
/// @param a The first hash key.
/// @param b The second hash key.
/// @return Result of the comparison.
int hashmap_int_comp(const void *a, const void *b);
/// @brief Transforms a string key into a hash key.
/// @param key The string key.
/// @return The resulting hash key.
unsigned int hashmap_str_hash(const void *key);
/// @brief Compares two string hash keys.
/// @param a The first hash key.
/// @param b The second hash key.
/// @return Result of the comparison.
int hashmap_str_comp(const void *a, const void *b);
/// @brief This function can be passed as hashmap_dupe_t, it does nothing.
/// @param value The value to duplicate.
/// @return The duplicated value.
void *hashmap_do_not_duplicate(const void *value);
/// @brief This function can be passed as hashmap_free_t, it does nothing.
/// @param value The value to free.
void hashmap_do_not_free(void *value);
// == HASHMAP CREATION AND DESTRUCTION ========================================
/// @brief User-defined hashmap.
/// @param size Dimension of the hashmap.
/// @param hash_fun The hashing function.
/// @param comp_fun The hash compare function.
/// @param dupe_fun The key duplication function.
/// @param key_free_fun The function used to free memory of keys.
/// @return A pointer to the hashmap.
/// @details
/// (key_free_fun) : No free function.
/// (val_free_fun) : Standard `free` function.
hashmap_t *hashmap_create(
unsigned int size,
hashmap_hash_t hash_fun,
hashmap_comp_t comp_fun,
hashmap_dupe_t dupe_fun,
hashmap_free_t key_free_fun);
/// @brief Standard hashmap with keys of type (char *).
/// @param size Dimension of the hashmap.
/// @return A pointer to the hashmap.
/// @details
/// (key_free_fun) : Standard `free` function.
/// (val_free_fun) : Standard `free` function.
hashmap_t *hashmap_create_str(unsigned int size);
/// @brief Standard hashmap with keys of type (char *).
/// @param size Dimension of the hashmap.
/// @return A pointer to the hashmap.
/// @details
/// (key_free_fun) : No free function.
/// (val_free_fun) : Standard `free` function.
hashmap_t *hashmap_create_int(unsigned int size);
/// @brief Frees the memory of the hashmap.
/// @param map A pointer to the hashmap.
void hashmap_free(hashmap_t *map);
// == HASHMAP ACCESS FUNCTIONS ================================================
/// @brief Sets the `value` for the given `key` in the hashmap `map`.
/// @param map The hashmap.
/// @param key The entry key.
/// @param value The entry value.
/// @return NULL on success, a pointer to an already existing entry if fails.
void *hashmap_set(hashmap_t *map, const void *key, void *value);
/// @brief Access the value for the given key.
/// @param map The hashmap.
/// @param key The key of the entry we are searching.
/// @return The value on success, or NULL on failure.
void *hashmap_get(hashmap_t *map, const void *key);
/// @brief Removes the entry with the given key.
/// @param map The hashmap.
/// @param key The key of the entry we are searching.
/// @return The value on success, or NULL on failure.
void *hashmap_remove(hashmap_t *map, const void *key);
/// @brief Checks if the hashmap is empty.
/// @param map The hashmap.
/// @return 1 if empty, 0 otherwise.
int hashmap_is_empty(hashmap_t *map);
/// @brief Checks if the hashmap contains an entry with the given key.
/// @param map The hashmap.
/// @param key The key of the entry we are searching.
/// @return 1 if the entry is present, 0 otherwise.
int hashmap_has(hashmap_t *map, const void *key);
/// @brief Provides access to all the keys.
/// @param map The hashmap.
/// @return A list with all the keys, remember to destroy the list.
list_t *hashmap_keys(hashmap_t *map);
/// @brief Provides access to all the values.
/// @param map The hashmap.
/// @return A list with all the values, remember to destroy the list.
list_t *hashmap_values(hashmap_t *map);
| 38.985401 | 84 | 0.679835 | 3.171875 |
b2b9128938a7476610fbf31df937ff94978048ae
| 1,514 |
py
|
Python
|
tests/TestMetrics.py
|
gr33ndata/irlib
|
4a518fec994b1a89cdc7d09a8170efec3d7e6615
|
[
"MIT"
] | 80 |
2015-02-16T18:33:57.000Z
|
2021-05-06T02:03:22.000Z
|
tests/TestMetrics.py
|
gr33ndata/irlib
|
4a518fec994b1a89cdc7d09a8170efec3d7e6615
|
[
"MIT"
] | 2 |
2016-02-05T06:30:21.000Z
|
2017-09-24T17:42:58.000Z
|
tests/TestMetrics.py
|
gr33ndata/irlib
|
4a518fec994b1a89cdc7d09a8170efec3d7e6615
|
[
"MIT"
] | 25 |
2015-05-13T17:35:41.000Z
|
2020-06-04T01:52:11.000Z
|
from unittest import TestCase
from irlib.metrics import Metrics
class TestMetrics(TestCase):
def setUp(self):
self.m = Metrics()
def test_jaccard_same_len(self):
with self.assertRaises(ValueError):
self.m.jaccard_vectors(
[0, 1],
[0, 1, 2, 3]
)
def test_jaccard_empty(self):
e = self.m.jaccard_vectors([],[])
self.assertEqual(e,1)
def test_jaccard_int(self):
e = self.m.jaccard_vectors(
[0, 2, 1, 3],
[0, 1, 2, 3]
)
self.assertEqual(e,0.75)
def test_jaccard_bool(self):
e = self.m.jaccard_vectors(
[False, False, True, True, True ],
[False, True , True, True, False]
)
self.assertEqual(e,0.4)
def test_euclid_same_len(self):
with self.assertRaises(ValueError):
self.m.euclid_vectors(
[0, 1, 2, 3],
[0, 1]
)
def test_euclid(self):
e = self.m.euclid_vectors([1,1],[4,5])
self.assertEqual(e,5)
def test_cos_same_len(self):
with self.assertRaises(ValueError):
self.m.cos_vectors(
[0, 1, 2],
[1, 1]
)
def test_cos_0(self):
c = self.m.cos_vectors([1,0,1],[0,1,0])
self.assertEqual(round(c,5),float(0))
def test_cos_1(self):
c = self.m.cos_vectors([1,1,1],[1,1,1])
self.assertEqual(round(c,5),float(1))
| 24.819672 | 47 | 0.515192 | 3.34375 |
0cde5c372756830b141e6816281e99f572d9eff3
| 3,463 |
py
|
Python
|
tests/required_with_test.py
|
roypeters/spotlight
|
f23818cf7b49aa7a31200c1945ebc2d91656156e
|
[
"MIT"
] | 9 |
2019-03-26T13:21:16.000Z
|
2021-03-21T08:55:49.000Z
|
tests/required_with_test.py
|
roypeters/spotlight
|
f23818cf7b49aa7a31200c1945ebc2d91656156e
|
[
"MIT"
] | 7 |
2019-03-28T17:32:03.000Z
|
2021-09-24T13:17:32.000Z
|
tests/required_with_test.py
|
roypeters/spotlight
|
f23818cf7b49aa7a31200c1945ebc2d91656156e
|
[
"MIT"
] | 4 |
2019-03-30T13:28:22.000Z
|
2020-06-15T13:15:44.000Z
|
from src.spotlight.errors import REQUIRED_WITH_ERROR
from .validator_test import ValidatorTest
class RequiredWithTest(ValidatorTest):
def setUp(self):
self.other_field = "test1"
self.field = "test2"
self.required_with_error = REQUIRED_WITH_ERROR.format(
field=self.field, other=self.other_field
)
self.rules = {"test2": "required_with:test1"}
def test_required_with_rule_with_missing_field_expect_error(self):
data = {"test1": "hello"}
expected = self.required_with_error
errors = self.validator.validate(data, self.rules)
errs = errors.get(self.field)
self.assertEqual(errs[0], expected)
def test_required_with_rule_with_field_present_expect_no_error(self):
data = {"test1": "hello", "test2": "world"}
expected = None
errors = self.validator.validate(data, self.rules)
errs = errors.get(self.field)
self.assertEqual(errs, expected)
def test_required_with_rule_with_boolean_true_expect_no_error(self):
data = {"test1": True, "test2": "world"}
expected = None
errors = self.validator.validate(data, self.rules)
errs = errors.get(self.field)
self.assertEqual(errs, expected)
def test_required_with_rule_with_boolean_false_expect_no_error(self):
data = {"test1": False, "test2": "world"}
expected = None
errors = self.validator.validate(data, self.rules)
errs = errors.get(self.field)
self.assertEqual(errs, expected)
def test_required_with_rule_with_multi_requirement_and_missing_field_expect_error(
self
):
field = "test5"
rules = {"test5": "required_with:test1,test2,test3,test4"}
data = {"test2": "not.missing", "test4": "not.missing"}
expected = REQUIRED_WITH_ERROR.format(
field=field, other="test1, test2, test3, test4"
)
errors = self.validator.validate(data, rules)
errs = errors.get(field)
self.assertEqual(errs[0], expected)
def test_required_with_rule_with_all_present_expect_no_error(self):
rules = {"test5": "required_with:test1,test2,test3,test4"}
data = {
"test1": "test",
"test2": "test",
"test3": "test",
"test4": "test",
"test5": "test",
}
expected = None
errors = self.validator.validate(data, rules)
errs = errors.get("test5")
self.assertEqual(errs, expected)
def test_required_with_rule_with_other_field_present_but_none_expect_error(self):
field = "test2"
rules = {
"test1": "required_with:test2|string",
"test2": "required_with:test1|string",
}
data = {"test1": "test", "test2": None}
expected = REQUIRED_WITH_ERROR.format(field=field, other="test1")
errors = self.validator.validate(data, rules)
errs = errors.get(field)
self.assertEqual(errs[0], expected)
def test_required_with_rule_with_both_none_expect_no_error(self):
field = "test2"
rules = {
"test1": "required_with:test2|string",
"test2": "required_with:test1|string",
}
data = {"test1": None, "test2": None}
expected = None
errors = self.validator.validate(data, rules)
errs = errors.get(field)
self.assertEqual(errs, expected)
| 32.064815 | 86 | 0.626047 | 3.109375 |
7024732140d5618e7a550fb1a433b2d231dcf87a
| 10,867 |
go
|
Go
|
lars.go
|
go-experimental/lcars
|
4dc376e72ef7f96a295a328f06e83e2b9ab85422
|
[
"MIT"
] | 438 |
2016-01-30T01:10:06.000Z
|
2022-03-23T02:09:18.000Z
|
lars.go
|
go-experimental/lcars
|
4dc376e72ef7f96a295a328f06e83e2b9ab85422
|
[
"MIT"
] | 11 |
2016-04-03T15:19:31.000Z
|
2021-08-20T23:50:53.000Z
|
lars.go
|
go-experimental/lcars
|
4dc376e72ef7f96a295a328f06e83e2b9ab85422
|
[
"MIT"
] | 29 |
2016-03-30T07:02:33.000Z
|
2022-01-24T10:00:52.000Z
|
package lars
import (
"fmt"
"net/http"
"reflect"
"strings"
"sync"
"github.com/go-playground/form"
)
// HTTP Constant Terms and Variables
const (
// CONNECT HTTP method
CONNECT = http.MethodConnect
// DELETE HTTP method
DELETE = http.MethodDelete
// GET HTTP method
GET = http.MethodGet
// HEAD HTTP method
HEAD = http.MethodHead
// OPTIONS HTTP method
OPTIONS = http.MethodOptions
// PATCH HTTP method
PATCH = http.MethodPatch
// POST HTTP method
POST = http.MethodPost
// PUT HTTP method
PUT = http.MethodPut
// TRACE HTTP method
TRACE = http.MethodTrace
//-------------
// Media types
//-------------
ApplicationJSON = "application/json"
ApplicationJSONCharsetUTF8 = ApplicationJSON + "; " + CharsetUTF8
ApplicationJavaScript = "application/javascript"
ApplicationJavaScriptCharsetUTF8 = ApplicationJavaScript + "; " + CharsetUTF8
ApplicationXML = "application/xml"
ApplicationXMLCharsetUTF8 = ApplicationXML + "; " + CharsetUTF8
ApplicationForm = "application/x-www-form-urlencoded"
ApplicationProtobuf = "application/protobuf"
ApplicationMsgpack = "application/msgpack"
TextHTML = "text/html"
TextHTMLCharsetUTF8 = TextHTML + "; " + CharsetUTF8
TextPlain = "text/plain"
TextPlainCharsetUTF8 = TextPlain + "; " + CharsetUTF8
MultipartForm = "multipart/form-data"
OctetStream = "application/octet-stream"
//---------
// Charset
//---------
CharsetUTF8 = "charset=utf-8"
//---------
// Headers
//---------
AcceptedLanguage = "Accept-Language"
AcceptEncoding = "Accept-Encoding"
Authorization = "Authorization"
ContentDisposition = "Content-Disposition"
ContentEncoding = "Content-Encoding"
ContentLength = "Content-Length"
ContentType = "Content-Type"
Location = "Location"
Upgrade = "Upgrade"
Vary = "Vary"
WWWAuthenticate = "WWW-Authenticate"
XForwardedFor = "X-Forwarded-For"
XRealIP = "X-Real-Ip"
Allow = "Allow"
Origin = "Origin"
Gzip = "gzip"
WildcardParam = "*wildcard"
basePath = "/"
blank = ""
slashByte = '/'
paramByte = ':'
wildByte = '*'
)
// Handler is the type used in registering handlers.
// NOTE: these handlers may get wrapped by the HandlerFunc
// type internally.
type Handler interface{}
// HandlerFunc is the internal handler type used for middleware and handlers
type HandlerFunc func(Context)
// HandlersChain is an array of HanderFunc handlers to run
type HandlersChain []HandlerFunc
// ContextFunc is the function to run when creating a new context
type ContextFunc func(l *LARS) Context
// CustomHandlerFunc wraped by HandlerFunc and called where you can type cast both Context and Handler
// and call Handler
type CustomHandlerFunc func(Context, Handler)
// customHandlers is a map of your registered custom CustomHandlerFunc's
// used in determining how to wrap them.
type customHandlers map[reflect.Type]CustomHandlerFunc
// LARS is the main routing instance
type LARS struct {
routeGroup
trees map[string]*node
// function that gets called to create the context object... is total overridable using RegisterContext
contextFunc ContextFunc
pool sync.Pool
http404 HandlersChain // 404 Not Found
http405 HandlersChain // 405 Method Not Allowed
automaticOPTIONS HandlersChain
notFound HandlersChain
customHandlersFuncs customHandlers
// mostParams used to keep track of the most amount of
// params in any URL and this will set the default capacity
// of eachContext Params
mostParams uint8
// Enables automatic redirection if the current route can't be matched but a
// handler for the path with (without) the trailing slash exists.
// For example if /foo/ is requested but a route only exists for /foo, the
// client is redirected to /foo with http status code 301 for GET requests
// and 307 for all other request methods.
redirectTrailingSlash bool
// If enabled, the router checks if another method is allowed for the
// current route, if the current request can not be routed.
// If this is the case, the request is answered with 'Method Not Allowed'
// and HTTP status code 405.
// If no other Method is allowed, the request is delegated to the NotFound
// handler.
handleMethodNotAllowed bool
// if enabled automatically handles OPTION requests; manually configured OPTION
// handlers take presidence. default true
automaticallyHandleOPTIONS bool
}
// RouteMap contains a single routes full path
// and other information
type RouteMap struct {
Depth int `json:"depth"`
Path string `json:"path"`
Method string `json:"method"`
Handler string `json:"handler"`
}
var (
default404Handler = func(c Context) {
http.Error(c.Response(), http.StatusText(http.StatusNotFound), http.StatusNotFound)
}
methodNotAllowedHandler = func(c Context) {
c.Response().WriteHeader(http.StatusMethodNotAllowed)
}
automaticOPTIONSHandler = func(c Context) {
c.Response().WriteHeader(http.StatusOK)
}
formDecoder *form.Decoder
formDecoderInit sync.Once
)
// New Creates and returns a new lars instance
func New() *LARS {
l := &LARS{
routeGroup: routeGroup{
middleware: make(HandlersChain, 0),
},
trees: make(map[string]*node),
contextFunc: func(l *LARS) Context {
return NewContext(l)
},
mostParams: 0,
http404: []HandlerFunc{default404Handler},
http405: []HandlerFunc{methodNotAllowedHandler},
redirectTrailingSlash: true,
handleMethodNotAllowed: false,
automaticallyHandleOPTIONS: false,
}
l.routeGroup.lars = l
l.pool.New = func() interface{} {
c := l.contextFunc(l)
b := c.BaseContext()
b.parent = c
return b
}
return l
}
func initFormDecoder() {
formDecoderInit.Do(func() {
formDecoder = form.NewDecoder()
})
}
// BuiltInFormDecoder returns the built in form decoder github.com/go-playground/form
// in order for custom type to be registered.
func (l *LARS) BuiltInFormDecoder() *form.Decoder {
initFormDecoder()
return formDecoder
}
// RegisterCustomHandler registers a custom handler that gets wrapped by HandlerFunc
func (l *LARS) RegisterCustomHandler(customType interface{}, fn CustomHandlerFunc) {
if l.customHandlersFuncs == nil {
l.customHandlersFuncs = make(customHandlers)
}
t := reflect.TypeOf(customType)
if _, ok := l.customHandlersFuncs[t]; ok {
panic(fmt.Sprint("Custom Type + CustomHandlerFunc already declared: ", t))
}
l.customHandlersFuncs[t] = fn
}
// RegisterContext registers a custom Context function for creation
// and resetting of a global object passed per http request
func (l *LARS) RegisterContext(fn ContextFunc) {
l.contextFunc = fn
}
// Register404 alows for overriding of the not found handler function.
// NOTE: this is run after not finding a route even after redirecting with the trailing slash
func (l *LARS) Register404(notFound ...Handler) {
chain := make(HandlersChain, len(notFound))
for i, h := range notFound {
chain[i] = l.wrapHandler(h)
}
l.http404 = chain
}
// SetAutomaticallyHandleOPTIONS tells lars whether to
// automatically handle OPTION requests; manually configured
// OPTION handlers take precedence. default true
func (l *LARS) SetAutomaticallyHandleOPTIONS(set bool) {
l.automaticallyHandleOPTIONS = set
}
// SetRedirectTrailingSlash tells lars whether to try
// and fix a URL by trying to find it
// lowercase -> with or without slash -> 404
func (l *LARS) SetRedirectTrailingSlash(set bool) {
l.redirectTrailingSlash = set
}
// SetHandle405MethodNotAllowed tells lars whether to
// handle the http 405 Method Not Allowed status code
func (l *LARS) SetHandle405MethodNotAllowed(set bool) {
l.handleMethodNotAllowed = set
}
// Serve returns an http.Handler to be used.
func (l *LARS) Serve() http.Handler {
// reserved for any logic that needs to happen before serving starts.
// i.e. although this router does not use priority to determine route order
// could add sorting of tree nodes here....
l.notFound = make(HandlersChain, len(l.middleware)+len(l.http404))
copy(l.notFound, l.middleware)
copy(l.notFound[len(l.middleware):], l.http404)
if l.automaticallyHandleOPTIONS {
l.automaticOPTIONS = make(HandlersChain, len(l.middleware)+1)
copy(l.automaticOPTIONS, l.middleware)
copy(l.automaticOPTIONS[len(l.middleware):], []HandlerFunc{automaticOPTIONSHandler})
}
return http.HandlerFunc(l.serveHTTP)
}
// Conforms to the http.Handler interface.
func (l *LARS) serveHTTP(w http.ResponseWriter, r *http.Request) {
c := l.pool.Get().(*Ctx)
c.parent.RequestStart(w, r)
if root := l.trees[r.Method]; root != nil {
if c.handlers, c.params, c.handlerName = root.find(r.URL.Path, c.params); c.handlers == nil {
c.params = c.params[0:0]
if l.redirectTrailingSlash && len(r.URL.Path) > 1 {
// find again all lowercase
orig := r.URL.Path
lc := strings.ToLower(orig)
if lc != r.URL.Path {
if c.handlers, _, _ = root.find(lc, c.params); c.handlers != nil {
r.URL.Path = lc
c.handlers = l.redirect(r.Method, r.URL.String())
r.URL.Path = orig
goto END
}
}
if lc[len(lc)-1:] == basePath {
lc = lc[:len(lc)-1]
} else {
lc = lc + basePath
}
if c.handlers, _, _ = root.find(lc, c.params); c.handlers != nil {
r.URL.Path = lc
c.handlers = l.redirect(r.Method, r.URL.String())
r.URL.Path = orig
goto END
}
}
} else {
goto END
}
}
if l.automaticallyHandleOPTIONS && r.Method == OPTIONS {
l.getOptions(c)
goto END
}
if l.handleMethodNotAllowed {
if l.checkMethodNotAllowed(c) {
goto END
}
}
// not found
c.handlers = l.notFound
END:
c.parent.Next()
c.parent.RequestEnd()
l.pool.Put(c)
}
func (l *LARS) getOptions(c *Ctx) {
if c.request.URL.Path == "*" { // check server-wide OPTIONS
for m := range l.trees {
if m == OPTIONS {
continue
}
c.response.Header().Add(Allow, m)
}
} else {
for m, tree := range l.trees {
if m == c.request.Method || m == OPTIONS {
continue
}
if c.handlers, _, _ = tree.find(c.request.URL.Path, c.params); c.handlers != nil {
c.response.Header().Add(Allow, m)
}
}
}
c.response.Header().Add(Allow, OPTIONS)
c.handlers = l.automaticOPTIONS
return
}
func (l *LARS) checkMethodNotAllowed(c *Ctx) (found bool) {
for m, tree := range l.trees {
if m != c.request.Method {
if c.handlers, _, _ = tree.find(c.request.URL.Path, c.params); c.handlers != nil {
// add methods
c.response.Header().Add(Allow, m)
found = true
}
}
}
if found {
c.handlers = l.http405
}
return
}
| 25.390187 | 104 | 0.681329 | 3.15625 |
a1bb09e5813486d61aecf91aaea95364e01d651b
| 2,818 |
go
|
Go
|
slices/flatten_test.go
|
golodash/godash
|
59b0c987536624be4521ffc645cde92cee5ada64
|
[
"MIT"
] | null | null | null |
slices/flatten_test.go
|
golodash/godash
|
59b0c987536624be4521ffc645cde92cee5ada64
|
[
"MIT"
] | 8 |
2022-01-20T07:54:27.000Z
|
2022-03-07T15:55:32.000Z
|
slices/flatten_test.go
|
gotorn/godash
|
59b0c987536624be4521ffc645cde92cee5ada64
|
[
"MIT"
] | 2 |
2022-01-04T11:42:52.000Z
|
2022-01-04T14:47:14.000Z
|
package slices
import (
"fmt"
"reflect"
"strconv"
"testing"
)
type TFlatten struct {
name string
arr []interface{}
want []interface{}
}
var tFlattenBenchs = []TFlatten{
{
name: "10",
arr: []interface{}{},
},
{
name: "100",
arr: []interface{}{},
},
{
name: "1000",
arr: []interface{}{},
},
{
name: "10000",
arr: []interface{}{},
},
{
name: "100000",
arr: []interface{}{},
},
{
name: "1000000",
arr: []interface{}{},
},
}
func init() {
for j := 0; j < len(tFlattenBenchs); j++ {
length, _ := strconv.Atoi(tFlattenBenchs[j].name)
for i := 0; i < length/10; i++ {
tFlattenBenchs[j].arr = append(tFlattenBenchs[j].arr, []interface{}{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}...)
}
}
}
func TestFlatten(t *testing.T) {
var tests = []TFlatten{
{
name: "nil",
arr: nil,
want: nil,
},
{
name: "empty",
arr: []interface{}{},
want: []interface{}{},
},
{
name: "none",
arr: []interface{}{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
want: []interface{}{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
},
{
name: "normal",
arr: []interface{}{0, []interface{}{1, 2}, []interface{}{3, 4, 5}, []interface{}{6, 7}, 8, 9},
want: []interface{}{0, 1, 2, 3, 4, 5, 6, 7, 8, 9},
},
{
name: "more layer",
arr: []interface{}{[]interface{}{0, []interface{}{1, 2}}, 3, []interface{}{4, 5, []interface{}{6, 7}, 8}, []interface{}{9}},
want: []interface{}{0, []interface{}{1, 2}, 3, 4, 5, []interface{}{6, 7}, 8, 9},
},
}
for _, subject := range tests {
t.Run(subject.name, func(t *testing.T) {
got, err := Flatten(subject.arr)
if err != nil {
if subject.want != nil {
t.Errorf("Flatten() got = %v, wanted = %v", got, subject.want)
}
return
}
if len(got) != len(subject.want) {
t.Errorf("Flatten() got = %v, wanted = %v", got, subject.want)
return
}
check := func(subgot, want []interface{}, function interface{}) {
for i := 0; i < len(subgot); i++ {
if gotVal, ok := subgot[i].([]interface{}); ok {
if wantVal, ok := want[i].([]interface{}); ok {
for j := 0; j < len(subgot); j++ {
reflect.ValueOf(function).Call([]reflect.Value{reflect.ValueOf(gotVal), reflect.ValueOf(wantVal), reflect.ValueOf(function)})
}
} else {
t.Errorf("Flatten() got = %v, wanted = %v", got, subject.want)
return
}
} else {
if subgot[i] != want[i] {
t.Errorf("Flatten() got = %v, wanted = %v", got, subject.want)
return
}
}
}
}
check(got, subject.want, check)
})
}
}
func BenchmarkFlatten(b *testing.B) {
for j := 0; j < len(tFlattenBenchs); j++ {
b.Run(fmt.Sprintf("slice_size_%s", tFlattenBenchs[j].name), func(b *testing.B) {
for i := 0; i < b.N; i++ {
Flatten(tFlattenBenchs[j].arr)
}
})
}
}
| 21.51145 | 133 | 0.518453 | 3.1875 |
9bfde1ef2987bbe2e1a1f4205370fa7a6db8e5f8
| 2,093 |
js
|
JavaScript
|
js/main.js
|
stauffenbits/warp-wars
|
91d0ee54022531355bb3e3d4a366533cd72db43e
|
[
"MIT"
] | null | null | null |
js/main.js
|
stauffenbits/warp-wars
|
91d0ee54022531355bb3e3d4a366533cd72db43e
|
[
"MIT"
] | null | null | null |
js/main.js
|
stauffenbits/warp-wars
|
91d0ee54022531355bb3e3d4a366533cd72db43e
|
[
"MIT"
] | null | null | null |
import * as Game from '/js/Universe.js';
import * as $ from 'jquery';
var universe = new Game.Universe(20, 50);
universe.draw();
var CURRENT_STAR = null;
var CURRENT_PLANET = null;
var CURRENT_BUILDING = null;
Game.$fourd.make_resolve_click(function(vertex){
if(!vertex){
return;
}
if(vertex.game_object instanceof Game.Star){
starClick(vertex);
}else if(vertex.game_object instanceof Game.Planet){
planetClick(vertex);
}
updateHUD();
})
var starClick = function(vertex){
if(CURRENT_STAR !== null){
CURRENT_STAR.collapse();
}
if(vertex.game_object == CURRENT_STAR){
if(CURRENT_PLANET !== null){
CURRENT_PLANET.collapse();
CURRENT_PLANET = null;
}
CURRENT_STAR = null;
return;
}
CURRENT_STAR = vertex.game_object;
Game.$fourd.toggle_controls('orbit', vertex);
CURRENT_STAR.expand();
}
var planetClick = function(vertex){
if(CURRENT_PLANET !== null){
CURRENT_PLANET.collapse();
}
if(vertex.game_object == CURRENT_PLANET){
CURRENT_PLANET = null;
return;
}
CURRENT_PLANET = vertex.game_object;
Game.$fourd.toggle_controls('orbit', vertex);
CURRENT_PLANET.expand();
}
var updateHUD = function(){
document.querySelector('#selected-star').textContent =
CURRENT_STAR ? CURRENT_STAR.universe.stars.indexOf(CURRENT_STAR) : "None";
document.querySelector('#selected-planet').textContent =
CURRENT_PLANET ? CURRENT_PLANET.star.planets.indexOf(CURRENT_PLANET) : "None";
updateBuildMenu();
}
var updateBuildMenu = function(){
CURRENT_PLANET ? showBuildMenu() : hideBuildMenu();
}
var showBuildMenu = function(){
document.querySelector('.build').classList.remove('hidden');
}
var hideBuildMenu = function(){
document.querySelector('.build').classList.add('hidden');
}
window.buildHousing = function(){
var building = new Game.Building(CURRENT_PLANET);
CURRENT_PLANET.buildings.push(building)
building.draw();
}
window.buildGenerator = function(){
var building = new Game.Building(CURRENT_PLANET);
CURRENT_PLANET.buildings.push(building)
building.draw();
}
| 22.505376 | 82 | 0.707597 | 3.125 |
1624310ed265b7f1d8eaeb31fa8dbc01289b419a
| 1,318 |
c
|
C
|
Labs/Lab1/1.03.c
|
ZamaPower/Algorithms-ProgrammingLabs
|
c32c6571aa3bfc256d9c1251c93f4a9a589d12be
|
[
"Unlicense"
] | null | null | null |
Labs/Lab1/1.03.c
|
ZamaPower/Algorithms-ProgrammingLabs
|
c32c6571aa3bfc256d9c1251c93f4a9a589d12be
|
[
"Unlicense"
] | null | null | null |
Labs/Lab1/1.03.c
|
ZamaPower/Algorithms-ProgrammingLabs
|
c32c6571aa3bfc256d9c1251c93f4a9a589d12be
|
[
"Unlicense"
] | null | null | null |
/* Exercise 03
-----------
Write a C program able to:
- Read a matrix m1 of R rows and C columns of integer values
- Compute a matrix m2 of R rows and C columns such that
- m2[i][j] is equal to zero
if m1[i][j] is equal to zero.
- m2[i][j] is the factorial number of -m1[i][j]
if m1[i][j] is a negative number.
- m2[i][j] is the smallest power of 10 larger than m1[r][j]
if m1[i][j] is a positive number.
*/
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#define R 5
#define C 5
unsigned factorial(int n);
int findclosepower(int n);
int main(int argc, char const *argv[]) {
int m1[R][C],m2[R][C];
for (int i=0; i<R; i++) {
for (int j=0; j<C; j++) {
fscanf(stdin, "%d",&m1[i][j]);
if (m1[i][j]== 0) {
m2[i][j]=0;
}
if (m1[i][j]<0) {
m2[i][j]= factorial(m1[i][j]);
}
if (m1[i][j]>0) {
m2[i][j]= findclosepower(m1[i][j]);
}
}
}
return EXIT_SUCCESS;
}
unsigned factorial(int n)
{
n=abs(n);
if (n == 1)
return 1;
else
return n * factorial(n - 1);
}
int findclosepower(int n){
int power=1;
while (power<n) {
power=power*10;
}
return (power);
}
| 19.671642 | 61 | 0.49393 | 3.140625 |
58df4a8f7249d0de4c7999b0a4f14b4b2049c703
| 2,403 |
lua
|
Lua
|
user_libraries/cf_chain_flow/appScripts/publishSubscribe.lua
|
glenn-edgar/lua_frame_work
|
417b72fe254a4359f89f77570733aead2552dfaf
|
[
"MIT"
] | null | null | null |
user_libraries/cf_chain_flow/appScripts/publishSubscribe.lua
|
glenn-edgar/lua_frame_work
|
417b72fe254a4359f89f77570733aead2552dfaf
|
[
"MIT"
] | null | null | null |
user_libraries/cf_chain_flow/appScripts/publishSubscribe.lua
|
glenn-edgar/lua_frame_work
|
417b72fe254a4359f89f77570733aead2552dfaf
|
[
"MIT"
] | null | null | null |
---
--- File: publishSubscribe.lua
--- Lua Publish Subscribe.lua
---
---
---
publishSubscribe = {}
publishSubscribe.queue = {}
function publishSubscribe.initializePublishSubscribe()
-- null function right now
end
function publishSubscribe.flush( event )
publishSubscribe.queue[event] = {}
end
function publishSubscribe.number( event )
local returnValue
if publishSubscribe.queue[event] == nil then
returnValue = 0
else
returnValue = #(publishSubscribe.queue[event])
end
return returnValue
end
function publishSubscribe.attach( event,queueId )
local temp
if publishSubscribe.queue[event] == nil then
publishSubscribe.queue[event] = {}
end
temp = publishSubscribe.queue[event]
temp[queueId] = 1
end
function publishSubscribe.remove( event, queueId )
local temp
if publishSubscribe.queue[event] ~= nil then
temp = publishSubscribe.queue[event]
temp[queueId] = nil
end
end
function publishSubscribe.post( event, data )
if publishSubscribe.queue[ event] ~= nil then
for i,k in pairs( publishSubscribe.queue[event]) do
eventSystem.queue(i,event,data)
end
end
end
function publishSubscribe.dump()
printf("dumping Publish Subscribe Queue")
for i,k in pairs(publishSubscribe.queue) do
print("event",i)
end
end
function publishSubscribe.dumpEventQueue( event )
print("dumping Publish Subscribe Queue for event ", event)
print("printing a list of queues")
if publishSubscribe.queue[event] == nil then
print("empty queue")
else
for i,k in pairs(publishSubscribe.queue[event]) do
print("queue ",i)
end
end
end
function publishSubscribe.description()
return "publish and subscribe functionality")
end
function publishSubscribe.help()
print(".init .. initialize")
print(".flush(event) .. remove subscribers for a specific event")
print(".number(event) .. number of subscribers for an event")
print(".attach(event, queue) .. attach a queue to an event ")
print(".remove(queue) .. removes a queue attached to an event")
print(".post(event,data) .. post an event ")
print(".dump() .. dump register events ")
print(".dumpEventQueue( event ) .. dump queue registered to an event")
print(".help() .. displays commands ")
end
| 21.455357 | 79 | 0.668747 | 3.296875 |
575dba07a9779b464e441d23c96da48c39a1db83
| 1,157 |
c
|
C
|
Udemy_Course_1/lonie_count_solution.c
|
pteczar/c_learning_1
|
b5f51307ecf7d4a476d216a87a593b2357d6850a
|
[
"MIT"
] | 1 |
2022-01-20T17:15:39.000Z
|
2022-01-20T17:15:39.000Z
|
Udemy_Course_1/lonie_count_solution.c
|
pteczar/c_learning_1
|
b5f51307ecf7d4a476d216a87a593b2357d6850a
|
[
"MIT"
] | null | null | null |
Udemy_Course_1/lonie_count_solution.c
|
pteczar/c_learning_1
|
b5f51307ecf7d4a476d216a87a593b2357d6850a
|
[
"MIT"
] | null | null | null |
#include <stdio.h>
int main()
{
FILE *fp;
char filename[100];
char ch;
int linecount, wordcount, charcount;
// Initialize counter variables
linecount = 0;
wordcount = 0;
charcount = 0;
// Prompt user to enter filename
printf("Enter a filename :");
fgets(filename, 1000, stdin);
// Open file in read-only mode
fp = fopen(filename,"r");
// If file opened successfully, then write the string to file
if ( fp )
{
//Repeat until End Of File character is reached.
while ((ch=getc(fp)) != EOF) {
// Increment character count if NOT new line or space
if (ch != ' ' && ch != '\n') { ++charcount; }
// Increment word count if new line or space character
if (ch == ' ' || ch == '\n') { ++wordcount; }
// Increment line count if new line character
if (ch == '\n') { ++linecount; }
}
if (charcount > 0) {
++linecount;
++wordcount;
}
}
else
{
printf("Failed to open the file\n");
}
printf("Lines : %d \n", linecount);
printf("Words : %d \n", wordcount);
printf("Characters : %d \n", charcount);
getchar();
return(0);
}
| 20.660714 | 64 | 0.566984 | 3.46875 |
84fd7ac46548921ae9033f15d124a4e7dd4c92dd
| 3,177 |
asm
|
Assembly
|
src/test/ref/font-hex-show.asm
|
jbrandwood/kickc
|
d4b68806f84f8650d51b0e3ef254e40f38b0ffad
|
[
"MIT"
] | 2 |
2022-03-01T02:21:14.000Z
|
2022-03-01T04:33:35.000Z
|
src/test/ref/font-hex-show.asm
|
jbrandwood/kickc
|
d4b68806f84f8650d51b0e3ef254e40f38b0ffad
|
[
"MIT"
] | null | null | null |
src/test/ref/font-hex-show.asm
|
jbrandwood/kickc
|
d4b68806f84f8650d51b0e3ef254e40f38b0ffad
|
[
"MIT"
] | null | null | null |
// Shows a font where each char contains the number of the char (00-ff)
/// @file
/// Commodore 64 Registers and Constants
/// @file
/// The MOS 6526 Complex Interface Adapter (CIA)
///
/// http://archive.6502.org/datasheets/mos_6526_cia_recreated.pdf
// Commodore 64 PRG executable file
.file [name="font-hex-show.prg", type="prg", segments="Program"]
.segmentdef Program [segments="Basic, Code, Data"]
.segmentdef Basic [start=$0801]
.segmentdef Code [start=$80d]
.segmentdef Data [startAfter="Code"]
.segment Basic
:BasicUpstart(main)
/// $D018 VIC-II base addresses
// @see #VICII_MEMORY
.label D018 = $d018
.label SCREEN = $400
.label CHARSET = $2000
.segment Code
main: {
.const toD0181_return = (>(SCREEN&$3fff)*4)|(>CHARSET)/4&$f
// *D018 = toD018(SCREEN, CHARSET)
lda #toD0181_return
sta D018
// init_font_hex(CHARSET)
jsr init_font_hex
ldx #0
// Show all chars on screen
__b1:
// SCREEN[c] = c
txa
sta SCREEN,x
// for (byte c: 0..255)
inx
cpx #0
bne __b1
// }
rts
}
// Make charset from proto chars
// void init_font_hex(__zp(5) char *charset)
init_font_hex: {
.label __0 = 3
.label idx = 2
.label proto_lo = 7
.label charset = 5
.label c1 = 4
.label proto_hi = 9
.label c = $b
lda #0
sta.z c
lda #<FONT_HEX_PROTO
sta.z proto_hi
lda #>FONT_HEX_PROTO
sta.z proto_hi+1
lda #<CHARSET
sta.z charset
lda #>CHARSET
sta.z charset+1
__b1:
lda #0
sta.z c1
lda #<FONT_HEX_PROTO
sta.z proto_lo
lda #>FONT_HEX_PROTO
sta.z proto_lo+1
__b2:
// charset[idx++] = 0
lda #0
tay
sta (charset),y
lda #1
sta.z idx
ldx #0
__b3:
// proto_hi[i]<<4
txa
tay
lda (proto_hi),y
asl
asl
asl
asl
sta.z __0
// proto_lo[i]<<1
txa
tay
lda (proto_lo),y
asl
// proto_hi[i]<<4 | proto_lo[i]<<1
ora.z __0
// charset[idx++] = proto_hi[i]<<4 | proto_lo[i]<<1
ldy.z idx
sta (charset),y
// charset[idx++] = proto_hi[i]<<4 | proto_lo[i]<<1;
inc.z idx
// for( byte i: 0..4)
inx
cpx #5
bne __b3
// charset[idx++] = 0
lda #0
ldy.z idx
sta (charset),y
// charset[idx++] = 0;
iny
// charset[idx++] = 0
sta (charset),y
// proto_lo += 5
lda #5
clc
adc.z proto_lo
sta.z proto_lo
bcc !+
inc.z proto_lo+1
!:
// charset += 8
lda #8
clc
adc.z charset
sta.z charset
bcc !+
inc.z charset+1
!:
// for( byte c: 0..15 )
inc.z c1
lda #$10
cmp.z c1
bne __b2
// proto_hi += 5
lda #5
clc
adc.z proto_hi
sta.z proto_hi
bcc !+
inc.z proto_hi+1
!:
// for( byte c: 0..15 )
inc.z c
lda #$10
cmp.z c
bne __b1
// }
rts
}
.segment Data
// Bit patterns for symbols 0-f (3x5 pixels) used in font hex
FONT_HEX_PROTO: .byte 2, 5, 5, 5, 2, 6, 2, 2, 2, 7, 6, 1, 2, 4, 7, 6, 1, 2, 1, 6, 5, 5, 7, 1, 1, 7, 4, 6, 1, 6, 3, 4, 6, 5, 2, 7, 1, 1, 1, 1, 2, 5, 2, 5, 2, 2, 5, 3, 1, 1, 2, 5, 7, 5, 5, 6, 5, 6, 5, 6, 2, 5, 4, 5, 2, 6, 5, 5, 5, 6, 7, 4, 6, 4, 7, 7, 4, 6, 4, 4
| 21.039735 | 262 | 0.548316 | 3 |
0bc25237116d36d1b3724261d878f108f7fb3326
| 1,103 |
py
|
Python
|
abc199/d/main.py
|
KeiNishikawa218/atcoder
|
0af5e091f8b1fd64d5ca7b46b06b9356eacfe601
|
[
"MIT"
] | null | null | null |
abc199/d/main.py
|
KeiNishikawa218/atcoder
|
0af5e091f8b1fd64d5ca7b46b06b9356eacfe601
|
[
"MIT"
] | null | null | null |
abc199/d/main.py
|
KeiNishikawa218/atcoder
|
0af5e091f8b1fd64d5ca7b46b06b9356eacfe601
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
class UnionFind():
def __init__(self, n):
self.parent = [-1 for _ in range(n)]
# 正==子: 根の頂点番号 / 負==根: 連結頂点数
def find(self, x):
if self.parent[x] < 0:
return x
else:
self.parent[x] = self.find(self.parent[x])
return self.parent[x]
def unite(self, x, y):
x, y = self.find(x), self.find(y)
if x == y:
return False
else:
if self.size(x) < self.size(y):
x, y = y, x
self.parent[x] += self.parent[y]
self.parent[y] = x
def same(self, x, y):
return self.find(x) == self.find(y)
def size(self, x):
x = self.find(x)
return -self.parent[x]
def is_root(self, x):
return self.parent[x] < 0
def main():
n, m = map(int, input().split())
count = 0
pair_list = []
uf = UnionFind(n)
for i in range(m):
array = list(map(int,input().split()))
array[0] -=1 ; array[1] -= 1
pair_list.append(array)
print(uf.unite(n-1,m-1))
main()
| 23.978261 | 54 | 0.481414 | 3.703125 |
e7e46d31c42a93c03c2df71128dd11ecc6e4322c
| 3,289 |
py
|
Python
|
lib/misc.py
|
cripplet/langmuir-hash
|
5b4aa8e705b237704dbb99fbaa89af8cc2e7a8b5
|
[
"MIT"
] | null | null | null |
lib/misc.py
|
cripplet/langmuir-hash
|
5b4aa8e705b237704dbb99fbaa89af8cc2e7a8b5
|
[
"MIT"
] | null | null | null |
lib/misc.py
|
cripplet/langmuir-hash
|
5b4aa8e705b237704dbb99fbaa89af8cc2e7a8b5
|
[
"MIT"
] | null | null | null |
# custom libs
from lib.args import getConf
# Python libs
from re import sub
from os import mkdir
from os.path import exists
from getpass import getuser
from socket import gethostname
def genFrame(file):
from classes.frame import Frame
from lib.array import getGrid
grid = getGrid(file)
return(Frame(len(grid[0]), len(grid), 0, grid))
# given an int (treated as binary list), generate all unique rotational permutations of int (circular shifts)
# http://bit.ly/GLdKmI
def genPermutations(i, width):
permutations = list()
for j in range(width):
permutations.append(i)
# (i & 1) << (width - 1) advances the end bit to the beginning of the binary string
i = (i >> 1) | ((i & 1) << (width - 1))
return(list(set(permutations)))
# given a string representation of a neighbor configuration, return the number of neighbors in the configuration
def getConfigNum(config):
return(len(filter(lambda x: x == "1", list(config))))
# makes a unique directory
def initDir(dir):
i = 0
tmpDir = dir
while(exists(tmpDir)):
i += 1
tmpDir = dir + "." + str(i)
mkdir(tmpDir)
return(tmpDir)
def pad(i, max):
maxLength = len(str(max))
return(str(i).zfill(maxLength))
def resolveBoundary(bound, coord):
if(coord < 0):
return(coord + bound)
if(coord > bound - 1):
return(coord - bound)
return(coord)
# given an array of lines:
# stripping lines that begin with "#"
# stripping the rest of a line with "#" in the middle
# stripping lines that end with ":"
# remove whitespace
def prep(file):
lines = list()
for line in file:
line = sub(r'\s', '', line.split("#")[0])
if((line != "") and (line[-1] != ":")):
lines.append(line)
return(lines)
# bin() format is "0bxxxxxx"
# [2:] strips "0b"
# [-width:] selects last < width > chars
def toBin(i, width):
return(bin(i)[2:][-width:].zfill(width))
# renders the configuration file
# def renderConfig(folder):
# if(folder[-1] != "/"):
# folder += "/"
# fp = open(folder + "config.conf", "r")
# s = "config file for " + folder[:-1] + ":\n\n"
# for line in fp:
# s += line
# return(s)
def renderConfig(name):
fp = open(name, "r")
s = "config file for " + name + ":\n\n"
for line in fp:
s += line
return(s)
# given a config file, output a CSV line
def renderCSV(simulation):
try:
open(simulation + "/conf.conf", "r")
except IOError as err:
return()
params = getConf(simulation + "/config.conf")
s = getuser() + "@" + gethostname() + ":" + simulation + ","
s += str(params["steps"]) + ","
s += str(params["dens"]) + ","
s += str(params["hori"]) + ","
s += str(params["diag"]) + ","
s += str(params["beta"]) + ","
s += str(params["energies"][0]["000000"]) + ","
s += str(params["energies"][1]["000001"]) + ","
s += str(params["energies"][2]["000011"]) + ","
s += str(params["energies"][2]["000101"]) + ","
s += str(params["energies"][2]["001001"]) + ","
s += str(params["energies"][3]["000111"]) + ","
s += str(params["energies"][3]["001011"]) + ","
s += str(params["energies"][3]["010011"]) + ","
s += str(params["energies"][3]["010101"]) + ","
s += str(params["energies"][4]["001111"]) + ","
s += str(params["energies"][4]["010111"]) + ","
s += str(params["energies"][4]["011011"]) + ","
s += str(params["energies"][5]["011111"]) + ","
s += str(params["energies"][6]["111111"])
return(s)
| 28.353448 | 112 | 0.617817 | 3.6875 |
22d691563196c8b157757c1535f94599a8af4454
| 4,333 |
c
|
C
|
AED2/FinalProject/ABB/ABB.c
|
matheuscr30/UFU
|
e947e5a4ccd5c025cb8ef6e00b42ea1160742712
|
[
"MIT"
] | null | null | null |
AED2/FinalProject/ABB/ABB.c
|
matheuscr30/UFU
|
e947e5a4ccd5c025cb8ef6e00b42ea1160742712
|
[
"MIT"
] | 11 |
2020-01-28T22:59:24.000Z
|
2022-03-11T23:59:04.000Z
|
AED2/FinalProject/ABB/ABB.c
|
matheuscr30/UFU
|
e947e5a4ccd5c025cb8ef6e00b42ea1160742712
|
[
"MIT"
] | null | null | null |
#include <stdio.h>
#include <stdlib.h>
#include "ABB.h"
Arv cria_vazia()
{
return NULL;
}
void libera_arvore(Arv *A)
{
if (A != NULL)
{
libera_arvore(&(*A)->sae);
libera_arvore(&(*A)->sad);
free(*A);
}
A = NULL;
}
void exibe_arvore(Arv A)
{
if (A == NULL)
printf("<>");
printf("<");
printf("%d", A->info.idade);
exibe_arvore(A->sae);
exibe_arvore(A->sad);
printf(">");
}
void exibe_ordenado(Arv A)
{
if (A != NULL)
{
exibe_ordenado(A->sae);
printf("%d ", A->info.idade);
exibe_ordenado(A->sad);
}
}
int insere_ord(Arv *A, reg elem)
{
if (A == NULL)
return 0;
if (*A == NULL)
{
Arv novo = (Arv)malloc(sizeof(struct no));
if (novo == NULL)
return 0;
novo->info = elem;
novo->sae = NULL;
novo->sad = NULL;
*A = novo;
return 1;
}
if (elem.idade > (*A)->info.idade)
return insere_ord(&(*A)->sad, elem);
else
return insere_ord(&(*A)->sae, elem);
}
int remove_ord(Arv *A, int idade)
{
if (A == NULL || (*A) == NULL)
return 0;
if (idade > (*A)->info.idade)
remove_ord(&(*A)->sad, idade);
else if (idade < (*A)->info.idade)
remove_ord(&(*A)->sae, idade);
else
{
if ((*A)->sae == NULL && (*A)->sad == NULL) //Nó folha
{
free(*A);
A = NULL;
return 1;
}
else if ((*A)->sae != NULL && (*A)->sad == NULL) //Nó com 1 filho a esquerda
{
Arv aux = (*A);
A = &aux->sae;
free(aux);
return 1;
}
else if ((*A)->sae == NULL && (*A)->sad != NULL) //Nó com 1 filho a direita
{
Arv aux = (*A);
A = &aux->sad;
free(aux);
return 1;
}
else //Nó com 2 filhos
{
Arv aux = (*A)->sae;
while(aux->sad != NULL)
{
aux = aux->sad;
}
reg temp = (*A)->info;
(*A)->info = aux->info;
aux->info = temp;
return remove_ord(&(*A)->sae, idade);
}
}
}
Arv busca_bin(Arv A, int idade)
{
if (A == NULL)
return NULL;
if (A->info.idade == idade)
return A;
else if (idade > A->info.idade)
return busca_bin(A->sad, idade);
else
return busca_bin(A->sae, idade);
}
reg* maior(Arv A){
if (A == NULL) return NULL;
else if (A->sae == NULL && A->sad == NULL) return &A->info;
else if (A->sae != NULL && A->sad == NULL) return &A->info;
else if (A->sae == NULL && A->sad != NULL) return maior(A->sad);
else
{
return maior(A->sad);
}
}
int de_maior(Arv A){
if (A == NULL) return 0;
else if (A->sae == NULL && A->sad == NULL){
if (A->info.idade >= 18) return 1;
else return 0;
}
else if (A->sae != NULL && A->sad == NULL)
{
if (A->info.idade >= 18) return 1 + de_maior(A->sae);
else return de_maior(A->sae);
}
else if (A->sae == NULL && A->sad != NULL)
{
if (A->info.idade >= 18) return 1 + de_maior(A->sad);
else return de_maior(A->sad);
}
else{
if (A->info.idade >= 18) return 1 + de_maior(A->sae) + de_maior(A->sad);
else return de_maior(A->sae) + de_maior(A->sad);
}
}
int qtde_nos(Arv A, int ini, int fim){
if (A == NULL) return 0;
else if (A->sae == NULL && A->sad == NULL){
if (A->info.idade >= ini && A->info.idade <= fim) return 1;
else return 0;
}
else if (A->sae != NULL && A->sad == NULL)
{
if (A->info.idade >= ini && A->info.idade <= fim) return 1 + qtde_nos(A->sae, ini, fim);
else return qtde_nos(A->sae, ini, fim);
}
else if (A->sae == NULL && A->sad != NULL)
{
if (A->info.idade >= ini && A->info.idade <= fim) return 1 + qtde_nos(A->sad, ini, fim);
else return qtde_nos(A->sad, ini, fim);
}
else{
if (A->info.idade >= ini && A->info.idade <= fim) return 1 + qtde_nos(A->sae, ini, fim) + qtde_nos(A->sad, ini, fim);
else return qtde_nos(A->sae, ini, fim) + qtde_nos(A->sad, ini, fim);
}
}
void juntarAux(Arv A, Arv A2){
if (A == NULL) return;
else if (A->sae == NULL && A->sad == NULL) insere_ord(&A2, A->info);
else if (A->sae != NULL && A->sad == NULL){
insere_ord(&A2, A->info);
juntarAux(A->sae, A2);
}
else if (A->sae == NULL && A->sad != NULL){
insere_ord(&A2, A->info);
juntarAux(A->sad, A2);
}
else{
insere_ord(&A2, A->info);
juntarAux(A->sae, A2);
juntarAux(A->sad, A2);
}
}
Arv juntar(Arv A1, Arv A2){
juntarAux(A1, A2);
return A2;
}
| 20.535545 | 121 | 0.519963 | 3.078125 |
9c662e20f4a0664c46bff2cc2da174f189da037b
| 5,504 |
js
|
JavaScript
|
src/pages/driverList/ViewDriver.js
|
tharshan24/we4us-web
|
7c41cc668492e478fd35d19113f3a8756c7ef18f
|
[
"MIT"
] | null | null | null |
src/pages/driverList/ViewDriver.js
|
tharshan24/we4us-web
|
7c41cc668492e478fd35d19113f3a8756c7ef18f
|
[
"MIT"
] | null | null | null |
src/pages/driverList/ViewDriver.js
|
tharshan24/we4us-web
|
7c41cc668492e478fd35d19113f3a8756c7ef18f
|
[
"MIT"
] | null | null | null |
import { useState, useEffect } from "react";
import {
// CalendarToday,
LocationSearching,
MailOutline,
PermIdentity,
PhoneAndroid,
// Publish,
} from "@material-ui/icons";
import FeaturedPlayListIcon from '@mui/icons-material/FeaturedPlayList';
import { useParams, useHistory } from "react-router-dom";
// import { Link } from "react-router-dom";
import "../notification/user.css";
import http from "../../services/httpService";
import { CircularProgress, Snackbar } from "@material-ui/core";
import { Alert } from "@mui/material";
export default function ViewDriver() {
const history = useHistory();
const [driver, setDriver] = useState();
const [loading, setLoading] = useState(true);
const [isSuccess, setIsSuccess] = useState(false);
const { userId } = useParams();
useEffect(() => {
const fetchDriver = async () => {
setLoading(true);
const { data } = await http.get(`/admin/viewDriverById/${userId}`);
setDriver(data.result.row[0]);
console.log(data.result.row[0]);
setLoading(false);
};
fetchDriver();
}, []);
const handleDriverStatus = async (status) => {
try {
await http.get(`/admin/updateDriverStatus/${userId}/${status}`);
setIsSuccess(true);
} catch (e) {
console.log(e);
}
};
if (loading)
return (
<div
style={{
width: "100%",
display: "flex",
justifyContent: "center",
}}
>
<CircularProgress />
</div>
);
return (
<div className="user">
<Snackbar
open={isSuccess}
anchorOrigin={{ vertical: "top", horizontal: "right" }}
onClose={() => {
setIsSuccess(false);
history.goBack();
}}
>
<Alert severity="success" sx={{ width: "100%" }}>
Successfully updated the status
</Alert>
</Snackbar>
;
<div className="userTitleContainer">
<h1 className="userTitle">DETAILS</h1>
</div>
<div className="userContainer">
<div className="userShow">
<div className="userShowTop">
{driver.profile_picture_path && (
<img src={driver.profile_picture_path.split(" ")[0]} height={50} width={50} />
)}
<div className="userShowTopTitle">
<span className="userShowUsername">{`${driver.first_name} ${driver.last_name}`}</span>
</div>
</div>
<div className="userShowBottom">
<span className="userShowTitle">Account Details</span>
<div className="userShowInfo">
<PermIdentity className="userShowIcon" />
<span className="userShowInfoTitle">{driver.user_name}</span>
</div>
<div className="userShowInfo">
<FeaturedPlayListIcon className="userShowIcon" />
<span className="userShowInfoTitle">{driver.license_no}</span>
</div>
<span className="userShowTitle">Contact Details</span>
<div className="userShowInfo">
<PhoneAndroid className="userShowIcon" />
<span className="userShowInfoTitle">{driver.mobile_number}</span>
</div>
<div className="userShowInfo">
<MailOutline className="userShowIcon" />
<span className="userShowInfoTitle">{driver.email}</span>
</div>
<div className="userShowInfo">
<LocationSearching className="userShowIcon" />
<span className="userShowInfoTitle">{driver.name_en}</span>
</div>
</div>
</div>
<div className="userUpdate">
<span className="userUpdateTitle">Description</span>
<div className="userShowBottom"></div>
<div className="img1">
{driver.license_proof_path && (
<img src={driver.license_proof_path.split(" ")[0]} height={250} width={300} />
)}
<span
style={{
textAlign: "center",
color: "#ffff",
fontSize: 32,
fontWeight: 600,
}}
> ...
</span>
{/* </div>
<div className="img2"> */}
{driver.license_proof_path && (
<img src={driver.vehicle_book_proof.split(" ")[0]} height={250} width={300} />
)}
</div>
<div className="userUpdateRight">
{/* <button
className="userUpdateButton1"
onClick={() => handleDriverStatus(1)}
>
Accept
</button>
</div>
<div className="userUpdateRight">
<button
className="userUpdateButton2"
onClick={() => handleDriverStatus(0)}
>
Reject
</button> */}
{loading ? driver.status===1(
<button className="userUpdateButton1"
onClick={e =>
window.confirm("Are you sure you wish to accept") &&
handleDriverStatus(0)
} >
Confirm
</button>
): <button className="userUpdateButton1"
onClick={e =>
window.confirm("Are you sure you wish to cancle") &&
handleDriverStatus(1)
} >
Cancel
</button>
}
</div>
</div>
</div>
</div>
);
}
| 30.921348 | 100 | 0.525981 | 3.03125 |
a452ee41d69219cf5190d4d0f185c319920f7ff3
| 1,030 |
lua
|
Lua
|
appuio/redis/node_ready.lua
|
isantospardo/charts
|
d96bee5151118159b042268fcb8b163ebc82d4af
|
[
"BSD-3-Clause"
] | 11 |
2019-05-15T06:08:13.000Z
|
2021-10-16T09:59:25.000Z
|
appuio/redis/node_ready.lua
|
isantospardo/charts
|
d96bee5151118159b042268fcb8b163ebc82d4af
|
[
"BSD-3-Clause"
] | 123 |
2018-06-01T14:03:18.000Z
|
2022-02-14T10:17:18.000Z
|
appuio/redis/node_ready.lua
|
isantospardo/charts
|
d96bee5151118159b042268fcb8b163ebc82d4af
|
[
"BSD-3-Clause"
] | 25 |
2018-06-01T09:05:07.000Z
|
2021-10-21T05:37:33.000Z
|
local raw_state = redis.call("info", "replication")
local split = function(text, delim)
return text:gmatch("[^"..delim.."]+")
end
local collect = function(iter)
local elements = {}
for s in iter do table.insert(elements, s); end
return elements
end
local has_prefix = function(text, prefix)
return text:find(prefix, 1, true) == 1
end
local replication_state = {}
for s in split(raw_state, "\r\n") do
(function(s)
if has_prefix(s,"#") then
return
end
local kv = collect(split(s, ":"))
replication_state[kv[1]] = kv[2]
end)(s)
end
local isSlave = replication_state["role"] == "slave"
local isMasterLinkDown = replication_state["master_link_status"] == "down"
local isSyncing = replication_state["master_sync_in_progress"] == "1"
if isSlave and isMasterLinkDown then
if isSyncing then
return redis.error_reply("node is syncing")
else
return redis.error_reply("link to master down")
end
end
return redis.status_reply("ready")
| 24.52381 | 74 | 0.662136 | 3.078125 |
70a82abafa7c59946a62da174533c16e51e4e5c3
| 2,929 |
h
|
C
|
src/point.h
|
Wicwik/k-means
|
df99bd1e0d4436426de9dd82c2315d6ae40c4e47
|
[
"MIT"
] | null | null | null |
src/point.h
|
Wicwik/k-means
|
df99bd1e0d4436426de9dd82c2315d6ae40c4e47
|
[
"MIT"
] | null | null | null |
src/point.h
|
Wicwik/k-means
|
df99bd1e0d4436426de9dd82c2315d6ae40c4e47
|
[
"MIT"
] | null | null | null |
#pragma once
class Point
{
public:
Point()
: m_x{0}
, m_y{0}
, m_cluster{-1}
, m_minimal_distance{__DBL_MAX__}
{
}
Point(double x, double y)
: m_x{x}
, m_y{y}
, m_cluster{-1}
, m_minimal_distance{__DBL_MAX__}
{
}
Point(const Point& other)
: m_x{other.m_x}
, m_y{other.m_y}
, m_cluster{other.m_cluster}
, m_minimal_distance{other.m_minimal_distance}
{
}
Point& operator=(const Point& rhs)
{
Point tmp(rhs);
m_swap(tmp);
return *this;
}
double distance(Point p)
{
return (p.m_x - m_x) * (p.m_x - m_x) + (p.m_y - m_y) * (p.m_y - m_y);
}
double get_minimal_distance()
{
return m_minimal_distance;
}
void set_minimal_distance(double minimal_distance)
{
m_minimal_distance = minimal_distance;
}
int get_cluster()
{
return m_cluster;
}
void set_cluster(int cluster)
{
m_cluster = cluster;
}
double get_x()
{
return m_x;
}
void set_x(double x)
{
m_x = x;
}
double get_y()
{
return m_y;
}
void set_y(double y)
{
m_y = y;
}
private:
double m_x;
double m_y;
int m_cluster;
double m_minimal_distance;
void m_swap(Point tmp)
{
m_x = tmp.m_x;
m_y = tmp.m_y;
m_cluster = tmp.m_cluster;
m_minimal_distance = tmp.m_minimal_distance;
}
friend std::ostream& operator<<(std::ostream& lhs, const Point& rhs);
friend Point operator+(Point lhs, const Point& rhs);
friend Point operator+(Point lhs, const double& rhs);
friend Point operator-(Point lhs, const Point& rhs);
friend Point operator-(Point lhs, const double& rhs);
friend Point operator*(Point lhs, const Point& rhs);
friend Point operator*(Point lhs, const double& rhs);
friend bool operator==(const Point& lhs, const Point& rhs);
friend bool operator!=(const Point& lhs, const Point& rhs);
};
std::ostream& operator<<(std::ostream& lhs, const Point& rhs)
{
return lhs << "[" << rhs.m_x << "," << rhs.m_y << "]";
}
Point operator+(Point lhs, const Point& rhs)
{
return Point{(lhs.m_x + rhs.m_x), (lhs.m_y + rhs.m_y)};
}
Point operator+(Point lhs, const double& rhs)
{
return Point{(lhs.m_x + rhs), (lhs.m_y + rhs)};
}
Point operator-(Point lhs, const Point& rhs)
{
return Point{(lhs.m_x - rhs.m_x), (lhs.m_y - rhs.m_y)};
}
Point operator-(Point lhs, const double& rhs)
{
return Point{(lhs.m_x - rhs), (lhs.m_y - rhs)};
}
Point operator*(Point lhs, const Point& rhs)
{
return Point{(lhs.m_x * rhs.m_x), (lhs.m_y * rhs.m_y)};
}
Point operator*(Point lhs, const double& rhs)
{
return Point{(lhs.m_x * rhs), (lhs.m_y * rhs)};
}
bool operator==(const Point& lhs, const Point& rhs)
{
if (lhs.m_x == rhs.m_x && lhs.m_y == rhs.m_y)
{
return true;
}
else
{
return false;
}
}
bool operator!=(const Point& lhs, const Point& rhs)
{
if (lhs == rhs)
{
return false;
}
return true;
}
| 17.969325 | 74 | 0.613861 | 3.0625 |
e92d185a6bc768d3bbbbe90a8241e3cd5b9a6ddd
| 1,745 |
rb
|
Ruby
|
model/roomba/ai/path_finder.rb
|
WojciechKo/lecture-assignment-wmh
|
6207a34f236966e160757d08936cca0981000ae1
|
[
"Unlicense"
] | 1 |
2018-06-15T11:05:37.000Z
|
2018-06-15T11:05:37.000Z
|
model/roomba/ai/path_finder.rb
|
WojciechKo/self-driving-vacuum-cleaner
|
6207a34f236966e160757d08936cca0981000ae1
|
[
"Unlicense"
] | null | null | null |
model/roomba/ai/path_finder.rb
|
WojciechKo/self-driving-vacuum-cleaner
|
6207a34f236966e160757d08936cca0981000ae1
|
[
"Unlicense"
] | null | null | null |
class PathFinder
MOVES = [:left, :right, :up, :down]
def initialize(mapper)
@mapper = mapper
end
def path_to(field)
(1..1000).each do |n|
path = n_steps_paths(n).find { |vector, path| path.destination_field == field }
return path[1].moves unless path.nil?
end
end
private
def n_steps_paths(n)
if n == 1
@paths = {[0, 0] => Path.new([], @mapper)}
@paths.merge!(MOVES.map { |m| Path.new([m], @mapper) }
.select { |p| p.destination_field != :blocked }
.inject({}) { |result, path| result[path.vector] = path; result })
else
@paths = @paths
.select { |vector, path| path.moves.size == (n -1) }
.values
.map(&:moves)
.product(MOVES)
.map { |p| Path.new(p[0] + [p[1]], @mapper) }
.select { |p| p.destination_field != :blocked }
.inject({}) { |result, path| result[path.vector] = path; result }
.merge(@paths)
@paths.select { |vector, path| path.moves.size == n }
end
end
end
class Path
attr_reader :moves
def initialize(moves, mapper)
@moves = moves
@mapper = mapper
end
def destination_field
coordinates = @mapper.coordinates.zip(vector).map { |pair| pair[0] + pair[1] }
@mapper.map.field(*coordinates)
end
def to_s
moves.to_s
end
def vector
@vector ||= moves.inject([0, 0]) do |vector, move|
case move
when :right
vector[0] += 1
when :left
vector[0] -= 1
when :up
vector[1] += 1
when :down
vector[1] -= 1
end
vector
end
end
end
| 24.577465 | 90 | 0.508883 | 3.09375 |
4c01b78ce28fa867e96753de193e2551cdcb7d18
| 2,624 |
swift
|
Swift
|
Tests/GraphTests/GraphTests.swift
|
pkrll/Graph.swift
|
46148cb857be1470776c866dbbddfb5f0fe1f7de
|
[
"Apache-2.0"
] | 1 |
2019-02-03T22:32:10.000Z
|
2019-02-03T22:32:10.000Z
|
Tests/GraphTests/GraphTests.swift
|
pkrll/Graph.swift
|
46148cb857be1470776c866dbbddfb5f0fe1f7de
|
[
"Apache-2.0"
] | null | null | null |
Tests/GraphTests/GraphTests.swift
|
pkrll/Graph.swift
|
46148cb857be1470776c866dbbddfb5f0fe1f7de
|
[
"Apache-2.0"
] | 1 |
2019-01-13T17:07:04.000Z
|
2019-01-13T17:07:04.000Z
|
import XCTest
@testable import Graph
final class GraphTests: XCTestCase {
func testNodeCreation() {
let label = 0
let node = Node(withLabel: label)
XCTAssertEqual(node.label, label)
}
func testNodeAddEdge() {
let source = Node(withLabel: 0)
let target = Node(withLabel: 1)
XCTAssertEqual(source.numberOfEdges, 0)
XCTAssertEqual(target.numberOfEdges, 0)
source.addEdge(to: target)
XCTAssertEqual(source.numberOfEdges, 1)
XCTAssertEqual(target.numberOfEdges, 0)
}
func testNodeProperties() {
let node = Node(withLabel: 0)
node["color"] = "Blue"
XCTAssertNotNil(node["color"])
XCTAssertEqual(node["color"] as! String, "Blue")
node["color"] = "Red"
XCTAssertEqual(node["color"] as! String, "Red")
node.setProperty("color", to: "Yellow")
XCTAssertEqual(node["color"] as! String, "Yellow")
}
func testGraphCreation() {
let graph = Graph()
XCTAssertEqual(graph.size, 0)
}
func testGraphAddNode() {
let graph = Graph()
graph.addNode(withLabel: 1)
XCTAssertEqual(graph.size, 1)
graph.addNode(withLabel: 2)
XCTAssertEqual(graph.size, 2)
graph.addNode(withLabel: 3)
XCTAssertEqual(graph.size, 3)
}
func testGraphSubscript() {
let graph = Graph()
var node = graph[5]
XCTAssertNil(node)
graph.addNode(withLabel: 3)
node = graph[3]
XCTAssertNotNil(node)
XCTAssertEqual(node!.label, 3)
}
func testGraph() {
let graph = Graph()
graph.addNode(withLabel: 0)
graph.addNode(withLabel: 1)
graph.addNode(withLabel: 2)
let target = Node(withLabel: 3)
graph.addNode(target)
XCTAssertEqual(graph.size, 4)
let weights = [0: [2, 5], 1: [5], 2: [1]]
XCTAssertTrue(graph.addEdge(from: 0, to: 1, withWeight: weights[0]![0]))
XCTAssertTrue(graph.addEdge(from: 0, to: 2, withWeight: weights[0]![1]))
XCTAssertTrue(graph.addEdge(from: 1, to: 3, withWeight: weights[1]![0]))
XCTAssertTrue(graph.addEdge(from: 2, to: 3, withWeight: weights[2]![0]))
XCTAssertFalse(graph.addEdge(from: 5, to: 3, withWeight: 0))
for node in graph.nodes {
XCTAssertNil(node["visited"])
var index = 0
for edge in node.edges {
XCTAssertEqual(edge.weight, weights[node.label]![index])
index += 1
}
node["visited"] = true
XCTAssertNotNil(node["visited"])
XCTAssertTrue(node["visited"] as! Bool)
}
}
static var allTests = [
("testNodeCreation", testNodeCreation),
("testNodeAddEdge", testNodeAddEdge),
("testNodeProperties", testNodeProperties),
("testGraphCreation", testGraphCreation),
("testGraphAddNode", testGraphAddNode),
("testGraphSubscript", testGraphSubscript),
("testGraph", testGraph)
]
}
| 23.428571 | 74 | 0.690549 | 3.109375 |
e714ac708631ee96ad789bb2f3e80388bc73e4c7
| 2,254 |
js
|
JavaScript
|
neuraum/src/MainPage/index.js
|
leo-alexander/code-test
|
244b4e3cbac3242672b667a049686b55012ecb80
|
[
"MIT"
] | null | null | null |
neuraum/src/MainPage/index.js
|
leo-alexander/code-test
|
244b4e3cbac3242672b667a049686b55012ecb80
|
[
"MIT"
] | null | null | null |
neuraum/src/MainPage/index.js
|
leo-alexander/code-test
|
244b4e3cbac3242672b667a049686b55012ecb80
|
[
"MIT"
] | null | null | null |
import React, { Component } from "react";
// import PropTypes from "prop-types";
import VendorTable from "./components/VendorTable"
import styles from "./mainPage.css";
class MainPage extends Component {
state = {
houses: {},
error: false,
success: false,
sortBy: 'internal_id'
}
componentDidMount = () => {
fetch("https://www.fertighaus.de/-/houses.json?vendor__in=28,10")
.then(res => res.json())
.then(
(result) => {
this.setState({
houses: this.formatHouses(result.results),
success: true,
});
},
(error) => {
this.setState({
error
});
}
)
}
formatHouses = (houses) => {
// arrange houses by vendor category
let formattedHouses = {}
houses.forEach((house) => {
let houseId = house.vendor_verbose.id
if (!formattedHouses.hasOwnProperty(houseId)) {
// add the vendor and create a directory for its houses
house.vendor_verbose['houses'] = []
formattedHouses[houseId] = house.vendor_verbose
}
// add the house to the correct vendor
formattedHouses[houseId].houses.push(house)
})
console.log(formattedHouses)
return formattedHouses
}
changeGlobalSort = (e) => {
this.setState({
sortBy: e.target.value
})
}
render() {
const { houses, success, sortBy} = this.state
return (
<div className={styles.container}>
<div className={styles.globalSort}>
<div className={styles.sortText}>Sort By: </div>
<select id="sortSelector" onChange={(e) => this.changeGlobalSort(e)} value={this.state.sortBy}>
<option value="internal_id">House ID</option>
<option value="name">Name</option>
<option value="price">Price</option>
<option value="living_area_total">Size</option>
</select>
</div>
{success ?
<div className={styles.vendorTablesContainer}>
{Object.entries(houses).map(([vendorId, vendor]) => {
return(<VendorTable vendor={vendor} sortBy={sortBy}/>
)})}
</div>
:
<div>Loading</div>
}
</div>
)
}
}
export default MainPage;
| 28.175 | 105 | 0.575421 | 3.1875 |
b098c1279d9893cec2f84cded02ecb4fd11da8cf
| 2,319 |
rs
|
Rust
|
src/validation/error.rs
|
akashgurava/oas3-rs
|
535523c99c44cb71c53100ec5fdaad1a468723ab
|
[
"MIT"
] | 11 |
2020-11-06T12:38:52.000Z
|
2022-02-11T17:37:42.000Z
|
src/validation/error.rs
|
akashgurava/oas3-rs
|
535523c99c44cb71c53100ec5fdaad1a468723ab
|
[
"MIT"
] | null | null | null |
src/validation/error.rs
|
akashgurava/oas3-rs
|
535523c99c44cb71c53100ec5fdaad1a468723ab
|
[
"MIT"
] | 4 |
2021-03-06T13:02:58.000Z
|
2022-02-24T21:52:14.000Z
|
use std::fmt;
use derive_more::{Display, Error};
use http::{Method, StatusCode};
use serde_json::Value as JsonValue;
use super::Path;
use crate::spec::{Error as SchemaError, SchemaType};
#[derive(Debug, Clone, PartialEq)]
pub struct AggregateError {
errors: Vec<Error>,
}
impl AggregateError {
pub fn new(errors: Vec<Error>) -> Self {
Self { errors }
}
pub fn empty() -> Self {
Self { errors: vec![] }
}
pub fn push(&mut self, err: Error) {
self.errors.push(err)
}
}
impl fmt::Display for AggregateError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let errs = self
.errors
.iter()
.map(|err| format!(" => {}", err.to_string()))
.collect::<Vec<_>>()
.join("\n");
f.write_str(&errs)
}
}
/// Validation Errors
#[derive(Clone, PartialEq, Debug, Display, Error)]
pub enum Error {
//
// Wrapped Errors
//
#[display(fmt = "Schema error")]
Schema(SchemaError),
//
// Leaf Errors
//
#[display(fmt = "Not JSON")]
NotJson,
#[display(fmt = "{} is not a {:?}", _0, _1)]
TypeMismatch(Path, SchemaType),
#[display(fmt = "Array item type mismatch: {}", _0)]
ArrayItemTypeMismatch(JsonValue, #[error(source)] Box<Error>),
#[display(fmt = "Undocumented field: {}", _0)]
UndocumentedField(#[error(not(source))] String),
#[display(fmt = "Status mismatch: expected {}; got {}", _0, _1)]
StatusMismatch(StatusCode, StatusCode),
#[display(fmt = "Required field missing: {}", _0)]
RequiredFieldMissing(#[error(not(source))] Path),
#[display(fmt = "Type did not match any `anyOf` variant: {}\n{}", _0, _1)]
OneOfNoMatch(Path, AggregateError),
#[display(fmt = "Non-nullable field was null: {}", _0)]
InvalidNull(#[error(not(source))] Path),
#[display(fmt = "Operation not found: {} {}", _0, _1)]
OperationNotFound(Method, String),
#[display(fmt = "Operation ID not found: {}", _0)]
OperationIdNotFound(#[error(not(source))] String),
#[display(fmt = "Parameter not found: {}", _0)]
ParameterNotFound(#[error(not(source))] String),
#[display(fmt = "Invalid parameter location: {}", _0)]
InvalidParameterLocation(#[error(not(source))] String),
}
| 25.766667 | 78 | 0.589047 | 3.1875 |
80f2bb7a68e563926a60b703c5db40d84324f423
| 1,222 |
swift
|
Swift
|
AlgorithmPractice/LeetCode-Go/50-PowxN.swift
|
YuanmuShi/AlgorithmPractice
|
32d87b34f59999b47212dca87156c43a6aee9a3e
|
[
"MIT"
] | null | null | null |
AlgorithmPractice/LeetCode-Go/50-PowxN.swift
|
YuanmuShi/AlgorithmPractice
|
32d87b34f59999b47212dca87156c43a6aee9a3e
|
[
"MIT"
] | null | null | null |
AlgorithmPractice/LeetCode-Go/50-PowxN.swift
|
YuanmuShi/AlgorithmPractice
|
32d87b34f59999b47212dca87156c43a6aee9a3e
|
[
"MIT"
] | null | null | null |
//
// 50-PowxN.swift
// AlgorithmPractice
//
// Created by Jeffrey on 2021/1/19.
// Copyright © 2021 Jeffrey. All rights reserved.
//
import Foundation
/*
50. Pow(x, n)
实现 pow(x, n) ,即计算 x 的 n 次幂函数(即,xn)。
示例 1:
输入:x = 2.00000, n = 10
输出:1024.00000
示例 2:
输入:x = 2.10000, n = 3
输出:9.26100
示例 3:
输入:x = 2.00000, n = -2
输出:0.25000
解释:2-2 = 1/22 = 1/4 = 0.25
提示:
-100.0 < x < 100.0
-231 <= n <= 231-1
-104 <= xn <= 104
*/
extension Solution {
static func test50() {
print(myPow(2, 3))
// print(myPow1(8.84372, -5))
}
// 循环法
private static func myPow(_ x: Double, _ n: Int) -> Double {
var tmpX = x
var tmpN = n
if n < 0 {
tmpX = 1 / x
tmpN = -n
}
var result: Double = 1
var power = tmpX
while tmpN > 0 {
if tmpN % 2 == 1 {
result *= power
}
power *= power
tmpN /= 2
}
return result
}
// 递归法
private static func myPow1(_ x: Double, _ n: Int) -> Double {
if n == 0 {
return 1
}
if n < 0 {
return 1 / myPow(x, -n)
}
if n % 2 == 0 {
return myPow(x * x, n / 2)
}
return x * myPow(x * x, n / 2)
}
}
| 14.209302 | 63 | 0.47054 | 3.25 |
4a448ea429e14998449bf77d3471155fb1fc4986
| 2,553 |
js
|
JavaScript
|
tests/integration/slider/slider_core.js
|
Teleburna/jquery-mobile
|
f075f58e80e71014bbeb94dc0d2efd4cd800a0ba
|
[
"CC0-1.0"
] | 2,140 |
2015-01-01T15:29:54.000Z
|
2021-10-01T00:21:19.000Z
|
tests/integration/slider/slider_core.js
|
Teleburna/jquery-mobile
|
f075f58e80e71014bbeb94dc0d2efd4cd800a0ba
|
[
"CC0-1.0"
] | 1,030 |
2015-01-01T12:40:58.000Z
|
2021-09-14T02:06:01.000Z
|
tests/integration/slider/slider_core.js
|
Teleburna/jquery-mobile
|
f075f58e80e71014bbeb94dc0d2efd4cd800a0ba
|
[
"CC0-1.0"
] | 883 |
2015-01-02T16:58:12.000Z
|
2021-10-22T00:35:05.000Z
|
define( [ "qunit", "jquery" ], function( QUnit, $ ) {
function defineTooltipTest( name, slider, hasValue, hasTooltip ) {
QUnit.test( name, function( assert ) {
var widget = slider.slider( "instance" ),
track = slider.siblings( ".ui-slider-track" ),
handle = track.children( ".ui-slider-handle" ),
popup = slider.siblings( ".ui-slider-popup" ),
assertState = function( condition, popupVisible ) {
var expectedHandleText = ( hasValue ? ( widget.options.mini ? "" : ( "" + slider.val() ) ) : "" );
assert.deepEqual( popup.is( ":visible" ), popupVisible,
"Upon " + condition + " popup is " + ( popupVisible ? "" : "not " ) +
"visible" );
if ( popupVisible ) {
assert.deepEqual( popup.text(), ( "" + slider.val() ),
"Upon " + condition +
" the popup reflects the input value (" + slider.val() + ")" );
}
assert.deepEqual( handle.text(), expectedHandleText,
"Upon " + condition + " the handle text is " + expectedHandleText );
};
assertState( "startup", false );
// Make sure the widget updates correctly when dragging by the handle
handle.trigger( "vmousedown" );
assertState( "handle vmousedown", hasTooltip );
// Move to 89% of the length of the slider
handle.trigger( $.extend( $.Event( "vmousemove" ), {
pageX: track.offset().left + track.width() * 0.89
} ) );
assertState( "handle vmousemove", hasTooltip );
handle.trigger( "vmouseup" );
assertState( "handle vmouseup", false );
// Make sure the widget updates correctly when clicking on the track at 47%
track.trigger( $.extend( $.Event( "vmousedown" ), {
pageX: track.offset().left + track.width() * 0.47
} ) );
assertState( "track vmousedown", hasTooltip );
// Move to 53%
track.trigger( $.extend( $.Event( "vmousemove" ), {
pageX: track.offset().left + track.width() * 0.53
} ) );
assertState( "track vmousemove", hasTooltip );
track.trigger( "vmouseup" );
assertState( "track vmouseup", false );
} );
}
function defineTests( moduleNameSuffix, idPrefix ) {
QUnit.module( "Slider tooltip - " + moduleNameSuffix );
defineTooltipTest( "Basic slider", $( "#" + idPrefix + "basic-slider" ), false, false );
defineTooltipTest( "Slider showing value", $( "#" + idPrefix + "show-value" ), true, false );
defineTooltipTest( "Slider showing tooltip", $( "#" + idPrefix + "popup" ), false, true );
defineTooltipTest( "Tooltip and value", $( "#" + idPrefix + "value-and-popup" ), true, true );
}
defineTests( "regular size", "" );
defineTests( "mini size", "mini-" );
} );
| 37 | 102 | 0.631022 | 3 |
7f38ea4c5d991d6e4497e6b8babde7a3b6e3b4e0
| 2,982 |
go
|
Go
|
src/codechef/easy/section10/section11/bit2c/solution.go
|
wangsenyuan/learn-go
|
a2ee4862b006e78cfb993b4cac229d6c58b8c583
|
[
"Apache-2.0"
] | 5 |
2020-06-04T03:44:24.000Z
|
2021-11-14T03:16:25.000Z
|
src/codechef/easy/section10/section11/bit2c/solution.go
|
wangsenyuan/learn-go
|
a2ee4862b006e78cfb993b4cac229d6c58b8c583
|
[
"Apache-2.0"
] | null | null | null |
src/codechef/easy/section10/section11/bit2c/solution.go
|
wangsenyuan/learn-go
|
a2ee4862b006e78cfb993b4cac229d6c58b8c583
|
[
"Apache-2.0"
] | null | null | null |
package main
import (
"bufio"
"fmt"
"os"
)
func readInt(bytes []byte, from int, val *int) int {
i := from
sign := 1
if bytes[i] == '-' {
sign = -1
i++
}
tmp := 0
for i < len(bytes) && bytes[i] != ' ' {
tmp = tmp*10 + int(bytes[i]-'0')
i++
}
*val = tmp * sign
return i
}
func readNum(scanner *bufio.Scanner) (a int) {
scanner.Scan()
readInt(scanner.Bytes(), 0, &a)
return
}
func readTwoNums(scanner *bufio.Scanner) (a int, b int) {
res := readNNums(scanner, 2)
a, b = res[0], res[1]
return
}
func readNNums(scanner *bufio.Scanner, n int) []int {
res := make([]int, n)
x := 0
scanner.Scan()
for i := 0; i < n; i++ {
for x < len(scanner.Bytes()) && scanner.Bytes()[x] == ' ' {
x++
}
x = readInt(scanner.Bytes(), x, &res[i])
}
return res
}
func fillNNums(scanner *bufio.Scanner, n int, res []int) {
x := 0
scanner.Scan()
for i := 0; i < n; i++ {
for x < len(scanner.Bytes()) && scanner.Bytes()[x] == ' ' {
x++
}
x = readInt(scanner.Bytes(), x, &res[i])
}
}
func readUint64(bytes []byte, from int, val *uint64) int {
i := from
var tmp uint64
for i < len(bytes) && bytes[i] != ' ' {
tmp = tmp*10 + uint64(bytes[i]-'0')
i++
}
*val = tmp
return i
}
func readInt64(bytes []byte, from int, val *int64) int {
i := from
var tmp int64
for i < len(bytes) && bytes[i] != ' ' {
tmp = tmp*10 + int64(bytes[i]-'0')
i++
}
*val = tmp
return i
}
func readNInt64Nums(scanner *bufio.Scanner, n int) []int64 {
res := make([]int64, n)
x := -1
scanner.Scan()
for i := 0; i < n; i++ {
x = readInt64(scanner.Bytes(), x+1, &res[i])
}
return res
}
func main() {
scanner := bufio.NewScanner(os.Stdin)
tc := readNum(scanner)
for tc > 0 {
tc--
scanner.Scan()
s := scanner.Text()
fmt.Println(solve(s))
}
}
func solve(s string) int {
nums := make([]int, 0, 11)
ops := make([]byte, 0, 11)
var num int
for i := 0; i <= len(s); i++ {
if i == len(s) || !isDigit(s[i]) {
nums = append(nums, num)
if i < len(s) {
ops = append(ops, s[i])
}
num = 0
continue
}
x := int(s[i] - '0')
num = num*10 + x
}
n := len(nums)
dp := make([][]map[int]bool, n)
for i := 0; i < n; i++ {
dp[i] = make([]map[int]bool, n)
}
var dfs func(i, j int) map[int]bool
dfs = func(i, j int) map[int]bool {
if dp[i][j] != nil {
return dp[i][j]
}
dp[i][j] = make(map[int]bool)
if i == j {
dp[i][j][nums[i]] = true
return dp[i][j]
}
for k := i; k < j; k++ {
a := dfs(i, k)
b := dfs(k+1, j)
for aa := range a {
for bb := range b {
cc := bitOp(aa, bb, ops[k])
dp[i][j][cc] = true
}
}
}
return dp[i][j]
}
res := dfs(0, n-1)
var ans int
for k := range res {
ans = max(ans, k)
}
return ans
}
func isDigit(x byte) bool {
return x >= '0' && x <= '9'
}
func bitOp(a int, b int, c byte) int {
if c == '&' {
return a & b
}
if c == '|' {
return a | b
}
return a ^ b
}
func max(a, b int) int {
if a >= b {
return a
}
return b
}
| 15.777778 | 61 | 0.518779 | 3.40625 |
e756dd326d378dc4baa05a2c0c44a2078959c642
| 2,110 |
js
|
JavaScript
|
src/api/parser/src/utils/__mocks__/supabase.js
|
nguyenhung15913/telescope
|
11268ac446a52cf3337e3ab607fa6c1e950afbd7
|
[
"BSD-2-Clause"
] | null | null | null |
src/api/parser/src/utils/__mocks__/supabase.js
|
nguyenhung15913/telescope
|
11268ac446a52cf3337e3ab607fa6c1e950afbd7
|
[
"BSD-2-Clause"
] | 14 |
2022-01-20T21:30:19.000Z
|
2022-01-27T22:23:01.000Z
|
src/api/parser/src/utils/__mocks__/supabase.js
|
AmasiaNalbandian/telescope
|
182e00e98053db6e6b8184c4b8d25f225d7e3f2e
|
[
"BSD-2-Clause"
] | 1 |
2022-02-21T03:25:06.000Z
|
2022-02-21T03:25:06.000Z
|
const { hash } = require('@senecacdot/satellite');
const normalizeUrl = require('normalize-url');
const urlToId = (url) => hash(normalizeUrl(url));
let feeds = [];
let feedIds = new Set();
module.exports = {
__resetMockFeeds: () => {
feeds = [];
feedIds = new Set();
},
/**
* @param {Array<Feed | { url: string }>} feedObjects
*/
__setMockFeeds: (feedObjects) => {
const mockFeeds = feedObjects.reduce((uniqueFeeds, feed) => {
const id = feed.id || urlToId(feed.url);
if (!feedIds.has(id)) {
feedIds.add(id);
return uniqueFeeds.concat({ id, invalid: false, flagged: false });
}
return uniqueFeeds;
}, []);
feeds = feeds.concat(mockFeeds);
},
// Invalid feed related functions
setInvalidFeed: (id) => {
feeds.forEach((feed) => {
if (feed.id === id) {
feed.invalid = true;
}
});
return Promise.resolve();
},
getInvalidFeeds: () => {
const invalidFeedIds = feeds.filter((feed) => feed.flagged).map((feed) => ({ id: feed.id }));
return Promise.resolve(invalidFeedIds);
},
isInvalid: (id) => {
const targetFeed = feeds.find((feed) => feed.id === id);
return Promise.resolve(!!targetFeed.invalid);
},
// Flagged feed related functions
getAllFeeds: jest.fn().mockImplementation(() => Promise.resolve(feeds)),
setFlaggedFeed: jest.fn().mockImplementation((id) => {
feeds.forEach((feed) => {
if (feed.id === id) {
feed.flagged = true;
}
});
return Promise.resolve();
}),
unsetFlaggedFeed: jest.fn().mockImplementation((id) => {
feeds.forEach((feed) => {
if (feed.id === id) {
feed.flagged = false;
}
});
return Promise.resolve();
}),
getFlaggedFeeds: jest.fn().mockImplementation(() => {
const flaggedFeedIds = feeds.filter((feed) => feed.flagged).map((feed) => feed.id);
return Promise.resolve(flaggedFeedIds);
}),
isFlagged: jest.fn().mockImplementation((id) => {
const targetFeed = feeds.find((feed) => feed.id === id);
return Promise.resolve(!!targetFeed.flagged);
}),
};
| 28.513514 | 97 | 0.590995 | 3 |
c3830185506bb0407a6bcd3d31caf6557033c575
| 1,131 |
go
|
Go
|
common.go
|
TipsyPixie/advent-of-code-2020
|
9e178f8e5d274483690750c3fe6b6c765bcae235
|
[
"Beerware"
] | null | null | null |
common.go
|
TipsyPixie/advent-of-code-2020
|
9e178f8e5d274483690750c3fe6b6c765bcae235
|
[
"Beerware"
] | null | null | null |
common.go
|
TipsyPixie/advent-of-code-2020
|
9e178f8e5d274483690750c3fe6b6c765bcae235
|
[
"Beerware"
] | null | null | null |
package aoc
import (
"bufio"
"io/ioutil"
"os"
"testing"
)
type Input interface {
ReadLine() (string, bool, error)
ReadAll() (string, error)
Close() error
}
type fileInput struct {
file *os.File
scanner *bufio.Scanner
}
// to make sure fileInput implements Input
var _ Input = (*fileInput)(nil)
func FromFile(path string) (*fileInput, error) {
file, err := os.Open(path)
if err != nil {
return nil, err
}
return &fileInput{
file: file,
scanner: bufio.NewScanner(file),
}, nil
}
func (input *fileInput) ReadLine() (string, bool, error) {
if input.scanner.Scan() {
return input.scanner.Text(), true, nil
}
return "", false, input.scanner.Err()
}
func (input *fileInput) ReadAll() (string, error) {
fileContents, err := ioutil.ReadAll(input.file)
if err != nil {
return "", err
}
return string(fileContents), nil
}
func (input *fileInput) Close() error {
err := input.file.Close()
if err != nil {
return err
}
return nil
}
func CommonTest(t *testing.T, f func(string) (int, error)) {
answer, err := f("./input.txt")
if err != nil {
t.Error(err)
t.FailNow()
}
t.Log(answer)
}
| 17.136364 | 60 | 0.647215 | 3.1875 |
be378eea5163bc414d71195ef66bb4e79b336476
| 2,243 |
rs
|
Rust
|
writing_automated_tests/src/how_to_write_tests.rs
|
learn-frame/learn-rust
|
22c471ccbfc4a3555af0838b5b45b5d82ab0e616
|
[
"MIT"
] | null | null | null |
writing_automated_tests/src/how_to_write_tests.rs
|
learn-frame/learn-rust
|
22c471ccbfc4a3555af0838b5b45b5d82ab0e616
|
[
"MIT"
] | null | null | null |
writing_automated_tests/src/how_to_write_tests.rs
|
learn-frame/learn-rust
|
22c471ccbfc4a3555af0838b5b45b5d82ab0e616
|
[
"MIT"
] | null | null | null |
/// 1. 设置任何所需的数据或状态
/// 2. 运行需要测试的代码
/// 3. 断言其结果是我们所期望的
///
/// 复习: 属性(attribute) 是关于 Rust 代码片段的元数据, 常见的如
/// #[derive], #[test], #[allow]
///
#[derive(Debug)]
struct Rectangle {
width: u32,
height: u32,
}
impl Rectangle {
#[allow(unused)]
fn can_hold(&self, other: &Rectangle) -> bool {
self.width > other.width && self.height > other.height
}
}
#[allow(unused)]
pub fn add_two(a: i32) -> i32 {
a + 2
}
#[allow(unused)]
pub fn greeting(name: &str) -> String {
String::from("Hello!")
}
#[allow(unused)]
pub fn make_error() {
panic!("I'm error!");
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_works() {
assert_eq!(add_two(2), 4);
assert_ne!(add_two(2), 5);
}
#[test]
fn larger_can_hold_smaller() {
let larger = Rectangle {
width: 8,
height: 7,
};
let smaller = Rectangle {
width: 5,
height: 1,
};
assert!(larger.can_hold(&smaller));
assert!(!smaller.can_hold(&larger));
}
#[test]
fn greeting_contains_name() {
let result = greeting("Carol");
assert!(
result.contains("Carol"),
// 自定义错误信息
"Greeting did not contain name, value was `{}`",
result
);
}
// #[should_panic] 用来验证那些应该出错的函数
#[test]
#[should_panic(expected = "出错就对咯!")]
fn need_error() {
make_error();
}
// 下面这个函数因为不出错, 所以测试会失败
#[test]
#[should_panic]
fn need_error_1() {
add_two(3);
}
// 但吊诡的是, 如果你执行多个函数
// 只要有一个出错, 就能通过
// 因此一个 should_panic 宏执行一个函数比较好
#[test]
#[should_panic]
fn need_error_2() {
add_two(3);
make_error();
}
// 也可以是用 Result<T, E>
// 不能对这些使用 Result<T, E> 的测试使用 #[should_panic] 注解
#[test]
fn use_result() -> Result<(), String> {
if add_two(2) == 4 {
Ok(())
} else {
Err(String::from("two plus two does not equal four"))
}
}
}
// 执行 cargo test
//
// test how_to_write_tests::tests::it_works ... ok
// running 1 test
// test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured; 0 filtered out; finished in 0.00s
| 19.675439 | 96 | 0.523852 | 3.328125 |
7153b5624b085aa1e0fc2c88e34eea47ceafa586
| 1,570 |
ts
|
TypeScript
|
src/app/auth/training/past-training/past-training.component.ts
|
harshyadav9/maxAangularMaterial
|
c4c89dfbcbcb26d541654b2201de717abaa89182
|
[
"MIT"
] | null | null | null |
src/app/auth/training/past-training/past-training.component.ts
|
harshyadav9/maxAangularMaterial
|
c4c89dfbcbcb26d541654b2201de717abaa89182
|
[
"MIT"
] | null | null | null |
src/app/auth/training/past-training/past-training.component.ts
|
harshyadav9/maxAangularMaterial
|
c4c89dfbcbcb26d541654b2201de717abaa89182
|
[
"MIT"
] | null | null | null |
import { Component, OnInit , ViewChild, AfterViewInit, OnDestroy } from '@angular/core';
import { TrainingService } from '../training.service';
import { Excercise } from '../excercise.model';
import { MatTableDataSource , MatSort , PageEvent, MatPaginator} from '@angular/material';
import { Subscription } from 'rxjs';
@Component({
selector: 'app-past-training',
templateUrl: './past-training.component.html',
styleUrls: ['./past-training.component.css']
})
export class PastTrainingComponent implements OnInit, AfterViewInit , OnDestroy {
excercises:Excercise[] = [];
subsc:Subscription;
pageEvent: PageEvent;
displayedColumns:string[] = ["name","calories","duration","date","state"];
dataSource = new MatTableDataSource<Excercise>();
// MatSort gives access to underlying property of matsort and mat-sort-header
@ViewChild(MatSort) sort:MatSort
@ViewChild(MatPaginator) paginator:MatPaginator
constructor(private trainingService:TrainingService) { }
ngOnInit() {
this.subsc = this.trainingService.completeOrCancelExc.subscribe((data:Excercise[])=>{
this.dataSource.data = data;
console.log("this.dataSource.data",this.dataSource.data);
});
this.trainingService.fetchCancelOrCompleteExcercise();
};
ngAfterViewInit(){
this.dataSource.sort = this.sort;
this.dataSource.paginator = this.paginator;
}
doFilter(filterValue:string){
this.dataSource.filter = filterValue.trim().toLowerCase();
}
ngOnDestroy(){
if(this.subsc)
this.subsc.unsubscribe();
}
}
| 29.622642 | 90 | 0.711465 | 3.109375 |
857621577f099fbe301996f513e60d4f3c9af9ad
| 6,072 |
js
|
JavaScript
|
uiclient/js/services.js
|
JanezSedeljsak/tcp-socket-demo
|
9dbb43c091b181f1a4989a86232e33403b9c6467
|
[
"CC0-1.0"
] | 1 |
2021-07-06T05:00:54.000Z
|
2021-07-06T05:00:54.000Z
|
uiclient/js/services.js
|
JanezSedeljsak/tcp-socket-demo
|
9dbb43c091b181f1a4989a86232e33403b9c6467
|
[
"CC0-1.0"
] | null | null | null |
uiclient/js/services.js
|
JanezSedeljsak/tcp-socket-demo
|
9dbb43c091b181f1a4989a86232e33403b9c6467
|
[
"CC0-1.0"
] | null | null | null |
const pystruct = require('python-struct');
const { ipcRenderer } = require('electron');
app.service('$drag', function () {
this.for = function (elmnt) {
let pos1 = 0, pos2 = 0, pos3 = 0, pos4 = 0;
if (document.getElementById(elmnt.id + "-header")) {
document.getElementById(elmnt.id + "-header").onmousedown = dragMouseDown;
} else elmnt.onmousedown = dragMouseDown;
function dragMouseDown(e) {
var containers = document.getElementsByClassName("chat-container");
for (var i = 0; i < containers.length; i++) containers.item(i).style.zIndex = 0;
e = e || window.event;
e.preventDefault();
e.target.parentElement.style.zIndex = 5;
pos3 = e.clientX;
pos4 = e.clientY;
document.onmouseup = closeDragElement;
document.onmousemove = elementDrag;
}
function elementDrag(e) {
e = e || window.event;
e.preventDefault();
pos1 = pos3 - e.clientX;
pos2 = pos4 - e.clientY;
pos3 = e.clientX;
pos4 = e.clientY;
elmnt.style.top = (elmnt.offsetTop - pos2) + "px";
elmnt.style.left = (elmnt.offsetLeft - pos1) + "px";
}
function closeDragElement() {
document.onmouseup = null;
document.onmousemove = null;
}
}
});
app.service('$parser', function () {
this.capFirstLetter = str => str.split(" ").map((word) => word[0].toUpperCase() + word.substring(1).toLowerCase()).join(" ");
this.sendData = (socketClient, object) => {
let jsonString = JSON.stringify(object);
// tmp fix remove non utf-8 characters
jsonString = jsonString.replace(/[^\x20-\x7E]/g, '');
const byteHeader = pystruct.pack("!H", jsonString.length);
socketClient.write(byteHeader + jsonString);
};
this.decodeData = byteArr => {
const enc = new TextDecoder("utf-8");
const len = pystruct.unpack("!H", byteArr)[0];
const dec = enc.decode(byteArr);
return JSON.parse(dec.substr(dec.length - len));
};
});
app.service('$appWindow', function ($window) {
this.exit = () => $window.close();
this.minimize = (winName="") => ipcRenderer.send(winName == 'admin' ? 'request-minimize-admin' : 'request-minimize');
});
app.service('$notification', function () {
this.defaultSettingsByType = {
normal: {
position: 'top-end',
showConfirmButton: false,
timer: 2000
},
form: {
input: 'text',
inputAttributes: { autocapitalize: 'off' },
showCancelButton: true
}
};
this.show = function(type, settings, callback=() => {}) {
const def = type in this.defaultSettingsByType ? this.defaultSettingsByType[type] : {};
Swal.fire({ ...def, ...settings }).then(function(input){
if (input.isConfirmed) {
if (!callback) return;
callback(input);
}
}, () => {});
}
});
app.service('$certService', function($notification) {
this.myHash = s => {
let a = 0, c = 0, o;
for (let h = s.length - 1; h >= 0; h--) {
o = s.charCodeAt(h);
a = (a << 6 & 268435455) + o + (o << 14);
c = a & 266338304;
a = c !== 0 ? a ^ c >> 21 : a;
}
return `__${String(a).split("").reverse().join("")}__`;
};
this.openAdminApp = () => {
$notification.show('form', { title: 'Enter admin code', confirmButtonText: 'Open app', input: 'password' }, (input) => {
if (this.myHash(input.value) == '__433063862__') {
ipcRenderer.send('draw-admin');
}
});
};
this.getAllCertRequests = () => {
const data = ipcRenderer.sendSync('call-certificate-service', { action: 'get-requested-certificates' });
if ('certificates' in data && Array.isArray(data['certificates'])) {
return data['certificates'];
}
$notification.show('normal', { icon: 'error', title: `Error occured!` });
return [];
}
this.confirmCertificate = (certName) => {
const data = ipcRenderer.sendSync('call-certificate-service', { certName, action: 'confirm-certificate' });
if ('success' in data && data['success']) return;
$notification.show('normal', { icon: 'error', title: `Error occured!` });
};
this.sendCertificateRequest = (certName) => {
const data = ipcRenderer.sendSync('call-certificate-service', { certName, action: 'generate-certificate' });
if ('success' in data && data['success']) {
$notification.show('normal', { icon: 'success', title: `Your certificate was succesfully created!` });
return;
}
$notification.show('normal', { icon: 'error', title: `Error occured while creating your certificate!` });
};
this.getUserCertificate = (certName, allowAdmin=false) => {
const data = ipcRenderer.sendSync('call-certificate-service', { certName, action: 'get-certificate', allowAdmin });
if ('success' in data && data['success'] && 'certData' in data) {
const tcpSocketConfig = [3333, '127.0.0.1'];
return {
host: tcpSocketConfig[1],
port: tcpSocketConfig[0],
secureProtocol: 'TLSv1_2_method',
rejectUnauthorized: false,
...data['certData']
};
}
$notification.show('normal', { icon: 'error', title: data['message'] });
return undefined;
}
this.getAllCertificates = () => {
const data = ipcRenderer.sendSync('call-certificate-service', { action: 'get-all-certificates' });
if ('members' in data && Array.isArray(data['members'])) {
return data['members'];
}
$notification.show('normal', { icon: 'error', title: `Error occured!` });
return [];
}
});
| 36.8 | 129 | 0.54776 | 3.046875 |
f169e6df4ad1c789f786bc112f41f87edd79bc0c
| 1,662 |
sql
|
SQL
|
Wiley_task/5_Table_of_records.sql
|
Alexgta/SQL-PLSQL-Examples
|
f810a3cb42d7450c055458b10f28a359e4e8d6f5
|
[
"Apache-2.0"
] | null | null | null |
Wiley_task/5_Table_of_records.sql
|
Alexgta/SQL-PLSQL-Examples
|
f810a3cb42d7450c055458b10f28a359e4e8d6f5
|
[
"Apache-2.0"
] | null | null | null |
Wiley_task/5_Table_of_records.sql
|
Alexgta/SQL-PLSQL-Examples
|
f810a3cb42d7450c055458b10f28a359e4e8d6f5
|
[
"Apache-2.0"
] | null | null | null |
CREATE TABLE "ORDMMAPP"."TEST_EMPLOY"
(EMPLOY_ID NUMBER,
FIRST_NAME VARCHAR2(100),
LAST_NAME VARCHAR2(100),
SALARY NUMBER
);
Insert into ORDMMAPP.TEST_EMPLOY (EMPLOY_ID,FIRST_NAME,LAST_NAME,SALARY) values (1,'Jhon','Smith',100);
Insert into ORDMMAPP.TEST_EMPLOY (EMPLOY_ID,FIRST_NAME,LAST_NAME,SALARY) values (2,'Igor','Doe',200);
Insert into ORDMMAPP.TEST_EMPLOY (EMPLOY_ID,FIRST_NAME,LAST_NAME,SALARY) values (3,'Peter ','Smith',300);
Insert into ORDMMAPP.TEST_EMPLOY (EMPLOY_ID,FIRST_NAME,LAST_NAME,SALARY) values (4,'Scott','Lee',400);
Insert into ORDMMAPP.TEST_EMPLOY (EMPLOY_ID,FIRST_NAME,LAST_NAME,SALARY) values (5,'Jones','Ivanov',500);
create or replace function test_records_of_tables (p_first_name varchar2 default null, p_last_name varchar2 default null)
RETURN NUMBER
AS
TYPE employ_rec IS RECORD (
employ_id NUMBER,
first_name VARCHAR2(100),
last_name VARCHAR2(100),
salary NUMBER
);
TYPE employ_tbl_type IS TABLE OF employ_rec INDEX BY VARCHAR2(200);
employ_tbl employ_tbl_type;
CURSOR cur_employ IS
SELECT t.employ_id, t.first_name, t.last_name, t.salary
FROM test_employ t;
v_key1 varchar2(200);
v_result NUMBER := 0;
BEGIN
v_result := -1;
FOR rc IN cur_employ LOOP
v_key1 := rc.first_name || rc.last_name;
employ_tbl(v_key1).employ_id := rc.employ_id;
employ_tbl(v_key1).first_name := rc.first_name;
employ_tbl(v_key1).last_name := rc.last_name;
employ_tbl(v_key1).salary := rc.salary;
END LOOP;
BEGIN
v_result := employ_tbl(p_first_name || p_last_name).salary;
EXCEPTION
WHEN OTHERS THEN
v_result := -1;
END;
RETURN v_result;
END test_records_of_tables;
| 29.157895 | 121 | 0.749699 | 3 |
2f6865f8fd9f292b7ef125defd6cd4316114d8c8
| 2,508 |
rs
|
Rust
|
rust/src/bin/remove_duplicates_from_sorted_array.rs
|
senofsky/leetcode
|
8cde5d0fbe781a7e5b3e9859ea37faa5fd6e6fec
|
[
"MIT"
] | null | null | null |
rust/src/bin/remove_duplicates_from_sorted_array.rs
|
senofsky/leetcode
|
8cde5d0fbe781a7e5b3e9859ea37faa5fd6e6fec
|
[
"MIT"
] | null | null | null |
rust/src/bin/remove_duplicates_from_sorted_array.rs
|
senofsky/leetcode
|
8cde5d0fbe781a7e5b3e9859ea37faa5fd6e6fec
|
[
"MIT"
] | null | null | null |
// Given a sorted array nums, remove the duplicates in-place such that each
// element appear only once and return the new length.
//
// Do not allocate extra space for another array, you must do this by modifying
// the input array in-place with O(1) extra memory.
//
// Example 1:
//
// Given nums = [1,1,2],
//
// Your function should return length = 2, with the first two elements of nums
// being 1 and 2 respectively.
//
// It doesn't matter what you leave beyond the returned length.
//
// Example 2:
//
// Given nums = [0,0,1,1,1,2,2,3,3,4],
//
// Your function should return length = 5, with the first five elements of nums
// being modified to 0, 1, 2, 3, and 4 respectively.
//
// It doesn't matter what values are set beyond the returned length.
//
// Clarification:
//
// Confused why the returned value is an integer but your answer is an array?
//
// Note that the input array is passed in by reference, which means modification
// to the input array will be known to the caller as well.
//
// Internally you can think of this:
//
// // nums is passed in by reference. (i.e., without making a copy)
// int len = removeDuplicates(nums);
//
// // any modification to nums in your function would be known by the caller.
// // using the length returned by your function, it prints the first len
// // elements.
// for (int i = 0; i < len; i++) {
// print(nums[i]);
// }
// TODO: Compare against other submissions
// pub fn remove_duplicates(nums: &mut Vec<i32>) -> i32 {
// if nums.is_empty() {
// return 0;
// }
//
// let array_length = nums.len();
// let mut new_index = 0;
//
// for index in 0..array_length {
// if nums[new_index] != nums[index] {
// new_index += 1;
// nums[new_index] = nums[index];
// }
// }
//
// (new_index + 1) as i32
fn remove_duplicates(nums: &mut Vec<i32>) -> i32 {
if nums.is_empty() {
return 0;
}
let array_length = nums.len();
let mut new_index = 1;
for index in 1..array_length {
if nums[index] != nums[index - 1] {
nums[new_index] = nums[index];
new_index += 1;
}
}
new_index as i32
}
fn main() {
let mut nums = vec![1, 1, 2];
let new_length = remove_duplicates(&mut nums);
println!("{:?}, length = {}", nums, new_length);
let mut nums = vec![0, 0, 1, 1, 1, 2, 2, 3, 3, 4];
let new_length = remove_duplicates(&mut nums);
println!("{:?}, length = {}", nums, new_length);
}
| 28.179775 | 80 | 0.610447 | 3.515625 |
afabdc8b173466c9869f608310797b3a08fea174
| 5,008 |
rb
|
Ruby
|
mrblib/mrb_keyboard.rb
|
chronno/mruby-mrgss
|
7f29e667576c8543ec2da274278da70ff8b9605b
|
[
"MIT"
] | 1 |
2015-11-25T08:29:30.000Z
|
2015-11-25T08:29:30.000Z
|
mrblib/mrb_keyboard.rb
|
chronno/mruby-mrgss
|
7f29e667576c8543ec2da274278da70ff8b9605b
|
[
"MIT"
] | null | null | null |
mrblib/mrb_keyboard.rb
|
chronno/mruby-mrgss
|
7f29e667576c8543ec2da274278da70ff8b9605b
|
[
"MIT"
] | 1 |
2015-11-26T22:18:07.000Z
|
2015-11-26T22:18:07.000Z
|
#============================================================================
# ** ::MRGSS
#----------------------------------------------------------------------------
# This module contains all MRGSS Modules.
#============================================================================
module MRGSS
#------------------------------------------------------------------------
# * Keyboard
#------------------------------------------------------------------------
# This Class represents a the keyboard
#------------------------------------------------------------------------
module Keyboard
#----------------------------------------------------------------------
# Status buffers
#----------------------------------------------------------------------
@trigger = Array.new(512).fill(false)
@release = Array.new(512).fill(false)
@press = Array.new(512).fill(false)
@repeat = Array.new(512).fill(false)
@time = Array.new(512).fill(0)
#----------------------------------------------------------------------
# triggered?
#----------------------------------------------------------------------
def self.trigger?(key)
return @trigger[key]
end
#----------------------------------------------------------------------
# pressed?
#----------------------------------------------------------------------
def self.press?(key)
return @press[key]
end
#----------------------------------------------------------------------
# keyboard status update
#----------------------------------------------------------------------
def self.update(key, action)
@trigger[key] = action == 1 && @time[key] == 0
@release[key] = action == 0
@time[key] = action == 1 ? @time[key] + 1 : 0
@repeat[key] = action == 2 && @time[key] % 2 == 0
@press[key] = action != 0 && @time[key] > 1
end
#----------------------------------------------------------------------
# update character input
#----------------------------------------------------------------------
def self.method_missing(name, *args, &block)
p args.pack("U").to_s
end
#----------------------------------------------------------------------
# Keys Constants
#----------------------------------------------------------------------
KEY_UNKNOWN = -1
KEY_SPACE = 32
KEY_APOSTROPHE = 39
KEY_COMMA = 44
KEY_MINUS = 45
KEY_PERIOD = 46
KEY_SLASH = 47
KEY_0 = 48
KEY_1 = 49
KEY_2 = 50
KEY_3 = 51
KEY_4 = 52
KEY_5 = 53
KEY_6 = 54
KEY_7 = 55
KEY_8 = 56
KEY_9 = 57
KEY_SEMICOLON = 59
KEY_EQUAL = 61
KEY_A = 65
KEY_B = 66
KEY_C = 67
KEY_D = 68
KEY_E = 69
KEY_F = 70
KEY_G = 71
KEY_H = 72
KEY_I = 73
KEY_J = 74
KEY_K = 75
KEY_L = 76
KEY_M = 77
KEY_N = 78
KEY_O = 79
KEY_P = 80
KEY_Q = 81
KEY_R = 82
KEY_S = 83
KEY_T = 84
KEY_U = 85
KEY_V = 86
KEY_W = 87
KEY_X = 88
KEY_Y = 89
KEY_Z = 90
KEY_LEFT_BRACKET = 91
KEY_BACKSLASH = 92
KEY_RIGHT_BRACKET = 93
KEY_GRAVE_ACCENT = 96
KEY_WORLD_1 = 161
KEY_WORLD_2 = 162
KEY_ESCAPE = 256
KEY_ENTER = 257
KEY_TAB = 258
KEY_BACKSPACE = 259
KEY_INSERT = 260
KEY_DELETE = 261
KEY_RIGHT = 262
KEY_LEFT = 263
KEY_DOWN = 264
KEY_UP = 265
KEY_PAGE_UP = 266
KEY_PAGE_DOWN = 267
KEY_HOME = 268
KEY_END = 269
KEY_CAPS_LOCK = 280
KEY_SCROLL_LOCK = 281
KEY_NUM_LOCK = 282
KEY_PRINT_SCREEN = 283
KEY_PAUSE = 284
KEY_F1 = 290
KEY_F2 = 291
KEY_F3 = 292
KEY_F4 = 293
KEY_F5 = 294
KEY_F6 = 295
KEY_F7 = 296
KEY_F8 = 297
KEY_F9 = 298
KEY_F10 = 299
KEY_F11 = 300
KEY_F12 = 301
KEY_F13 = 302
KEY_F14 = 303
KEY_F15 = 304
KEY_F16 = 305
KEY_F17 = 306
KEY_F18 = 307
KEY_F19 = 308
KEY_F20 = 309
KEY_F21 = 310
KEY_F22 = 311
KEY_F23 = 312
KEY_F24 = 313
KEY_F25 = 314
KEY_KP_0 = 320
KEY_KP_1 = 321
KEY_KP_2 = 322
KEY_KP_3 = 323
KEY_KP_4 = 324
KEY_KP_5 = 325
KEY_KP_6 = 326
KEY_KP_7 = 327
KEY_KP_8 = 328
KEY_KP_9 = 329
KEY_KP_DECIMAL = 330
KEY_KP_DIVIDE = 331
KEY_KP_MULTIPLY = 332
KEY_KP_SUBTRACT = 333
KEY_KP_ADD = 334
KEY_KP_ENTER = 335
KEY_KP_EQUAL = 336
KEY_LEFT_SHIFT = 340
KEY_LEFT_CONTROL = 341
KEY_LEFT_ALT = 342
KEY_LEFT_SUPER = 343
KEY_RIGHT_SHIFT = 344
KEY_RIGHT_CONTROL = 345
KEY_RIGHT_ALT = 346
KEY_RIGHT_SUPER = 347
KEY_MENU = 348
KEY_LAST = KEY_MENU
end
end
| 28.617143 | 77 | 0.381989 | 3.125 |
4a55f20e2894a0e8e39d1a5bb7bdebae8de54931
| 4,402 |
js
|
JavaScript
|
src/utils.js
|
myurch/mock-rel
|
7fa490bc6c84ac6cb1e9cea675dcd00d5a40329d
|
[
"MIT"
] | 3 |
2019-10-08T19:11:28.000Z
|
2020-01-07T21:19:45.000Z
|
src/utils.js
|
myurch/mock-rel
|
7fa490bc6c84ac6cb1e9cea675dcd00d5a40329d
|
[
"MIT"
] | 1 |
2021-05-10T11:29:09.000Z
|
2021-05-10T11:29:09.000Z
|
src/utils.js
|
myurch/mock-rel
|
7fa490bc6c84ac6cb1e9cea675dcd00d5a40329d
|
[
"MIT"
] | null | null | null |
import * as R from 'ramda'
import {BACKREF} from './consts'
export const createField = ({type, modelName=null, backref=null}) => {
return({
type: type,
backref: backref,
modelName: modelName,
})
}
// state can be null
export const handle_add_all_models = ({modelName, data_list, id_automatic, state}) => {
if (!(typeof(modelName) === 'string')){
throw TypeError('mock-rel must take String for modelName')
}
if (id_automatic === undefined) {
id_automatic = true
}
let table = {}
if (id_automatic) {
let idx = resolveNextid({state, modelName})
R.forEach((obj)=> {
obj.id = idx
table = R.assoc(idx.toString(), obj, table)
idx ++
}, data_list)
} else {
R.forEach((obj)=> {
table = R.assoc(R.prop('id', obj).toString(), obj, table)
}, data_list)
}
return table
}
let objMax = (obj) => {
if(obj) {
let keys = Object.keys(obj);
let arr = keys.map(key => obj[key]);
if(arr.length > 0) {
arr.sort(function(a, b){return a-b})
return (arr[arr.length - 1]) + 1;
}
}
return 0;
}
const resolveNextid = ({state, modelName, data, schema}) => {
const customResolver = R.path([modelName, 'id_resolver'], schema)
if (customResolver) {
return customResolver({state, modelName, data})
} else {
// look at all id's already stored in the state; return max + 1
const ids = R.pluck(['id'], R.propOr({}, modelName, state))
return objMax(ids)
}
}
export const handle_backref = ({schema, modelName, state, data, nextId}) => {
if (schema) {
R.map(fieldName => {
const type = R.path([modelName, 'fields', fieldName], schema)
if (R.prop('type', type) === BACKREF) {
R.map(relId => {
// make sure id's are strings if going into assocPath()
const relPath = [
R.prop('modelName', type), // modelName
relId.toString(), // id
R.prop('backref', type), // fieldName
]
const modelExists = R.pathOr(false,
R.slice(0, -1, relPath),
state
)
if ( typeof(modelExists) === typeof(false) ) {
throw TypeError(`Backref obj does not exist for model: ${modelName}, field: ${fieldName}`)
} else {
state = R.assocPath(relPath, nextId, state)
}
}, R.prop(fieldName, data))
}
}, Object.keys(data))
}
return state
}
export const handle_add_model = ({state, modelName, data, nextId, schema}) => {
if (!(typeof(modelName) === 'string')){
throw TypeError('mock-rel must take String for modelName')
}
if (nextId === undefined){
nextId = resolveNextid({state, modelName, data, schema})
}
// add associated data
const existingRow = R.path([modelName, nextId.toString()], state)
let row = R.assocPath(['id'], nextId, data)
if (existingRow) {
row = R.mergeDeepLeft(
row,
existingRow
)
}
state = R.assocPath([modelName, nextId.toString()], row, state)
return {state, nextId}
}
export const checkSchemaIntegrity = (schema) => {
if (schema) {
R.mapObjIndexed((num, key, obj) => {
if (!(R.prop(['fields'], num))) {
throw TypeError('mock-rel schema integrity error. Every model should have "fields" key')
}
}, schema)
}
}
// return boolean true if passes
export const checkValidation = (state, action) => {
const modelName = R.path(['payload', 'modelName'], action)
const validation = R.path(['payload', 'schema', modelName, 'validation'], action)
if (validation) {
return validation({state, action})
}
return true
}
// return boolean true if passes
export const checkPreAction = (state, action) => {
const modelName = R.path(['payload', 'modelName'], action)
const preAction = R.path(['payload', 'schema', modelName, 'preAction'], action)
if (preAction) {
return preAction({state, action})
}
return { state, action }
}
| 31.219858 | 114 | 0.534075 | 3.0625 |
af2ad448e88ea996a981a72136e655cc9278a886
| 8,389 |
rb
|
Ruby
|
spec/sidekiq_ecs_scaler/configuration_spec.rb
|
shoma07/sidekiq-ecs-scaler
|
12b6a5fa004e4cf96b7be4f0d0240bf6b9fbc4b7
|
[
"MIT"
] | null | null | null |
spec/sidekiq_ecs_scaler/configuration_spec.rb
|
shoma07/sidekiq-ecs-scaler
|
12b6a5fa004e4cf96b7be4f0d0240bf6b9fbc4b7
|
[
"MIT"
] | null | null | null |
spec/sidekiq_ecs_scaler/configuration_spec.rb
|
shoma07/sidekiq-ecs-scaler
|
12b6a5fa004e4cf96b7be4f0d0240bf6b9fbc4b7
|
[
"MIT"
] | null | null | null |
# frozen_string_literal: true
RSpec.describe SidekiqEcsScaler::Configuration do
let(:configuration) { described_class.new }
describe "#enabled" do
subject { configuration.enabled }
context "when default" do
it { is_expected.to eq true }
end
end
describe "#enabled=" do
subject(:write) { configuration.enabled = enabled }
context "when enabled is true" do
let(:enabled) { true }
it do
expect { write }.not_to change(configuration, :enabled).from(true)
end
end
context "when enabled is false" do
let(:enabled) { false }
it do
expect { write }.to change(configuration, :enabled).to(false)
end
end
context "when enabled is invalid" do
let(:enabled) { "true" }
it do
expect { write }.to raise_error(ArgumentError)
end
end
end
describe "#logger" do
subject { configuration.logger }
context "when default" do
it { is_expected.to eq Sidekiq.logger }
end
end
describe "#logger=" do
subject(:write) { configuration.logger = logger }
context "when logger is valid" do
let(:logger) { Logger.new(StringIO.new) }
it do
expect { write }.to change(configuration, :logger).to(logger)
end
end
context "when logger is invalid" do
let(:logger) { StringIO.new }
it do
expect { write }.to raise_error(ArgumentError)
end
end
end
describe "#queue_name" do
subject { configuration.queue_name }
context "when default" do
it { is_expected.to eq "default" }
end
end
describe "#queue_name=" do
subject(:write) { configuration.queue_name = queue_name }
context "when argument is valid" do
let(:queue_name) { "highest" }
it do
expect { write }.to change(configuration, :queue_name).to("highest")
end
end
context "when argument is invalid" do
let(:queue_name) { nil }
it do
expect { write }.to raise_error(ArgumentError)
end
end
end
describe "#min_count" do
subject { configuration.min_count }
context "when default" do
it { is_expected.to eq 1 }
end
end
describe "#min_count=" do
subject(:write) { configuration.min_count = min_count }
context "when argument is valid and less than max_count" do
let(:min_count) { 2 }
before do
configuration.max_count = 3
end
it do
expect { write }.to change(configuration, :min_count).to(2)
end
it do
expect { write }.not_to change(configuration, :max_count)
end
end
context "when argument is valid and grater than max_count" do
let(:min_count) { 2 }
before do
configuration.max_count = 1
end
it do
expect { write }.to change(configuration, :min_count).to(2)
end
it do
expect { write }.to change(configuration, :max_count).to(2)
end
end
context "when argument is invalid" do
let(:min_count) { 0 }
it do
expect { write }.to raise_error(ArgumentError)
end
end
end
describe "#max_count" do
subject { configuration.max_count }
context "when default" do
it { is_expected.to eq 1 }
end
end
describe "#max_count=" do
subject(:write) { configuration.max_count = max_count }
context "when argument is valid and grater than min_count" do
let(:max_count) { 2 }
before do
configuration.min_count = 1
end
it do
expect { write }.to change(configuration, :max_count).to(2)
end
it do
expect { write }.not_to change(configuration, :min_count)
end
end
context "when argument is valid and less than min_count" do
let(:max_count) { 2 }
before do
configuration.min_count = 3
end
it do
expect { write }.to change(configuration, :max_count).to(2)
end
it do
expect { write }.to change(configuration, :min_count).to(2)
end
end
context "when argument is invalid" do
let(:max_count) { 0 }
it do
expect { write }.to raise_error(ArgumentError)
end
end
end
describe "#step_count" do
subject { configuration.step_count }
context "when default" do
it { is_expected.to eq 1 }
end
end
describe "#step_count=" do
subject(:write) { configuration.step_count = step_count }
context "when argument is valid" do
let(:step_count) { 2 }
it do
expect { write }.to change(configuration, :step_count).to(2)
end
end
context "when argument is invalid" do
let(:step_count) { 0 }
it do
expect { write }.to raise_error(ArgumentError)
end
end
end
describe "#max_latency" do
subject { configuration.max_latency }
context "when default" do
it { is_expected.to eq 3600 }
end
end
describe "#max_latency=" do
subject(:write) { configuration.max_latency = max_latency }
context "when argument is valid" do
let(:max_latency) { 7200 }
it do
expect { write }.to change(configuration, :max_latency).to(7200)
end
end
context "when argument is less than max count" do
let(:max_latency) { 10 }
before do
configuration.max_count = 20
end
it do
expect { write }.to raise_error(ArgumentError)
end
end
end
describe "#ecs_client" do
subject { configuration.ecs_client }
context "when default" do
it { is_expected.to be_instance_of(::Aws::ECS::Client) }
end
end
describe "#ecs_client=" do
subject(:write) { configuration.ecs_client = ecs_client }
context "when argument is kind of Aws::ECS::Client" do
let(:ecs_client) do
Class.new(::Aws::ECS::Client) do
def initialize
super(stub_responses: true)
end
end.new
end
it do
write
expect(configuration.ecs_client).to be_kind_of(::Aws::ECS::Client)
end
end
context "when argument is not kind of Aws::ECS::Client" do
let(:ecs_client) do
Class.new.new
end
it do
expect { write }.to raise_error(ArgumentError)
end
end
end
describe "#latency_per_step_count" do
subject { configuration.latency_per_step_count }
context "when step_count is 1" do
before do
configuration.min_count = 2
configuration.max_count = 20
configuration.max_latency = 3600
end
it { is_expected.to eq 189 }
end
context "when step_count is 2" do
before do
configuration.min_count = 2
configuration.max_count = 20
configuration.step_count = 2
configuration.max_latency = 3600
end
it { is_expected.to eq 360 }
end
end
describe "#task_meta!" do
subject(:call) { configuration.task_meta! }
context "when task_meta is present" do
before do
allow(configuration).to receive(:task_meta).and_return(
SidekiqEcsScaler::TaskMetaV4.new({ "Cluster" => "local", "TaskARN" => "ARN" })
)
end
it { is_expected.to have_attributes(cluster: "local", task_arn: "ARN") }
end
context "when task_meta is null" do
it do
expect { call }.to raise_error(SidekiqEcsScaler::Error)
end
end
end
describe "#sidekiq_options" do
subject { configuration.sidekiq_options }
context "when default" do
it { is_expected.to eq({ "retry" => true, "queue" => "default" }) }
end
end
describe "#sidekiq_options=" do
subject(:write) { configuration.sidekiq_options = sidekiq_options }
context "when argument is invalid" do
let(:sidekiq_options) { nil }
it do
expect { write }.to raise_error(ArgumentError)
end
end
context "when argument is valid" do
let(:sidekiq_options) { { "queue" => "scheduler" } }
around do |example|
original_options = SidekiqEcsScaler::Worker.sidekiq_options
example.run
SidekiqEcsScaler::Worker.sidekiq_options(original_options)
end
it do
expect { write }.to(
change(SidekiqEcsScaler::Worker, :sidekiq_options).to({ "retry" => true, "queue" => "scheduler" })
)
end
end
end
end
| 21.960733 | 108 | 0.611396 | 3.015625 |
8e1fb2e49a44743deae0236451fd0a0bceb2ed7b
| 10,535 |
swift
|
Swift
|
Sources/ComponentKit/CustomView/CircleScroll/CircleScrollView.swift
|
CreatorWilliam/ProjectKit
|
4a4d64ac3b24b766dc02bb9581dff3625fc6bb9b
|
[
"MIT"
] | null | null | null |
Sources/ComponentKit/CustomView/CircleScroll/CircleScrollView.swift
|
CreatorWilliam/ProjectKit
|
4a4d64ac3b24b766dc02bb9581dff3625fc6bb9b
|
[
"MIT"
] | null | null | null |
Sources/ComponentKit/CustomView/CircleScroll/CircleScrollView.swift
|
CreatorWilliam/ProjectKit
|
4a4d64ac3b24b766dc02bb9581dff3625fc6bb9b
|
[
"MIT"
] | null | null | null |
//
// CircleScrollView.swift
// ComponentKit
//
// Created by William Lee on 20/12/17.
// Copyright © 2018 William Lee. All rights reserved.
//
import UIKit
import ImageKit
public protocol CircleScrollViewDelegate: AnyObject {
func circleScrollView(_ view: CircleScrollView, didScrollTo index: Int)
func circleScrollView(_ view: CircleScrollView, didSelectAt index: Int)
}
// MARK: - Default
public extension CircleScrollViewDelegate {
func circleScrollView(_ view: CircleScrollView, didScrollTo index: Int) { }
}
public class CircleScrollView: UIView {
/// 滑动方向
public enum Direction {
/// 水平滑动
case horizontal
/// 竖直滑动
case vertical
}
public weak var delegate: CircleScrollViewDelegate?
/// 页码
public let pageControl = UIPageControl()
/// 占位图(本地图片名)
public var placeholder: String? {
didSet {
if let name = placeholder {
previousView.image = UIImage(named: name)
currentView.image = UIImage(named: name)
nextView.image = UIImage(named: name)
}
}
}
/// 滑动方向
private var direction: Direction = .horizontal
/// 展示内容的容器
private let scrollView: UIScrollView = UIScrollView()
/// 上一个视图
private var previousView = UIImageView()
/// 当前视图
private var currentView = UIImageView()
/// 下一个视图
private var nextView = UIImageView()
//Timer
private var timer: Timer?
/// 当前索引
private var currentIndex: Int = 0
/// 上一个
private var previousIndex: Int {
var index = currentIndex - 1
if index < 0 { index = images.count - 1 }
return index
}
/// 下一个
private var nextIndex: Int {
var index = currentIndex + 1
if index > images.count - 1 { index = 0 }
return index
}
/// 是否自动滚动
private var isAutoScrollable: Bool = false
/// 数据源
private var images: [Any] = []
public init(frame: CGRect = .zero,
isAutoScrollable: Bool = false) {
super.init(frame: frame)
self.isAutoScrollable = isAutoScrollable
setupUI()
}
required public init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
public override func layoutSubviews() {
super.layoutSubviews()
scrollView.frame = bounds
let width: CGFloat = scrollView.bounds.width
let height: CGFloat = scrollView.bounds.height
switch direction {
case .horizontal:
previousView.frame = CGRect(x: 0, y: 0, width: width, height: height)
currentView.frame = CGRect(x: width, y: 0, width: width, height: height)
nextView.frame = CGRect(x: width * 2, y: 0, width: width, height: height)
scrollView.contentSize = CGSize(width: width * 3, height: height)
scrollView.contentOffset = CGPoint(x: width, y: 0)
case .vertical:
previousView.frame = CGRect(x: 0, y: 0, width: width, height: height)
currentView.frame = CGRect(x: 0, y: height, width: width, height: height)
nextView.frame = CGRect(x: 0, y: height * 2, width: width, height: height)
scrollView.contentSize = CGSize(width: width, height: height * 3)
scrollView.contentOffset = CGPoint(x: 0, y: height)
}
}
}
// MARK: - Public
public extension CircleScrollView {
/// 设置轮播图集后,自动进行轮播
///
/// - Parameter items: 轮播图集
func update(with items: [Any], isForce: Bool = false) {
//保存数据,只会初始化一次, 除非是强制性更新
if images.count > 0 && isForce == false { return }
images = items
currentIndex = 0
pageControl.numberOfPages = images.count
// 防止越界
guard images.count > 0 else { return }
scrollView.isScrollEnabled = (images.count > 1)
update(view: previousView, with: images[previousIndex])
update(view: currentView, with: images[currentIndex])
update(view: nextView, with: images[nextIndex])
//判断启动轮播
if isAutoScrollable {
DispatchQueue.main.asyncAfter(deadline: .now() + 2, execute: {
self.startLoop()
})
} else {
self.stopLoop()
}
}
}
// MARK: - UIScrollViewDelegate
extension CircleScrollView: UIScrollViewDelegate {
public func scrollViewDidEndDecelerating(_ scrollView: UIScrollView) {
updateContent()
}
}
// MARK: - Zoomable
extension CircleScrollView: Zoomable {
public var zoomView: UIView { return currentView }
public var zoomViewContainer: UIView { return scrollView }
public func zoom(with offset: CGFloat) {
// 仅支持水平滚动,竖直方向上放大
guard direction == .horizontal else { return }
let size = scrollView.bounds.size
guard size.height > 0 else { return }
zoomView.layer.anchorPoint = CGPoint(x: 0.5, y: 1)
zoomView.center = CGPoint(x: scrollView.contentSize.width / 2, y: scrollView.contentSize.height)
//向下偏移放大
if (offset > 0) { return }
let heightOffset = abs(offset)
let widhtOffset = abs(offset) * (size.width / size.height)
zoomView.bounds.size.height = heightOffset + size.height
zoomView.bounds.size.width = widhtOffset + size.width
}
}
// MARK: - Setup
private extension CircleScrollView {
func setupUI() {
//ScrollView
scrollView.clipsToBounds = false
scrollView.showsVerticalScrollIndicator = false
scrollView.showsHorizontalScrollIndicator = false
scrollView.delegate = self
scrollView.bounces = true
scrollView.isPagingEnabled = true
scrollView.backgroundColor = .clear
scrollView.isScrollEnabled = false
addSubview(scrollView)
previousView.contentMode = .scaleAspectFill
previousView.clipsToBounds = true
scrollView.addSubview(previousView)
currentView.contentMode = .scaleAspectFill
currentView.clipsToBounds = true
scrollView.addSubview(currentView)
nextView.contentMode = .scaleAspectFill
nextView.clipsToBounds = true
scrollView.addSubview(nextView)
pageControl.isUserInteractionEnabled = false
pageControl.hidesForSinglePage = true
addSubview(pageControl)
pageControl.layout.add { (make) in
make.leading().trailing().bottom().equal(self)
}
let tapGR = UITapGestureRecognizer()
tapGR.numberOfTapsRequired = 1
tapGR.numberOfTouchesRequired = 1
tapGR.addTarget(self, action: #selector(clickContent(_:)))
addGestureRecognizer(tapGR)
}
}
// MARK: - Action
private extension CircleScrollView {
@objc func clickContent(_ sender: Any) {
guard images.count > 0 else { return }
delegate?.circleScrollView(self, didSelectAt: currentIndex)
}
/// 开始循环
func startLoop() {
//大于1,轮播,否则不轮播
guard images.count > 1 else {
stopLoop()
return
}
//已经启动则不再重新启动
if let _ = timer { return }
//正常启动
timer = Timer(timeInterval: 5, target: self, selector: #selector(loop), userInfo: nil, repeats: true)
guard let temp = timer else { return }
RunLoop.main.add(temp, forMode: RunLoop.Mode.default)
DispatchQueue.main.asyncAfter(deadline: .now() + 5) {
self.timer?.fire()
}
}
/// 停止循环
func stopLoop() {
timer?.invalidate()
timer = nil
}
@objc func loop(_ timer: Timer) {
scrollToNext()
}
func scrollToPrevious() {
var offset: CGPoint = .zero
switch direction {
case .horizontal: offset.x = 0
case .vertical: offset.y = 0
}
scrollView.isUserInteractionEnabled = false
UIView.animate(withDuration: 0.5, animations: {
self.scrollView.contentOffset = offset
}, completion: { (_) in
self.scrollView.isUserInteractionEnabled = true
self.updateContent()
})
}
func scrollToNext() {
var offset: CGPoint = .zero
switch direction {
case .horizontal: offset.x = scrollView.bounds.width * 2
case .vertical: offset.y = scrollView.bounds.height * 2
}
scrollView.isUserInteractionEnabled = false
UIView.animate(withDuration: 0.5, animations: {
self.scrollView.contentOffset = offset
}, completion: { (_) in
self.scrollView.isUserInteractionEnabled = true
self.updateContent()
})
}
}
// MARK: - Utility
private extension CircleScrollView {
func updateContent() {
defer {
pageControl.currentPage = currentIndex
delegate?.circleScrollView(self, didScrollTo: currentIndex)
}
var offset: CGPoint = .zero
var isPrevious: Bool = false
var isNext: Bool = false
switch direction {
case .horizontal:
let width: CGFloat = scrollView.bounds.width
offset = CGPoint(x: width, y: 0)
if scrollView.contentOffset.x < width { isPrevious = true }
if scrollView.contentOffset.x > width { isNext = true }
case .vertical:
let height: CGFloat = scrollView.bounds.height
offset = CGPoint(x: 0, y: height)
if scrollView.contentOffset.y < height { isPrevious = true }
if scrollView.contentOffset.y > height { isNext = true }
}
if isPrevious == true {
// 更新索引
currentIndex -= 1
if currentIndex < 0 { currentIndex = images.count - 1 }
// 交换位置
(previousView, currentView) = (currentView, previousView)
(previousView.frame, currentView.frame) = (currentView.frame, previousView.frame)
} else if isNext == true {
// 更新索引
currentIndex += 1
if currentIndex > images.count - 1 { currentIndex = 0 }
// 交换位置
(currentView, nextView) = (nextView, currentView)
(currentView.frame, nextView.frame) = (nextView.frame, currentView.frame)
} else {
return
}
scrollView.contentOffset = offset
guard previousIndex < images.count else { return }
guard nextIndex < images.count else { return }
update(view: previousView, with: images[previousIndex])
update(view: nextView, with: images[nextIndex])
}
func update(view: UIView, with content: Any) {
guard let imageView = view as? UIImageView else { return }
if let url = content as? String {
imageView.setImage(with: url, placeholder: placeholder)
} else if let image = content as? UIImage {
imageView.image = image
} else if let url = content as? URL {
imageView.setImage(with: url, placeholder: placeholder)
} else {
// Nothing
}
}
}
| 24.614486 | 105 | 0.634551 | 3.03125 |
83e4b872116e67f4047c4666533d0806d4e3f9e1
| 27,699 |
rs
|
Rust
|
tests/known_cases.rs
|
arctic-hen7/bonnie
|
6694d9e8d45e69ad74bf1326937eb711079a3223
|
[
"MIT"
] | 41 |
2021-04-13T14:01:42.000Z
|
2022-03-27T14:34:53.000Z
|
tests/known_cases.rs
|
arctic-hen7/bonnie
|
6694d9e8d45e69ad74bf1326937eb711079a3223
|
[
"MIT"
] | 23 |
2021-04-21T17:59:05.000Z
|
2022-03-12T09:13:03.000Z
|
tests/known_cases.rs
|
arctic-hen7/bonnie
|
6694d9e8d45e69ad74bf1326937eb711079a3223
|
[
"MIT"
] | 4 |
2021-04-16T06:10:03.000Z
|
2022-01-06T01:13:20.000Z
|
// Bonnie mostly follows a strategy of integration testing to mimc real usage
// This also significantly reduces the brittleness of tests
// Note that the commands specified in testing WILL ACTUALLY BE RUN, so change things here carefully!
// Commands epcified should `echo` their name so we trace them back and `exit` with some exit code
// This file handles manually-coded known cases
// All these tests are Linux-specific due to their OS-specific testing/shells (sorry!), they are marked as such for conditional compilation
use lib::{Config, BONNIE_VERSION};
// A testing utility that represents all Bonnie returns as the promise of an exit code
// This is modelled off the code in `main.rs` that actually runs Bonnie
// This takes an output, which will be a simple vector in testing
#[cfg(test)]
fn run_e2e_test(
cfg_str: &str,
prog_args: Vec<String>,
version: &str,
output: &mut impl std::io::Write,
) -> Result<i32, String> {
let cfg = Config::new(cfg_str)?.to_final(version, output)?;
let (command_to_run, command_name, relevant_args) = cfg.get_command_for_args(&prog_args)?;
let bone = command_to_run.prepare(&command_name, &relevant_args, &cfg.default_shell)?;
// We don't want it verbose, it'll be so anyway in development
let exit_code = bone.run(&command_name, false, output)?;
Ok(exit_code)
}
// A testing utility macro that allows us to expect an exit code to be returned
// This returns the output of the execution (warnings, command info, etc.) as a vector of lines
// The config string given here does not have to contain any version tag, that will be added
#[cfg(test)]
macro_rules! expect_exit_code {
($exit_code:literal, $raw_cfg_str:expr, $version:expr, [ $($arg:expr),+ ]) => {
{
// We define a vector that warnings and command information will be printed to
let mut output = Vec::new();
let prog_args = vec![$($arg.to_string()), +];
let cfg_str = "version = \"".to_string() + $version + "\"\n" + $raw_cfg_str;
let res = run_e2e_test(&cfg_str, prog_args, $version, &mut output);
assert_eq!(res, Ok($exit_code));
// We know this will only be filled with `u8` bytes, so we can safely call `.unwrap()`
let output_string = String::from_utf8(output).unwrap();
let output_lines: Vec<String> = output_string.lines().map(|x| x.to_string()).collect();
output_lines
};
}
}
// A testing utility macro that allows us to expect some error to be returned
// This returns the output of the execution (warnings, command info, etc.) as a vector of lines
// The config string given here does not have to contain any version tag, that will be added
// TODO after `error_chain` migration, test for specific errors here
#[cfg(test)]
macro_rules! expect_error {
($raw_cfg_str:expr, $version:expr, [ $($arg:expr),+ ]) => {
{
// We define a vector that warnings and command information will be printed to
let mut output = Vec::new();
let prog_args = vec![$($arg.to_string()), +];
let cfg_str = "version = \"".to_string() + $version + "\"\n" + $raw_cfg_str;
let res = run_e2e_test(&cfg_str, prog_args, $version, &mut output);
println!("{:#?}", res);
assert!(matches!(res, Err(_)));
// We know this will only be filled with `u8` bytes, so we can safely call `.unwrap()`
let output_string = String::from_utf8(output).unwrap();
let output_lines: Vec<String> = output_string.lines().map(|x| x.to_string()).collect();
output_lines
}
}
}
// A utility testing macro that asserts the ordered presence of a series of elements in a vector of strings
#[cfg(test)]
macro_rules! assert_contains_ordered {
($vec:expr, [ $($elem:expr),+ ]) => {
{
// Concatenate everything so we can easily assert order
let concat_vec = $vec.join(" | ");
let concat_checks = vec![$($elem.to_string()), +].join(" | ");
assert!(concat_vec.contains(&concat_checks))
}
}
}
// A utility testing macro that asserts the unordered presence of a series of elements in a vector of strings
#[cfg(test)]
macro_rules! assert_contains {
($vec:expr, [ $($elem:expr),+ ]) => {
{
let checks = vec![$($elem.to_string()), +];
let mut contains = false;
for check in checks.iter() {
if $vec.contains(check) {
// We only need
contains = true;
}
}
assert!(contains)
}
}
}
// This test suite tests all the major syntactic feature of Bonnie
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_kv_syntax() {
let output = expect_exit_code!(
0,
r#"
[scripts]
basic = "exit 0"
"#,
BONNIE_VERSION,
["basic"]
);
println!("{:#?}", output);
assert_contains!(output, ["sh, [\"-c\", \"exit 0\"]"]);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux (uses the `USER` environment variable, the feature itself should be fine)
fn succeeds_with_env_var_interpolation() {
let output = expect_exit_code!(
0,
r#"
[scripts]
basic.cmd = "echo %USER && exit 0"
basic.env_vars = ["USER"]
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(
output,
["sh, [\"-c\", \"echo ".to_string() + &std::env::var("USER").unwrap() + " && exit 0\"]"]
);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_arg_interpolation() {
let output = expect_exit_code!(
0,
r#"
[scripts]
basic.cmd = "echo %name && exit 0"
basic.args = ["name"]
"#,
BONNIE_VERSION,
["basic", "Name"]
);
assert_contains_ordered!(output, ["sh, [\"-c\", \"echo Name && exit 0\"]"]);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn returns_error_on_too_few_args() {
expect_error!(
r#"
[scripts]
basic.cmd = "echo %name && exit 0"
basic.args = ["name"]
"#,
BONNIE_VERSION,
["basic"]
);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_mass_arg_interpolation_and_no_args() {
let output = expect_exit_code!(
0,
r#"
[scripts]
basic = "echo %% && exit 0"
"#,
BONNIE_VERSION,
["basic"]
);
println!("{:?}", output);
assert_contains_ordered!(output, ["sh, [\"-c\", \"echo && exit 0\"]"]); // Note the extra space from concatenation
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_mass_arg_interpolation_and_one_arg() {
let output = expect_exit_code!(
0,
r#"
[scripts]
basic = "echo %% && exit 0"
"#,
BONNIE_VERSION,
["basic", "Test"]
);
assert_contains_ordered!(output, ["sh, [\"-c\", \"echo Test && exit 0\"]"]);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_mass_arg_interpolation_and_many_args() {
let output = expect_exit_code!(
0,
r#"
[scripts]
basic = "echo %% && exit 0"
"#,
BONNIE_VERSION,
["basic", "foo", "bar"]
);
assert_contains_ordered!(output, ["sh, [\"-c\", \"echo foo bar && exit 0\"]"]);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_mass_arg_interpolation_and_escaping() {
let output = expect_exit_code!(
0,
r#"
[scripts]
basic = "echo %% \\%% && exit 0"
"#,
BONNIE_VERSION,
["basic", "foo", "bar"]
);
assert_contains_ordered!(output, ["sh, [\"-c\", \"echo foo bar %% && exit 0\"]"]);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_mass_arg_interpolation_and_specific_arg_interpolation() {
let output = expect_exit_code!(
0,
r#"
[scripts]
basic.cmd = "echo %name %% && exit 0"
basic.args = ["name"]
"#,
BONNIE_VERSION,
["basic", "Name", "foo", "bar"]
);
assert_contains_ordered!(output, ["sh, [\"-c\", \"echo Name foo bar && exit 0\"]"]);
}
// This test is dependent on the contents of `.env`
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn loads_env_files() {
let output = expect_exit_code!(
0,
r#"
env_files = ["src/.env"]
[scripts]
basic.cmd = "echo %SHORTGREETING && exit 0"
basic.env_vars = ["SHORTGREETING"]
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(output, ["sh, [\"-c\", \"echo Hello && exit 0\"]"]);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn returns_error_on_nonexistent_env_file() {
expect_error!(
r#"
env_files = ["src/.ennv"] # Misspelt this line
[scripts]
basic.cmd = "echo %SHORTGREETING && exit 0"
basic.env_vars = ["SHORTGREETING"]
"#,
BONNIE_VERSION,
["basic"]
);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn returns_error_on_invalid_env_file() {
expect_error!(
r#"
env_files = ["src/.env.invalid"] # This file contains an uninclosed ' ', and is thus invalid
[scripts]
basic.cmd = "echo %INVALID_VAR && exit 0"
basic.env_vars = ["INVALID_VAR"]
"#,
BONNIE_VERSION,
["basic"]
);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_full_interpolation() {
let output = expect_exit_code!(
0,
r#"
env_files = ["src/.env"]
[scripts]
basic.cmd = "echo \"%SHORTGREETING %name %%\" && exit 0"
basic.args = ["name"]
basic.env_vars = ["SHORTGREETING"]
"#,
BONNIE_VERSION,
["basic", "Name", "(extra stuff)"]
);
assert_contains_ordered!(
output,
["sh, [\"-c\", \"echo \\\"Hello Name (extra stuff)\\\" && exit 0\"]"]
);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_multistage() {
let output = expect_exit_code!(
1,
r#"
[scripts]
basic = ["(exit 0)", "exit 1"]
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(output, ["sh, [\"-c\", \"(exit 0) && exit 1\"]"]);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_multistage_with_interpolation() {
let output = expect_exit_code!(
1,
r#"
env_files = ["src/.env"]
[scripts]
basic.cmd = [
"echo %SHORTGREETING %%",
"echo %name && exit 1"
]
basic.args = ["name"]
basic.env_vars = ["SHORTGREETING"]
"#,
BONNIE_VERSION,
["basic", "Name", "foo", "bar"]
);
assert_contains_ordered!(
output,
["sh, [\"-c\", \"echo Hello foo bar && echo Name && exit 1\"]"]
);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_kv_unordered_subcommands() {
let cfg = r#"
[scripts]
basic.subcommands.test = "exit 0"
basic.subcommands.other = "exit 1"
"#;
let output1 = expect_exit_code!(0, cfg, BONNIE_VERSION, ["basic", "test"]);
assert_contains_ordered!(output1, ["sh, [\"-c\", \"exit 0\"]"]);
let output2 = expect_exit_code!(1, cfg, BONNIE_VERSION, ["basic", "other"]);
assert_contains_ordered!(output2, ["sh, [\"-c\", \"exit 1\"]"]);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_multistage_and_interpolation_unordered_subcommands() {
let cfg = r#"
env_files = ["src/.env"]
[scripts]
basic.subcommands.test.cmd = [
"echo %SHORTGREETING %%",
"echo %name && exit 1"
]
basic.subcommands.test.args = ["name"]
basic.subcommands.test.env_vars = ["SHORTGREETING"]
basic.subcommands.other = "exit 1"
"#;
let output1 = expect_exit_code!(1, cfg, BONNIE_VERSION, ["basic", "test", "Name", "foo bar"]);
assert_contains_ordered!(
output1,
["sh, [\"-c\", \"echo Hello foo bar && echo Name && exit 1\"]"]
);
let output2 = expect_exit_code!(1, cfg, BONNIE_VERSION, ["basic", "other"]);
assert_contains_ordered!(output2, ["sh, [\"-c\", \"exit 1\"]"]);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_root_cmd_for_unordered_subcommands() {
let cfg = r#"
[scripts]
basic.cmd = "exit 0"
basic.subcommands.test = "exit 1"
basic.subcommands.other = "exit 2"
"#;
let root_output = expect_exit_code!(0, cfg, BONNIE_VERSION, ["basic"]);
assert_contains_ordered!(root_output, ["sh, [\"-c\", \"exit 0\"]"]);
let output1 = expect_exit_code!(1, cfg, BONNIE_VERSION, ["basic", "test"]);
assert_contains_ordered!(output1, ["sh, [\"-c\", \"exit 1\"]"]);
let output2 = expect_exit_code!(2, cfg, BONNIE_VERSION, ["basic", "other"]);
assert_contains_ordered!(output2, ["sh, [\"-c\", \"exit 2\"]"]);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn returns_error_on_missing_cmd() {
expect_error!(
r#"
[scripts]
basic.args = ["name"]
"#,
BONNIE_VERSION,
["basic", "Name"]
);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_os_specific_kv_cmd() {
let output = expect_exit_code!(
0,
r#"
[scripts]
basic.cmd.generic = "exit 1"
basic.cmd.targets.linux = "exit 0"
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(output, ["sh, [\"-c\", \"exit 0\"]"]);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_os_specific_multistage_and_interpolation_cmd() {
let output = expect_exit_code!(
1,
r#"
env_files = ["src/.env"]
[scripts]
basic.cmd.generic = "exit 2"
basic.cmd.targets.linux = [
"echo %SHORTGREETING %%",
"echo %name && exit 1"
]
basic.args = ["name"]
basic.env_vars = ["SHORTGREETING"]
"#,
BONNIE_VERSION,
["basic", "Name", "foo", "bar"]
);
println!("{:?}", output);
assert_contains_ordered!(
output,
["sh, [\"-c\", \"echo Hello foo bar && echo Name && exit 1\"]"]
);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_custom_shell() {
let output = expect_exit_code!(
0,
r#"
[scripts]
basic.cmd.exec = "exit 0"
basic.cmd.shell = ["bash", "-c", "{COMMAND}"]
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(output, ["bash, [\"-c\", \"exit 0\"]"]);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_custom_shell_with_delimiter() {
let output = expect_exit_code!(
0,
r#"
[scripts]
basic.cmd.exec = "exit 0"
basic.cmd.shell = { parts = ["bash", "-c", "{COMMAND}"], delimiter = " && " }
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(output, ["bash, [\"-c\", \"exit 0\"]"]);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_custom_shell_and_os_specificity_and_multistage_and_interpolation() {
let output = expect_exit_code!(
1,
r#"
env_files = ["src/.env"]
[scripts]
basic.cmd.generic = "exit 2"
basic.cmd.targets.linux.exec = [
"echo %SHORTGREETING %%",
"echo %name && exit 1"
]
basic.cmd.targets.linux.shell = ["bash", "-c", "{COMMAND}"]
basic.args = ["name"]
basic.env_vars = ["SHORTGREETING"]
"#,
BONNIE_VERSION,
["basic", "Name", "foo", "bar"]
);
println!("{:?}", output);
assert_contains_ordered!(
output,
["bash, [\"-c\", \"echo Hello foo bar && echo Name && exit 1\"]"]
);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn returns_error_if_generic_os_specifier_not_given() {
expect_error!(
r#"
[scripts]
basic.cmd.targets.linux = "exit 0"
"#,
BONNIE_VERSION,
["basic"]
);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn uses_simple_default_shell() {
let output = expect_exit_code!(
0,
r#"
default_shell = ["bash", "-c", "{COMMAND}"]
[scripts]
basic = "exit 0"
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(output, ["bash, [\"-c\", \"exit 0\"]"]);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn uses_generic_default_shell() {
let output = expect_exit_code!(
0,
r#"
default_shell.generic = ["bash", "-c", "{COMMAND}"]
[scripts]
basic = "exit 0"
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(output, ["bash, [\"-c\", \"exit 0\"]"]);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn uses_generic_default_shell_with_delimiter() {
let output = expect_exit_code!(
0,
r#"
default_shell.generic = { parts = ["bash", "-c", "{COMMAND}"], delimiter = " && " }
[scripts]
basic = "exit 0"
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(output, ["bash, [\"-c\", \"exit 0\"]"]);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn uses_os_specific_default_shell() {
let output = expect_exit_code!(
0,
r#"
default_shell.generic = ["sh", "-c", "{COMMAND}"]
default_shell.targets.linux = ["bash", "-c", "{COMMAND}"]
[scripts]
basic = "exit 0"
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(output, ["bash, [\"-c\", \"exit 0\"]"]);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_kv_simple_ordered_subcommands() {
let output = expect_exit_code!(
0,
r#"
[scripts]
basic.subcommands.test = "exit 0"
basic.subcommands.other = "exit 1"
basic.order = "test"
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(output, ["sh, [\"-c\", \"exit 0\"]"]);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_kv_complex_ordered_subcommands() {
let output = expect_exit_code!(
1,
r#"
[scripts]
basic.subcommands.test = "exit 0"
basic.subcommands.other = "exit 1"
basic.order = """
test {
Any => other
}
"""
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(
output,
["sh, [\"-c\", \"exit 0\"]", "sh, [\"-c\", \"exit 1\"]"]
);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn returns_error_on_non_global_args_for_ordered_subcommands() {
expect_error!(
r#"
[scripts]
basic.subcommands.test = "echo %name && exit 0"
basic.subcommands.test.args = ["name"] # This has to be `basic.args` instead
basic.subcommands.other = "exit 1"
basic.order = """
test {
Any => other
}
"""
"#,
BONNIE_VERSION,
["basic"]
);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn returns_error_on_unordered_nesting_in_order() {
expect_error!(
r#"
[scripts]
basic.subcommands.test = "echo %name && exit 0"
basic.subcommands.test.args = ["name"] # This has to be `basic.args` instead
basic.subcommands.other = "exit 1"
basic.subcommands.nested.subcommands.test = "exit 0"
basic.subcommands.nested.subcommands.other = "exit 1"
basic.order = """
test {
Any => other
}
"""
"#,
BONNIE_VERSION,
["basic"]
);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn returns_error_on_cmd_and_ordered_subcommands() {
expect_error!(
r#"
[scripts]
basic.cmd = "exit 0"
basic.subcommands.test = "exit 0"
basic.subcommands.other = "exit 1"
basic.order = """
test {
Any => other
}
"""
"#,
BONNIE_VERSION,
["basic"]
);
}
// This test should basically represent the most complex use-case of Bonnie in terms of syntax
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_everything() {
let output = expect_exit_code!(
1,
r#"
env_files = ["src/.env"]
default_env.generic = ["sh", "-c", "{COMMAND}"]
default_env.targets.linux = ["bash", "-c", "{COMMAND}"]
[scripts]
basic.subcommands.test.cmd.generic = "exit 5"
basic.subcommands.test.cmd.targets.linux.exec = [
"echo %SHORTGREETING %%",
"echo %name && exit 1"
]
basic.subcommands.test.env_vars = ["SHORTGREETING"]
basic.subcommands.test.cmd.targets.linux.shell = ["sh", "-c", "{COMMAND}"]
basic.subcommands.nested.subcommands.test = "exit 2"
basic.subcommands.nested.subcommands.other = "exit 3"
basic.subcommands.nested.order = """
test {
Any => other
}
"""
basic.args = ["name"]
basic.order = """
test {
Any => nested {
Any => test
}
}
"""
"#,
BONNIE_VERSION,
["basic", "Name", "foo", "bar"]
);
println!("{:?}", output);
assert_contains_ordered!(
output,
[
"sh, [\"-c\", \"echo Hello foo bar && echo Name && exit 1\"]",
"sh, [\"-c\", \"exit 2\"]",
"sh, [\"-c\", \"exit 3\"]",
"sh, [\"-c\", \"echo Hello foo bar && echo Name && exit 1\"]"
]
);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_success_failure_order_control() {
let output1 = expect_exit_code!(
1,
r#"
[scripts]
basic.subcommands.test = "exit 0"
basic.subcommands.other = "exit 1"
basic.order = """
test {
Success => other
}
"""
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(
output1,
["sh, [\"-c\", \"exit 0\"]", "sh, [\"-c\", \"exit 1\"]"]
);
let output2 = expect_exit_code!(
0,
r#"
[scripts]
basic.subcommands.test = "exit 1"
basic.subcommands.other = "exit 0"
basic.order = """
test {
Failure => other
}
"""
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(
output2,
["sh, [\"-c\", \"exit 1\"]", "sh, [\"-c\", \"exit 0\"]"]
);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_exit_code_order_control() {
let output1 = expect_exit_code!(
1,
r#"
[scripts]
basic.subcommands.test = "exit 0"
basic.subcommands.other = "exit 1"
basic.order = """
test {
0 => other
}
"""
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(
output1,
["sh, [\"-c\", \"exit 0\"]", "sh, [\"-c\", \"exit 1\"]"]
);
let output2 = expect_exit_code!(
0,
r#"
[scripts]
basic.subcommands.test = "exit 1"
basic.subcommands.other = "exit 0"
basic.order = """
test {
1 => other
}
"""
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(
output2,
["sh, [\"-c\", \"exit 1\"]", "sh, [\"-c\", \"exit 0\"]"]
);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_not_exit_code_order_control() {
let output1 = expect_exit_code!(
1,
r#"
[scripts]
basic.subcommands.test = "exit 0"
basic.subcommands.other = "exit 1"
basic.order = """
test {
!1 => other
}
"""
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(
output1,
["sh, [\"-c\", \"exit 0\"]", "sh, [\"-c\", \"exit 1\"]"]
);
let output2 = expect_exit_code!(
0,
r#"
[scripts]
basic.subcommands.test = "exit 1"
basic.subcommands.other = "exit 0"
basic.order = """
test {
!0 => other
}
"""
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(
output2,
["sh, [\"-c\", \"exit 1\"]", "sh, [\"-c\", \"exit 0\"]"]
);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_any_none_order_control() {
let output1 = expect_exit_code!(
1,
r#"
[scripts]
basic.subcommands.test = "exit 0"
basic.subcommands.other = "exit 1"
basic.order = """
test {
Any => other
}
"""
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(
output1,
["sh, [\"-c\", \"exit 0\"]", "sh, [\"-c\", \"exit 1\"]"]
);
let output2 = expect_exit_code!(
1,
r#"
[scripts]
basic.subcommands.test = "exit 1"
basic.subcommands.other = "exit 0"
basic.order = """
test {
None => other
}
"""
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(output2, ["sh, [\"-c\", \"exit 1\"]"]);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_union_order_control() {
let output = expect_exit_code!(
1,
r#"
[scripts]
basic.subcommands.test = "exit 0"
basic.subcommands.other = "exit 1"
basic.order = """
test {
0|Success|2 => other
}
"""
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(
output,
["sh, [\"-c\", \"exit 0\"]", "sh, [\"-c\", \"exit 1\"]"]
);
}
#[test]
#[cfg(target_os = "linux")] // This test will only work on Linux
fn succeeds_with_intersection_order_control() {
let output = expect_exit_code!(
1,
r#"
[scripts]
basic.subcommands.test = "exit 0"
basic.subcommands.other = "exit 1"
basic.order = """
test {
0+Success => other
}
"""
"#,
BONNIE_VERSION,
["basic"]
);
assert_contains_ordered!(
output,
["sh, [\"-c\", \"exit 0\"]", "sh, [\"-c\", \"exit 1\"]"]
);
}
| 29.719957 | 139 | 0.538034 | 3.0625 |
56d05ef03c12938cc7381edb5785cc3b3cd90ba0
| 2,044 |
ts
|
TypeScript
|
packages/compiler-dom/src/transforms/Transition.ts
|
btea/vue-next
|
0cf9ae62be21a6180f909e03091f087254ae3e52
|
[
"MIT"
] | 2,198 |
2022-01-17T10:25:48.000Z
|
2022-03-31T16:41:11.000Z
|
packages/compiler-dom/src/transforms/Transition.ts
|
btea/vue-next
|
0cf9ae62be21a6180f909e03091f087254ae3e52
|
[
"MIT"
] | 486 |
2022-01-17T10:59:41.000Z
|
2022-03-31T10:28:18.000Z
|
packages/compiler-dom/src/transforms/Transition.ts
|
btea/vue-next
|
0cf9ae62be21a6180f909e03091f087254ae3e52
|
[
"MIT"
] | 555 |
2022-01-17T12:55:31.000Z
|
2022-03-31T14:13:40.000Z
|
import {
NodeTransform,
NodeTypes,
ElementTypes,
ComponentNode,
IfBranchNode
} from '@vue/compiler-core'
import { TRANSITION } from '../runtimeHelpers'
import { createDOMCompilerError, DOMErrorCodes } from '../errors'
export const transformTransition: NodeTransform = (node, context) => {
if (
node.type === NodeTypes.ELEMENT &&
node.tagType === ElementTypes.COMPONENT
) {
const component = context.isBuiltInComponent(node.tag)
if (component === TRANSITION) {
return () => {
if (!node.children.length) {
return
}
// warn multiple transition children
if (hasMultipleChildren(node)) {
context.onError(
createDOMCompilerError(
DOMErrorCodes.X_TRANSITION_INVALID_CHILDREN,
{
start: node.children[0].loc.start,
end: node.children[node.children.length - 1].loc.end,
source: ''
}
)
)
}
// check if it's s single child w/ v-show
// if yes, inject "persisted: true" to the transition props
const child = node.children[0]
if (child.type === NodeTypes.ELEMENT) {
for (const p of child.props) {
if (p.type === NodeTypes.DIRECTIVE && p.name === 'show') {
node.props.push({
type: NodeTypes.ATTRIBUTE,
name: 'persisted',
value: undefined,
loc: node.loc
})
}
}
}
}
}
}
}
function hasMultipleChildren(node: ComponentNode | IfBranchNode): boolean {
// #1352 filter out potential comment nodes.
const children = (node.children = node.children.filter(
c =>
c.type !== NodeTypes.COMMENT &&
!(c.type === NodeTypes.TEXT && !c.content.trim())
))
const child = children[0]
return (
children.length !== 1 ||
child.type === NodeTypes.FOR ||
(child.type === NodeTypes.IF && child.branches.some(hasMultipleChildren))
)
}
| 28.788732 | 77 | 0.559198 | 3.046875 |
b5f81c343b0e24261b3b783196948064518402de
| 6,974 |
rs
|
Rust
|
backend/server/src/state.rs
|
hgzimmerman/SWEN344-web-project
|
39c7f51d43646c1cf7d8ba4686195ef2c23a2a43
|
[
"MIT"
] | 1 |
2020-12-28T01:44:40.000Z
|
2020-12-28T01:44:40.000Z
|
backend/server/src/state.rs
|
hgzimmerman/SWEN344-web-project
|
39c7f51d43646c1cf7d8ba4686195ef2c23a2a43
|
[
"MIT"
] | 72 |
2019-01-26T14:34:11.000Z
|
2019-04-30T00:27:21.000Z
|
backend/server/src/state.rs
|
hgzimmerman/SWEN344-web-project
|
39c7f51d43646c1cf7d8ba4686195ef2c23a2a43
|
[
"MIT"
] | null | null | null |
//! Represents the shared server resources that all requests may utilize.
use crate::{error::Error, server_auth::secret_filter};
use apply::Apply;
use authorization::Secret;
use egg_mode::KeyPair;
use hyper::{
client::{connect::dns::GaiResolver, HttpConnector},
Body, Client,
};
use hyper_tls::HttpsConnector;
use pool::{init_pool, Pool, PoolConfig, PooledConn, DATABASE_URL};
use rand::{distributions::Alphanumeric, thread_rng, Rng};
use std::path::PathBuf;
use warp::{Filter, Rejection};
/// Simplified type for representing a HttpClient.
pub type HttpsClient = Client<HttpsConnector<HttpConnector<GaiResolver>>, Body>;
/// State that is passed around to all of the api handlers.
/// It can be used to acquire connections to the database,
/// or to reference the key that signs the access tokens.
///
/// These entities are acquired by running a filter function that brings them
/// into the scope of the relevant api.
pub struct State {
/// A pool of database connections.
database_connection_pool: Pool,
/// The secret key.
secret: Secret,
/// Https client
https: HttpsClient,
/// Twitter consumer token
twitter_consumer_token: KeyPair,
/// The path to the server directory.
/// This allows file resources to have a common reference point when determining from where to serve assets.
server_lib_root: PathBuf,
/// Is the server running in a production environment
is_production: bool,
}
/// Configuration object for creating the state.
///
/// If unspecified, it will default to a sane default.
#[derive(Debug, Default)]
pub struct StateConfig {
pub secret: Option<Secret>,
pub max_pool_size: Option<u32>,
pub server_lib_root: Option<PathBuf>,
pub is_production: bool,
}
impl State {
/// Creates a new state.
pub fn new(conf: StateConfig) -> Self {
const RANDOM_KEY_LENGTH: usize = 200;
let secret = conf.secret.unwrap_or_else(|| {
// Generate a new random key if none is provided.
thread_rng()
.sample_iter(&Alphanumeric)
.take(RANDOM_KEY_LENGTH)
.collect::<String>()
.apply(|s| Secret::new(&s))
});
let pool_conf = PoolConfig {
max_connections: conf.max_pool_size,
..Default::default()
};
let pool = init_pool(DATABASE_URL, pool_conf);
let https = HttpsConnector::new(4).unwrap();
let client = Client::builder().build::<_, _>(https);
let twitter_con_token = get_twitter_con_token();
let root = conf.server_lib_root.unwrap_or_else(|| PathBuf::from("./"));
State {
database_connection_pool: pool, //db_filter(pool),
secret,
https: client,
twitter_consumer_token: twitter_con_token.clone(),
server_lib_root: root,
is_production: conf.is_production,
}
}
/// Gets a pooled connection to the database.
pub fn db(&self) -> impl Filter<Extract = (PooledConn,), Error = Rejection> + Clone {
/// Filter that exposes connections to the database to individual filter requests
fn db_filter(pool: Pool) -> impl Filter<Extract = (PooledConn,), Error = Rejection> + Clone {
fn get_conn_from_pool(pool: &Pool) -> Result<PooledConn, Rejection> {
pool.clone()
.get() // Will get the connection from the pool, or wait a specified time until one becomes available.
.map_err(|_| {
log::error!("Pool exhausted: could not get database connection.");
Error::DatabaseUnavailable.reject()
})
}
warp::any().and_then(move || -> Result<PooledConn, Rejection> { get_conn_from_pool(&pool) })
}
db_filter(self.database_connection_pool.clone())
}
/// Gets the secret used for authoring JWTs
pub fn secret(&self) -> impl Filter<Extract = (Secret,), Error = Rejection> + Clone {
secret_filter(self.secret.clone())
}
/// Gets the https client used for making dependent api calls.
pub fn https_client(&self) -> impl Filter<Extract = (HttpsClient,), Error = Rejection> + Clone {
/// Function that creates the HttpClient filter.
fn http_filter(
client: HttpsClient,
) -> impl Filter<Extract = (HttpsClient,), Error = Rejection> + Clone {
// This needs to be able to return a Result w/a Rejection, because there is no way to specify the type of
// warp::never::Never because it is private, precluding the possibility of using map instead of and_then().
// This adds space overhead, but not nearly as much as using a boxed filter.
warp::any().and_then(move || -> Result<HttpsClient, Rejection> { Ok(client.clone()) })
}
http_filter(self.https.clone())
}
/// Access the twitter consumer token.
pub fn twitter_consumer_token(&self) -> impl Filter<Extract = (KeyPair,), Error = Rejection> + Clone {
fn twitter_consumer_token_filter(twitter_consumer_token: KeyPair) -> impl Filter<Extract = (KeyPair,), Error = Rejection> + Clone {
warp::any().and_then(move || -> Result<KeyPair, Rejection> { Ok(twitter_consumer_token.clone()) })
}
twitter_consumer_token_filter(self.twitter_consumer_token.clone())
}
pub fn server_lib_root(&self) -> PathBuf {
self.server_lib_root.clone()
}
pub fn is_production(&self) -> bool {
self.is_production
}
/// Creates a new state object from an existing object pool.
/// This is useful if using fixtures.
#[cfg(test)]
pub fn testing_init(pool: Pool, secret: Secret) -> Self {
use std::time::Duration;
let https = HttpsConnector::new(1).unwrap();
let client = Client::builder()
.keep_alive_timeout(Some(Duration::new(12, 0)))
.build::<_, Body>(https);
let twitter_con_token = get_twitter_con_token();
State {
database_connection_pool: pool,
secret,
https: client,
twitter_consumer_token: twitter_con_token,
server_lib_root: PathBuf::from("./"), // THIS makes the assumption that the tests are run from the backend/server dir.
is_production: false,
}
}
}
/// Gets the connection key pair for the serer.
/// This represents the authenticity of the application
fn get_twitter_con_token() -> KeyPair {
// TODO move getting these into a config object, or get them directly from the filesystem.
// These definitely shouldn't be in source code, but I don't care,
// I just want this to work right now. Also, this is a school project.
const KEY: &str = "Pq2sA4Lfbovd4SLQhSQ6UPEVg";
const SECRET: &str = "uK6U7Xqj2QThlm6H3y8dKSH3itZgpo9AVhR5or80X9umZc62ln";
egg_mode::KeyPair::new(KEY, SECRET)
}
| 38.530387 | 139 | 0.640379 | 3.203125 |
cb2eb9c379ba8f1b243b56d2570d91967aa512c5
| 1,458 |
go
|
Go
|
go-snippets/go-routines-example.go
|
ferralucho/mercado-libre-accelerator
|
fee70eadd708b73383f2c9314ff3d0d8fd359f6a
|
[
"MIT"
] | null | null | null |
go-snippets/go-routines-example.go
|
ferralucho/mercado-libre-accelerator
|
fee70eadd708b73383f2c9314ff3d0d8fd359f6a
|
[
"MIT"
] | null | null | null |
go-snippets/go-routines-example.go
|
ferralucho/mercado-libre-accelerator
|
fee70eadd708b73383f2c9314ff3d0d8fd359f6a
|
[
"MIT"
] | null | null | null |
package main
import (
"fmt"
"net/http"
"sync"
)
func miFuncion(wg *sync.WaitGroup) {
fmt.Println("Dentro de la goroutine")
wg.Done()
}
/*
func main() {
fmt.Println("Inicio del programa")
var wg sync.WaitGroup
wg.Add(1)
//cuenta cuantos hilos de ejecucion espera que todos los procesos terminen
go miFuncion(&wg)
wg.Wait()
fmt.Printf("Fin del programa")
}
*/
/*
func main() {
fmt.Println("Hello World")
var waitgroup sync.WaitGroup
waitgroup.Add(1)
go func() {
fmt.Println("Inside my goroutine")
waitgroup.Done()
}()
waitgroup.Wait()
fmt.Println("Finished Execution")
}
*/
/*
go func(url string) {
fmt.Println(url)
}(url)
*/
var urls = []string {
"https://www.google.com",
"https://www.lavoz.com.ar",
"https://www.mercadolibre.com",
}
func recuperar(url string, wg *sync.WaitGroup) {
fmt.Println(url)
res, err := http.Get(url)
if err != nil {
fmt.Println(err)
}
wg.Done()
fmt.Println(res.Status)
}
func enviarRequest(w http.ResponseWriter, r *http.Request){
fmt.Println("Enviamos request al endpoint")
var waitgroup sync.WaitGroup
for _, url := range urls {
waitgroup.Add(1)
go recuperar(url, &waitgroup)
}
waitgroup.Wait()
fmt.Println("Devuelve una respuesta")
fmt.Println("Proceso terminado")
fmt.Fprint(w, "Proceso terminado")
}
func handleRequest() {
http.HandleFunc("/", enviarRequest)
http.ListenAndServe(":8080", nil)
}
func main() {
handleRequest()
}
| 16.953488 | 75 | 0.663923 | 3.078125 |
50e82d6dc007ad89757899ad96fb9adafaaa685e
| 16,063 |
go
|
Go
|
xslice/xslice_test.go
|
HappyFacade/gokit
|
0448a17eb37a301e70ceeac039be84503750f791
|
[
"Apache-2.0"
] | null | null | null |
xslice/xslice_test.go
|
HappyFacade/gokit
|
0448a17eb37a301e70ceeac039be84503750f791
|
[
"Apache-2.0"
] | null | null | null |
xslice/xslice_test.go
|
HappyFacade/gokit
|
0448a17eb37a301e70ceeac039be84503750f791
|
[
"Apache-2.0"
] | null | null | null |
/*
* Copyright 2012-2020 Li Kexian
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* A toolkit for Golang development
* https://www.likexian.com/
*/
package xslice
import (
"strings"
"testing"
"github.com/likexian/gokit/assert"
)
type a struct {
x, y int
}
type b struct {
x, y int
}
func TestVersion(t *testing.T) {
assert.Contains(t, Version(), ".")
assert.Contains(t, Author(), "likexian")
assert.Contains(t, License(), "Apache License")
}
func TestIsSlice(t *testing.T) {
assert.False(t, IsSlice(0))
assert.False(t, IsSlice("0"))
assert.True(t, IsSlice([]int{0, 1, 2}))
assert.True(t, IsSlice([]string{"0", "1", "2"}))
}
func TestUnique(t *testing.T) {
// Not a slice
tests := []struct {
in interface{}
out interface{}
}{
{1, 1},
{1.0, 1.0},
{true, true},
}
for _, v := range tests {
assert.Panic(t, func() { Unique(v.in) })
}
// Is a slice
tests = []struct {
in interface{}
out interface{}
}{
{[]int{0, 0, 1, 1, 1, 2, 2, 3}, []int{0, 1, 2, 3}},
{[]int8{0, 0, 1, 1, 1, 2, 2, 3}, []int8{0, 1, 2, 3}},
{[]int16{0, 0, 1, 1, 1, 2, 2, 3}, []int16{0, 1, 2, 3}},
{[]int32{0, 0, 1, 1, 1, 2, 2, 3}, []int32{0, 1, 2, 3}},
{[]int64{0, 0, 1, 1, 1, 2, 2, 3}, []int64{0, 1, 2, 3}},
{[]uint{0, 0, 1, 1, 1, 2, 2, 3}, []uint{0, 1, 2, 3}},
{[]uint8{0, 0, 1, 1, 1, 2, 2, 3}, []uint8{0, 1, 2, 3}},
{[]uint16{0, 0, 1, 1, 1, 2, 2, 3}, []uint16{0, 1, 2, 3}},
{[]uint32{0, 0, 1, 1, 1, 2, 2, 3}, []uint32{0, 1, 2, 3}},
{[]uint64{0, 0, 1, 1, 1, 2, 2, 3}, []uint64{0, 1, 2, 3}},
{[]float32{0, 0, 1, 1, 1, 2, 2, 3}, []float32{0, 1, 2, 3}},
{[]float64{0, 0, 1, 1, 1, 2, 2, 3}, []float64{0, 1, 2, 3}},
{[]string{"a", "a", "b", "b", "b", "c"}, []string{"a", "b", "c"}},
{[]bool{true, true, true, false}, []bool{true, false}},
{[]interface{}{0, 1, 1, "1", 2}, []interface{}{0, 1, "1", 2}},
{[]interface{}{[]int{0, 1}, []int{0, 1}, []int{1, 2}}, []interface{}{[]int{0, 1}, []int{1, 2}}},
{[]interface{}{a{0, 1}, a{1, 2}, a{0, 1}, b{0, 1}}, []interface{}{a{0, 1}, a{1, 2}, b{0, 1}}},
}
for _, v := range tests {
assert.Equal(t, Unique(v.in), v.out)
}
}
func TestIsUnique(t *testing.T) {
// Not a slice
tests := []struct {
in interface{}
}{
{1},
{1.0},
{true},
}
for _, v := range tests {
assert.Panic(t, func() { IsUnique(v.in) })
}
// Is a slice
tests = []struct {
in interface{}
}{
{[]int{0, 0, 1, 1, 1, 2, 2, 3}},
{[]int8{0, 0, 1, 1, 1, 2, 2, 3}},
{[]int16{0, 0, 1, 1, 1, 2, 2, 3}},
{[]int32{0, 0, 1, 1, 1, 2, 2, 3}},
{[]int64{0, 0, 1, 1, 1, 2, 2, 3}},
{[]uint{0, 0, 1, 1, 1, 2, 2, 3}},
{[]uint8{0, 0, 1, 1, 1, 2, 2, 3}},
{[]uint16{0, 0, 1, 1, 1, 2, 2, 3}},
{[]uint32{0, 0, 1, 1, 1, 2, 2, 3}},
{[]uint64{0, 0, 1, 1, 1, 2, 2, 3}},
{[]float32{0, 0, 1, 1, 1, 2, 2, 3}},
{[]float64{0, 0, 1, 1, 1, 2, 2, 3}},
{[]string{"a", "a", "b", "b", "b", "c"}},
{[]bool{true, true, true, false}},
{[]interface{}{0, 1, 1, "1", 2}},
{[]interface{}{[]int{0, 1}, []int{0, 1}, []int{1, 2}}},
{[]interface{}{a{0, 1}, a{1, 2}, a{0, 1}, b{0, 1}}},
}
for _, v := range tests {
assert.False(t, IsUnique(v.in))
}
// Is a slice
tests = []struct {
in interface{}
}{
{[]int{1}},
{[]int{0, 1, 2, 3}},
{[]int8{0, 1, 2, 3}},
{[]int16{0, 1, 2, 3}},
{[]int32{0, 1, 2, 3}},
{[]int64{0, 1, 2, 3}},
{[]uint{0, 1, 2, 3}},
{[]uint8{0, 1, 2, 3}},
{[]uint16{0, 1, 2, 3}},
{[]uint32{0, 1, 2, 3}},
{[]uint64{0, 1, 2, 3}},
{[]float32{0, 1, 2, 3}},
{[]float64{0, 1, 2, 3}},
{[]string{"a", "b", "c"}},
{[]bool{true, false}},
{[]interface{}{0, 1, "1", 2}},
{[]interface{}{[]int{0, 1}, []int{1, 2}}},
{[]interface{}{a{0, 1}, a{1, 2}, b{0, 1}}},
}
for _, v := range tests {
assert.True(t, IsUnique(v.in))
}
}
func TestIntersect(t *testing.T) {
// Not a slice
tests := []struct {
x interface{}
y interface{}
out interface{}
}{
{1, 1, nil},
{1.0, 1.0, nil},
{true, true, nil},
{[]int{1}, 1, nil},
{[]float64{1.0}, 1, nil},
{[]bool{true}, true, nil},
}
for _, v := range tests {
assert.Panic(t, func() { Intersect(v.x, v.y) })
}
// Is a slice
tests = []struct {
x interface{}
y interface{}
out interface{}
}{
{[]int{0, 1, 2}, []int{1, 2, 3}, []int{1, 2}},
{[]int8{0, 1, 2}, []int8{1, 2, 3}, []int8{1, 2}},
{[]int16{0, 1, 2}, []int16{1, 2, 3}, []int16{1, 2}},
{[]int32{0, 1, 2}, []int32{1, 2, 3}, []int32{1, 2}},
{[]int64{0, 1, 2}, []int64{1, 2, 3}, []int64{1, 2}},
{[]float32{0, 1, 2}, []float32{1, 2, 3}, []float32{1, 2}},
{[]float64{0, 1, 2}, []float64{1, 2, 3}, []float64{1, 2}},
{[]string{"0", "1", "2"}, []string{"1", "2", "3"}, []string{"1", "2"}},
{[]bool{true, false}, []bool{true}, []bool{true}},
{[]interface{}{0, 1, "1", 2}, []interface{}{1, "1", 2, 3}, []interface{}{1, "1", 2}},
{[]interface{}{[]int{0, 1}, []int{1, 2}}, []interface{}{[]int{1, 2}, []int{2, 3}}, []interface{}{[]int{1, 2}}},
{[]interface{}{a{0, 1}, a{1, 2}, b{0, 1}}, []interface{}{a{1, 2}, b{2, 3}}, []interface{}{a{1, 2}}},
}
for _, v := range tests {
assert.Equal(t, Intersect(v.x, v.y), v.out)
}
}
func TestDifferent(t *testing.T) {
// Not a slice
tests := []struct {
x interface{}
y interface{}
out interface{}
}{
{1, 1, nil},
{1.0, 1.0, nil},
{true, true, nil},
{[]int{1}, 1, nil},
{[]float64{1.0}, 1, nil},
{[]bool{true}, true, nil},
}
for _, v := range tests {
assert.Panic(t, func() { Different(v.x, v.y) })
}
// Is a slice
tests = []struct {
x interface{}
y interface{}
out interface{}
}{
{[]int{0, 1, 2}, []int{1, 2, 3}, []int{0}},
{[]int8{0, 1, 2}, []int8{1, 2, 3}, []int8{0}},
{[]int16{0, 1, 2}, []int16{1, 2, 3}, []int16{0}},
{[]int32{0, 1, 2}, []int32{1, 2, 3}, []int32{0}},
{[]int64{0, 1, 2}, []int64{1, 2, 3}, []int64{0}},
{[]float32{0, 1, 2}, []float32{1, 2, 3}, []float32{0}},
{[]float64{0, 1, 2}, []float64{1, 2, 3}, []float64{0}},
{[]string{"0", "1", "2"}, []string{"1", "2", "3"}, []string{"0"}},
{[]bool{true, false}, []bool{true}, []bool{false}},
{[]interface{}{0, 1, "1", 2}, []interface{}{1, "1", 2, 3}, []interface{}{0}},
{[]interface{}{[]int{0, 1}, []int{1, 2}}, []interface{}{[]int{1, 2}, []int{2, 3}}, []interface{}{[]int{0, 1}}},
{[]interface{}{a{0, 1}, a{1, 2}, b{0, 1}}, []interface{}{a{1, 2}, b{2, 3}}, []interface{}{a{0, 1}, b{0, 1}}},
}
for _, v := range tests {
assert.Equal(t, Different(v.x, v.y), v.out)
}
}
func TestMerge(t *testing.T) {
// Not a slice
tests := []struct {
x interface{}
y interface{}
out interface{}
}{
{1, 1, 1},
{1.0, 1.0, 1.0},
{true, true, true},
{[]int{1}, 1, []int{1}},
{[]float64{1.0}, 1, []float64{1.0}},
{[]bool{true}, true, []bool{true}},
}
for _, v := range tests {
assert.Panic(t, func() { Merge(v.x, v.y) })
}
// Is a slice
tests = []struct {
x interface{}
y interface{}
out interface{}
}{
{[]int{0, 1, 2}, []int{1, 2, 3}, []int{0, 1, 2, 3}},
{[]int8{0, 1, 2}, []int8{1, 2, 3}, []int8{0, 1, 2, 3}},
{[]int16{0, 1, 2}, []int16{1, 2, 3}, []int16{0, 1, 2, 3}},
{[]int32{0, 1, 2}, []int32{1, 2, 3}, []int32{0, 1, 2, 3}},
{[]int64{0, 1, 2}, []int64{1, 2, 3}, []int64{0, 1, 2, 3}},
{[]float32{0, 1, 2}, []float32{1, 2, 3}, []float32{0, 1, 2, 3}},
{[]float64{0, 1, 2}, []float64{1, 2, 3}, []float64{0, 1, 2, 3}},
{[]string{"0", "1", "2"}, []string{"1", "2", "3"}, []string{"0", "1", "2", "3"}},
{[]bool{true, false}, []bool{true}, []bool{true, false}},
{[]interface{}{0, 1, "1", 2}, []interface{}{1, "1", 2, 3}, []interface{}{0, 1, "1", 2, 3}},
{[]interface{}{[]int{0, 1}, []int{1, 2}}, []interface{}{[]int{1, 2}, []int{2, 3}}, []interface{}{[]int{0, 1}, []int{1, 2}, []int{2, 3}}},
{[]interface{}{a{0, 1}, a{1, 2}, b{0, 1}}, []interface{}{a{1, 2}, b{2, 3}}, []interface{}{a{0, 1}, a{1, 2}, b{0, 1}, b{2, 3}}},
}
for _, v := range tests {
assert.Equal(t, Merge(v.x, v.y), v.out)
}
}
func TestReverse(t *testing.T) {
// Not a slice
tests := []struct {
in interface{}
out interface{}
}{
{1, 1},
{1.0, 1.0},
{true, true},
}
for _, v := range tests {
assert.Panic(t, func() { Reverse(v.in) })
}
// Is a slice
tests = []struct {
in interface{}
out interface{}
}{
{[]int{0, 1, 2, 3, 4}, []int{4, 3, 2, 1, 0}},
{[]int8{0, 1, 2, 3, 4}, []int8{4, 3, 2, 1, 0}},
{[]int16{0, 1, 2, 3, 4}, []int16{4, 3, 2, 1, 0}},
{[]int32{0, 1, 2, 3, 4}, []int32{4, 3, 2, 1, 0}},
{[]int64{0, 1, 2, 3, 4}, []int64{4, 3, 2, 1, 0}},
{[]float32{0, 1, 2, 3, 4}, []float32{4, 3, 2, 1, 0}},
{[]float64{0, 1, 2, 3, 4}, []float64{4, 3, 2, 1, 0}},
{[]string{"a", "b", "c", "d", "e"}, []string{"e", "d", "c", "b", "a"}},
{[]bool{true, false, true, false}, []bool{false, true, false, true}},
{[]interface{}{0, 1, 2, "3", 3}, []interface{}{3, "3", 2, 1, 0}},
{[]interface{}{[]int{0, 1}, []int{1, 2}}, []interface{}{[]int{1, 2}, []int{0, 1}}},
{[]interface{}{a{0, 1}, a{1, 2}, b{0, 1}}, []interface{}{b{0, 1}, a{1, 2}, a{0, 1}}},
}
for _, v := range tests {
Reverse(v.in)
assert.Equal(t, v.in, v.out)
}
}
func TestShuffle(t *testing.T) {
// Not a slice
tests := []struct {
in interface{}
out interface{}
}{
{1, 1},
{1.0, 1.0},
{true, true},
}
for _, v := range tests {
assert.Panic(t, func() { Shuffle(v.in) })
}
// Is a slice
tests = []struct {
in interface{}
out interface{}
}{
{[]int{0, 1, 2, 3, 4}, []int{0, 1, 2, 3, 4}},
{[]int8{0, 1, 2, 3, 4}, []int8{0, 1, 2, 3, 4}},
{[]int16{0, 1, 2, 3, 4}, []int16{0, 1, 2, 3, 4}},
{[]int32{0, 1, 2, 3, 4}, []int32{0, 1, 2, 3, 4}},
{[]int64{0, 1, 2, 3, 4}, []int64{0, 1, 2, 3, 4}},
{[]float32{0, 1, 2, 3, 4}, []float32{0, 1, 2, 3, 4}},
{[]float64{0, 1, 2, 3, 4}, []float64{0, 1, 2, 3, 4}},
{[]string{"a", "b", "c", "d", "e"}, []string{"a", "b", "c", "d", "e"}},
{[]bool{true, false, false, true, true}, []bool{true, false, false, true, true}},
{[]interface{}{0, 1, 2, "3", 3}, []interface{}{0, 1, 2, "3", 3}},
{[]interface{}{[]int{0, 1}, []int{1, 2}, []int{1, 2}}, []interface{}{[]int{0, 1}, []int{1, 2}, []int{1, 2}}},
{[]interface{}{a{0, 1}, a{1, 2}, b{0, 1}}, []interface{}{a{0, 1}, a{1, 2}, b{0, 1}}},
}
for _, v := range tests {
Shuffle(v.in)
assert.NotEqual(t, v.in, v.out)
}
}
func TestFill(t *testing.T) {
tests := []struct {
v interface{}
n int
out interface{}
}{
{1, -1, nil},
{1, 0, nil},
}
for _, v := range tests {
assert.Panic(t, func() { Fill(v.v, v.n) })
}
tests = []struct {
v interface{}
n int
out interface{}
}{
{1, 1, []int{1}},
{1, 3, []int{1, 1, 1}},
{int(1), 3, []int{1, 1, 1}},
{int8(1), 3, []int8{1, 1, 1}},
{int16(1), 3, []int16{1, 1, 1}},
{int32(1), 3, []int32{1, 1, 1}},
{int64(1), 3, []int64{1, 1, 1}},
{float32(1), 3, []float32{1, 1, 1}},
{float64(1), 3, []float64{1, 1, 1}},
{"a", 3, []string{"a", "a", "a"}},
{true, 3, []bool{true, true, true}},
{[]int{1, 2}, 3, [][]int{{1, 2}, {1, 2}, {1, 2}}},
{a{1, 2}, 3, []a{{1, 2}, {1, 2}, {1, 2}}},
{[]interface{}{0, "1"}, 3, [][]interface{}{{0, "1"}, {0, "1"}, {0, "1"}}},
{[]interface{}{[]int{0, 1}}, 3, [][]interface{}{{[]int{0, 1}}, {[]int{0, 1}}, {[]int{0, 1}}}},
{[]interface{}{a{0, 1}}, 3, [][]interface{}{{a{x: 0, y: 1}}, {a{x: 0, y: 1}}, {a{x: 0, y: 1}}}},
}
for _, v := range tests {
assert.Equal(t, Fill(v.v, v.n), v.out)
}
}
func TestChunk(t *testing.T) {
tests := []struct {
v interface{}
n int
out interface{}
}{
{1, 1, 1},
{[]int{1}, 0, nil},
}
for _, v := range tests {
assert.Panic(t, func() { Chunk(v.v, v.n) })
}
tests = []struct {
v interface{}
n int
out interface{}
}{
{[]int{0, 1, 2}, 1, [][]int{{0}, {1}, {2}}},
{[]int{0, 1, 2, 3, 4}, 2, [][]int{{0, 1}, {2, 3}, {4}}},
{[]int{0, 1, 2, 3, 4, 5}, 2, [][]int{{0, 1}, {2, 3}, {4, 5}}},
{[]string{"a", "b", "c", "d", "e"}, 3, [][]string{{"a", "b", "c"}, {"d", "e"}}},
{[]interface{}{a{0, 1}, b{2, 3}, a{4, 5}}, 2, [][]interface{}{{a{0, 1}, b{2, 3}}, {a{4, 5}}}},
}
for _, v := range tests {
assert.Equal(t, Chunk(v.v, v.n), v.out)
}
}
func TestConcat(t *testing.T) {
tests := []struct {
in interface{}
out interface{}
}{
{1, 1},
}
for _, v := range tests {
assert.Panic(t, func() { Concat(v.in) })
}
tests = []struct {
in interface{}
out interface{}
}{
{[]int{}, []int{}},
{[]int{0, 1, 2, 3, 4}, []int{0, 1, 2, 3, 4}},
{[][]int{{0, 1}, {2, 3}, {4}}, []int{0, 1, 2, 3, 4}},
{[][]string{{"a", "b"}, {"c"}, {"d", "e"}}, []string{"a", "b", "c", "d", "e"}},
{[][]interface{}{{a{0, 1}, b{0, 1}}, {a{1, 2}}}, []interface{}{a{0, 1}, b{0, 1}, a{1, 2}}},
}
for _, v := range tests {
assert.Equal(t, Concat(v.in), v.out)
}
}
func TestFilter(t *testing.T) {
// Panic tests
tests := []struct {
v interface{}
f interface{}
out interface{}
}{
{1, nil, 1},
{[]int{1}, nil, nil},
{[]int{1}, 1, nil},
{[]int{1}, func() {}, nil},
{[]int{1}, func(v int) {}, nil},
{[]int{1}, func(v int) int { return v }, nil},
}
for _, v := range tests {
assert.Panic(t, func() { Filter(v.v, v.f) })
}
// General tests
tests = []struct {
v interface{}
f interface{}
out interface{}
}{
{[]interface{}{0, 1, nil, 2}, func(v interface{}) bool { return v != nil }, []interface{}{0, 1, 2}},
{[]int{-2, -1, 0, 1, 2}, func(v int) bool { return v >= 0 }, []int{0, 1, 2}},
{[]string{"a_0", "b_1", "a_1"}, func(v string) bool { return strings.HasPrefix(v, "a_") }, []string{"a_0", "a_1"}},
{[]bool{true, false, false}, func(v bool) bool { return !v }, []bool{false, false}},
}
for _, v := range tests {
assert.Equal(t, Filter(v.v, v.f), v.out)
}
}
func TestMap(t *testing.T) {
// Panic tests
tests := []struct {
v interface{}
f interface{}
out interface{}
}{
{1, nil, 1},
{[]int{1}, nil, nil},
{[]int{1}, 1, nil},
{[]int{1}, func() {}, nil},
{[]int{1}, func(v int) {}, nil},
}
for _, v := range tests {
assert.Panic(t, func() { Map(v.v, v.f) })
}
// General tests
tests = []struct {
v interface{}
f interface{}
out interface{}
}{
{[]int{1, 2, 3, 4, 5}, func(v int) int { return v * v * v }, []int{1, 8, 27, 64, 125}},
{[]int{-2, -1, 0, 1, 2}, func(v int) bool { return v > 0 }, []bool{false, false, false, true, true}},
{[]string{"a", "b", "c"}, func(v string) string { return "x_" + v }, []string{"x_a", "x_b", "x_c"}},
{[]bool{true, false, false}, func(v bool) bool { return !v }, []bool{false, true, true}},
{[]interface{}{1, nil}, func(v interface{}) interface{} { return assert.If(v == nil, -1, v) }, []interface{}{1, -1}},
}
for _, v := range tests {
assert.Equal(t, Map(v.v, v.f), v.out)
}
}
func TestReduce(t *testing.T) {
// Panic tests
tests := []struct {
v interface{}
f interface{}
out interface{}
}{
{1, nil, 1},
{[]int{}, nil, nil},
{[]int{0, 1}, nil, nil},
{[]int{0, 1}, 1, nil},
{[]int{0, 1}, func() {}, nil},
{[]int{0, 1}, func(x int) {}, nil},
{[]int{0, 1}, func(x, y int) {}, nil},
{[]int{0, 1}, func(x bool, y int) int { return y }, nil},
{[]int{0, 1}, func(x int, y bool) int { return x }, nil},
{[]int{0, 1}, func(x int, y int) bool { return true }, nil},
}
for _, v := range tests {
assert.Panic(t, func() { Reduce(v.v, v.f) })
}
// General tests
tests = []struct {
v interface{}
f interface{}
out interface{}
}{
{[]int{1}, func(x, y int) int { return x + y }, 1},
{[]int{1, 2}, func(x, y int) int { return x + y }, 3},
{[]int{1, 2, 3, 4}, func(x, y int) int { return x * y }, 24},
}
for _, v := range tests {
assert.Equal(t, Reduce(v.v, v.f).(int), v.out)
}
}
| 27.364566 | 139 | 0.474195 | 3.09375 |
0764590f7d911ba9df9f4374e406422b4e2367f2
| 2,253 |
rs
|
Rust
|
rs/game.rs
|
canufeel/kitty-wars-yew-front
|
5cea75a73d9b65c24838b606a3b434e7490fdf88
|
[
"Apache-2.0",
"MIT"
] | 2 |
2020-02-21T22:39:53.000Z
|
2020-04-14T22:12:53.000Z
|
rs/game.rs
|
canufeel/kitty-wars-yew-front
|
5cea75a73d9b65c24838b606a3b434e7490fdf88
|
[
"Apache-2.0",
"MIT"
] | 6 |
2021-05-11T01:33:26.000Z
|
2022-02-26T22:52:17.000Z
|
rs/game.rs
|
canufeel/kitty-wars-yew-front
|
5cea75a73d9b65c24838b606a3b434e7490fdf88
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
use std::collections::HashMap;
use yew::{Html, html, Callback};
use stdweb::web::event::ClickEvent;
pub enum ItemType {
Weapon,
Armor
}
pub struct Item {
item_type: ItemType,
item_power: String
}
impl Item {
pub fn new(
item_type: ItemType,
item_power: String
) -> Self {
Item {
item_power,
item_type
}
}
}
pub struct Player {
weapon_id: String,
armor_id: String,
kitty_id: String,
is_battling: bool
}
impl Player {
pub fn new(
weapon_id: String,
armor_id: String,
kitty_id: String,
) -> Self {
Player {
weapon_id,
armor_id,
kitty_id,
is_battling: false
}
}
pub fn set_battling(&mut self, is_battling: bool) {
self.is_battling = is_battling;
}
}
pub struct PlayerState {
pub account: String,
players: HashMap<String, Player>,
items: HashMap<String, Item>
}
impl PlayerState {
pub fn new(
account: String,
players: HashMap<String, Player>,
items: HashMap<String, Item>
) -> Self {
PlayerState {
account,
players,
items
}
}
pub fn has_player_for_account(&self) -> bool {
self.players.contains_key(&self.account)
}
fn get_items_for_current_player(&self) -> Option<(&Item, &Item)> {
match self.players.get(&self.account) {
Some(items) => match (self.items.get(&items.weapon_id), self.items.get(&items.armor_id)) {
(Some(weapon), Some(armor)) => Some((weapon, armor)),
_ => None,
},
_ => None
}
}
pub fn get_player_details(&self, on_join: Callback<ClickEvent>) -> Html {
let load_finished_data = match self.get_items_for_current_player() {
None => html! {
<div class="join">
<button onclick=on_join>{ "Join" }</button>
</div>
},
Some((weapon, armor)) => html!{
<div class="player-details">
<div class="weapon">
{ format!("Weapon: {}", weapon.item_power) }
</div>
<div class="armor">
{ format!("Armor: {}", armor.item_power) }
</div>
</div>
}
};
html! {
<div class="finished">
<p>{ format!("Hello, {}", self.account) }</p>
{ load_finished_data }
</div>
}
}
}
| 20.297297 | 96 | 0.573014 | 3.203125 |
f06ae02416b8f8f9bb909dbd1c4d484476e5b8f7
| 4,498 |
py
|
Python
|
examples/pykey60/code-1.py
|
lesley-byte/pykey
|
ce21b5b6c0da938bf24891e5acb196d6779c433a
|
[
"MIT"
] | null | null | null |
examples/pykey60/code-1.py
|
lesley-byte/pykey
|
ce21b5b6c0da938bf24891e5acb196d6779c433a
|
[
"MIT"
] | null | null | null |
examples/pykey60/code-1.py
|
lesley-byte/pykey
|
ce21b5b6c0da938bf24891e5acb196d6779c433a
|
[
"MIT"
] | null | null | null |
#pylint: disable = line-too-long
import os
import time
import board
import neopixel
import keypad
import usb_hid
import pwmio
import rainbowio
from adafruit_hid.keyboard import Keyboard
from pykey.keycode import KB_Keycode as KC
from adafruit_hid.keyboard_layout_us import KeyboardLayoutUS
# Hardware definition: GPIO where RGB LED is connected.
pixel_pin = board.NEOPIXEL
num_pixels = 61
pixels = neopixel.NeoPixel(pixel_pin, num_pixels, brightness=1, auto_write=False)
cyclecount = 0
def rainbow_cycle(wait):
for i in range(num_pixels):
rc_index = (i * 256 // num_pixels) + wait
pixels[i] = rainbowio.colorwheel(rc_index & 255)
pixels.show()
buzzer = pwmio.PWMOut(board.SPEAKER, variable_frequency=True)
OFF = 0
ON = 2**15
# Hardware definition: Switch Matrix Setup.
keys = keypad.KeyMatrix(
row_pins=(board.ROW1, board.ROW2, board.ROW3, board.ROW4, board.ROW5),
column_pins=(board.COL1, board.COL2, board.COL3, board.COL4, board.COL5, board.COL6, board.COL7,
board.COL8, board.COL9, board.COL10, board.COL11, board.COL12, board.COL13, board.COL14),
columns_to_anodes=True,
)
# CONFIGURABLES ------------------------
MACRO_FOLDER = '/layers'
# CLASSES AND FUNCTIONS ----------------
class Layer:
""" Class representing a layer, for which we have a set
of macro sequences or keycodes"""
def __init__(self, layerdata):
self.name = layerdata['name']
self.macros = layerdata['macros']
# Neopixel update function
def update_pixels(color):
for i in range(num_pixels):
pixels[i] = color
pixels.show()
# INITIALIZATION -----------------------
# Load all the macro key setups from .py files in MACRO_FOLDER
layers = []
files = os.listdir(MACRO_FOLDER)
files.sort()
for filename in files:
print(filename)
if filename.endswith('.py'):
try:
module = __import__(MACRO_FOLDER + '/' + filename[:-3])
layers.append(Layer(module.layer))
except (SyntaxError, ImportError, AttributeError, KeyError, NameError,
IndexError, TypeError) as err:
print(err)
pass
if not layers:
print('NO MACRO FILES FOUND')
while True:
pass
layer_count = len(layers)
# print(layer_count)
def get_active_layer(layer_keys_pressed, layer_count):
tmp = 0
if len(layer_keys_pressed)>0:
for layer_id in layer_keys_pressed:
if layer_id > tmp: # use highest layer number
tmp = layer_id
if tmp >= layer_count:
tmp = layer_count-1
return tmp
# setup variables
keyboard = Keyboard(usb_hid.devices)
keyboard_layout = KeyboardLayoutUS(keyboard)
active_keys = []
not_sleeping = True
layer_index = 0
buzzer.duty_cycle = ON
buzzer.frequency = 440 #
time.sleep(0.05)
buzzer.frequency = 880 #
time.sleep(0.05)
buzzer.frequency = 440 #
time.sleep(0.05)
buzzer.duty_cycle = OFF
while not_sleeping:
key_event = keys.events.get()
if key_event:
key_number = key_event.key_number
cyclecount = cyclecount +1
rainbow_cycle(cyclecount)
# keep track of keys being pressed for layer determination
if key_event.pressed:
active_keys.append(key_number)
else:
active_keys.remove(key_number)
# reset the layers and identify which layer key is pressed.
layer_keys_pressed = []
for active_key in active_keys:
group = layers[0].macros[active_key][2]
for item in group:
if isinstance(item, int):
if (item >= KC.LAYER_0) and (item <= KC.LAYER_F) :
layer_keys_pressed.append(item - KC.LAYER_0)
layer_index = get_active_layer(layer_keys_pressed, layer_count)
# print(layer_index)
# print(layers[layer_index].macros[key_number][1])
group = layers[layer_index].macros[key_number][2]
color = layers[layer_index].macros[key_number][0]
if key_event.pressed:
update_pixels(color)
for item in group:
if isinstance(item, int):
keyboard.press(item)
else:
keyboard_layout.write(item)
else:
for item in group:
if isinstance(item, int):
if item >= 0:
keyboard.release(item)
#update_pixels(0x000000)
time.sleep(0.002)
| 28.289308 | 106 | 0.631392 | 3 |
be7a64f77320fd681c829169bf63f1da8ca71b48
| 5,042 |
rs
|
Rust
|
src/helpers.rs
|
fewensa/microkv
|
89d983721e588dfad1460dcdf1c48c97c1ef7a14
|
[
"MIT"
] | null | null | null |
src/helpers.rs
|
fewensa/microkv
|
89d983721e588dfad1460dcdf1c48c97c1ef7a14
|
[
"MIT"
] | null | null | null |
src/helpers.rs
|
fewensa/microkv
|
89d983721e588dfad1460dcdf1c48c97c1ef7a14
|
[
"MIT"
] | null | null | null |
use std::fs::{File, OpenOptions};
use std::io::{Read, Write};
use std::path::{Path, PathBuf};
use secstr::{SecStr, SecVec};
use serde::de::DeserializeOwned;
use serde::Serialize;
use sodiumoxide::crypto::secretbox::Nonce;
use sodiumoxide::crypto::secretbox::{self, Key};
use crate::errors::{ErrorType, KVError, Result};
/// Defines the directory path where a key-value store
/// (or multiple) can be interacted with.
pub(crate) const DEFAULT_WORKSPACE_PATH: &str = ".microkv/";
/// Helper that retrieves the home directory by resolving $HOME
#[inline]
pub fn get_home_dir() -> PathBuf {
dirs::home_dir().unwrap()
}
/// Helper that forms an absolute path from a given database name and the default workspace path.
#[inline]
pub fn get_db_path<S: AsRef<str>>(name: S) -> PathBuf {
let mut path = get_home_dir();
path.push(DEFAULT_WORKSPACE_PATH);
get_db_path_with_base_path(name, path)
}
/// with base path
#[inline]
pub fn get_db_path_with_base_path<S: AsRef<str>>(name: S, mut base_path: PathBuf) -> PathBuf {
base_path.push(name.as_ref());
base_path.set_extension("kv");
base_path
}
/// read file and deserialize use bincode
#[inline]
pub fn read_file_and_deserialize_bincode<V>(path: &PathBuf) -> Result<V>
where
V: DeserializeOwned + 'static,
{
// read kv raw serialized structure to kv_raw
let mut kv_raw: Vec<u8> = Vec::new();
File::open(path)?.read_to_end(&mut kv_raw)?;
bincode::deserialize(&kv_raw).map_err(|_e| KVError {
error: ErrorType::FileError,
msg: Some(format!(
"Failed read file {:?} an deserialize use bincode",
path
)),
})
}
/// gen nonce
pub fn gen_nonce() -> Nonce {
secretbox::gen_nonce()
}
/// encode value
pub fn encode_value<V>(value: &V, pwd: &Option<SecStr>, nonce: &Nonce) -> Result<SecVec<u8>>
where
V: Serialize,
{
// serialize the object for committing to db
let ser_val: Vec<u8> = bincode::serialize(&value).unwrap();
// encrypt and secure value if password is available
let value: SecVec<u8> = match pwd {
// encrypt using AEAD and secure memory
Some(pwd) => {
let key: Key = Key::from_slice(pwd.unsecure()).unwrap();
SecVec::new(secretbox::seal(&ser_val, nonce, &key))
}
// otherwise initialize secure serialized object to insert to BTreeMap
None => SecVec::new(ser_val),
};
Ok(value)
}
/// decode value
pub fn decode_value<V>(value: &SecVec<u8>, pwd: &Option<SecStr>, nonce: &Nonce) -> Result<V>
where
V: DeserializeOwned + 'static,
{
// get value to deserialize. If password is set, retrieve the value, and decrypt it
// using AEAD. Otherwise just get the value and return
let deser_val = match pwd {
Some(pwd) => {
// initialize key from pwd slice
let key = match Key::from_slice(pwd.unsecure()) {
Some(k) => k,
None => {
return Err(KVError {
error: ErrorType::CryptoError,
msg: Some("cannot derive key from password hash".to_string()),
});
}
};
// borrow secured value by reference, and decrypt before deserializing
match secretbox::open(value.unsecure(), nonce, &key) {
Ok(r) => r,
Err(_) => {
return Err(KVError {
error: ErrorType::CryptoError,
msg: Some("cannot validate value being decrypted".to_string()),
});
}
}
}
// if no password, return value as-is
None => value.unsecure().to_vec(),
};
// finally deserialize into deserializable object to return as
let value = bincode::deserialize(&deser_val).map_err(|e| KVError {
error: ErrorType::KVError,
msg: Some(format!(
"cannot deserialize into specified object type: {:?}",
e
)),
})?;
Ok(value)
}
/// Writes the IndexMap to persistent storage after encrypting with secure crypto construction.
pub(crate) fn persist_serialize<S>(path: &PathBuf, object: &S) -> Result<()>
where
S: Serialize,
{
// initialize workspace directory if not exists
match path.parent() {
Some(path) => {
if !path.is_dir() {
std::fs::create_dir_all(path)?;
}
}
None => {
return Err(KVError {
error: ErrorType::FileError,
msg: Some("The store file parent path isn't sound".to_string()),
});
}
}
// check if path to db exists, if not create it
let path = Path::new(path);
let mut file: File = OpenOptions::new().write(true).create(true).open(path)?;
// acquire a file lock that unlocks at the end of scope
// let _file_lock = Arc::new(Mutex::new(0));
let ser = bincode::serialize(object).unwrap();
file.write_all(&ser)?;
Ok(())
}
| 31.5125 | 97 | 0.592622 | 3.046875 |
0698e2c5537ade652b75252bd4bbe52b4c2ac826
| 2,114 |
kt
|
Kotlin
|
app/src/main/java/com/moneytree/app/common/NSViewModel.kt
|
Dishantraiyani/moneytree
|
093356b039ebef395523a496a45b93a11cc74009
|
[
"Apache-2.0"
] | null | null | null |
app/src/main/java/com/moneytree/app/common/NSViewModel.kt
|
Dishantraiyani/moneytree
|
093356b039ebef395523a496a45b93a11cc74009
|
[
"Apache-2.0"
] | null | null | null |
app/src/main/java/com/moneytree/app/common/NSViewModel.kt
|
Dishantraiyani/moneytree
|
093356b039ebef395523a496a45b93a11cc74009
|
[
"Apache-2.0"
] | null | null | null |
package com.moneytree.app.common
import android.app.Application
import android.content.Intent
import androidx.lifecycle.AndroidViewModel
import androidx.lifecycle.MutableLiveData
import com.moneytree.app.ui.login.NSLoginActivity
/**
* The base class for all view models which holds methods and members common to all view models
*/
open class NSViewModel(mApplication: Application) : AndroidViewModel(mApplication) {
var isProgressShowing = MutableLiveData<Boolean>()
var isBottomProgressShowing = MutableLiveData<Boolean>()
val validationErrorId by lazy { NSSingleLiveEvent<Int>() }
val failureErrorMessage: NSSingleLiveEvent<String?> = NSSingleLiveEvent()
val apiErrors: NSSingleLiveEvent<List<Any>> = NSSingleLiveEvent()
val noNetworkAlert: NSSingleLiveEvent<Boolean> = NSSingleLiveEvent()
var isRefreshComplete = MutableLiveData<Boolean>()
/**
* To handle the API failure error and communicate back to UI
*
* @param errorMessage The error message to show
*/
protected fun handleFailure(errorMessage: String?) {
isProgressShowing.value = false
isBottomProgressShowing.value = false
failureErrorMessage.value = errorMessage
}
/**
* To handle api error message
*
* @param apiErrorList The errorList contains string resource id and string
*/
protected fun handleError(apiErrorList: List<Any>) {
isProgressShowing.value = false
isBottomProgressShowing.value = false
if (apiErrorList.contains("Session TimeOut!!\n")) {
NSApplication.getInstance().getPrefs().clearPrefData()
NSApplication.getInstance().startActivity(Intent(NSApplication.getInstance(), NSLoginActivity::class.java).setFlags(Intent.FLAG_ACTIVITY_NEW_TASK or Intent.FLAG_ACTIVITY_CLEAR_TASK))
} else {
apiErrors.value = apiErrorList
}
}
/**
* To handle no network
*/
protected open fun handleNoNetwork() {
isProgressShowing.value = false
isBottomProgressShowing.value = false
noNetworkAlert.value = true
}
}
| 37.087719 | 194 | 0.715705 | 3.21875 |
3b3a4999ecd6ac88ea2540589cd7ca93157f0fd1
| 1,696 |
c
|
C
|
src/helpf.c
|
just6chill/hdir
|
94fabcb109cb6c29888c93b48eb1b0d5219736e3
|
[
"Apache-2.0"
] | 9 |
2021-01-01T16:18:27.000Z
|
2022-03-20T09:59:00.000Z
|
src/helpf.c
|
just6chill/hdir
|
94fabcb109cb6c29888c93b48eb1b0d5219736e3
|
[
"Apache-2.0"
] | 2 |
2021-01-01T16:20:24.000Z
|
2021-01-09T14:44:45.000Z
|
src/helpf.c
|
just6chill/hdir
|
94fabcb109cb6c29888c93b48eb1b0d5219736e3
|
[
"Apache-2.0"
] | 3 |
2021-01-02T16:44:49.000Z
|
2021-03-10T07:23:11.000Z
|
#include "helpf.h"
#include "color.h"
#include <stdio.h>
#include <windows.h>
#define WHITESPACE printf("\n");
int helpf(char *args[]) {
/* start help output */
green("syntax"); printf(": '"); green("hdir "); printf("<"); green("suffix"); printf("> <"); green("parameter1"); printf("> <"); green("parameter2"); printf(">' \n");
printf("'"); green("r"); printf("' - rename a file or a folder \n Example: 'hdir r example.txt newname.txt' \n ");
WHITESPACE
printf("'"); green("c"); printf("' - copy a file \n Example: 'hdir c example.txt newfile.txt' \n ");
WHITESPACE
printf("'"); green("d"); printf("' - delete a file \n Example: 'hdir d example.txt' \n ");
WHITESPACE
printf("'"); green("f"); printf("' - create a folder \n Example: 'hdir f folder1 folder2 foldern' \n ");
WHITESPACE
printf("'"); green("n"); printf("' - create a file with given extension \n Example: 'hdir n example.txt example1.txt examplen.txt' \n ");
WHITESPACE
printf("'"); green("k"); printf("' - delete a folder \n Example: 'hdir k examplefolder' \n ");
WHITESPACE
printf("'"); green("l"); printf("' - show all files and subfolders of the named folder/directory \n Example: 'hdir l examplefolder' \n ");
WHITESPACE
printf("'"); green("s"); printf("' - show stats of a file (size, perms, last change, user-id, drive) \n Example: 'hdir s example.txt' \n");
WHITESPACE
printf("'"); green("h"); printf("' - type 'hdir h' for help \n");
printf("several parameters only allowed with create a folder or file as explained in the examples \n");
WHITESPACE
printf("hdir made by "); green("just6chill"); printf("(github)");
return 0;
}
| 44.631579 | 168 | 0.614976 | 3.171875 |
fe28b50106f7ee4fe230e1fe94ef9f9e210bc2d0
| 1,009 |
c
|
C
|
jisuanke.com/C2991/A/a.c
|
jyi2ya/artifacts
|
c227d170592d1fec94a09b20e2f1f75a46dfdf1f
|
[
"MIT"
] | null | null | null |
jisuanke.com/C2991/A/a.c
|
jyi2ya/artifacts
|
c227d170592d1fec94a09b20e2f1f75a46dfdf1f
|
[
"MIT"
] | null | null | null |
jisuanke.com/C2991/A/a.c
|
jyi2ya/artifacts
|
c227d170592d1fec94a09b20e2f1f75a46dfdf1f
|
[
"MIT"
] | null | null | null |
#include <stdio.h>
long long gans;
long long cnt;
int stk1[5000009], stk2[5000009];
int tp1, tp2;
void init(void)
{
cnt = 1;
gans = 0;
tp1 = tp2 = 0;
}
void PUSH(int x)
{
stk1[tp1++] = x;
if (tp2 == 0 || x >= stk2[tp2 - 1])
stk2[tp2++] = x;
gans ^= cnt * stk2[tp2 - 1];
++cnt;
}
void POP(void)
{
if (tp1 > 0) {
--tp1;
if (tp1 > 0) {
if (stk2[tp2 - 1] == stk1[tp1])
--tp2;
gans ^= cnt * stk2[tp2 - 1];
} else {
tp2 = 0;
}
}
++cnt;
}
int n, p, q, m;
unsigned int SA, SB, SC;
unsigned int rng61(void)
{
SA ^= SA << 16;
SA ^= SA >> 5;
SA ^= SA << 1;
unsigned int t = SA; SA = SB;
SB = SC;
SC ^= t ^ SA;
return SC;
}
void gen(void)
{
scanf("%d%d%d%d%u%u%u", &n, &p, &q, &m, &SA, &SB, &SC);
for(int i = 1; i <= n; i++) {
if(rng61() % (p + q) < (unsigned int)p)
PUSH(rng61() % m + 1);
else
POP();
}
}
int main(void)
{
int T;
int i;
scanf("%d", &T);
for (i = 1; i <= T; ++i) {
init();
gen();
printf("Case #%d: %lld\n", i, gans);
}
return 0;
}
| 13.103896 | 56 | 0.472745 | 3.203125 |
f00d8a2ff37a2b007fa4edfda74f6d8657793532
| 3,684 |
py
|
Python
|
piton/lib/inquirer/questions.py
|
piton-package-manager/PPM
|
19015b76184befe1e2daa63189a13b039787868d
|
[
"MIT"
] | 19 |
2016-04-08T04:00:07.000Z
|
2021-11-12T19:36:56.000Z
|
piton/lib/inquirer/questions.py
|
LookLikeAPro/PPM
|
19015b76184befe1e2daa63189a13b039787868d
|
[
"MIT"
] | 9 |
2017-01-03T13:39:47.000Z
|
2022-01-15T20:38:20.000Z
|
piton/lib/inquirer/questions.py
|
LookLikeAPro/PPM
|
19015b76184befe1e2daa63189a13b039787868d
|
[
"MIT"
] | 6 |
2017-04-01T03:38:45.000Z
|
2021-05-06T11:25:31.000Z
|
# -*- coding: utf-8 -*-
"""
Module that implements the questions types
"""
import json
from . import errors
def question_factory(kind, *args, **kwargs):
for clazz in (Text, Password, Confirm, List, Checkbox):
if clazz.kind == kind:
return clazz(*args, **kwargs)
raise errors.UnknownQuestionTypeError()
def load_from_dict(question_dict):
"""
Load one question from a dict.
It requires the keys 'name' and 'kind'.
:return: The Question object with associated data.
:return type: Question
"""
return question_factory(**question_dict)
def load_from_list(question_list):
"""
Load a list of questions from a list of dicts.
It requires the keys 'name' and 'kind' for each dict.
:return: A list of Question objects with associated data.
:return type: List
"""
return [load_from_dict(q) for q in question_list]
def load_from_json(question_json):
"""
Load Questions from a JSON string.
:return: A list of Question objects with associated data if the JSON
contains a list or a Question if the JSON contains a dict.
:return type: List or Dict
"""
data = json.loads(question_json)
if isinstance(data, list):
return load_from_list(data)
if isinstance(data, dict):
return load_from_dict(data)
raise TypeError(
'Json contained a %s variable when a dict or list was expected',
type(data))
class TaggedValue(object):
def __init__(self, label, value):
self.label = label
self.value = value
def __str__(self):
return self.label
def __repr__(self):
return self.value
def __cmp__(self, other):
if isinstance(other, TaggedValue):
return self.value != other.value
return self.value != other
class Question(object):
kind = 'base question'
def __init__(self,
name,
message='',
choices=None,
default=None,
ignore=False,
validate=True):
self.name = name
self._message = message
self._choices = choices or []
self._default = default
self._ignore = ignore
self._validate = validate
self.answers = {}
@property
def ignore(self):
return bool(self._solve(self._ignore))
@property
def message(self):
return self._solve(self._message)
@property
def default(self):
return self._solve(self._default)
@property
def choices_generator(self):
for choice in self._solve(self._choices):
yield (
TaggedValue(*choice)
if isinstance(choice, tuple) and len(choice) == 2
else choice
)
@property
def choices(self):
return list(self.choices_generator)
def validate(self, current):
try:
if self._solve(self._validate, current):
return
except Exception:
pass
raise errors.ValidationError(current)
def _solve(self, prop, *args, **kwargs):
if callable(prop):
return prop(self.answers, *args, **kwargs)
if isinstance(prop, str):
return prop.format(**self.answers)
return prop
class Text(Question):
kind = 'text'
class Password(Question):
kind = 'password'
class Confirm(Question):
kind = 'confirm'
def __init__(self, name, default=False, **kwargs):
super(Confirm, self).__init__(name, default=default, **kwargs)
class List(Question):
kind = 'list'
class Checkbox(Question):
kind = 'checkbox'
| 24.236842 | 72 | 0.604777 | 3.53125 |
4a40f17d330460d3c3b290eb627c7d22b14fa222
| 911 |
js
|
JavaScript
|
src/reducer/chatroomModel.js
|
chumakovvchuma/bridge-game
|
38edee8ed005ba792627c0a142ed9e7ab4acb63b
|
[
"MIT"
] | 1 |
2018-11-05T17:19:15.000Z
|
2018-11-05T17:19:15.000Z
|
src/reducer/chatroomModel.js
|
chumakovvchuma/bridge-game
|
38edee8ed005ba792627c0a142ed9e7ab4acb63b
|
[
"MIT"
] | 6 |
2020-07-12T02:29:57.000Z
|
2020-07-12T02:29:58.000Z
|
src/reducer/chatroomModel.js
|
chumakovvchuma/bridge-game
|
38edee8ed005ba792627c0a142ed9e7ab4acb63b
|
[
"MIT"
] | 8 |
2018-11-05T01:08:33.000Z
|
2021-12-17T21:56:08.000Z
|
import Database from "../firebase";
import {dispatch} from "../reducer";
/*
* A chatroom class to handle the communcation with database
* @param linkId, string, a unique path name for a table, is generate from a timeStamp
* @param id, string, a unique key of a table, is generate by firebase when push a new node
*/
export default class ChatroomModel {
constructor(linkId, id) {
this.linkId = linkId;
this.id = id;
this.get();
}
// get data
get() {
Database.getChatRoomById(this.id).then(chatroom => {
this.update(chatroom, this.id);
this.listenChanged();
});
}
// update data
update(chatroom, id) {
dispatch("UPDATE_CHAT_ROOM", {
chatroom: chatroom,
id: id
});
}
// register data change event
listenChanged() {
Database.getNodeByPath(`chatroom/${this.id}/`, snapshot =>
this.update(snapshot.val(), this.id)
);
}
}
| 23.973684 | 91 | 0.637761 | 3.109375 |
547b92ccf00ad69100276753c26a67c062f86746
| 2,634 |
swift
|
Swift
|
SwiftBluetooth/Classes/Operations/Write/WriteOperation.swift
|
CatchZeng/SwiftBluetooth
|
6b5ae0032ceb8306d6712d89acddd9aee79b672e
|
[
"MIT"
] | 3 |
2018-02-27T08:59:02.000Z
|
2018-07-18T07:21:55.000Z
|
SwiftBluetooth/Classes/Operations/Write/WriteOperation.swift
|
CatchZeng/SwiftBluetooth
|
6b5ae0032ceb8306d6712d89acddd9aee79b672e
|
[
"MIT"
] | null | null | null |
SwiftBluetooth/Classes/Operations/Write/WriteOperation.swift
|
CatchZeng/SwiftBluetooth
|
6b5ae0032ceb8306d6712d89acddd9aee79b672e
|
[
"MIT"
] | null | null | null |
//
// WriteOperation.swift
// SwiftBluetooth
//
// Created by CatchZeng on 2017/9/5.
// Copyright © 2017年 CatchZeng. All rights reserved.
//
import UIKit
import CoreBluetooth
open class WriteOperation: BLEOperation {
// Maximum 20 bytes in a single ble package
private static let notifyMTU = 20
private var peripheral: BLEPeripheral
private var data: Data
private var characteristic: CBCharacteristic
private var type: CBCharacteristicWriteType
private var callback: ((Result<(Data)>) -> Void)?
public init(peripheral: BLEPeripheral, data: Data, characteristic: CBCharacteristic, type: CBCharacteristicWriteType, callback: ((Result<(Data)>) -> Void)?) {
self.peripheral = peripheral
self.data = data
self.characteristic = characteristic
self.type = type
self.callback = callback
}
// MARK: BLEOperation
public override func start() {
if peripheral.peripheral.state != .connected {
printLog("bluetooth is disconnected.")
return
}
super.start()
writeValue()
if type == .withoutResponse {
success()
}
}
@discardableResult
public override func process(event: Event) -> Any? {
if type == .withoutResponse {
return nil
}
if case .didWriteCharacteristic(let characteristic) = event {
if characteristic.uuid == self.characteristic.uuid {
success()
}
}
return nil
}
public override func cancel() {
super.cancel()
callback?(.cancelled)
callback = nil
}
public override func fail(_ error: Error?) {
super.fail(error)
callback?(.failure(error: error))
callback = nil
}
public override func success() {
super.success()
callback?(.success(data))
callback = nil
}
// MARK: Private Methods
private func writeValue() {
var sendIndex = 0
while true {
var amountToSend = data.count - sendIndex
if amountToSend > WriteOperation.notifyMTU {
amountToSend = WriteOperation.notifyMTU
}
if amountToSend <= 0 {
return
}
let dataChunk = data.subdata(in: sendIndex..<sendIndex+amountToSend)
printLog("didSend: \(dataChunk.hexString)")
peripheral.peripheral.writeValue(dataChunk, for: characteristic, type: type)
sendIndex += amountToSend
}
}
}
| 26.34 | 162 | 0.582384 | 3 |
71576fdf6e4a482279521e3c89a2ec534472d331
| 4,294 |
lua
|
Lua
|
scene4.lua
|
M0Rf30/trosh
|
61862b65111c12b71cb95ab335a2a7a398372192
|
[
"WTFPL"
] | 1 |
2016-12-08T20:51:22.000Z
|
2016-12-08T20:51:22.000Z
|
scene4.lua
|
M0Rf30/trosh
|
61862b65111c12b71cb95ab335a2a7a398372192
|
[
"WTFPL"
] | null | null | null |
scene4.lua
|
M0Rf30/trosh
|
61862b65111c12b71cb95ab335a2a7a398372192
|
[
"WTFPL"
] | null | null | null |
function scene4_load()
backgroundwhite = 0
staralpha = 1
asteroids = {}
bullets = {}
love.audio.play(bgmusic)
starttimer = 0
alerttimer = 0
flyingquad = 3
pspeedx = 0
pspeedy = 0
playerx = nil
flyanimationtimer = 0
end
function scene4_update(dt)
if secondtimer then
secondtimer = secondtimer + dt
end
for i, v in pairs(stars) do
v:update(dt)
end
--EXPLOSION
local delete = {}
for i, v in pairs(explosions) do
if v:update(dt) == true then
table.insert(delete, i)
end
end
table.sort(delete, function(a,b) return a>b end)
for i, v in pairs(delete) do
table.remove(explosions, v) --remove
end
if rockets[1] then
rockets[1]:update(dt)
end
if (starttimer > 0 and starttimer < 3) or alerttimer > 0.1 then
alerttimer = math.fmod(alerttimer + dt*7, math.pi*2)
end
if jumped then
if rockets[1] then
rockets[1].x = rockets[1].x - dt*3
end
if rockets[1] and secondtimer > 2 and secondtimer - dt <= 2 then
for i = 1, 20 do
if explosions then
table.insert(explosions, explosion:new(rockets[1].x-16+math.random(16)-8, rockets[1].y-20+math.random(16)-8))
end
end
starmover = math.pi
rockets[1] = nil
end
playerx = playerx + pspeedx*dt
playery = playery + pspeedy*dt
if pspeedx > 0 then
pspeedx = pspeedx - dt*5
end
if playery >= 20 then
playery = 20
pspeedy = 0
end
if playerx >= 50 then
pspeedx = 0
playerx = 50
end
if secondtimer > 2 then
local i = math.max(0, (1-(secondtimer-2)/2))
staralpha = math.max(0, (1-(secondtimer-2)/2))*i
love.graphics.setBackgroundColor(153*(1-i), 217*(1-i), 234*(1-i))
if shakeamount < 5 then
shakeamount = math.min(5, shakeamount+dt*3)
elseif shakeamount > 5 then
shakeamount = math.max(5, shakeamount-dt*3)
end
end
if secondtimer > 4 then
changegamestate("scene5")
end
end
if starttimer >= 4.3 and starttimer - dt < 4.3 then
playerx = rockets[1].x+4
playery = rockets[1].y
end
if jumped then
flyanimationtimer = flyanimationtimer + dt
while flyanimationtimer > 0.1 do
flyanimationtimer = flyanimationtimer - 0.1
if flyingquad == 3 then
flyingquad = 4
else
flyingquad = 3
end
end
end
end
function scene4_draw()
local r, g, b = love.graphics.getColor()
love.graphics.setColor(math.random(255), math.random(255), math.random(255), 255*(1-scoreanim))
for i = 1, backgroundstripes, 2 do
local alpha = math.rad((i/backgroundstripes + math.fmod(sunrot/100, 1)) * 360)
local point1 = {lastexplosion[1]*scale+200*scale*math.cos(alpha), lastexplosion[2]*scale+200*scale*math.sin(alpha)}
local alpha = math.rad(((i+1)/backgroundstripes + math.fmod(sunrot/100, 1)) * 360)
local point2 = {lastexplosion[1]*scale+200*scale*math.cos(alpha), lastexplosion[2]*scale+200*scale*math.sin(alpha)}
love.graphics.polygon("fill", lastexplosion[1]*scale, lastexplosion[2]*scale, point1[1], point1[2], point2[1], point2[2])
end
love.graphics.setColor(r, g, b, 255)
for i,v in pairs(stars) do
v:draw()
end
if playerx then
local off = 0
if rockets[1] then
off = rockets[1].startingoffset
end
love.graphics.draw(playerimg, playerquad[flyingquad], (playerx+off)*scale, playery*scale, 0, scale, scale, 13, 6)
end
if rockets[1] then
rockets[1]:draw()
end
for i, v in pairs(explosions) do
v:draw()
end
if (starttimer > 0 and starttimer < 3) or alerttimer > 0.1 then
local i = math.abs(math.sin(alerttimer))
love.graphics.setColor(255, 0, 0, i*100)
love.graphics.rectangle("fill", 0, 0, 100*scale, 80*scale)
love.graphics.setColor(255, 0, 0, i*255)
draw(alertimg, 50+math.random(5)-3, 40+math.random(5)-3, (math.random()*2-1)*0.1, i*0.5+0.6, i*0.5+0.6, 54, 15)
draw(randomshitimg, 50+math.random(20)-10, 40+math.random(20)-10, 0, 1, 1, 50, 42)
end
if starttimer > 4 and not jumped then
love.graphics.setColor(255, 0, 0, math.random(255))
properprint("jump!!", 0, 40, scale*3)
end
end
function scene4_action()
if starttimer > 4.3 and not jumped then
jumped = true
secondtimer = 0
pspeedx = 20
pspeedy = 2
end
end
| 24.537143 | 124 | 0.639031 | 3.109375 |
71e50f0560634c2d7efa6fa1b347f5a1107714fd
| 7,749 |
ts
|
TypeScript
|
src/components/utils/helpers.ts
|
tinybat02/ES-trajectory-v4
|
3e317bd671442b2bb2ff8cc44d65746f530ea1aa
|
[
"Apache-2.0"
] | null | null | null |
src/components/utils/helpers.ts
|
tinybat02/ES-trajectory-v4
|
3e317bd671442b2bb2ff8cc44d65746f530ea1aa
|
[
"Apache-2.0"
] | null | null | null |
src/components/utils/helpers.ts
|
tinybat02/ES-trajectory-v4
|
3e317bd671442b2bb2ff8cc44d65746f530ea1aa
|
[
"Apache-2.0"
] | null | null | null |
import Feature from 'ol/Feature';
import Point from 'ol/geom/Point';
import { Coordinate } from 'ol/coordinate';
import LineString from 'ol/geom/LineString';
import Circle from 'ol/geom/Circle';
import { Circle as CircleStyle, Stroke, Style, Fill, Icon, Text } from 'ol/style';
import GeometryType from 'ol/geom/GeometryType';
import { Draw } from 'ol/interaction';
import { Vector as VectorLayer } from 'ol/layer';
import { Vector as VectorSource } from 'ol/source';
import { FeatureLike } from 'ol/Feature';
import { getLength } from 'ol/sphere';
import Arrow from '../../img/arrow.png';
import Arrow1 from '../../img/arrow1.png';
interface SingleData {
latitude: number;
longitude: number;
[key: string]: any;
}
export const formatLength = function(line: LineString) {
const length = getLength(line);
let output;
if (length > 100) {
output = Math.round((length / 1000) * 100) / 100 + ' ' + 'km';
} else {
output = Math.round(length * 100) / 100 + ' ' + 'm';
}
return output;
};
export const processDataES = (data: SingleData[]) => {
data.reverse();
const perDeviceRoute: { [key: string]: [number, number][] } = {};
const perDeviceVendor: { [key: string]: string } = {};
const perDeviceTime: { [key: string]: number[] } = {};
const perDeviceUncertainty: { [key: string]: number[] } = {};
const perDeviceFloor: { [key: string]: number[] } = {};
data.map(datum => {
(perDeviceRoute[datum.hash_id] = perDeviceRoute[datum.hash_id] || []).push([datum.longitude, datum.latitude]);
(perDeviceTime[datum.hash_id] = perDeviceTime[datum.hash_id] || []).push(datum.timestamp);
(perDeviceUncertainty[datum.hash_id] = perDeviceUncertainty[datum.hash_id] || []).push(datum.uncertainty);
(perDeviceFloor[datum.hash_id] = perDeviceFloor[datum.hash_id] || []).push(datum.floor);
if (!perDeviceVendor[datum.hash_id]) perDeviceVendor[datum.hash_id] = datum.vendor;
});
const perDeviceRoute_nonSinglePoint: { [key: string]: [number, number][] } = {};
const perDeviceTime_nonSinglePoint: { [key: string]: number[] } = {};
const perDeviceTime_array: { hash_id: string; duration: number }[] = [];
let singlePointCount = 0;
Object.keys(perDeviceRoute).map(hash_id => {
if (perDeviceRoute[hash_id].length > 1) {
perDeviceRoute_nonSinglePoint[hash_id] = perDeviceRoute[hash_id];
} else {
singlePointCount++;
}
});
Object.keys(perDeviceTime).map(hash_id => {
if (perDeviceTime[hash_id].length > 1) {
perDeviceTime_nonSinglePoint[hash_id] = perDeviceTime[hash_id];
perDeviceTime_array.push({ hash_id, duration: perDeviceTime[hash_id].slice(-1)[0] - perDeviceTime[hash_id][0] });
}
});
perDeviceTime_array.sort((a, b) => {
if (a.duration > b.duration) return -1;
if (a.duration < b.duration) return 1;
return 0;
});
return {
perDeviceRoute: perDeviceRoute_nonSinglePoint,
perDeviceTime: perDeviceTime_nonSinglePoint,
perDeviceVendor,
perDeviceUncertainty,
singlePointCount,
perDeviceFloor,
selectList: perDeviceTime_array.map(elm => elm.hash_id),
};
};
export const createLine = (routeData: Coordinate[], iterRoute: number, floorData: number[], other_floor: number) => {
let color = 'rgba(73,168,222)';
let pic = Arrow;
if (floorData[iterRoute] == other_floor) color = 'rgba(255,176,0)';
if (floorData[iterRoute + 1] == other_floor) pic = Arrow1;
const dx = routeData[iterRoute + 1][0] - routeData[iterRoute][0];
const dy = routeData[iterRoute + 1][1] - routeData[iterRoute][1];
const rotation = Math.atan2(dy, dx);
const lineFeature = new Feature(new LineString([routeData[iterRoute], routeData[iterRoute + 1]]));
lineFeature.setStyle([
new Style({
stroke: new Stroke({
color: color,
width: 2,
}),
}),
new Style({
geometry: new Point(routeData[iterRoute + 1]),
image: new Icon({
src: pic,
anchor: [0.75, 0.5],
rotateWithView: true,
rotation: -rotation,
}),
}),
]);
return lineFeature;
};
export const createLineWithLabel = (
routeData: Coordinate[],
timeData: number[],
iterRoute: number,
floorData: number[],
other_floor: number
) => {
let color = 'rgba(73,168,222)';
let pic = Arrow;
if (floorData[iterRoute] == other_floor) color = 'rgba(255,176,0)';
if (floorData[iterRoute + 1] == other_floor) pic = Arrow1;
const dx = routeData[iterRoute + 1][0] - routeData[iterRoute][0];
const dy = routeData[iterRoute + 1][1] - routeData[iterRoute][1];
const rotation = Math.atan2(dy, dx);
const lineFeature = new Feature(new LineString([routeData[iterRoute], routeData[iterRoute + 1]]));
lineFeature.setStyle([
new Style({
stroke: new Stroke({
color: color,
width: 2,
}),
text: new Text({
stroke: new Stroke({
color: '#fff',
width: 2,
}),
font: '18px Calibri,sans-serif',
text: `${timeData[iterRoute + 1] - timeData[iterRoute]}s`,
}),
}),
new Style({
geometry: new Point(routeData[iterRoute + 1]),
image: new Icon({
src: pic,
anchor: [0.75, 0.5],
rotateWithView: true,
rotation: -rotation,
}),
}),
]);
return lineFeature;
};
export const createPoint = (
routeData: Coordinate[],
routeRadiusData: number[],
iterRoute: number,
floorData: number[],
other_floor: number
) => {
let color = 'rgba(73,168,222,0.6)';
if (floorData[iterRoute] == other_floor) color = 'rgba(255,176,0,0.6)';
// const pointFeature = new Feature(new Point(routeData[iterRoute]));
// pointFeature.setStyle(
// new Style({
// image: new Circle({
// radius: routeRadiusData[iterRoute] || 2,
// // radius: 5,
// fill: new Fill({ color: color }),
// }),
// })
// );
const pointFeature = new Feature(new Circle(routeData[iterRoute], routeRadiusData[iterRoute] || 2));
pointFeature.setStyle(
new Style({
fill: new Fill({ color: color }),
})
);
return pointFeature;
};
export const createMeasureLayer = (source: VectorSource) => {
return new VectorLayer({
source: source,
style: function(feature: FeatureLike) {
const geometry = feature.getGeometry() as LineString;
const line_styles = [
new Style({
fill: new Fill({
color: 'rgba(255, 255, 255, 0.2)',
}),
stroke: new Stroke({
color: 'rgba(0, 0, 0, 0.5)',
width: 2,
}),
}),
];
geometry.forEachSegment(function(start, end) {
const linestring = new LineString([start, end]);
const len = formatLength(linestring);
line_styles.push(
new Style({
geometry: linestring,
text: new Text({
fill: new Fill({ color: '#000' }),
stroke: new Stroke({
color: '#fff',
width: 2,
}),
font: '12px/1 sans-serif',
text: len,
}),
})
);
});
return line_styles;
},
zIndex: 2,
});
};
export const createDraw = (source: VectorSource) => {
return new Draw({
source: source,
type: GeometryType.LINE_STRING,
style: new Style({
fill: new Fill({
color: 'rgba(255, 255, 255, 0.2)',
}),
stroke: new Stroke({
color: 'rgba(0, 0, 0, 0.5)',
lineDash: [10, 10],
width: 2,
}),
image: new CircleStyle({
radius: 5,
stroke: new Stroke({
color: 'rgba(0, 0, 0, 0.7)',
}),
fill: new Fill({
color: 'rgba(255, 255, 255, 0.2)',
}),
}),
}),
});
};
| 29.803846 | 119 | 0.597884 | 3.140625 |
1804c2477718baa3ceecb9eff1329d670d33379d
| 10,940 |
rs
|
Rust
|
src/main.rs
|
s3mon/s3mon
|
e0ba08efa118555a0f71185fd0c8301efcff1420
|
[
"BSD-3-Clause"
] | 3 |
2019-10-18T02:59:42.000Z
|
2019-10-20T09:59:16.000Z
|
src/main.rs
|
s3mon/s3mon
|
e0ba08efa118555a0f71185fd0c8301efcff1420
|
[
"BSD-3-Clause"
] | 8 |
2019-10-03T21:02:03.000Z
|
2019-10-17T21:01:44.000Z
|
src/main.rs
|
s3mon/s3mon
|
e0ba08efa118555a0f71185fd0c8301efcff1420
|
[
"BSD-3-Clause"
] | 3 |
2019-10-10T17:14:11.000Z
|
2021-01-11T05:36:24.000Z
|
use clap::{App, Arg};
use env_logger;
use serde_yaml;
use std::sync::Arc;
use std::{process, thread};
mod auth;
mod config;
mod s3;
fn main() {
// RUST_LOG=debug
let _ = env_logger::try_init();
// cli options
let matches = App::new("s3mon")
.version(env!("CARGO_PKG_VERSION"))
.arg(
Arg::with_name("config")
.help("config.yml")
.long("config")
.short("c")
.required(false)
.value_name("FILE")
.takes_value(true)
.validator(is_file),
)
.get_matches();
// Gets a value for config if supplied by user, or defaults to "default.conf"
let config = matches.value_of("config").unwrap_or_else(|| {
eprintln!("Unable to open configuration file, use (\"-h for help\")");
process::exit(1);
});
// parse config file
let file = std::fs::File::open(&config).expect("Unable to open file");
let yml: config::Config = match serde_yaml::from_reader(file) {
Err(e) => {
eprintln!("Error parsing configuration file: {}", e);
process::exit(1);
}
Ok(yml) => yml,
};
// create an S3 Client
let s3 = match s3::Monitor::new(&yml) {
Ok(s3) => Arc::new(s3),
Err(e) => {
eprintln!("Error: {}", e);
process::exit(1);
}
};
// store all threads
let mut children = vec![];
for bucket in yml.s3mon.buckets {
let bucket_name = bucket.0.to_string();
for file in bucket.1 {
let thread_s3 = Arc::clone(&s3);
let bucket = bucket_name.clone();
children.push(thread::spawn(|| {
println!("{}", check(thread_s3, bucket, file));
}));
}
}
// Wait for all the threads to finish
for child in children {
let _ = child.join();
}
}
fn check(s3: Arc<s3::Monitor>, bucket: String, file: config::Object) -> String {
// create InfluxDB line protocol
// https://docs.influxdata.com/influxdb/v1.7/write_protocols/line_protocol_tutorial/
let mut output: Vec<String> = Vec::new();
output.push(format!("s3mon,bucket={},prefix={}", bucket, file.prefix));
let mut exist = false;
let mut size_mismatch = false;
let mut bucket_error = false;
// query the bucket
match s3.objects(bucket, file.prefix, file.age) {
Ok(objects) => {
if !objects.is_empty() {
exist = true;
}
for o in objects {
if file.size > 0 {
if let Some(size) = o.size {
if size < file.size {
size_mismatch = true;
}
}
}
}
}
Err(e) => {
eprintln!("Error: {}", e);
bucket_error = true;
}
}
output.push(format!(
"error={}i,exist={}i,size_mismatch={}i",
bucket_error as i32, exist as i32, size_mismatch as i32,
));
output.join(" ")
}
fn is_file(s: String) -> Result<(), String> {
let metadata = match std::fs::metadata(&s) {
Err(err) => return Err(err.to_string()),
Ok(metadata) => metadata,
};
if !metadata.is_file() {
return Err(format!("cannot read file: {}", s));
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_config() -> Result<(), serde_yaml::Error> {
let yml = r#"
---
s3mon:
endpoint: endpoint
region: region
access_key: ACCESS_KEY_ID
secret_key: SECRET_ACCESS_KEY
buckets:
bucket_A:
- prefix: foo
age: 43200
- prefix: bar
- prefix: baz
size: 1024
"#;
let mut buckets = std::collections::BTreeMap::new();
buckets.insert(
"bucket_A".to_string(),
vec![
config::Object {
prefix: "foo".to_string(),
age: 43200,
size: 0,
},
config::Object {
prefix: "bar".to_string(),
age: 86400,
size: 0,
},
config::Object {
prefix: "baz".to_string(),
age: 86400,
size: 1024,
},
],
);
let cfg = config::Config {
s3mon: config::Data {
endpoint: "endpoint".to_string(),
region: "region".to_string(),
access_key: "ACCESS_KEY_ID".to_string(),
secret_key: "SECRET_ACCESS_KEY".to_string(),
buckets,
},
};
let y: config::Config = serde_yaml::from_str(yml)?;
assert_eq!(cfg, y);
Ok(())
}
#[test]
fn check_object() {
use chrono::prelude::{SecondsFormat, Utc};
use rusoto_core::Region;
use rusoto_mock::{MockCredentialsProvider, MockRequestDispatcher};
use rusoto_s3::S3Client;
let last_modified = Utc::now().to_rfc3339_opts(SecondsFormat::Millis, true);
let mock = MockRequestDispatcher::with_status(200).with_body(
format!(
r#"<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Name>cubeta</Name>
<Prefix>E</Prefix>
<StartAfter>ExampleGuide.pdf</StartAfter>
<KeyCount>1</KeyCount>
<MaxKeys>3</MaxKeys>
<IsTruncated>false</IsTruncated>
<Contents>
<Key>ExampleObject.txt</Key>
<LastModified>{}</LastModified>
<ETag>"599bab3ed2c697f1d26842727561fd94"</ETag>
<Size>857</Size>
<StorageClass>REDUCED_REDUNDANCY</StorageClass>
</Contents>
</ListBucketResult>
"#,
last_modified
)
.as_str(),
);
let client = Arc::new(s3::Monitor {
s3: S3Client::new_with(mock, MockCredentialsProvider, Region::UsEast1),
});
// test finding file & prefix
let file = config::Object {
prefix: "E".to_string(),
age: 30,
size: 0,
};
assert_eq!(
check(client, "cubeta".to_string(), file),
"s3mon,bucket=cubeta,prefix=E error=0i,exist=1i,size_mismatch=0i",
);
}
#[test]
fn check_object_size_mismatch() {
use chrono::prelude::{SecondsFormat, Utc};
use rusoto_core::Region;
use rusoto_mock::{MockCredentialsProvider, MockRequestDispatcher};
use rusoto_s3::S3Client;
let last_modified = Utc::now().to_rfc3339_opts(SecondsFormat::Millis, true);
let mock = MockRequestDispatcher::with_status(200).with_body(
format!(
r#"<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Name>cubeta</Name>
<Prefix>E</Prefix>
<StartAfter>ExampleGuide.pdf</StartAfter>
<KeyCount>1</KeyCount>
<MaxKeys>3</MaxKeys>
<IsTruncated>false</IsTruncated>
<Contents>
<Key>ExampleObject.txt</Key>
<LastModified>{}</LastModified>
<ETag>"599bab3ed2c697f1d26842727561fd94"</ETag>
<Size>857</Size>
<StorageClass>REDUCED_REDUNDANCY</StorageClass>
</Contents>
</ListBucketResult>
"#,
last_modified
)
.as_str(),
);
let client = Arc::new(s3::Monitor {
s3: S3Client::new_with(mock, MockCredentialsProvider, Region::UsEast1),
});
// test finding file & prefix
let file = config::Object {
prefix: "E".to_string(),
age: 30,
size: 1024,
};
assert_eq!(
check(client, "cubeta".to_string(), file),
"s3mon,bucket=cubeta,prefix=E error=0i,exist=1i,size_mismatch=1i",
);
}
#[test]
fn check_object_age_expired() {
use rusoto_core::Region;
use rusoto_mock::{MockCredentialsProvider, MockRequestDispatcher};
use rusoto_s3::S3Client;
let mock = MockRequestDispatcher::with_status(200).with_body(
r#"<?xml version="1.0" encoding="UTF-8"?>
<ListBucketResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Name>cubeta</Name>
<Prefix>E</Prefix>
<StartAfter>ExampleGuide.pdf</StartAfter>
<KeyCount>1</KeyCount>
<MaxKeys>3</MaxKeys>
<IsTruncated>false</IsTruncated>
<Contents>
<Key>ExampleObject.txt</Key>
<LastModified>2019-10-14T08:52:23.231Z</LastModified>
<ETag>"599bab3ed2c697f1d26842727561fd94"</ETag>
<Size>857</Size>
<StorageClass>REDUCED_REDUNDANCY</StorageClass>
</Contents>
</ListBucketResult>
"#,
);
let client = Arc::new(s3::Monitor {
s3: S3Client::new_with(mock, MockCredentialsProvider, Region::UsEast1),
});
// test finding file & prefix
let file = config::Object {
prefix: "E".to_string(),
age: 30,
size: 1024,
};
assert_eq!(
check(client, "cubeta".to_string(), file),
"s3mon,bucket=cubeta,prefix=E error=0i,exist=0i,size_mismatch=0i",
);
}
#[test]
fn check_object_no_bucket() {
use rusoto_core::Region;
use rusoto_mock::{MockCredentialsProvider, MockRequestDispatcher};
use rusoto_s3::S3Client;
let mock = MockRequestDispatcher::with_status(404).with_body(
r#"<?xml version="1.0" encoding="UTF-8"?>
<Error>
<Code>NoSuchBucket</Code>
<Message>The specified bucket does not exist</Message>
<RequestId>4442587FB7D0A2F9</RequestId>
</Error>"#,
);
let client = Arc::new(s3::Monitor {
s3: S3Client::new_with(mock, MockCredentialsProvider, Region::UsEast1),
});
// test finding file & prefix
let file = config::Object {
prefix: "E".to_string(),
age: 30,
size: 512,
};
assert_eq!(
check(client, "cubeta".to_string(), file),
"s3mon,bucket=cubeta,prefix=E error=1i,exist=0i,size_mismatch=0i",
);
}
}
| 31.618497 | 88 | 0.503382 | 3.21875 |
85a0428ed36a9c08b697287c604b9fb77acfdaaa
| 2,498 |
js
|
JavaScript
|
src/sedan.js
|
CIS580/frogger-lbreck93
|
11c547360973dd6de78ae1faef06cfd9d9f86786
|
[
"CC-BY-2.0"
] | null | null | null |
src/sedan.js
|
CIS580/frogger-lbreck93
|
11c547360973dd6de78ae1faef06cfd9d9f86786
|
[
"CC-BY-2.0"
] | null | null | null |
src/sedan.js
|
CIS580/frogger-lbreck93
|
11c547360973dd6de78ae1faef06cfd9d9f86786
|
[
"CC-BY-2.0"
] | null | null | null |
"use strict";
const MS_PER_FRAME = 1000 / 8;
/**
* @module exports the Player class
*/
module.exports = exports = Sedan;
/**
* @constructor Player
* Creates a new player object
* @param {Postition} position object specifying an x and y
*/
function Sedan(position) {
this.state = "drive";
this.type = 'hostile';
this.row = position.row;
this.direction = Math.round(Math.random() * (1));
this.width = 64;
this.height = 64;
this.spritesheet = new Image();
this.ground = new Image();
this.ground.src = encodeURI('assets/tex_road.jpg');
this.x = 64*this.row;
if (this.direction == 0){
this.spritesheet.src = encodeURI('assets/TRBRYcars [Converted] sedan.png');
this.y = position.cavasHeight + 25;
this.resty = position.cavasHeight + 25;
}
else{
this.y = -50;
this.resty = -50;
this.spritesheet.src = encodeURI('assets/TRBRYcars [Converted] sedan-Reversed.png');
}
this.timer = 0;
this.frame = 0;
this.speed = Math.round(Math.random() * (2 - 1) + 1);
var self = this;
}
/**
* @function updates the player object
* {DOMHighResTimeStamp} time the elapsed time since the last frame
*/
Sedan.prototype.update = function (time, canvas) {
// console.log(this.row, this.x, this.y);
switch (this.direction) {
case 0:
if (this.y < 430)
{
this.y+=this.speed;
}
else{
this.y = -20;
}
break;
case 1:
if ((-25 - this.height) < this.y)
{
this.y-=this.speed;
}
else{
this.y = canvas.height + 25;
}
break;
}
};
/**
* @function renders the player into the provided context
* {DOMHighResTimeStamp} time the elapsed time since the last frame
* {CanvasRenderingContext2D} ctx the context to render into
*/
Sedan.prototype.render = function(time, ctx, canvas) {
//rendering too much i think.
ctx.strokeStyle = 'red';
ctx.strokeRect(this.x, this.y, this.width, this.height);
ctx.drawImage(this.ground,
this.row*64, 0, this.width, canvas.height);
ctx.drawImage(
// image
this.spritesheet,
// source rectangle
0, 0, this.spritesheet.width, this.spritesheet.height,
this.x, this.y, this.width, this.height
);
};
Sedan.prototype.reset = function(){
this.y = this.resety;
};
| 26.574468 | 96 | 0.573259 | 3.109375 |
e700a2a12b453bc1cfb73ec2fd13af1fdcd9f820
| 4,065 |
swift
|
Swift
|
Sources/LSFoundation/String+LSFoundation.swift
|
hisaac/LSFoundation
|
483447cc6ebf189e963c738671913e5a649e0856
|
[
"MIT"
] | null | null | null |
Sources/LSFoundation/String+LSFoundation.swift
|
hisaac/LSFoundation
|
483447cc6ebf189e963c738671913e5a649e0856
|
[
"MIT"
] | null | null | null |
Sources/LSFoundation/String+LSFoundation.swift
|
hisaac/LSFoundation
|
483447cc6ebf189e963c738671913e5a649e0856
|
[
"MIT"
] | null | null | null |
//
// String+LSFoundation.swift
// LSFoundation
//
import Foundation
public extension String {
/// Convenience accessor for `NSString`'s `lastPathComponent` property
var lastPathComponent: String {
return NSString(string: self).lastPathComponent
}
/// Checks if a string does not contain another string
/// - Parameter other: The other element to check for
/// - Returns: True if the string is not contained within the other string
func doesNotContain<T>(_ other: T) -> Bool where T: StringProtocol {
return self.contains(other).toggled
}
/// Checks if the current string contains any of the given strings
/// - Parameter other: The other strings to check for
/// - Returns: True if any of the given strings are contained within the current string
func containsAny<T>(of other: T...) -> Bool where T: StringProtocol {
return containsAny(of: other)
}
/// Checks if the current string contains any of the given strings
/// - Parameter other: The other strings to check for
/// - Returns: True if any of the given strings are contained within the current string
func containsAny<T>(of other: [T]) -> Bool where T: StringProtocol {
for element in other {
if self.contains(element) {
return true
}
}
return false
}
/// Checks if the current string contains all of the given strings
/// - Parameter other: The other strings to check for
/// - Returns: True if all of the given strings are contained within the current string
func containsAll<T>(of other: T...) -> Bool where T: StringProtocol {
return containsAll(of: other)
}
/// Checks if the current string contains all of the given strings
/// - Parameter other: The other strings to check for
/// - Returns: True if all of the given strings are contained within the current string
func containsAll<T>(of other: [T]) -> Bool where T: StringProtocol {
for element in other {
if self.doesNotContain(element) {
return false
}
}
return true
}
/// Checks if the current string does not contain any of the given strings
/// - Parameter other: The other strings to check for
/// - Returns: True if none of the given strings are contained within the current string
func containsNone<T>(of other: T...) -> Bool where T: StringProtocol {
return containsNone(of: other)
}
/// Checks if the current string does not contain any of the given strings
/// - Parameter other: The other strings to check for
/// - Returns: True if none of the given strings are contained within the current string
func containsNone<T>(of other: [T]) -> Bool where T: StringProtocol {
for element in other {
if self.contains(element) {
return false
}
}
return true
}
/// Checks if the current string ends with any of the given strings
/// - Parameter other: The other strings to check for
/// - Returns: True if the current string ends with any of the given strings
func endsWithAny<T>(of other: T...) -> Bool where T: StringProtocol {
return endsWithAny(of: other)
}
/// Checks if the current string ends with any of the given strings
/// - Parameter other: The other strings to check for
/// - Returns: True if the current string ends with any of the given strings
func endsWithAny<T>(of other: [T]) -> Bool where T: StringProtocol {
for element in other {
if self.hasSuffix(element) {
return true
}
}
return false
}
/// Checks if the current string starts with any of the given strings
/// - Parameter other: The other strings to check for
/// - Returns: True if the current string starts with any of the given strings
func startsWithAny<T>(of other: T...) -> Bool where T: StringProtocol {
return endsWithAny(of: other)
}
/// Checks if the current string starts with any of the given strings
/// - Parameter other: The other strings to check for
/// - Returns: True if the current string starts with any of the given strings
func startsWithAny<T>(of other: [T]) -> Bool where T: StringProtocol {
for element in other {
if self.hasPrefix(element) {
return true
}
}
return false
}
}
| 34.74359 | 89 | 0.709471 | 3.46875 |
a40ae1c4103f01ac16e72de5e0b7e1da76d974a3
| 3,280 |
kt
|
Kotlin
|
app/src/test/java/com/cliffracertech/bootycrate/BottomNavigationDrawerTests.kt
|
NicholasHochstetler/StuffCrate
|
5b429d0528c902b852f47bc8759d3cd75c2d29f7
|
[
"Apache-2.0"
] | 15 |
2021-09-12T14:48:25.000Z
|
2022-01-29T17:37:13.000Z
|
app/src/test/java/com/cliffracertech/bootycrate/BottomNavigationDrawerTests.kt
|
NicholasHochstetler/BootyCrate
|
5b429d0528c902b852f47bc8759d3cd75c2d29f7
|
[
"Apache-2.0"
] | null | null | null |
app/src/test/java/com/cliffracertech/bootycrate/BottomNavigationDrawerTests.kt
|
NicholasHochstetler/BootyCrate
|
5b429d0528c902b852f47bc8759d3cd75c2d29f7
|
[
"Apache-2.0"
] | null | null | null |
/* Copyright 2021 Nicholas Hochstetler
* You may not use this file except in compliance with the Apache License
* Version 2.0, obtainable at http://www.apache.org/licenses/LICENSE-2.0
* or in the file LICENSE in the project's root directory. */
package com.cliffracertech.bootycrate
import android.content.Context
import android.graphics.Rect
import androidx.core.view.doOnNextLayout
import androidx.fragment.app.FragmentActivity
import androidx.test.core.app.ApplicationProvider
import com.cliffracertech.bootycrate.utils.dpToPixels
import com.cliffracertech.bootycrate.view.BottomNavigationDrawer
import com.google.common.truth.Truth.assertThat
import org.junit.Test
import org.junit.runner.RunWith
import org.robolectric.Robolectric
import org.robolectric.RobolectricTestRunner
@RunWith(RobolectricTestRunner::class)
class BottomNavigationDrawerTests {
private val context = ApplicationProvider.getApplicationContext<Context>()
private val rect = Rect()
//private lateinit var instance: BottomNavigationDrawer
//private fun waitForAnimationsToFinish() = Shadows.shadowOf(Looper.getMainLooper()).idle()
private fun instance(vararg attrs: Pair<Int, String>): BottomNavigationDrawer {
val activity = Robolectric.buildActivity(FragmentActivity::class.java).create().get()
val attrSet = Robolectric.buildAttributeSet()
for (attr in attrs)
attrSet.addAttribute(attr.first, attr.second)
return BottomNavigationDrawer(activity, attrSet.build())
}
@Test fun initialPeekHeight() {
instance(Pair(R.attr.behavior_peekHeight, "50dp")).doOnNextLayout {
it.getHitRect(rect)
assertThat(rect.height()).isEqualTo(context.resources.dpToPixels(50f))
}
}
@Test fun expandedHeight() {
instance().doOnNextLayout {
(it as BottomNavigationDrawer).expand()
it.getHitRect(rect)
assertThat(rect.height()).isEqualTo(it.height)
}
}
@Test fun isHideableXMLvalues() {
var instance = instance()
assertThat(instance.isHideable).isEqualTo(BottomNavigationDrawer.IsHideable.Yes)
for (value in BottomNavigationDrawer.IsHideable.values()) {
instance = instance(Pair(R.attr.isHideable, value.ordinal.toString()))
assertThat(instance.isHideable).isEqualTo(value)
}
}
@Test fun expandCollapseHideShow() {
val instance = instance()
assertThat(instance.isCollapsed).isTrue()
instance.expand()
assertThat(instance.isExpanded).isTrue()
instance.collapse()
assertThat(instance.isCollapsed).isTrue()
instance.hide()
assertThat(instance.isHidden).isTrue()
instance.show()
assertThat(instance.isHidden).isFalse()
assertThat(instance.isCollapsed).isTrue()
}
@Test fun isHideableValues() {
var instance = instance(Pair(R.attr.isHideable, BottomNavigationDrawer.IsHideable.No.ordinal.toString()))
instance.hide()
assertThat(instance.isHidden).isFalse()
instance = instance(Pair(R.attr.isHideable, BottomNavigationDrawer.IsHideable.OnlyByApp.ordinal.toString()))
instance.hide()
assertThat(instance.isHidden).isTrue()
}
}
| 37.701149 | 116 | 0.7125 | 3 |
f0238d97d920682e53df77bf6d0427a081fe7819
| 7,980 |
py
|
Python
|
untiler/__init__.py
|
waissbluth/untiler
|
866b3096196ac340597f77fbf5f2ce899e58238e
|
[
"MIT"
] | 37 |
2015-10-06T16:41:18.000Z
|
2022-03-22T14:52:13.000Z
|
untiler/__init__.py
|
waissbluth/untiler
|
866b3096196ac340597f77fbf5f2ce899e58238e
|
[
"MIT"
] | 18 |
2015-09-02T21:13:44.000Z
|
2021-01-04T15:46:04.000Z
|
untiler/__init__.py
|
waissbluth/untiler
|
866b3096196ac340597f77fbf5f2ce899e58238e
|
[
"MIT"
] | 8 |
2017-04-12T01:22:36.000Z
|
2021-08-17T04:10:46.000Z
|
#!/usr/bin/env python
from __future__ import with_statement
from __future__ import print_function
from __future__ import division
import os
from multiprocessing import Pool
import click
import mercantile as merc
import numpy as np
import rasterio
from rasterio import Affine
from rasterio.warp import reproject
try:
from rasterio.warp import RESAMPLING as Resampling # pre-1.0
except ImportError:
from rasterio.warp import Resampling
import untiler.scripts.tile_utils as tile_utils
def make_affine(height, width, ul, lr):
"""
Create an affine for a tile of a given size
"""
xCell = (ul[0] - lr[0]) / width
yCell = (ul[1] - lr[1]) / height
return Affine(-xCell, 0.0, ul[0],
0.0, -yCell, ul[1])
def affaux(up):
return Affine(1, 0, 0, 0, -1, 0), Affine(up, 0, 0, 0, -up, 0)
def upsample(rgb, up, fr, to):
up_rgb = np.empty((rgb.shape[0], rgb.shape[1] * up, rgb.shape[2] * up), dtype=rgb.dtype)
reproject(
rgb, up_rgb,
src_transform=fr,
dst_transform=to,
src_crs="EPSG:3857",
dst_crs="EPSG:3857",
resampling=Resampling.bilinear)
return up_rgb
def make_src_meta(bounds, size, creation_opts={}):
"""
Create metadata for output tiles
"""
ul = merc.xy(bounds.west, bounds.north)
lr = merc.xy(bounds.east, bounds.south)
aff = make_affine(size, size, ul, lr)
## default values
src_meta = {
'driver': 'GTiff',
'height': size,
'width': size,
'count': 4,
'dtype': np.uint8,
'affine': aff,
"crs": 'EPSG:3857',
'compress': 'JPEG',
'tiled': True,
'blockxsize': 256,
'blockysize': 256
}
for c in creation_opts.keys():
src_meta[c] = creation_opts[c]
return src_meta
def make_window(x, y, xmin, ymin, windowsize):
"""
Create a window for writing a child tile to a parent output tif
"""
if x < xmin or y < ymin:
raise ValueError("Indices can't be smaller than origin")
row = (y - ymin) * windowsize
col = (x - xmin) * windowsize
return (
(row, row + windowsize),
(col, col + windowsize)
)
globalArgs = None
def make_image_array(imdata, outputSize):
try:
depth, width, height = imdata.shape
if depth == 4:
alpha = imdata[3]
else:
alpha = np.zeros((outputSize, outputSize), dtype=np.uint8) + 255
return np.array([
imdata[0 % depth, :, :],
imdata[1 % depth, :, :],
imdata[2 % depth, :, :],
alpha
])
except Exception as e:
raise e
def load_image_data(imdata, outputSize):
imsize, depth = imdata.shape
if int(np.sqrt(imsize)) != outputSize:
raise ValueError("Output size of %s ** 2 does not equal %s" % (outputSize, imsize))
return imdata.reshape(outputSize, outputSize, depth).astype(np.uint8), imsize, depth
def global_setup(inputDir, args):
global globalArgs
globalArgs = args
def logwriter(openLogFile, writeObj):
if openLogFile:
print(writeObj, file=openLogFile)
return
def streaming_tile_worker(data):
size = 2 ** (data['zMax'] - globalArgs['compositezoom']) * globalArgs['tileResolution']
out_meta = make_src_meta(merc.bounds(data['x'], data['y'], data['z']), size, globalArgs['creation_opts'])
z, x, y = [int(i) for i in (data['z'], data['x'], data['y'])]
filename = globalArgs['sceneTemplate'] % (z, x, y)
subtiler = tile_utils.TileUtils()
log = 'FILE: %s\n' % filename
try:
with rasterio.open(filename, 'w', **out_meta) as dst:
if data['zMaxCov']:
superTiles = subtiler.get_super_tiles(data['zMaxTiles'], data['zMaxCov'])
fillbaseX, fillbaseY = subtiler.get_sub_base_zoom(data['x'], data['y'], data['z'], data['zMaxCov'])
## fill thresh == the number of sub tiles that would need to occur in a fill tile to not fill (eg completely covered)
fThresh = 4 ** (data['zMax'] - data['zMaxCov'])
fDiff = 2 ** (data['zMax'] - data['zMaxCov'])
toFaux, frFaux = affaux(fDiff)
if not globalArgs['no_fill']:
print('filling')
## Read and write the fill tiles first
for t in subtiler.get_fill_super_tiles(superTiles, data['maxCovTiles'], fThresh):
z, x, y = [int(i) for i in t]
path = globalArgs['readTemplate'] % (z, x, y)
log += '%s %s %s\n' % (z, x, y)
with rasterio.open(path) as src:
imdata = src.read()
imdata = make_image_array(imdata, globalArgs['tileResolution'])
imdata = upsample(imdata, fDiff, frFaux, toFaux)
window = make_window(x, y, fillbaseX, fillbaseY, globalArgs['tileResolution'] * fDiff)
dst.write(imdata, window=window)
baseX, baseY = subtiler.get_sub_base_zoom(data['x'], data['y'], data['z'], data['zMax'])
for t in data['zMaxTiles']:
z, x, y = [int(i) for i in t]
path = globalArgs['readTemplate'] % (z, x, y)
log += '%s %s %s\n' % (z, x, y)
with rasterio.open(path) as src:
imdata = src.read()
imdata = make_image_array(imdata, globalArgs['tileResolution'])
window = make_window(x, y, baseX, baseY, globalArgs['tileResolution'])
dst.write(imdata, window=window)
if globalArgs['logdir']:
with open(os.path.join(globalArgs['logdir'], '%s.log' % os.path.basename(filename)), 'w') as logger:
logwriter(logger, log)
return filename
except Exception as e:
click.echo("%s errored" % (path), err=True)
raise e
def inspect_dir(inputDir, zoom, read_template):
tiler = tile_utils.TileUtils()
allFiles = tiler.search_dir(inputDir)
template, readTemplate, separator = tile_utils.parse_template("%s/%s" % (inputDir, read_template))
allTiles = np.array([i for i in tiler.get_tiles(allFiles, template, separator)])
allTiles, _, _, _, _ = tiler.select_tiles(allTiles, zoom)
for t in allTiles:
z, x, y = t
click.echo([x, y, z])
def stream_dir(inputDir, outputDir, compositezoom, maxzoom, logdir, read_template, scene_template, workers, creation_opts, no_fill, tile_resolution=256):
tiler = tile_utils.TileUtils()
allFiles = tiler.search_dir(inputDir)
template, readTemplate, separator = tile_utils.parse_template("%s/%s" % (inputDir, read_template))
allTiles = np.array([i for i in tiler.get_tiles(allFiles, template, separator)])
if allTiles.shape[0] == 0 or allTiles.shape[1] != 3:
raise ValueError("No tiles were found for that template")
if maxzoom:
allTiles = tiler.filter_tiles(allTiles, maxzoom)
if allTiles.shape[0] == 0:
raise ValueError("No tiles were found below that maxzoom")
_, sceneTemplate, _ = tile_utils.parse_template("%s/%s" % (outputDir, scene_template))
pool = Pool(workers, global_setup, (inputDir, {
'maxzoom': maxzoom,
'readTemplate': readTemplate,
'outputDir': outputDir,
'tileResolution': tile_resolution,
'compositezoom': compositezoom,
'fileTemplate': '%s/%s_%s_%s_%s.tif',
'sceneTemplate': sceneTemplate,
'logdir': logdir,
'creation_opts': creation_opts,
'no_fill': no_fill
}))
superTiles = tiler.get_super_tiles(allTiles, compositezoom)
for p in pool.imap_unordered(streaming_tile_worker, tiler.get_sub_tiles(allTiles, superTiles)):
click.echo(p)
pool.close()
pool.join()
if __name__ == "__main__":
stream_dir()
inspect_dir()
| 30.113208 | 153 | 0.590977 | 3.0625 |
0cae04c95140cd33bca1362795247caf69458f47
| 9,770 |
py
|
Python
|
fugue/column/functions.py
|
kvnkho/fugue
|
5f3fe8f1fb72632e5b5987d720c1d1ef546e4682
|
[
"Apache-2.0"
] | 547 |
2020-09-22T08:30:14.000Z
|
2022-03-30T23:11:05.000Z
|
fugue/column/functions.py
|
kvnkho/fugue
|
5f3fe8f1fb72632e5b5987d720c1d1ef546e4682
|
[
"Apache-2.0"
] | 196 |
2020-09-22T23:08:26.000Z
|
2022-03-26T21:22:48.000Z
|
fugue/column/functions.py
|
kvnkho/fugue
|
5f3fe8f1fb72632e5b5987d720c1d1ef546e4682
|
[
"Apache-2.0"
] | 37 |
2020-09-23T17:05:00.000Z
|
2022-03-29T18:26:52.000Z
|
from typing import Any, Optional
import pyarrow as pa
from fugue.column.expressions import (
ColumnExpr,
_FuncExpr,
_to_col,
function,
)
from triad import Schema
def coalesce(*args: Any) -> ColumnExpr:
"""SQL ``COALESCE`` function
:param args: If a value is not :class:`~fugue.column.expressions.ColumnExpr`
then it's converted to a literal column by
:func:`~fugue.column.expressions.col`
.. note::
this function can infer neither type nor alias
.. admonition:: New Since
:class: hint
**0.6.0**
.. admonition:: Examples
.. code-block:: python
import fugue.column.functions as f
f.coalesce(col("a"), col("b")+col("c"), 1)
"""
return function("COALESCE", *[_to_col(x) for x in args])
def min(col: ColumnExpr) -> ColumnExpr: # pylint: disable=redefined-builtin
"""SQL ``MIN`` function (aggregation)
:param col: the column to find min
.. note::
* this function can infer type from ``col`` type
* this function can infer alias from ``col``'s inferred alias
.. admonition:: New Since
:class: hint
**0.6.0**
.. admonition:: Examples
.. code-block:: python
import fugue.column.functions as f
# assume col a has type double
f.min(col("a")) # CAST(MIN(a) AS double) AS a
f.min(-col("a")) # CAST(MIN(-a) AS double) AS a
# neither type nor alias can be inferred in the following cases
f.min(col("a")+1)
f.min(col("a")+col("b"))
# you can specify explicitly
# CAST(MIN(a+b) AS int) AS x
f.min(col("a")+col("b")).cast(int).alias("x")
"""
assert isinstance(col, ColumnExpr)
return _SameTypeUnaryAggFuncExpr("MIN", col)
def max(col: ColumnExpr) -> ColumnExpr: # pylint: disable=redefined-builtin
"""SQL ``MAX`` function (aggregation)
:param col: the column to find max
.. note::
* this function can infer type from ``col`` type
* this function can infer alias from ``col``'s inferred alias
.. admonition:: New Since
:class: hint
**0.6.0**
.. admonition:: Examples
.. code-block:: python
import fugue.column.functions as f
# assume col a has type double
f.max(col("a")) # CAST(MAX(a) AS double) AS a
f.max(-col("a")) # CAST(MAX(-a) AS double) AS a
# neither type nor alias can be inferred in the following cases
f.max(col("a")+1)
f.max(col("a")+col("b"))
# you can specify explicitly
# CAST(MAX(a+b) AS int) AS x
f.max(col("a")+col("b")).cast(int).alias("x")
"""
assert isinstance(col, ColumnExpr)
return _SameTypeUnaryAggFuncExpr("MAX", col)
def count(col: ColumnExpr) -> ColumnExpr:
"""SQL ``COUNT`` function (aggregation)
:param col: the column to find count
.. note::
* this function cannot infer type from ``col`` type
* this function can infer alias from ``col``'s inferred alias
.. admonition:: New Since
:class: hint
**0.6.0**
.. admonition:: Examples
.. code-block:: python
import fugue.column.functions as f
f.count(col("*")) # COUNT(*)
f.count(col("a")) # COUNT(a) AS a
# you can specify explicitly
# CAST(COUNT(a) AS double) AS a
f.count(col("a")).cast(float)
"""
assert isinstance(col, ColumnExpr)
return _UnaryAggFuncExpr("COUNT", col)
def count_distinct(col: ColumnExpr) -> ColumnExpr:
"""SQL ``COUNT DISTINCT`` function (aggregation)
:param col: the column to find distinct element count
.. note::
* this function cannot infer type from ``col`` type
* this function can infer alias from ``col``'s inferred alias
.. admonition:: New Since
:class: hint
**0.6.0**
.. admonition:: Examples
.. code-block:: python
import fugue.column.functions as f
f.count_distinct(col("*")) # COUNT(DISTINCT *)
f.count_distinct(col("a")) # COUNT(DISTINCT a) AS a
# you can specify explicitly
# CAST(COUNT(DISTINCT a) AS double) AS a
f.count_distinct(col("a")).cast(float)
"""
assert isinstance(col, ColumnExpr)
return _UnaryAggFuncExpr("COUNT", col, arg_distinct=True)
def avg(col: ColumnExpr) -> ColumnExpr:
"""SQL ``AVG`` function (aggregation)
:param col: the column to find average
.. note::
* this function cannot infer type from ``col`` type
* this function can infer alias from ``col``'s inferred alias
.. admonition:: New Since
:class: hint
**0.6.0**
.. admonition:: Examples
.. code-block:: python
import fugue.column.functions as f
f.avg(col("a")) # AVG(a) AS a
# you can specify explicitly
# CAST(AVG(a) AS double) AS a
f.avg(col("a")).cast(float)
"""
assert isinstance(col, ColumnExpr)
return _UnaryAggFuncExpr("AVG", col)
def sum(col: ColumnExpr) -> ColumnExpr: # pylint: disable=redefined-builtin
"""SQL ``SUM`` function (aggregation)
:param col: the column to find sum
.. note::
* this function cannot infer type from ``col`` type
* this function can infer alias from ``col``'s inferred alias
.. admonition:: New Since
:class: hint
**0.6.0**
.. admonition:: Examples
.. code-block:: python
import fugue.column.functions as f
f.sum(col("a")) # SUM(a) AS a
# you can specify explicitly
# CAST(SUM(a) AS double) AS a
f.sum(col("a")).cast(float)
"""
assert isinstance(col, ColumnExpr)
return _UnaryAggFuncExpr("SUM", col)
def first(col: ColumnExpr) -> ColumnExpr:
"""SQL ``FIRST`` function (aggregation)
:param col: the column to find first
.. note::
* this function can infer type from ``col`` type
* this function can infer alias from ``col``'s inferred alias
.. admonition:: New Since
:class: hint
**0.6.0**
.. admonition:: Examples
.. code-block:: python
import fugue.column.functions as f
# assume col a has type double
f.first(col("a")) # CAST(FIRST(a) AS double) AS a
f.first(-col("a")) # CAST(FIRST(-a) AS double) AS a
# neither type nor alias can be inferred in the following cases
f.first(col("a")+1)
f.first(col("a")+col("b"))
# you can specify explicitly
# CAST(FIRST(a+b) AS int) AS x
f.first(col("a")+col("b")).cast(int).alias("x")
"""
assert isinstance(col, ColumnExpr)
return _SameTypeUnaryAggFuncExpr("FIRST", col)
def last(col: ColumnExpr) -> ColumnExpr:
"""SQL ``LAST`` function (aggregation)
:param col: the column to find last
.. note::
* this function can infer type from ``col`` type
* this function can infer alias from ``col``'s inferred alias
.. admonition:: New Since
:class: hint
**0.6.0**
.. admonition:: Examples
.. code-block:: python
import fugue.column.functions as f
# assume col a has type double
f.last(col("a")) # CAST(LAST(a) AS double) AS a
f.last(-col("a")) # CAST(LAST(-a) AS double) AS a
# neither type nor alias can be inferred in the following cases
f.last(col("a")+1)
f.last(col("a")+col("b"))
# you can specify explicitly
# CAST(LAST(a+b) AS int) AS x
f.last(col("a")+col("b")).cast(int).alias("x")
"""
assert isinstance(col, ColumnExpr)
return _SameTypeUnaryAggFuncExpr("LAST", col)
def is_agg(column: Any) -> bool:
"""Check if a column contains aggregation operation
:param col: the column to check
:return: whether the column is :class:`~fugue.column.expressions.ColumnExpr`
and contains aggregation operations
.. admonition:: New Since
:class: hint
**0.6.0**
.. admonition:: Examples
.. code-block:: python
import fugue.column.functions as f
assert not f.is_agg(1)
assert not f.is_agg(col("a"))
assert not f.is_agg(col("a")+lit(1))
assert f.is_agg(f.max(col("a")))
assert f.is_agg(-f.max(col("a")))
assert f.is_agg(f.max(col("a")+1))
assert f.is_agg(f.max(col("a"))+f.min(col("a"))))
"""
if isinstance(column, _UnaryAggFuncExpr):
return True
if isinstance(column, _FuncExpr):
return any(is_agg(x) for x in column.args) or any(
is_agg(x) for x in column.kwargs.values()
)
return False
class _UnaryAggFuncExpr(_FuncExpr):
def __init__(self, func: str, col: ColumnExpr, arg_distinct: bool = False):
super().__init__(func, col, arg_distinct=arg_distinct)
def infer_alias(self) -> ColumnExpr:
return (
self
if self.output_name != ""
else self.alias(self.args[0].infer_alias().output_name)
)
def _copy(self) -> _FuncExpr:
return _UnaryAggFuncExpr(self.func, *self.args, **self.kwargs)
class _SameTypeUnaryAggFuncExpr(_UnaryAggFuncExpr):
def _copy(self) -> _FuncExpr:
return _SameTypeUnaryAggFuncExpr(self.func, *self.args, **self.kwargs)
def infer_type(self, schema: Schema) -> Optional[pa.DataType]:
return self.as_type or self.args[0].infer_type(schema)
| 26.334232 | 80 | 0.572467 | 3.21875 |
abb17848a555fba18a51ef580238749d4287a090
| 976 |
rb
|
Ruby
|
test/unit/lock_file_test.rb
|
jsgarvin/admit_one
|
42eee744ee8744d5f41fd6059bac69d4202077f9
|
[
"MIT"
] | 1 |
2016-11-04T23:28:07.000Z
|
2016-11-04T23:28:07.000Z
|
test/unit/lock_file_test.rb
|
jsgarvin/admit_one
|
42eee744ee8744d5f41fd6059bac69d4202077f9
|
[
"MIT"
] | null | null | null |
test/unit/lock_file_test.rb
|
jsgarvin/admit_one
|
42eee744ee8744d5f41fd6059bac69d4202077f9
|
[
"MIT"
] | null | null | null |
require File.expand_path('../../../lib/admit_one', __FILE__)
require 'test/unit'
class LockFileTest < Test::Unit::TestCase
def setup
File.delete(lock_file_path) if File.exist?(lock_file_path)
end
def test_should_create_and_remove_lockfile
block_executed = false
AdmitOne::LockFile.new(:admit_one_lock_file_unit_test) do
block_executed = true
assert(File.exist?(lock_file_path))
end
assert(block_executed)
assert(!File.exist?(lock_file_path))
end
def test_should_not_clobber_another_lock_file
File.open(lock_file_path, "a") { |file| file.write("1\n") }
assert_raise(AdmitOne::LockFailure) do
AdmitOne::LockFile.new(:admit_one_lock_file_unit_test) do
assert false #should never run
end
end
assert(File.exist?(lock_file_path))
File.delete(lock_file_path)
end
#######
private
#######
def lock_file_path
"#{Dir.tmpdir}/admit_one_lock_file_unit_test.lock"
end
end
| 25.684211 | 63 | 0.702869 | 3 |
0c960c32123fe98899d1aea36a071118d99135d2
| 5,654 |
py
|
Python
|
nemo/collections/nlp/utils/evaluation_utils.py
|
ParikhKadam/NeMo
|
ee11f7c4666d410d91f9da33c61f4819ea625013
|
[
"Apache-2.0"
] | 1 |
2020-08-04T08:29:41.000Z
|
2020-08-04T08:29:41.000Z
|
nemo/collections/nlp/utils/evaluation_utils.py
|
ParikhKadam/NeMo
|
ee11f7c4666d410d91f9da33c61f4819ea625013
|
[
"Apache-2.0"
] | 1 |
2020-06-11T00:54:42.000Z
|
2020-06-11T00:54:42.000Z
|
nemo/collections/nlp/utils/evaluation_utils.py
|
ParikhKadam/NeMo
|
ee11f7c4666d410d91f9da33c61f4819ea625013
|
[
"Apache-2.0"
] | 3 |
2020-03-10T05:10:07.000Z
|
2020-12-08T01:33:35.000Z
|
# =============================================================================
# Copyright 2020 NVIDIA. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
import numpy as np
from nemo import logging
def analyze_confusion_matrix(cm, dict, max_pairs=10):
"""
Sort all confusions in the confusion matrix by value and display results.
Print results in a format: (name -> name, value)
Args:
cm: Confusion matrix
dict: Dictionary with key as a name and index as a value (Intents or Slots)
max_pairs: Max number of confusions to print
"""
threshold = 5 # just arbitrary value to take confusion with at least this number
confused_pairs = {}
size = cm.shape[0]
for i in range(size):
res = cm[i].argsort()
for j in range(size):
pos = res[size - j - 1]
# no confusion - same row and column
if pos == i:
continue
elif cm[i][pos] >= threshold:
str = f'{dict[i]} -> {dict[pos]}'
confused_pairs[str] = cm[i][pos]
else:
break
# sort by max confusions and print first max_pairs
sorted_confused_pairs = sorted(confused_pairs.items(), key=lambda x: x[1], reverse=True)
for i, pair_str in enumerate(sorted_confused_pairs):
if i >= max_pairs:
break
logging.info(pair_str)
def errors_per_class(cm, dict):
"""
Summarize confusions per each class in the confusion matrix.
It can be useful both for Intents and Slots.
It counts each confusion twice in both directions.
Args:
cm: Confusion matrix
dict: Dictionary with key as a name and index as a value (Intents or Slots)
"""
size = cm.shape[0]
confused_per_class = {}
total_errors = 0
for class_num in range(size):
sum = 0
for i in range(size):
if i != class_num:
sum += cm[class_num][i]
sum += cm[i][class_num]
confused_per_class[dict[class_num]] = sum
total_errors += sum
# logging.info(f'{dict[class_num]} - {sum}')
logging.info(f'Total errors (multiplied by 2): {total_errors}')
sorted_confused_per_class = sorted(confused_per_class.items(), key=lambda x: x[1], reverse=True)
for conf_str in sorted_confused_per_class:
logging.info(conf_str)
def log_misclassified_queries(intent_labels, intent_preds, queries, intent_dict, limit=50):
"""
Display examples of Intent mistakes.
In a format: Query, predicted and labeled intent names.
"""
logging.info(f'*** Misclassified intent queries (limit {limit}) ***')
cnt = 0
for i in range(len(intent_preds)):
if intent_labels[i] != intent_preds[i]:
query = queries[i].split('\t')[0]
logging.info(
f'{query} (predicted: {intent_dict[intent_preds[i]]} - labeled: {intent_dict[intent_labels[i]]})'
)
cnt = cnt + 1
if cnt >= limit:
break
def log_misclassified_slots(
intent_labels, intent_preds, slot_labels, slot_preds, subtokens_mask, queries, intent_dict, slot_dict, limit=50
):
"""
Display examples of Slot mistakes.
In a format: Query, predicted and labeled intent names and list of predicted and labeled slot numbers.
also prints dictionary of the slots at the start for easier reading.
"""
logging.info('')
logging.info(f'*** Misclassified slots queries (limit {limit}) ***')
# print slot dictionary
logging.info(f'Slot dictionary:')
str = ''
for i, slot in enumerate(slot_dict):
str += f'{i} - {slot}, '
if i % 5 == 4 or i == len(slot_dict) - 1:
logging.info(str)
str = ''
logging.info('----------------')
cnt = 0
for i in range(len(intent_preds)):
cur_slot_pred = slot_preds[i][subtokens_mask[i]]
cur_slot_label = slot_labels[i][subtokens_mask[i]]
if not np.all(cur_slot_pred == cur_slot_label):
query = queries[i].split('\t')[0]
logging.info(
f'{query} (predicted: {intent_dict[intent_preds[i]]} - labeled: {intent_dict[intent_labels[i]]})'
)
logging.info(f'p: {cur_slot_pred}')
logging.info(f'l: {cur_slot_label}')
cnt = cnt + 1
if cnt >= limit:
break
def check_problematic_slots(slot_preds_list, slot_dict):
""" Check non compliance of B- and I- slots for datasets that use such slot encoding. """
cnt = 0
# for sentence in slot_preds:
# slots = sentence.split(" ")
sentence = slot_preds_list
for i in range(len(sentence)):
slot_name = slot_dict[int(sentence[i])]
if slot_name.startswith("I-"):
prev_slot_name = slot_dict[int(sentence[i - 1])]
if slot_name[2:] != prev_slot_name[2:]:
print("Problem: " + slot_name + " - " + prev_slot_name)
cnt += 1
print("Total problematic slots: " + str(cnt))
| 37.197368 | 115 | 0.598161 | 3.1875 |
0a6ba4f18a26f53372dfc8e94a0c8335caaa8abd
| 1,292 |
sql
|
SQL
|
recipes/tables/rainfall.sql
|
svetasmirnova/mysqlcookbook
|
8cb370b9b91ef35f4654b774bac019e2b636ac67
|
[
"CC0-1.0"
] | 1 |
2022-03-01T16:45:38.000Z
|
2022-03-01T16:45:38.000Z
|
recipes/tables/rainfall.sql
|
svetasmirnova/mysqlcookbook
|
8cb370b9b91ef35f4654b774bac019e2b636ac67
|
[
"CC0-1.0"
] | null | null | null |
recipes/tables/rainfall.sql
|
svetasmirnova/mysqlcookbook
|
8cb370b9b91ef35f4654b774bac019e2b636ac67
|
[
"CC0-1.0"
] | null | null | null |
# rainfall.sql
# rainfall table: each record indicates date of measurement and amount
# of precipitation on that day.
# This file sets up the table and runs a self join to calculate
# running totals and averages for amount of precipitation each
# day, assuming no missing days. rainfall2.sql shows the calculations
# if missing days are permitted.
DROP TABLE IF EXISTS rainfall;
#@ _CREATE_TABLE_
CREATE TABLE rainfall
(
date DATE NOT NULL,
precip FLOAT(10,2) NOT NULL,
PRIMARY KEY(date)
);
#@ _CREATE_TABLE_
INSERT INTO rainfall (date, precip)
VALUES
('2014-06-01', 1.5),
('2014-06-02', 0),
('2014-06-03', 0.5),
('2014-06-04', 0),
('2014-06-05', 1.0)
;
SELECT * FROM rainfall;
# calculate cumulative precipation per day, assuming no missing days
SELECT t1.date, t1.precip AS 'daily precip',
SUM(t2.precip) AS 'cum. precip'
FROM rainfall AS t1, rainfall AS t2
WHERE t1.date >= t2.date
GROUP BY t1.date, t1.precip;
# Add columns to show elapsed days and running average of amount of
# precipitation, assuming no missing days
SELECT t1.date, t1.precip AS 'daily precip',
SUM(t2.precip) AS 'cum. precip',
COUNT(t2.precip) AS days,
AVG(t2.precip) AS 'avg. precip'
FROM rainfall AS t1, rainfall AS t2
WHERE t1.date >= t2.date
GROUP BY t1.date, t1.precip;
| 25.84 | 70 | 0.715944 | 3.28125 |
4a5bf3811a5e09878f36f82589fdad1748412ebe
| 846 |
js
|
JavaScript
|
Array/BestTimeToButAndSellStock.js
|
DorAm/algorithms
|
4126e3cdd87d8f6b13c7ebd63da5bdafdc192f74
|
[
"MIT"
] | 1 |
2021-04-03T15:50:11.000Z
|
2021-04-03T15:50:11.000Z
|
Array/BestTimeToButAndSellStock.js
|
DorAm/algorithms
|
4126e3cdd87d8f6b13c7ebd63da5bdafdc192f74
|
[
"MIT"
] | null | null | null |
Array/BestTimeToButAndSellStock.js
|
DorAm/algorithms
|
4126e3cdd87d8f6b13c7ebd63da5bdafdc192f74
|
[
"MIT"
] | null | null | null |
/**
* Best Time to Buy and Sell Stock:
*
* You are given an array prices where prices[i] is the price of a given stock on the ith day.
* You want to maximize your profit by choosing a single day to buy one stock and choosing a different day in the
* future to sell that stock. Return the maximum profit you can achieve from this transaction. If you cannot achieve
* any profit, return 0.
*
* Time: O(n)
* Space: O(1)
**/
function bestTimeToBuyAndSellStock(prices) {
let maxProfit = 0;
let minPrice = Number.MAX_VALUE;
for (let i = 0; i < prices.length; i++) {
if (prices[i] < minPrice) {
minPrice = prices[i];
} else {
maxProfit = Math.max(maxProfit, prices[i] - minPrice);
}
}
return maxProfit;
}
let prices = [7, 1, 5, 3, 6, 4];
bestTimeToBuyAndSellStock(prices);
| 30.214286 | 116 | 0.641844 | 3.09375 |
43bbf2dab944983671dc98f225231d3e1cff07d6
| 1,213 |
go
|
Go
|
pkg/restful/errors.go
|
GUAIK-ORG/go-restful
|
bf21f47b335b351353619bf3713c1810dd17e866
|
[
"Apache-2.0"
] | 14 |
2019-12-26T20:16:08.000Z
|
2020-11-10T01:37:03.000Z
|
pkg/restful/errors.go
|
GUAIK-ORG/go-restful
|
bf21f47b335b351353619bf3713c1810dd17e866
|
[
"Apache-2.0"
] | 2 |
2020-04-08T13:06:36.000Z
|
2020-04-09T03:19:38.000Z
|
pkg/restful/errors.go
|
GUAIK-ORG/gtask
|
07ae2ff75b3f8c2fe153f296f9c93bff0b115724
|
[
"Apache-2.0"
] | 1 |
2021-03-06T02:07:22.000Z
|
2021-03-06T02:07:22.000Z
|
package restful
import (
"sync"
"github.com/golang/glog"
)
type Error struct {
Code string
Msg map[string]string // 支持多语言
}
type Errors struct {
errors map[string]Error
language string
}
type ErrorsBucket struct {
errorsMap []*Errors
}
var errorBucketInsObj *ErrorsBucket
var errorBucketOnce sync.Once
func errorBucketIns() *ErrorsBucket {
errorBucketOnce.Do(func() {
errorBucketInsObj = &ErrorsBucket{
errorsMap: make([]*Errors, 0),
}
})
return errorBucketInsObj
}
func NewErrors() *Errors {
errors := &Errors{
errors: make(map[string]Error),
language: "en",
}
errorBucketIns().errorsMap = append(errorBucketIns().errorsMap, errors)
return errors
}
func (e *Errors) NewError(code string, msg string) {
err := Error{
Code: code,
Msg: map[string]string{
e.language: msg,
},
}
e.errors[code] = err
}
// 创建翻译
func (e *Errors) Translate(code string, language string, msg string) {
if _, ok := e.errors[code]; !ok {
glog.Error("Error@Translate : code not exist")
return
}
e.errors[code].Msg[language] = msg
}
// 获取错误消息
func (e *Errors) ErrorMsg(code string) map[string]string {
if _, ok := e.errors[code]; !ok {
return nil
}
return e.errors[code].Msg
}
| 17.328571 | 72 | 0.684254 | 3.09375 |
eb59041bc30d0822d35f9e2e67e1141ee3a0edff
| 3,670 |
rs
|
Rust
|
isymtope-generate/src/session.rs
|
tmzt/ismytope
|
f6caa4c7741e894032f67c9daa8a603fb47148c7
|
[
"MIT"
] | 25 |
2017-09-19T09:27:39.000Z
|
2021-01-02T09:52:09.000Z
|
isymtope-generate/src/session.rs
|
tmzt/isymtope
|
f6caa4c7741e894032f67c9daa8a603fb47148c7
|
[
"MIT"
] | null | null | null |
isymtope-generate/src/session.rs
|
tmzt/isymtope
|
f6caa4c7741e894032f67c9daa8a603fb47148c7
|
[
"MIT"
] | null | null | null |
use isymtope_ast_common::*;
use std::collections::HashMap;
#[cfg(feature = "session_time")]
use time::{get_time, Duration, Timespec};
#[derive(Debug)]
pub struct MemorySession {
#[cfg(feature = "session_time")]
created: Timespec,
#[cfg(feature = "session_time")]
expires: Option<Timespec>,
data: HashMap<String, ExpressionValue<OutputExpression>>,
}
#[cfg(feature = "session_time")]
impl Default for MemorySession {
fn default() -> Self {
MemorySession::new(get_time(), None)
}
}
#[cfg(not(feature = "session_time"))]
impl Default for MemorySession {
fn default() -> Self {
MemorySession::new()
}
}
impl MemorySession {
#[cfg(feature = "session_time")]
pub fn new(created: Timespec, expires: Option<Duration>) -> Self {
let expires = expires.map(|dur| created + dur);
MemorySession {
created: created,
expires: expires,
data: Default::default(),
}
}
#[cfg(not(feature = "session_time"))]
pub fn new() -> Self {
MemorySession {
data: Default::default(),
}
}
}
impl Session for MemorySession {
fn set_value(
&mut self,
key: &str,
value: ExpressionValue<OutputExpression>,
_update: bool,
) -> SessionResult<()> {
self.data.insert(key.to_owned(), value);
// let entry = self.data.entry(key);
// match self.data.entry(key.to_owned()) {
// Entry::Occupied(mut o) => {
// let item = o.get_mut();
// // Set modified timestamp
// item.2 = Some(ts.clone());
// println!("Replacing existing value of [{}] with [{:?}] (was [{:?}])", key, expr, item.0);
// Ok(())
// }
// Entry::Vacant(v) => {
// // let initial_ty = match mode { Some(DataItemMode::InitialType(ref ty) ) => Some(ty.to_owned()), _ => None };
// // let item = SessionDataItem::new(expr, initial_ty, ts);
// let item = SessionDataItem::new(expr, ts);
// v.insert(item);
// Ok(())
// }
// }
Ok(())
}
fn remove_value(&mut self, key: &str) -> SessionResult<()> {
self.data.remove(key);
Ok(())
}
fn get_value(&self, key: &str) -> SessionResult<Option<&ExpressionValue<OutputExpression>>> {
Ok(self.data.get(key))
}
#[cfg(feature = "session_time")]
fn created(&self) -> &Timespec {
&self.created
}
#[cfg(feature = "session_time")]
fn expires(&self) -> Option<&Timespec> {
self.expires.as_ref()
}
fn execute_action(
&mut self,
_session_id: &str,
_action_op: &ActionOp<ProcessedExpression>,
) -> SessionResult<()> {
Ok(())
}
#[cfg(feature = "types")]
fn set_with_type(
&mut self,
key: &str,
value: ExpressionValue<OutputExpression>,
) -> SessionResult<()> {
let ty = value.peek_ty();
let mode = ty.map(|ty| {
if initial {
DataItemMode::InitialType(ty)
} else {
DataItemMode::ReplaceType(ty)
}
});
self.set_with_type_mode(key, value, mode)?;
}
}
impl ReducerStateProvider for MemorySession {
fn get(&self, key: &str) -> SessionResult<Option<&ExpressionValue<OutputExpression>>> {
eprintln!("Requested reducer state key {}", key);
let expr = self.get_value(key)?;
eprintln!("Got value for reducer state key {}: {:?}", key, expr);
Ok(expr)
}
}
| 26.028369 | 129 | 0.531063 | 3.3125 |
21b662896d82d210ca2832612df2142667c07acf
| 3,152 |
rs
|
Rust
|
src/util.rs
|
LunarCoffee/Lucent
|
d8b0669488c032a9131782a00ee603ae6b29560e
|
[
"MIT"
] | 2 |
2020-05-13T04:43:34.000Z
|
2020-07-01T01:35:05.000Z
|
src/util.rs
|
LunarCoffee/lucent
|
d8b0669488c032a9131782a00ee603ae6b29560e
|
[
"MIT"
] | null | null | null |
src/util.rs
|
LunarCoffee/lucent
|
d8b0669488c032a9131782a00ee603ae6b29560e
|
[
"MIT"
] | null | null | null |
use std::time::SystemTime;
use async_std::io;
use chrono::{DateTime, Local, Utc};
use futures::{AsyncRead, AsyncReadExt};
use crate::consts;
// Used in handling range requests.
#[derive(Clone, Copy)]
pub struct Range {
pub low: usize,
pub high: usize,
}
pub fn get_time_utc() -> DateTime<Utc> { SystemTime::now().into() }
pub fn get_time_local() -> DateTime<Local> { SystemTime::now().into() }
// The following functions work with timestamps in the format used by HTTP (RFC 2616).
pub fn parse_time_rfc2616(time: &str) -> Option<DateTime<Utc>> {
DateTime::parse_from_str(time, "%a, %d %b %Y %T GMT").ok().map(|t| t.with_timezone(&Utc))
}
pub fn format_time_rfc2616(time: &DateTime<Utc>) -> String { time.format("%a, %d %b %Y %T GMT").to_string() }
// Visible characters ('vchar') as defined in RFC 7230.
pub fn is_visible_char(ch: char) -> bool { ('!'..='~').contains(&ch) }
// This iterates through the content of `reader` in chunks of a given size, calling `op` on each chunk. `op` may, for
// example, send the chunk over a network.
pub async fn with_chunks<R, F>(len: usize, reader: &mut R, mut op: F) -> io::Result<()>
where
R: AsyncRead + Unpin,
F: FnMut(Vec<u8>) -> io::Result<()>,
{
let chunk_count = (len - 1) / consts::READ_CHUNK_SIZE + 1;
for n in 0..chunk_count {
// The final chunk may be smaller.
let chunk_len = if n == chunk_count - 1 { len % consts::READ_CHUNK_SIZE } else { consts::READ_CHUNK_SIZE };
let mut chunk = vec![0; chunk_len];
reader.read_exact(&mut chunk).await?;
op(chunk)?;
}
Ok(())
}
// Gets a MIME type likely to be associated with a file extension.
pub fn media_type_by_ext(ext: &str) -> &str {
match ext {
"aac" => consts::H_MEDIA_AAC,
"avi" => consts::H_MEDIA_AVI,
"bmp" => consts::H_MEDIA_BITMAP,
"cgi" => consts::H_MEDIA_CGI_SCRIPT,
"css" => consts::H_MEDIA_CSS,
"csv" => consts::H_MEDIA_CSV,
"epub" => consts::H_MEDIA_EPUB,
"gz" => consts::H_MEDIA_GZIP,
"gif" => consts::H_MEDIA_GIF,
"htm" | "html" => consts::H_MEDIA_HTML,
"ico" => consts::H_MEDIA_ICON,
"jpg" | "jpeg" => consts::H_MEDIA_JPEG,
"js" => consts::H_MEDIA_JAVASCRIPT,
"json" => consts::H_MEDIA_JSON,
"mp3" => consts::H_MEDIA_MP3,
"mp4" => consts::H_MEDIA_MP4,
"oga" => consts::H_MEDIA_OGG_AUDIO,
"png" => consts::H_MEDIA_PNG,
"pdf" => consts::H_MEDIA_PDF,
"php" => consts::H_MEDIA_PHP,
"rtf" => consts::H_MEDIA_RTF,
"svg" => consts::H_MEDIA_SVG,
"swf" => consts::H_MEDIA_SWF,
"ttf" => consts::H_MEDIA_TTF,
"txt" => consts::H_MEDIA_TEXT,
"wav" => consts::H_MEDIA_WAV,
"weba" => consts::H_MEDIA_WEBM_AUDIO,
"webm" => consts::H_MEDIA_WEBM_VIDEO,
"webp" => consts::H_MEDIA_WEBP_IMAGE,
"woff" => consts::H_MEDIA_WOFF,
"woff2" => consts::H_MEDIA_WOFF2,
"xhtml" => consts::H_MEDIA_XHTML,
"xml" => consts::H_MEDIA_XML,
"zip" => consts::H_MEDIA_ZIP,
_ => consts::H_MEDIA_BINARY,
}
}
| 35.022222 | 117 | 0.600888 | 3.140625 |
dcc384ee188db8ec26d67ed090d28a6ff1adcfd8
| 9,379 |
lua
|
Lua
|
lualib/LuaKit/framework/lib/TableLib.lua
|
cooee/skynet
|
9a5e81de9d068c6595930d45614ded2746376731
|
[
"MIT"
] | null | null | null |
lualib/LuaKit/framework/lib/TableLib.lua
|
cooee/skynet
|
9a5e81de9d068c6595930d45614ded2746376731
|
[
"MIT"
] | null | null | null |
lualib/LuaKit/framework/lib/TableLib.lua
|
cooee/skynet
|
9a5e81de9d068c6595930d45614ded2746376731
|
[
"MIT"
] | null | null | null |
local tableConcat = table.concat
local tableInsert = table.insert
local type = type
local pairs = pairs
local tostring = tostring
local next = next
local TableLib = {}
--[[--
计算表格包含的字段数量
Lua table 的 "#" 操作只对依次排序的数值下标数组有效,TableLib.nums() 则计算 table 中所有不为 nil 的值的个数。
@param t 要计算的表格
@return integer
]]
function TableLib.nums(t)
local temp = checktable(t)
local count = 0
for k, v in pairs(temp) do
count = count + 1
end
return count
end
--[[--
将来源表格中所有键及其值复制到目标表格对象中,如果存在同名键,则覆盖其值
@usage
local dest = {a = 1, b = 2}
local src = {c = 3, d = 4}
TableLib.merge(dest, src)
-- dest = {a = 1, b = 2, c = 3, d = 4}
@param dest 目标表格
@param src 来源表格
]]
function TableLib.merge(dest, src)
if not src or not dest then
return;
end
for k, v in pairs(src) do
dest[k] = v
end
end
--[[--
合并两个表格的内容
@usage
local src1 = {a = 1, b = 2}
local src2 = {c = 3, d = 4}
local temp = TableLib.merge(src1, src2)
-- src1 = {a = 1, b = 2}
-- temp = {a = 1, b = 2, c = 3, d = 4}
@param src1 来源表格1
@param src2 来源表格2
]]
function TableLib.merge2(src1, src2)
local tb ={}
for k, v in pairs(src1) do
tableInsert(tb,v);
end
for k, v in pairs(src2) do
tableInsert(tb,v);
end
return tb;
end
--[[--
同步数据,把tab2 的数据同步到 tab1(不是合并)
@usage
local tab1 = {c = 1, b = 2,g=9}
local tab2 = {c = 3, d = 4}
TableLib.sync(tab1, tab2)
-- tab1 = {c = 3, b = 2,g=9}
-- tab2 = {c = 3, d = 4}
@param tab1 来源表格1
@param tab2 来源表格2
]]
function TableLib.sync(tab1, tab2)
for k, v in pairs(tab2) do
if tab1[k] ~= nil then
tab1[k] = v;
end
end
end
--[[--
从表格中查找指定值,返回其 key,如果没找到返回 nil
@usage
local hashtable = {name = "dualface", comp = "chukong"}
print(TableLib.keyof(hashtable, "chukong")) -- 输出 comp
@param table hashtable 表格
@param mixed value 要查找的值
@return string 该值对应的 key
]]
function TableLib.keyof(hashtable, value)
for k, v in pairs(hashtable) do
if v == value then return k end
end
return nil
end
--[[--
从表格中查找指定值,返回其索引,如果没找到返回 false
@function [parent=#table] indexof
@param table array 表格
@param mixed value 要查找的值
@param integer begin 起始索引值
@return integer#integer
从表格中查找指定值,返回其索引,如果没找到返回 false
]]
function TableLib.indexof(array, value, begin)
for i = begin or 1, #array do
if array[i] == value then return i end
end
return false
end
--[[--
从表格中删除指定值,返回删除的值的个数
@param table array 表格
@param mixed value 要删除的值
@param [boolean removeall] 是否删除所有相同的值
@usage
local array = {"a", "b", "c", "c"}
print(TableLib.removebyvalue(array, "c", true)) -- 输出 2
@return integer
]]
function TableLib.removeByValue(array, value, removeall)
local c, i, max = 0, 1, #array
while i <= max do
if array[i] == value then
table.remove(array, i)
c = c + 1
i = i - 1
max = max - 1
if not removeall then break end
end
i = i + 1
end
return c
end
--[[
判断table是否为空
]]
function TableLib.isEmpty(t)
if t and type(t)=="table" then --FIXME 此句可以判空,为何还要循环表内元素?
return next(t)==nil;
end
return true;
end
--[[
判断table是否为nil
]]
function TableLib.isNil(t)
if t and type(t)=="table" then
return false;
end
return true;
end
--[[
判断table是否为table
]]
function TableLib.isTable(t)
if type(t)=="table" then
return true;
end
return false;
end
--[[
复制table,只复制数据,对table内的function无效
]]
function TableLib.copyTab(st)
local tab = {}
for k, v in pairs(st or {}) do
if type(v) ~= "table" then
tab[k] = v
else
tab[k] = TableLib.copyTab(v)
end
end
return tab
end
function TableLib.copyTo(target, source)
for _,v in ipairs(source or {}) do
table.insert(target, v)
end
end
--[[
table校验,返回自身或者{}
]]
function TableLib.verify(t)
if t and type(t)=="table" then
return t;
end
return {};
end
function TableLib.getSize(t)
local size =0;
if t and type(t)=="table" then
for k,v in pairs(t) do
size=size+1;
end
end
return size;
end
--XXX 不符合格式时 是否需要直接返回?根据模块需求,自行修改是否放行,确认后请把此注释删除。
function TableLib.size(t)
if type(t) ~= "table" then
return 0;
end
local count = 0;
for _,v in pairs(t) do
count = count + 1;
end
return count;
end
--比较两个table的内容是否相同
function TableLib.equal(t1,t2)
if type(t1) ~= type(t2) then
return false;
else
if type(t1) ~= "table" then
return t1 == t2;
else
local len1 = TableLib.size(t1);
local len2 = TableLib.size(t2);
if len1 ~= len2 then
return false;
else
local isEqual = true;
for k,v in pairs(t1) do
if t2[k] == nil then
isEqual = false;
break;
else
if type(t2[k]) ~= type(v) then
isEqual = false;
break;
else
if type(v) ~= "table" then
if t2[k] ~= v then
isEqual = false;
break;
end
else
isEqual = TableLib.equal(v,t2[k]);
if not isEqual then
break;
end
end
end
end
end
return isEqual;
end
end
end
end
--从表里获取n个随机值
function TableLib.random(t, num)
assert(type(t) == "table", "invalid arg");
local randomList = { }
if not num or num > #t then
num = #t;
end
local rangeList = { };
for i,v in ipairs(t) do
rangeList[i] = v;
end
for i = 1, num do
local index = math.random(i, #rangeList);--生成一个随机数
rangeList[i], rangeList[index] = rangeList[index], rangeList[i];--交换
randomList[i] = rangeList[i];--交换以后把i位置的牌放到要返回的函数中
end
return randomList;
end
---序列化table
function TableLib.tostring(root)
if not root then return end
local cache = { [root] = "root" }
local flag = {};
local function _dump(t,name)
local mt = getmetatable(t)
if mt and mt.__tostring then
return tostring(t)
end
local temp = {}
for i,v in ipairs(t) do
flag[i] = true;
if cache[v] then
tableInsert(temp, cache[v])
elseif type(v) == "table" then
cache[v] = string.format("%s[%d]", name, i)
tableInsert(temp, string.format("%s", _dump(v, cache[v])))
else
tableInsert(temp, tostring(v))
end
end
for k,v in pairs(t) do
if not flag[k] then
local key = tostring(k)
if cache[v] then
tableInsert(temp, string.format("%s=%s", key, cache[v]))
elseif type(v) == "table" then
cache[v] = string.format("%s.%s", name, key)
tableInsert(temp, string.format("%s=%s", key, _dump(v, cache[v])))
else
tableInsert(temp, string.format("%s=%s", key, tostring(v)))
end
end
end
return string.format("{%s}", tableConcat(temp,","));
end
return _dump(root, "root");
end
---合并多个表
function TableLib.deepMerge( src, ... )
local arg = {...};
for i,v in ipairs(arg) do
for k,v in pairs(v) do
if type(v) == "table" and type(src[k]) == "table" then
TableLib.deepMerge(src[k], v);
else
src[k] = v;
end
end
end
end
function TableLib.select(t, func)
for i,v in ipairs(t) do
if func and func(i,v) == true then
return i, v;
end
end
end
function TableLib.selectall(t, func)
local temp = {};
for i,v in ipairs(t) do
if func and func(i,v) == true then
temp[#temp+1] = v;
end
end
return temp;
end
function TableLib.retrive(t, ...)
if not t then
return
end
local arg = {...}
local tmp = t;
for _,v in ipairs( arg ) do
if tmp[v] then
tmp = tmp[v];
else
return;
end
end
return tmp;
end
function TableLib.lockWrite(t)
local mt = getmetatable(t) or {};
mt.__newindex = function(t,k,v)
error(string.format("can't write [%s] into table",k))
end;
if not getmetatable(t) then
setmetatable(t, mt);
end
end
function TableLib.releaseLockWrite(t)
local mt = getmetatable(t);
if not (mt and mt.__newindex) then
return
end
mt.__newindex = nil
end
function TableLib.getSubset(t, from, to)
assert(from > 0 and from <= to and to <= #t, string.format("invalid range : %d, %d", from, to));
local sub = {}
for i=from,to do
sub[#sub + 1] = t[i]
end
return sub
end
return TableLib;
| 21.811628 | 100 | 0.518925 | 3.265625 |
f37ab0a8aed4c218137ba1c5d41266869de441c4
| 5,026 |
swift
|
Swift
|
FuzzyLogicPlayground.playground/Sources/Robot.swift
|
loufranco/FuzzyLogicPlayground
|
8a77bc1d80dc7b79bfbae4844b4702aa8f7bddd1
|
[
"MIT"
] | 6 |
2017-03-15T23:36:24.000Z
|
2019-03-18T14:13:46.000Z
|
FuzzyLogicPlayground.playground/Sources/Robot.swift
|
loufranco/FuzzyLogicPlayground
|
8a77bc1d80dc7b79bfbae4844b4702aa8f7bddd1
|
[
"MIT"
] | null | null | null |
FuzzyLogicPlayground.playground/Sources/Robot.swift
|
loufranco/FuzzyLogicPlayground
|
8a77bc1d80dc7b79bfbae4844b4702aa8f7bddd1
|
[
"MIT"
] | null | null | null |
import Foundation
import UIKit
import GameplayKit
import SpriteKit
import PlaygroundSupport
public enum Direction: Int {
case north = 0
case northwest = 1
case west = 2
case southwest = 3
case south = 4
case southeast = 5
case east = 6
case northeast = 7
public func left() -> Direction {
return Direction(rawValue: ((self.rawValue + 1) % 8)) ?? .north
}
public func right() -> Direction {
return Direction(rawValue: ((self.rawValue + 7) % 8)) ?? .north
}
public func rotation() -> CGFloat {
return CGFloat(self.rawValue) * CGFloat.pi / 4
}
public func forwardCell(from cell: (x: Int, y: Int)) -> (Int, Int) {
switch self {
case .north:
return (cell.x, cell.y+1)
case .northwest:
return (cell.x-1, cell.y+1)
case .west:
return (cell.x-1, cell.y)
case .southwest:
return (cell.x-1, cell.y-1)
case .south:
return (cell.x, cell.y-1)
case .southeast:
return (cell.x+1, cell.y-1)
case .east:
return (cell.x+1, cell.y)
case .northeast:
return (cell.x+1, cell.y+1)
}
}
}
public enum RobotAction: NSString {
case turnRight
case turnLeft
case moveForward
case radar
case fireLaser
public static let allValues = [
turnRight,
turnLeft,
moveForward,
radar,
fireLaser]
}
public typealias RobotNextAction = ([String: Any]) -> RobotAction
public class Robot: SKSpriteNode {
var direction: Direction = .north
var cell: (x: Int, y: Int) = (x: 0, y: 0)
var nextActionFn: RobotNextAction?
var radarCharge: Int = 0
var laserCharge: Int = 0
let die = GKRandomDistribution.d20()
var moveTime: TimeInterval = 0
var enemyCell: (x: Int, y: Int) = (x: 0, y: 0)
var ticksSinceEnemyKnown: Int = 0
@discardableResult
public func goto(cell: (x: Int, y: Int)) -> Robot {
guard let rwScene = self.scene as? RobotWarScene, rwScene.isInBounds(cell: cell) else { return self }
self.cell = cell
self.run(SKAction.move(to: CGPoint(x: (cell.x * 50) + 25, y: (cell.y * 50) + 25), duration: moveTime))
return self
}
@discardableResult
public func turnLeft() -> Robot {
direction = direction.left()
self.run(SKAction.rotate(toAngle: direction.rotation(), duration: moveTime, shortestUnitArc: true))
return self
}
@discardableResult
public func turnRight() -> Robot {
direction = direction.right()
self.run(SKAction.rotate(toAngle: direction.rotation(), duration: moveTime, shortestUnitArc: true))
return self
}
@discardableResult
public func moveForward() -> Robot {
return goto(cell: direction.forwardCell(from: self.cell))
}
public func canMoveForward() -> Bool {
guard let rwScene = self.scene as? RobotWarScene else { return false }
return rwScene.isInBounds(cell: direction.forwardCell(from: self.cell))
}
public func nextAction(state: [String: Any]) -> RobotAction {
if let nextActionFn = nextActionFn {
return nextActionFn(state)
}
let dieRoll = die.nextInt()
if canMoveForward() && dieRoll < 8 {
return .moveForward
} else if canMoveForward() && self.laserCharge > 2 && dieRoll < 12 {
return .fireLaser
} else if self.radarCharge > 1 && dieRoll < 12 {
return .radar
} else if dieRoll % 2 == 0 {
return .turnRight
} else {
return .turnLeft
}
}
public func radar() {
guard let rwScene = self.scene as? RobotWarScene else { return }
if let enemyCell = rwScene.radar(from: self) {
self.enemyCell = enemyCell
self.ticksSinceEnemyKnown = 0
}
}
public func state() -> [String: Any] {
var state = [String: Any]()
state["myPosX"] = self.cell.x
state["myPosY"] = self.cell.y
state["myDir"] = self.direction
state["laserCharge"] = max(0, self.laserCharge)
state["radarCharge"] = max(0, self.radarCharge)
state["enemyPosX"] = enemyCell.x
state["enemyPosY"] = enemyCell.y
state["ticksSinceEnemyKnown"] = ticksSinceEnemyKnown
return state
}
public func doAction(action: RobotAction) {
guard let rwScene = self.scene as? RobotWarScene else { return }
switch action {
case .turnRight:
turnRight()
case .turnLeft:
turnLeft()
case .moveForward:
moveForward()
case .radar:
radar()
case .fireLaser:
rwScene.fireLaser(from: self)
}
laserCharge = min(laserCharge + 1, rwScene.maxLaser)
radarCharge = min(radarCharge + 1, rwScene.maxRadar)
self.ticksSinceEnemyKnown += 1
}
}
| 28.235955 | 110 | 0.58078 | 3.109375 |
5433e0904fe44465501ddc7dcb7e70f9c42c91fe
| 8,053 |
go
|
Go
|
schema2go/lex.go
|
bytemine/ldap-crud
|
981cdde0a352e32f870ab527a0149364666c674a
|
[
"MIT"
] | 8 |
2015-12-24T07:42:37.000Z
|
2022-01-10T05:46:15.000Z
|
schema2go/lex.go
|
deepdivenow/ldap-crud
|
981cdde0a352e32f870ab527a0149364666c674a
|
[
"MIT"
] | null | null | null |
schema2go/lex.go
|
deepdivenow/ldap-crud
|
981cdde0a352e32f870ab527a0149364666c674a
|
[
"MIT"
] | 3 |
2015-10-28T23:48:34.000Z
|
2022-01-10T05:46:04.000Z
|
package main
import (
"log"
"strings"
)
// An lexed item
type item struct {
typ int
val string
}
// A state function returns the next state function where lexing should continue
type stateFn func(*lexer) stateFn
// Removes comments from the input.
func filterComments(i string) string {
out := make([]rune, 0)
inString := false
inComment := false
for _, c := range i {
switch string(c) {
case "'":
if !inComment {
inString = !inString
}
case "#":
if !inString {
inComment = true
}
continue
case "\n":
if inComment {
inComment = false
continue
}
}
if !inComment {
out = append(out, c)
}
}
return string(out)
}
type lexer struct {
input []string
start int
pos int
items chan item
lastitem item
debug bool
}
// Create a new lexer which lexes the given input
func newLexer(input string, debug bool) *lexer {
l := new(lexer)
l.items = make(chan item, 0)
l.input = strings.Fields(filterComments(input))
l.debug = debug
if debug {
log.Println("Schema input with comments removed.")
log.Println(filterComments(input))
}
// Start the lexing asynchronly
go l.run()
return l
}
func (l *lexer) run() {
for state := lexText; state != nil; {
state = state(l)
}
}
// Method to fulfill the yacc interface
func (l *lexer) Lex(lval *yySymType) int {
item := <-l.items
l.lastitem = item
lval.val = item.val
if l.debug {
printToken(l.lastitem)
}
return item.typ
}
// dito
func (l *lexer) Error(e string) {
printToken(l.lastitem)
log.Println(e)
}
// print the token type and the token value for debugging
func printToken(i item) {
typ := i.typ
val := i.val
switch typ {
case ATTRIBUTETYPE:
log.Print("attributetype ", val)
case OBJECTCLASS:
log.Print("objectclass ", val)
case NAME:
log.Print("name ", val)
case DESC:
log.Print("desc ", val)
case OBSOLETE:
log.Print("obsolete ", val)
case SUP:
log.Print("sup ", val)
case EQUALITY:
log.Print("equality ", val)
case ORDERING:
log.Print("ordering ", val)
case SUBSTR:
log.Print("substr ", val)
case SYNTAX:
log.Print("syntax ", val)
case SINGLEVALUE:
log.Print("singlevalue ", val)
case COLLECTIVE:
log.Print("collective ", val)
case NOUSERMODIFICATION:
log.Print("nousermodification ", val)
case ABSTRACT:
log.Print("abstract ", val)
case STRUCTURAL:
log.Print("structural ", val)
case AUXILIARY:
log.Print("auxiliary ", val)
case MUST:
log.Print("must ", val)
case MAY:
log.Print("may ", val)
case DOLLAR:
log.Print("dollar ", val)
case LBRACE:
log.Print("lbrace ", val)
case RBRACE:
log.Print("rbrace ", val)
case USAGE:
log.Print("usage ", val)
case EOF:
log.Print("eof ", val)
case STRING:
log.Print("string ", val)
case OID:
log.Print("oid ", val)
case NUMERICOID:
log.Print("noid ", val)
}
}
// Write a lexed token to the channel of lexed items
func (l *lexer) emit(typ int, val string) {
switch typ {
case STRING:
fallthrough
case OID:
fallthrough
case NUMERICOID:
l.items <- item{typ: typ, val: val}
default:
l.items <- item{typ: typ}
}
l.pos++
l.start = l.pos
}
// Emits an EOF token and closes the items channel
func (l *lexer) eof(e string) {
l.emit(EOF, "")
close(l.items)
}
// Lexes raw text until an attributetype or objectclass definition
func lexText(l *lexer) stateFn {
for {
if l.start == len(l.input) {
l.eof("lexText")
return nil
}
if strings.ToLower(l.input[l.start]) == "attributetype" {
return lexAttributeType
}
if strings.ToLower(l.input[l.start]) == "objectclass" {
return lexObjectType
}
l.pos++
l.start = l.pos
}
}
// Lexes an attributetype token
func lexAttributeType(l *lexer) stateFn {
l.emit(ATTRIBUTETYPE, "")
return lexLeftBrace
}
// Lexes an objectclass token
func lexObjectType(l *lexer) stateFn {
l.emit(OBJECTCLASS, "")
return lexLeftBrace
}
// Lexes a left brace (
func lexLeftBrace(l *lexer) stateFn {
l.emit(LBRACE, "")
return lexNumericOid
}
// Lexes a numericOid like 1.2.3.4.1545612.1
func lexNumericOid(l *lexer) stateFn {
l.emit(NUMERICOID, l.input[l.start])
return lexAttributes
}
// Lexes the attributes of an attributetype or objectclass
func lexAttributes(l *lexer) stateFn {
switch l.input[l.start] {
case "NAME":
l.emit(NAME, "")
return lexName
case "DESC":
l.emit(DESC, "")
return lexQuotedString
case "OBSOLETE":
l.emit(OBSOLETE, "")
return lexAttributes
case "SUP":
l.emit(SUP, "")
return lexOids
case "EQUALITY":
l.emit(EQUALITY, "")
return lexOid
case "ORDERING":
l.emit(ORDERING, "")
return lexOid
case "SUBSTR":
l.emit(SUBSTR, "")
return lexOid
case "SYNTAX":
l.emit(SYNTAX, "")
return lexNoidLength
case "SINGLE-VALUE":
l.emit(SINGLEVALUE, "")
return lexAttributes
case "COLLECTIVE":
l.emit(COLLECTIVE, "")
return lexAttributes
case "NO-USER-MODIFICATION":
l.emit(NOUSERMODIFICATION, "")
return lexAttributes
case "USAGE":
l.emit(USAGE, "")
return lexUsage
case "ABSTRACT":
l.emit(ABSTRACT, "")
return lexAttributes
case "STRUCTURAL":
l.emit(STRUCTURAL, "")
return lexAttributes
case "AUXILIARY":
l.emit(AUXILIARY, "")
return lexAttributes
case "MUST":
l.emit(MUST, "")
return lexOids
case "MAY":
l.emit(MAY, "")
return lexOids
case ")":
l.emit(RBRACE, "")
return lexText
}
l.eof("Attributes")
return nil
}
// Lexes a single name in single quotes or multiple names in braces
// Names can't have whitespaces in them.
// Examples are: 'foobar' or ( 'foo' 'bar' )
func lexName(l *lexer) stateFn {
if l.input[l.pos] == "(" {
for {
if l.start == len(l.input) {
l.eof("Name")
}
switch l.input[l.start] {
case "(":
l.emit(LBRACE, "")
case ")":
l.emit(RBRACE, "")
return lexAttributes
default:
l.emit(STRING, strings.TrimPrefix(strings.TrimRight(l.input[l.start], "'"), "'"))
}
l.start = l.pos
}
} else {
l.emit(STRING, strings.TrimPrefix(strings.TrimRight(l.input[l.start], "'"), "'"))
return lexAttributes
}
}
// Lexes a string in single quotes, whitespaces in the string are permitted.
// Example: 'foo bar is great'
func lexQuotedString(l *lexer) stateFn {
// the string only consists of the current fild
if strings.HasPrefix(l.input[l.start], "'") && strings.HasSuffix(l.input[l.start], "'") {
out := strings.TrimPrefix(l.input[l.start], "'")
out = strings.TrimRight(out, "'")
l.emit(STRING, out)
return lexAttributes
}
// the string consists of multiple fields
out := make([]string, 0)
out = append(out, strings.TrimPrefix(l.input[l.start], "'"))
l.pos++
for {
if l.pos == len(l.input) {
l.eof("QuotedString")
}
if strings.HasSuffix(l.input[l.pos], "'") {
out = append(out, strings.TrimRight(l.input[l.pos], "'"))
l.emit(STRING, strings.Join(out, " "))
return lexAttributes
}
out = append(out, l.input[l.pos])
l.pos++
}
}
// Lexes an Oid (string without enclosing braces and no whitespaces)
func lexOid(l *lexer) stateFn {
l.emit(OID, l.input[l.start])
return lexAttributes
}
// Lexes a numeric Oid, with an optional length specification in curly braces
// The length is currently ignored and not returned.
// Example: 1.2.3.4.5.6.7.8.9.0.1.2.3{32}
func lexNoidLength(l *lexer) stateFn {
// ignore the length for now
oid := strings.SplitN(l.input[l.start], "{", 2)
l.emit(NUMERICOID, oid[0])
return lexAttributes
}
// Lexes an usage string, but currently drops it silently.
func lexUsage(l *lexer) stateFn {
l.emit(STRING, "")
return lexAttributes
}
// Lexes a single oid, or a list of oids in braces seperated by dollar signs ( oid1 $ oid2 $ oid3 )
func lexOids(l *lexer) stateFn {
if l.input[l.pos] == "(" {
for {
if l.start == len(l.input) {
l.eof("Oids")
}
switch l.input[l.start] {
case "(":
l.emit(LBRACE, "")
case ")":
l.emit(RBRACE, "")
return lexAttributes
case "$":
l.emit(DOLLAR, "")
default:
l.emit(OID, l.input[l.start])
}
l.start = l.pos
}
} else {
l.emit(OID, l.input[l.start])
return lexAttributes
}
}
| 20.543367 | 99 | 0.653794 | 3.453125 |
894a15da18730816942ef24390d0788e01ab7c38
| 9,848 |
lua
|
Lua
|
mods/pulse_network/code/controller_api.lua
|
MineWitherMC/trinium2
|
f9d7ce90dec80d5f29b9c4df23725093f1048d48
|
[
"BSD-3-Clause"
] | 2 |
2018-04-30T06:41:20.000Z
|
2018-04-30T06:56:31.000Z
|
mods/pulse_network/code/controller_api.lua
|
MineWitherMC/trinium2
|
f9d7ce90dec80d5f29b9c4df23725093f1048d48
|
[
"BSD-3-Clause"
] | 1 |
2018-05-31T17:37:29.000Z
|
2018-06-01T08:34:47.000Z
|
mods/pulse_network/code/controller_api.lua
|
MineWitherMC/trinium2
|
f9d7ce90dec80d5f29b9c4df23725093f1048d48
|
[
"BSD-3-Clause"
] | 1 |
2018-05-16T18:15:56.000Z
|
2018-05-16T18:15:56.000Z
|
local api = trinium.api
local S = pulse_network.S
function pulse_network.trigger_update(controller_pos)
local meta = minetest.get_meta(controller_pos)
local cd = minetest.deserialize(meta:get_string"connected_devices")
for i = 1, #cd do
local name1 = minetest.get_node(cd[i]).name
if minetest.registered_items[name1].on_pulsenet_update then
minetest.registered_items[name1].on_pulsenet_update(cd[i], controller_pos)
end
end
end
function pulse_network.import_to_controller(pos)
local meta = minetest.get_meta(pos)
local inv = meta:get_inventory()
local items = meta:get_string"inventory":data()
local pending_recipes = meta:get_string"pending_recipes":data()
local s = inv:get_stack("input", 1)
if not s:is_empty() then
local name = s:get_name()
for i = 1, #pending_recipes do
if s:is_empty() then break end
local referrers_parsed = pending_recipes[i].refs
local action = false
for _, v in pairs(referrers_parsed) do
if v[name] then
local change = math.min(s:get_count(), v[name].needed)
if change > 0 then
v[name].needed = v[name].needed - change
v[name].buffered = v[name].buffered + change
s:take_item(change)
inv:set_stack("input", 1, s)
action = true
end
end
end
if action then
meta:set_string("pending_recipes", minetest.serialize(pending_recipes))
pulse_network.update_pending_recipe(pos, i)
end
end
end
if not s:is_empty() then
local CI, UI, CT, UT = meta:get_int"capacity_items", meta:get_int"used_items",
meta:get_int"capacity_types", meta:get_int"used_types"
local max_import = CI - UI
local id = api.get_item_identifier(s)
local dec = math.min(max_import, s:get_count())
if items[id] then
items[id] = items[id] + dec
s:take_item(dec)
inv:set_stack("input", 1, s)
meta:set_int("used_items", UI + dec)
elseif CT > UT then
items[id] = dec
s:take_item(dec)
inv:set_stack("input", 1, s)
meta:set_int("used_items", UI + dec)
meta:set_int("used_types", UT + 1)
end
meta:set_string("inventory", minetest.serialize(items))
end
pulse_network.trigger_update(pos)
end
function pulse_network.export_from_controller(pos, id, count)
local meta = minetest.get_meta(pos)
local items = meta:get_string"inventory":data()
if not items[id] then return false end
count = math.min(count, items[id])
meta:set_int("used_items", meta:get_int"used_items" - count)
items[id] = items[id] - count
if items[id] == 0 then
items[id] = nil
meta:set_int("used_types", meta:get_int"used_types" - 1)
end
meta:set_string("inventory", minetest.serialize(items))
pulse_network.import_to_controller(pos)
local tbl = id:split" "
local additional_info = table.map(table.tail(tbl), function(z) return " "..z end)
return tbl[1] .. " " .. count .. table.concat(additional_info)
end
function pulse_network.notify_pattern_change(pos, pattern, referrer)
local meta = minetest.get_meta(pos)
local patterns = meta:get_string"patterns":data()
local pattern_data = pattern:get_meta():get_string"recipe_data":data()
pattern_data.referrer = referrer
local flagged_for_addition = false
for _, v1 in pairs(pattern_data.outputs) do
local v = v1:split" "[1]
if not patterns[v] then
patterns[v] = {}
end
if not patterns[v][referrer] then
patterns[v][referrer] = pattern_data
flagged_for_addition = true
elseif not flagged_for_addition then
patterns[v][referrer] = nil
if table.count(patterns[v]) == 0 then
patterns[v] = nil
end
end
end
meta:set_string("patterns", minetest.serialize(patterns))
pulse_network.trigger_update(pos)
end
local function sequence(storage, patterns)
return function(object, steps)
assert(steps <= 25, S"Too complicated pattern sequence!")
local set = {}
if object.type == "pattern" then
--[[
If we obtained a pattern, we need to buffer all of its components.
]]--
for _, v in pairs(object.pattern.inputs) do
local input_id, input_count = unpack(v:split" ")
input_count = (tonumber(input_count) or 1) * object.multiplier
if not storage[input_id] then
storage[input_id] = 0
end
local added_count = math.min(storage[input_id], input_count)
storage[input_id] = storage[input_id] - added_count
input_count = input_count - added_count
if storage[input_id] == 0 then
storage[input_id] = nil
end
local needed_item = {
type = "item",
item = input_id,
buffered = added_count,
needed = input_count,
distance = steps,
parent = object.pattern.referrer,
}
set[needed_item] = 1
end
else
--[[
More interesting case is obtaining item.
In this case, we should select patterns we could use.
E.g, if first pattern for X is {Y, 2Z} and second is {T, 5W},
we need 10 Xs and we have 5 Ys and 10 Ts (and Z/W are free-craftable),
then we should request 5 Y-based recipes and
5 T-based recipes.
If we cannot produce the last recipe possible, it means we have not enough items.
However, we need to find whether we actually can craft Z
or we have to request 10 T-based recipes.
To do that, a probably good way is to binary-search maximum amount of {Y, 2Z} recipes.
However, I am too lazy ATM and just check the first recipe producing X.
]]--
if not patterns[object.item] and object.needed > 0 then
error(S("Could not craft @1 of @2!", object.needed, api.get_description(object.item)))
end
if object.needed == 0 then return {} end
for _, v in pairs(patterns[object.item]) do
local outputted_amount = 0
for _, output in pairs(v.outputs) do
local id, count = unpack(output:split" ")
if id == object.item then
count = tonumber(count) or 1
outputted_amount = outputted_amount + count
end
end
assert(outputted_amount > 0, S"System error!")
local needed_recipe_amount = math.ceil(object.needed / outputted_amount)
local needed_item = {
type = "pattern",
pattern = v,
multiplier = needed_recipe_amount
}
set[needed_item] = 1
break
end
end
return set
end
end
function pulse_network.request_autocraft(pos, item_id, count)
local meta = minetest.get_meta(pos)
if meta:get_int"active_processes" >= meta:get_int"available_processes" then
return false, S"Insufficient crafting cores!"
end
local storage = meta:get_string"inventory":data()
local patterns = meta:get_string"patterns":data()
local step_1 = {type = "item", item = item_id, buffered = 0, needed = count, distance = 1, parent = false}
local a, b = pcall(api.search, step_1, api.functions.new_object, sequence(storage, patterns))
if a then
b:push(step_1)
local memory = count * #b:data()
if meta:get_int"used_memory" + memory > meta:get_int"available_memory" then
return false, S"Insufficient crafting memory!"
end
return b, memory
end
return a, table.concat(table.tail(b:split": "), ": ")
end
function pulse_network.execute_autocraft(pos, item_id, count)
local meta = minetest.get_meta(pos)
local storage = meta:get_string"inventory":data()
local dm, memory = pulse_network.request_autocraft(pos, item_id, count)
if not dm then return end
meta:set_int("used_memory", meta:get_int"used_memory" + memory)
meta:set_int("active_processes", meta:get_int"active_processes" + 1)
local UI, UT = meta:get_int"used_items", meta:get_int"used_types"
local referrers_parsed = {}
dm:forEach(function(obj)
if obj.type == "item" then
local recursive_input_id = obj.item
local buf = obj.buffered
if buf > 0 then
storage[recursive_input_id] = storage[recursive_input_id] - buf
UI = UI - buf
if storage[recursive_input_id] == 0 then
storage[recursive_input_id] = nil
UT = UT - 1
end
end
if not obj.parent then return end
if not referrers_parsed[obj.parent] then
referrers_parsed[obj.parent] = {}
end
local old = referrers_parsed[obj.parent][recursive_input_id] or {}
referrers_parsed[obj.parent][recursive_input_id] = {
needed = (old.needed or 0) + obj.needed,
buffered = (old.buffered or 0) + obj.buffered,
}
end
end)
meta:set_int("used_items", UI)
meta:set_int("used_types", UT)
local pending_recipes = meta:get_string"pending_recipes":data()
table.insert(pending_recipes, {refs = referrers_parsed, memory = memory})
meta:set_string("pending_recipes", minetest.serialize(pending_recipes))
meta:set_string("inventory", minetest.serialize(storage))
pulse_network.trigger_update(pos)
pulse_network.update_pending_recipe(pos, #pending_recipes)
end
function pulse_network.update_pending_recipe(pos, key)
local meta = minetest.get_meta(pos)
local pending_recipes = meta:get_string"pending_recipes":data()
local processed_recipe = pending_recipes[key]
for k, v in pairs(processed_recipe.refs) do
if table.every(v, function(x) return x.needed == 0 end) then
processed_recipe.refs[k] = nil
local map = table.map(v, function(x) return x.buffered end)
pulse_network.send_items_to_referrer(k, map)
end
end
if table.count(processed_recipe.refs) == 0 then
meta:set_int("used_memory", meta:get_int"used_memory" - processed_recipe.memory)
meta:set_int("active_processes", meta:get_int"active_processes" - 1)
table.remove(pending_recipes, key)
end
meta:set_string("pending_recipes", minetest.serialize(pending_recipes))
end
function pulse_network.send_items_to_referrer(referrer, itemmap)
local pos, index = unpack(referrer:split"|")
pos = vector.destringify(pos)
local meta = minetest.get_meta(pos)
local old_items = meta:get_string"autocraft_itemmap":data()
api.merge_itemmaps(old_items, itemmap)
meta:set_string("autocraft_itemmap", minetest.serialize(old_items))
local node = minetest.get_node(pos)
local callback = api.get_field(node.name, "on_autocraft_insert")
if callback then
callback(pos, index)
end
end
| 32.288525 | 107 | 0.717201 | 3.078125 |
24d425da989789a856a92b408492b0f2ded9e09c
| 1,061 |
go
|
Go
|
listener.go
|
daniel-gil/throttler
|
b957ed87b4762320330eb74bf0f8642ab77ca93e
|
[
"MIT"
] | 1 |
2018-03-06T10:57:53.000Z
|
2018-03-06T10:57:53.000Z
|
listener.go
|
centraldereservas/throttler
|
edac3a522fb8c179afb9904fb80ebde0c3e6632a
|
[
"MIT"
] | null | null | null |
listener.go
|
centraldereservas/throttler
|
edac3a522fb8c179afb9904fb80ebde0c3e6632a
|
[
"MIT"
] | null | null | null |
package throttler
import (
"fmt"
"time"
)
type listener interface {
listen()
}
type requestHandler struct {
rate time.Duration
reqChan chan *Request
verbose bool
fulfiller fulfiller
}
func newListener(r time.Duration, ch chan *Request, v bool, f fulfiller) (listener, error) {
if ch == nil {
return nil, fmt.Errorf("request channel can not be nil")
}
if f == nil {
return nil, fmt.Errorf("fulfiller can not be nil")
}
return &requestHandler{
rate: r,
reqChan: ch,
verbose: v,
fulfiller: f,
}, nil
}
// listen waits for receiving new requests from the requests channel and processes them
// without exceeding the calculated maximal rate limit using the leaky bucket algorithm
func (l *requestHandler) listen() {
throttle := time.Tick(l.rate)
for req := range l.reqChan {
<-throttle
if l.verbose {
fmt.Printf("[%v] got ticket; Fulfilling Request [%v]\n", time.Now(), req.Name)
}
go l.fulfiller.fulfill(req)
if l.verbose {
fmt.Printf("[%v] Request fulfilled [%v]\n", time.Now(), req.Name)
}
}
}
| 21.653061 | 92 | 0.673893 | 3.21875 |
21f97f12e9558375a1cacfd90a02183e4ff5d213
| 1,375 |
sql
|
SQL
|
tutorial-iot-stream-predict/splicemachine/src/main/resources/ddl/create_train_and_test_labels.sql
|
splicemachine/splice-community-sample-code
|
db82e57ef9478827007fd5e267c3d488d0b10d33
|
[
"Apache-2.0"
] | 12 |
2016-08-09T17:40:00.000Z
|
2019-09-17T02:36:00.000Z
|
tutorial-iot-stream-predict/splicemachine/src/main/resources/ddl/create_train_and_test_labels.sql
|
splicemachine/splice-community-sample-code
|
db82e57ef9478827007fd5e267c3d488d0b10d33
|
[
"Apache-2.0"
] | 5 |
2016-10-20T20:08:11.000Z
|
2021-01-20T23:01:59.000Z
|
tutorial-iot-stream-predict/splicemachine/src/main/resources/ddl/create_train_and_test_labels.sql
|
splicemachine/splice-community-sample-code
|
db82e57ef9478827007fd5e267c3d488d0b10d33
|
[
"Apache-2.0"
] | 11 |
2016-10-13T16:08:51.000Z
|
2021-07-23T15:55:59.000Z
|
set schema IOT;
drop table if exists TRAIN_LABEL_TEMP;
create table TRAIN_LABEL_TEMP
(
engine_type int,
unit int,
time bigint,
prediction int
);
insert into IOT.TRAIN_LABEL_TEMP
select a.engine_type, a.unit, a.time, case when ((b.maxtime - a.time) > 30) then 0 else 1 end from IOT.TRAIN_DATA a inner join (select engine_type, unit, max(time) maxtime from IOT.TRAIN_DATA group by engine_type, unit) b on a.engine_type = b.engine_type and a.unit = b.unit ;
update IOT.TRAIN_DATA u set prediction = (select a.prediction from IOT.TRAIN_LABEL_TEMP a where a.engine_type =1 and a.unit = u.unit and a.time = u.time) where u.engine_type=1 ;
drop table if exists TEST_LABEL_TEMP;
create table TEST_LABEL_TEMP
(
engine_type int,
unit int,
time bigint,
prediction int
);
insert into IOT.TEST_LABEL_TEMP
select a.engine_type, a.unit, a.time, case when ((b.maxtime +r.rul- a.time) > 30) then 0 else 1 end from IOT.TETST_DATA a
inner join (select engine_type, unit, max(time) maxtime from IOT.TEST_DATA group by engine_type, unit) b on a.engine_type = b.engine_type and a.unit = b.unit inner join IOT.TEST_RUL_VALUES r on a.engine_type =r.engine_type and a.unit = r.unit;
update IOT.TEST_DATA u set prediction = (select a.prediction from IOT.TEST_LABEL_TEMP a where a.engine_type =1 and a.unit = u.unit and a.time = u.time) where u.engine_type=1 ;
| 35.25641 | 281 | 0.745455 | 3.09375 |
995d54b246b1349aed0e578b4c761e759c23a75b
| 1,465 |
swift
|
Swift
|
Sources/Data+Extensions.swift
|
kerimovscreations/SRP-Nimbus-Swift
|
e32d46fe00f2e93f61c35ae1b3d339f36349b854
|
[
"MIT"
] | 4 |
2021-02-25T06:25:52.000Z
|
2021-12-09T13:43:44.000Z
|
Sources/Data+Extensions.swift
|
kerimovscreations/SRP-Nimbus-Swift
|
e32d46fe00f2e93f61c35ae1b3d339f36349b854
|
[
"MIT"
] | null | null | null |
Sources/Data+Extensions.swift
|
kerimovscreations/SRP-Nimbus-Swift
|
e32d46fe00f2e93f61c35ae1b3d339f36349b854
|
[
"MIT"
] | 2 |
2021-10-08T15:46:08.000Z
|
2021-12-10T04:01:04.000Z
|
import Foundation
func ^ (lhs: Data, rhs: Data) -> Data? {
guard lhs.count == rhs.count else { return nil }
var result = Data(count: lhs.count)
for index in lhs.indices {
result[index] = lhs[index] ^ rhs[index]
}
return result
}
// Removed in Xcode 8 beta 3
func + (lhs: Data, rhs: Data) -> Data {
var result = lhs
result.append(rhs)
return result
}
extension DataProtocol {
public func hexEncodedString(uppercase: Bool = false) -> String {
return self.map {
if $0 < 16 {
return "0" + String($0, radix: 16, uppercase: uppercase)
} else {
return String($0, radix: 16, uppercase: uppercase)
}
}.joined()
}
}
extension NSData {
public var hex : String {
return (self as Data).hexEncodedString()
}
}
extension StringProtocol {
var hexaData: Data { .init(hexa) }
var hexaBytes: [UInt8] { .init(hexa) }
private var hexa: UnfoldSequence<UInt8, Index> {
sequence(state: startIndex) { startIndex in
guard startIndex < self.endIndex else { return nil }
let endIndex = self.index(startIndex, offsetBy: 2, limitedBy: self.endIndex) ?? self.endIndex
defer { startIndex = endIndex }
return UInt8(self[startIndex..<endIndex], radix: 16)
}
}
}
extension StringProtocol {
var data: Data { .init(utf8) }
var bytes: [UInt8] { .init(utf8) }
}
| 27.12963 | 105 | 0.587713 | 3.0625 |
978d178f853d8ce0ebac3f7013106d87194b8083
| 1,474 |
swift
|
Swift
|
OctoPod/Settings UI/DialogsViewController.swift
|
ArtCC/OctoPod
|
85d78f17aa0dfa3fe7c342c37970e0978eee9de2
|
[
"Apache-2.0"
] | null | null | null |
OctoPod/Settings UI/DialogsViewController.swift
|
ArtCC/OctoPod
|
85d78f17aa0dfa3fe7c342c37970e0978eee9de2
|
[
"Apache-2.0"
] | null | null | null |
OctoPod/Settings UI/DialogsViewController.swift
|
ArtCC/OctoPod
|
85d78f17aa0dfa3fe7c342c37970e0978eee9de2
|
[
"Apache-2.0"
] | null | null | null |
import UIKit
class DialogsViewController: ThemedStaticUITableViewController {
let appConfiguration: AppConfiguration = { return (UIApplication.shared.delegate as! AppDelegate).appConfiguration }()
@IBOutlet weak var onConnectLabel: UILabel!
@IBOutlet weak var onDisconnectLabel: UILabel!
@IBOutlet weak var onConnectSwitch: UISwitch!
@IBOutlet weak var onDisconnectSwitch: UISwitch!
override func viewDidLoad() {
super.viewDidLoad()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// Theme labels
let theme = Theme.currentTheme()
onConnectLabel.textColor = theme.textColor()
onDisconnectLabel.textColor = theme.textColor()
// Configure state of switches
onConnectSwitch.isOn = appConfiguration.confirmationOnConnect()
onDisconnectSwitch.isOn = appConfiguration.confirmationOnDisconnect()
// Only allow to change settings if app is not in read-only mode
onConnectSwitch.isEnabled = !appConfiguration.appLocked()
onDisconnectSwitch.isEnabled = !appConfiguration.appLocked()
}
@IBAction func onConnectChanged(_ sender: Any) {
appConfiguration.confirmationOnConnect(enable: onConnectSwitch.isOn)
}
@IBAction func onDisconnectChanged(_ sender: Any) {
appConfiguration.confirmationOnDisconnect(enable: onDisconnectSwitch.isOn)
}
}
| 35.095238 | 122 | 0.705563 | 3.03125 |
ddd74a46bc91181a6c493853949e91ce17dc767c
| 4,681 |
go
|
Go
|
plugins/inputs/logfile/tail/tail_test.go
|
taohungyang/amazon-cloudwatch-agent
|
f8067542b8220ac085b68f6ac40ca78049b60221
|
[
"MIT"
] | null | null | null |
plugins/inputs/logfile/tail/tail_test.go
|
taohungyang/amazon-cloudwatch-agent
|
f8067542b8220ac085b68f6ac40ca78049b60221
|
[
"MIT"
] | null | null | null |
plugins/inputs/logfile/tail/tail_test.go
|
taohungyang/amazon-cloudwatch-agent
|
f8067542b8220ac085b68f6ac40ca78049b60221
|
[
"MIT"
] | null | null | null |
package tail
import (
"fmt"
"io/ioutil"
"log"
"os"
"strings"
"testing"
"time"
"github.com/stretchr/testify/assert"
)
const linesWrittenToFile int = 10
type testLogger struct {
debugs, infos, warns, errors []string
}
func (l *testLogger) Errorf(format string, args ...interface{}) {
line := fmt.Sprintf(format, args...)
l.errors = append(l.errors, line)
}
func (l *testLogger) Error(args ...interface{}) {
line := fmt.Sprint(args...)
l.errors = append(l.errors, line)
}
func (l *testLogger) Debugf(format string, args ...interface{}) {
line := fmt.Sprintf(format, args...)
l.debugs = append(l.debugs, line)
}
func (l *testLogger) Debug(args ...interface{}) {
line := fmt.Sprint(args...)
l.debugs = append(l.debugs, line)
}
func (l *testLogger) Warnf(format string, args ...interface{}) {
line := fmt.Sprintf(format, args...)
l.warns = append(l.warns, line)
}
func (l *testLogger) Warn(args ...interface{}) {
line := fmt.Sprint(args...)
l.warns = append(l.warns, line)
}
func (l *testLogger) Infof(format string, args ...interface{}) {
line := fmt.Sprintf(format, args...)
l.infos = append(l.infos, line)
}
func (l *testLogger) Info(args ...interface{}) {
line := fmt.Sprint(args...)
l.infos = append(l.infos, line)
}
func TestNotTailedCompeletlyLogging(t *testing.T) {
tmpfile, tail, tlog := setup(t)
defer tearDown(tmpfile)
readThreelines(t, tail)
// Then remove the tmpfile
if err := os.Remove(tmpfile.Name()); err != nil {
t.Fatalf("failed to remove temporary log file %v: %v", tmpfile.Name(), err)
}
// Wait until the tailer should have been terminated
time.Sleep(exitOnDeletionWaitDuration + exitOnDeletionCheckDuration + 1*time.Second)
verifyTailerLogging(t, tlog, "File "+tmpfile.Name()+" was deleted, but file content is not tailed completely.")
verifyTailerExited(t, tail)
}
func TestStopAtEOF(t *testing.T) {
tmpfile, tail, _ := setup(t)
defer tearDown(tmpfile)
readThreelines(t, tail)
// Since StopAtEOF() will block until the EOF is reached, run it in a goroutine.
done := make(chan bool)
go func() {
tail.StopAtEOF()
close(done)
}()
// Verify the goroutine is blocked indefinitely.
select {
case <-done:
t.Fatalf("StopAtEOF() completed unexpectedly")
case <-time.After(time.Second * 1):
t.Log("timeout waiting for StopAtEOF() (as expected)")
}
assert.Equal(t, errStopAtEOF, tail.Err())
// Read to EOF
for i := 0; i < linesWrittenToFile - 3; i++ {
<-tail.Lines
}
// Verify StopAtEOF() has completed.
select {
case <-done:
t.Log("StopAtEOF() completed (as expected)")
case <- time.After(time.Second * 1):
t.Fatalf("StopAtEOF() has not completed")
}
// Then remove the tmpfile
if err := os.Remove(tmpfile.Name()); err != nil {
t.Fatalf("failed to remove temporary log file %v: %v", tmpfile.Name(), err)
}
verifyTailerExited(t, tail)
}
func setup(t *testing.T) (*os.File, *Tail, *testLogger) {
tmpfile, err := ioutil.TempFile("", "example")
if err != nil {
t.Fatalf("failed to create temp file: %v", err)
}
// Write the file content
for i := 0; i < linesWrittenToFile; i++ {
if _, err := fmt.Fprintf(tmpfile, "%v some log line\n", time.Now()); err != nil {
log.Fatal(err)
}
}
if err := tmpfile.Close(); err != nil {
log.Fatal(err)
}
// Modify the exit on deletion wait to reduce test length
exitOnDeletionCheckDuration = 100 * time.Millisecond
exitOnDeletionWaitDuration = 500 * time.Millisecond
// Setup the tail
var tl testLogger
tail, err := TailFile(tmpfile.Name(), Config{
Logger: &tl,
ReOpen: false,
Follow: true,
})
if err != nil {
t.Fatalf("failed to tail file %v: %v", tmpfile.Name(), err)
}
return tmpfile, tail, &tl
}
func readThreelines(t *testing.T, tail *Tail) {
for i := 0; i < 3; i++ {
line := <-tail.Lines
if line.Err != nil {
t.Errorf("error tailing test file: %v", line.Err)
continue
}
if !strings.HasSuffix(line.Text, "some log line") {
t.Errorf("wrong line from tail found: '%v'", line.Text)
}
}
}
func verifyTailerLogging(t *testing.T, tlog *testLogger, expectedErrorMsg string) {
if len(tlog.errors) == 0 {
t.Errorf("No error logs found: %v", tlog.errors)
return
}
if tlog.errors[0] != expectedErrorMsg {
t.Errorf("Incorrect error message for incomplete tail of file:\nExpecting: %v\nFound : '%v'", expectedErrorMsg, tlog.errors[0])
}
}
func verifyTailerExited(t *testing.T, tail *Tail) {
select {
case <-tail.Dead():
return
default:
t.Errorf("Tailer is still alive after file removed and wait period")
}
}
func tearDown(tmpfile *os.File) {
os.Remove(tmpfile.Name())
exitOnDeletionCheckDuration = time.Minute
exitOnDeletionWaitDuration = 5 * time.Minute
}
| 24.253886 | 132 | 0.668233 | 3.1875 |
fa51ca65c89057b3e45052aced8c8ea79f2d9298
| 2,498 |
swift
|
Swift
|
CovidTracker/CovidTracker/Utils/APICaller.swift
|
onurbasdas/covid-tracker
|
e4d1a5e269532301154d0ece25d6a1837abd2541
|
[
"MIT"
] | null | null | null |
CovidTracker/CovidTracker/Utils/APICaller.swift
|
onurbasdas/covid-tracker
|
e4d1a5e269532301154d0ece25d6a1837abd2541
|
[
"MIT"
] | null | null | null |
CovidTracker/CovidTracker/Utils/APICaller.swift
|
onurbasdas/covid-tracker
|
e4d1a5e269532301154d0ece25d6a1837abd2541
|
[
"MIT"
] | null | null | null |
//
// APICaller.swift
// CovidTracker
//
// Created by Onur Başdaş on 2.02.2022.
//
import Foundation
class APICaller {
static let shared = APICaller()
private init() {}
private struct Constants {
static let allStatesUrl = URL(string: "https://api.covidtracking.com/v2/states.json")
}
enum DataScope {
case national
case state(State)
}
public func getCovidData(
for scope: DataScope,
completion: @escaping(Result<[DayData], Error>) -> Void
) {
let urlString: String
switch scope {
case .national:
urlString = "https://api.covidtracking.com/v2/us/daily.json"
case .state(let state):
urlString = "https://api.covidtracking.com/v2/states/\(state.state_code.lowercased())/daily.json"
}
guard let url = URL(string: urlString) else { return }
let task = URLSession.shared.dataTask(with: url) { data, _, error in
guard let data = data, error == nil else { return }
do {
let result = try JSONDecoder().decode(CovidDataResponse.self, from: data)
let models: [DayData] = result.data.compactMap {
guard let value = $0.cases?.total.value,
let date = DateFormatter.dayFormatter.date(from: $0.date) else {
return nil
}
return DayData(
date: date,
count: value
)
}
completion(.success(models))
}
catch {
completion(.failure(error))
}
}
task.resume()
}
public func getStateList(
completion: @escaping (Result<[State], Error>) -> Void
) {
guard let url = Constants.allStatesUrl else { return }
let task = URLSession.shared.dataTask(with: url) { data, _, error in
guard let data = data, error == nil else { return }
do {
let result = try JSONDecoder().decode(StateListResponse.self, from: data)
let states = result.data
completion(.success(states))
}
catch {
completion(.failure(error))
}
}
task.resume()
}
}
| 29.046512 | 110 | 0.490793 | 3.109375 |
9bb3386d3dc0e1e1c582f3b1a036d0ae0cbb62a6
| 1,873 |
js
|
JavaScript
|
repos/tap/components/results/results.js
|
simpleviewinc/keg-herkin
|
b44877cd8a84e8c52189af5c6b44fe4913dbafad
|
[
"MIT"
] | 1 |
2021-04-02T17:51:53.000Z
|
2021-04-02T17:51:53.000Z
|
repos/tap/components/results/results.js
|
simpleviewinc/keg-herkin
|
b44877cd8a84e8c52189af5c6b44fe4913dbafad
|
[
"MIT"
] | 7 |
2021-02-03T00:45:09.000Z
|
2021-10-13T22:08:44.000Z
|
repos/tap/components/results/results.js
|
simpleviewinc/keg-herkin
|
b44877cd8a84e8c52189af5c6b44fe4913dbafad
|
[
"MIT"
] | null | null | null |
import React, { useMemo } from 'react'
import { noOp } from '@keg-hub/jsutils'
import { getBaseApiUrl } from 'SVUtils/api'
import { Iframe } from 'SVComponents/iframe/iframe'
import { IframeHeader } from 'SVComponents/iframe/iframeHeader'
import { ExternalLink } from 'SVAssets/icons'
import { Surface } from 'SVComponents/surface'
import { useActiveTestRuns } from 'SVHooks/useActiveTestRuns'
import { Loading, View, Text, TouchableIcon } from '@keg-hub/keg-components'
import { PrefixTitleHeader } from 'SVComponents/labels/prefixTitleHeader'
import { ResultsTabs } from './resultsTabs'
import { TestsRunning } from './testsRunning'
/**
* Results
* @param {Object} props
* @param {Object} props.reportUrl - Url of the report being viewed
* @param {string} props.activeFile - Current activeFile for this screen
* @param {string} props.onExternalOpen - callback called when the icon is pressed
* @param {Object} props.styles - Custom styles for the Results component
*
* @returns {Component}
*/
export const Results = props => {
const {
reportUrl,
activeFile,
onExternalOpen,
styles,
} = props
const testRunModel = useActiveTestRuns()
return (
<>
<Surface
prefix={'Test Results'}
TitleComponent={({styles:textStyles, ...props}) =>
<IframeHeader
{...props}
onExternalOpen={onExternalOpen}
mainTextStyles={textStyles}
mainStyles={styles?.iFrame?.header}
/>
}
capitalize={false}
title={'Report'}
styles={styles?.iFrame?.surface}
className={`runner-surface-iframe`}
>
{
testRunModel?.running
? (<TestsRunning styles={styles?.running} />)
: (<Iframe src={reportUrl}/>)
}
</Surface>
<ResultsTabs styles={styles?.actions} />
</>
)
}
| 30.704918 | 82 | 0.642285 | 3 |
0b7531882bc3693d78e18104d816fb7003ff5f35
| 74,222 |
py
|
Python
|
pyseq/main.py
|
nygctech/PySeq2500
|
6969f178a5f5837ce2f41887d59624bf4cc39433
|
[
"MIT"
] | 9 |
2019-09-25T16:41:42.000Z
|
2021-11-15T08:49:48.000Z
|
pyseq/main.py
|
nygctech/PySeq2500
|
6969f178a5f5837ce2f41887d59624bf4cc39433
|
[
"MIT"
] | 8 |
2020-07-18T09:50:33.000Z
|
2022-03-12T01:01:21.000Z
|
pyseq/main.py
|
nygctech/PySeq2500
|
6969f178a5f5837ce2f41887d59624bf4cc39433
|
[
"MIT"
] | 5 |
2020-08-02T09:51:12.000Z
|
2022-01-04T15:54:32.000Z
|
"""
TODO:
"""
import time
import logging
import os
from os.path import join
import sys
import configparser
import threading
import argparse
from . import methods
from . import args
from . import focus
# Global int to track # of errors during start up
def error(*args):
"""Keep count of errors and print to logger and/or console."""
global n_errors
i = 0
if isinstance(args[0], logging.Logger):
logger = args[0]
i = 1
msg = 'ERROR::'
for a in args[i:]:
msg = msg + str(a) + ' '
if i is 0:
print(msg)
else:
logger.log(21, msg)
n_errors += 1
return n_errors
##########################################################
## Flowcell Class ########################################
##########################################################
class Flowcell():
"""HiSeq 2500 System :: Flowcell
**Attributes:**
- position (str): Flowcell is at either position A (left slot )
or B (right slot).
- recipe_path (path): Path to the recipe.
- recipe (file): File handle for the recipe.
- first_line (int): Line number for the recipe to start from on the
initial cycle.
- cycle (int): The current cycle.
- total_cycles (int): Total number of the cycles for the experiment.
- history ([[int,],[str,],[str,]]): Timeline of flowcells events, the
1st column is the timestamp, the 2nd column is the event, and the
3rd column is an event specific detail.
- sections (dict): Dictionary of section names keys and coordinate
positions of the sections on the flowcell values.
- stage (dict): Dictionary of section names keys and stage positioning
and imaging details of the sections on the flowcell values.
- thread (int): Thread id of the current event on the flowcell.
- signal_event (str): Event that signals the other flowcell to continue
- wait_thread (threading.Event()): Blocks other flowcell until current
flowcell reaches signal event.
- waits_for (str): Flowcell A waits for flowcell B and vice versa.
- pump_speed (dict): Dictionary of pump scenario keys and pump speed
values.
- volume (dict): Keys are events/situations and values are volumes
in uL to use at the event/situation.
- filters (dict): Dictionary of filter set at each cycle, c: em, ex1, ex2.
- IMAG_counter (None/int): Counter for multiple images per cycle.
- events_since_IMAG (list): Record events since last IMAG step.
- temp_timer: Timer to check temperature of flowcell.
- temperature (float): Set temperature of flowcell in °C.
- temp_interval (float): Interval in seconds to check flowcell temperature.
- z_planes (int): Override number of z planes to image in recipe.
- pre_recipe_path (path): Recipe to run before actually starting experiment
- pre_recipe (file): File handle for the pre recipe.
"""
def __init__(self, position):
"""Constructor for flowcells
**Parameters:**
- position (str): Flowcell is at either position A (left slot) or
B (right slot).
"""
self.recipe_path = None
self.recipe = None
self.first_line = None
self.cycle = 0 # Current cycle
self.total_cycles = 0 # Total number of cycles for experiment
self.history = [[],[],[]] # summary of events in flowcell history
self.sections = {} # coordinates of flowcell of sections to image
self.stage = {} # stage positioning info for each section
self.thread = None # threading to do parallel actions on flowcells
self.signal_event = None # defines event that signals the next flowcell to continue
self.wait_thread = threading.Event() # blocks next flowcell until current flowcell reaches signal event
self.waits_for = None # position of the flowcell that signals current flowcell to continue
self.pump_speed = {}
self.volume = {'main':None,'side':None,'sample':None,'flush':None} # Flush volume
self.filters = {} # Dictionary of filter set at each cycle, c: em, ex1, ex2
self.IMAG_counter = None # Counter for multiple images per cycle
self.events_since_IMAG = [] # List events since last IMAG step
self.temp_timer = None # Timer to check temperature of flowcell
self.temperature = None # Set temperature of flowcell
self.temp_interval = None # Interval in minutes to check flowcell temperature
self.z_planes = None # Override number of z planes to image in recipe.
self.pre_recipe_path = None # Recipe to run before actually starting experiment
while position not in ['A', 'B']:
print('Flowcell must be at position A or B')
position = input('Enter A or B for ' + str(position) + ' : ')
self.position = position
def addEvent(self, event, command):
"""Record history of events on flow cell.
**Parameters:**
- instrument (str): Type of event can be valv, pump, hold, wait, or
imag.
- command (str): Details specific to each event such as hold time,
buffer, event to wait for, z planes to image, or pump volume.
**Returns:**
- int: A time stamp of the last event.
"""
self.history[0].append(time.time()) # time stamp
self.history[1].append(event) # event (valv, pump, hold, wait, imag)
self.history[2].append(command) # details such hold time, buffer, event to wait for
self.events_since_IMAG.append(event)
if event is 'PORT':
self.events_since_IMAG.append(command)
if event in ['IMAG', 'STOP']:
self.events_since_IMAG.append(event)
return self.history[0][-1] # return time stamp of last event
def restart_recipe(self):
"""Restarts the recipe and returns the number of completed cycles."""
# Restart recipe
if self.recipe is not None:
self.recipe.close()
self.recipe = open(self.recipe_path)
# Reset image counter (if mulitple images per cycle)
if self.IMAG_counter is not None:
self.IMAG_counter = 0
msg = 'PySeq::'+self.position+'::'
if self.cycle == self.total_cycles:
# Increase cycle counter
self.cycle += 1
# Flowcell completed all cycles
hs.message(msg+'Completed '+ str(self.total_cycles) + ' cycles')
hs.T.fc_off(fc.position)
self.temperature = None
do_rinse(self)
if self.temp_timer is not None:
self.temp_timer.cancel()
self.temp_timer = None
self.thread = threading.Thread(target = time.sleep, args = (10,))
elif self.cycle < self.total_cycles:
# Increase cycle counter
self.cycle += 1
# Start new cycle
restart_message = msg+'Starting cycle '+str(self.cycle)
self.thread = threading.Thread(target = hs.message,
args = (restart_message,))
else:
self.thread = threading.Thread(target = time.sleep, args = (10,))
thread_id = self.thread.start()
return self.cycle
def pre_recipe(self):
"""Initializes pre recipe before starting experiment."""
prerecipe_message = 'PySeq::'+self.position+'::'+'Starting pre recipe'
self.recipe = open(self.prerecipe_path)
self.thread = threading.Thread(target = hs.message,
args = (prerecipe_message,))
thread_id = self.thread.start()
return thread_id
def endHOLD(self):
"""Ends hold for incubations in buffer, returns False."""
msg = 'PySeq::'+self.position+'::cycle'+str(self.cycle)+'::Hold stopped'
hs.message(msg)
return False
##########################################################
## Setup Flowcells #######################################
##########################################################
def setup_flowcells(first_line, IMAG_counter):
"""Read configuration file and create flowcells.
**Parameters:**
- first_line (int): Line number for the recipe to start from on the
initial cycle.
**Returns:**
- dict: Dictionary of flowcell position keys with flowcell object values.
"""
err_msg = 'ConfigFile::sections::'
experiment = config['experiment']
method = experiment['method']
method = config[method]
flowcells = {}
for sect_name in config['sections']:
f_sect_name = sect_name.replace('_','') #remove underscores
position = config['sections'][sect_name]
AorB, coord = position.split(':')
# Create flowcell if it doesn't exist
if AorB not in flowcells.keys():
fc = Flowcell(AorB)
fc.recipe_path = experiment['recipe path']
fc.first_line = first_line
fc.volume['main'] = int(method.get('main prime volume', fallback=500))
fc.volume['side'] = int(method.get('side prime volume', fallback=350))
fc.volume['sample'] = int(method.get('sample prime volume', fallback=250))
fc.volume['flush'] = int(method.get('flush volume', fallback=1000))
fs = int(method.get('flush flowrate',fallback=700))
fc.pump_speed['flush'] = fs
ps = int(method.get('prime flowrate',fallback=100))
fc.pump_speed['prime'] = ps
rs = int(method.get('reagent flowrate', fallback=40))
fc.pump_speed['reagent'] = rs
fc.total_cycles = int(config.get('experiment','cycles'))
fc.temp_interval = float(method.get('temperature interval', fallback=5))*60
z_planes = int(method.get('z planes', fallback=0))
if z_planes > 0:
fc.z_planes = z_planes
if IMAG_counter > 1:
fc.IMAG_counter = 0
fc.prerecipe_path = method.get('pre recipe', fallback = None)
flowcells[AorB] = fc
# Add section to flowcell
if sect_name in flowcells[AorB].sections:
error(err_msg, sect_name, 'duplicated on flowcell', AorB)
else:
coord = coord.split(',')
flowcells[AorB].sections[f_sect_name] = [] # List to store coordinates of section on flowcell
flowcells[AorB].stage[f_sect_name] = {} # Dictionary to store stage position of section on flowcell
if float(coord[0]) < float(coord[2]):
error(err_msg,'Invalid x coordinates for', sect_name)
if float(coord[1]) < float(coord[3]):
error(err_msg, 'Invalid y coordinates for', sect_name)
for i in range(4):
try:
flowcells[AorB].sections[f_sect_name].append(float(coord[i]))
except:
error(err_msg,' No position for', sect_name)
# if runnning mulitiple flowcells...
# Define first flowcell
# Define prior flowcell signals to next flowcell
if len(flowcells) > 1:
flowcell_list = [*flowcells]
for fc in flowcells.keys():
flowcells[fc].waits_for = flowcell_list[
flowcell_list.index(fc)-1]
if experiment['first flowcell'] not in flowcells:
error('ConfigFile::First flowcell does not exist')
if isinstance(IMAG_counter, int):
error('Recipe::Need WAIT before IMAG with 2 flowcells.')
# table = {}
# for fc in flowcells:
# table[fc] = flowcells[fc].sections.keys()
# print('Flowcell section summary')
# print(tabulate.tabulate(table, headers = 'keys', tablefmt = 'presto'))
#
# userYN('Confirm flowcell(s)')
return flowcells
##########################################################
## Parse lines from recipe ###############################
##########################################################
def parse_line(line):
"""Parse line and return event (str) and command (str).
If line starts with the comment character, #, then None is return for
both event and command.
"""
comment_character = '#'
#delimiter = '\t'
no_comment = line.split(comment_character)[0] # remove comment
sections = no_comment.split(':')
if len(sections) == 2:
event = sections[0].strip() # first section is event
event = event[0:4] # event identified by first 4 characters
command = sections[1] # second section is command
command = command.strip() # remove space
else:
event = None
command = None
return event, command
##########################################################
## Setup Logging #########################################
##########################################################
def setup_logger():
"""Create a logger and return the handle."""
# Get experiment info from config file
experiment = config['experiment']
experiment_name = experiment['experiment name']
# Make directory to save data
save_path = join(experiment['save path'],experiment_name)
if not os.path.exists(save_path):
os.mkdir(save_path)
# Make directory to save logs
log_path = join(save_path, experiment['log path'])
if not os.path.exists(log_path):
os.mkdir(log_path)
# Create a custom logger
logger = logging.getLogger(__name__)
logger.setLevel(10)
# Create console handler
c_handler = logging.StreamHandler()
c_handler.setLevel(21)
# Create file handler
f_log_name = join(log_path,experiment_name + '.log')
f_handler = logging.FileHandler(f_log_name)
f_handler.setLevel(logging.INFO)
# Create formatters and add it to handlers
c_format = logging.Formatter('%(asctime)s - %(message)s', datefmt = '%Y-%m-%d %H:%M')
f_format = logging.Formatter('%(asctime)s - %(message)s')
c_handler.setFormatter(c_format)
f_handler.setFormatter(f_format)
# Add handlers to the logger
logger.addHandler(c_handler)
logger.addHandler(f_handler)
# Save copy of config with log
config_path = join(log_path,'config.cfg')
with open(config_path, 'w') as configfile:
config.write(configfile)
return logger
def configure_instrument(IMAG_counter, port_dict):
"""Configure and check HiSeq settings."""
global n_errors
model, name = methods.get_machine_info(args_['virtual'])
if model is not None:
config['experiment']['machine'] = model+'::'+name
experiment = config['experiment']
method = experiment['method']
method = config[method]
try:
total_cycles = int(experiment.get('cycles'))
except:
error('ConfigFile:: Cycles not specified')
# Creat HiSeq Object
if model == 'HiSeq2500':
if args_['virtual']:
from . import virtualHiSeq
hs = virtualHiSeq.HiSeq(name, logger)
hs.speed_up = int(method.get('speed up', fallback = 5000))
else:
import pyseq
com_ports = pyseq.get_com_ports()
hs = pyseq.HiSeq(name, logger)
else:
sys.exit()
# Check side ports
try:
side_ports = method.get('side ports', fallback = '9,21,22,23,24')
side_ports = side_ports.split(',')
side_ports = list(map(int, side_ports))
except:
error('ConfigFile:: Side ports not valid')
# Check sample port
try:
sample_port = int(method.get('sample port', fallback = 20))
except:
error('ConfigFile:: Sample port not valid')
# Check barrels per lane make sense:
n_barrels = int(method.get('barrels per lane', fallback = 1)) # Get method specific pump barrels per lane, fallback to 1
if n_barrels not in [1,2,4,8]:
error('ConfigFile:: Barrels per lane must be 1, 2, 4 or 8')
# Check inlet ports, note switch inlet ports in initialize_hs
inlet_ports = int(method.get('inlet ports', fallback = 2))
if inlet_ports not in [2,8]:
error('MethodFile:: inlet ports must be 2 or 8.')
variable_ports = method.get('variable reagents', fallback = None)
hs.z.image_step = int(method.get('z position', fallback = 21500))
hs.overlap = abs(int(method.get('overlap', fallback = 0)))
hs.overlap_dir = method.get('overlap direction', fallback = 'left').lower()
if hs.overlap_dir not in ['left', 'right']:
error('MethodFile:: overlap direction must be left or right')
for fc in flowcells.values():
AorB = fc.position
hs.v24[AorB].side_ports = side_ports
hs.v24[AorB].sample_port = sample_port
hs.v24[AorB].port_dict = port_dict # Assign ports on HiSeq
if variable_ports is not None:
v_ports = variable_ports.split(',')
for v in v_ports: # Assign variable ports
hs.v24[AorB].variable_ports.append(v.strip())
hs.p[AorB].update_limits(n_barrels) # Assign barrels per lane to pump
for section in fc.sections: # Convert coordinate sections on flowcell to stage info
pos = hs.position(AorB, fc.sections[section])
fc.stage[section] = pos
fc.stage[section]['z_pos'] = [hs.z.image_step]*3
## TODO: Changing laser color unecessary for now, revist if upgrading HiSeq
# Configure laser color & filters
# colors = [method.get('laser color 1', fallback = 'green'),
# method.get('laser color 2', fallback = 'red')]
# for i, color in enumerate(default_colors):
# if color is not colors[i]:
# laser = hs.lasers.pop(color) # Remove default laser color
# hs.lasers[colors[i]] = laser # Add new laser
# hs.lasers[colors[i]].color = colors[i] # Update laser color
# hs.optics.colors[i] = colors[i] # Update laser line color
# Check laser power
for color in hs.lasers.keys():
lp = int(method.get(color+' laser power', fallback = 10))
if hs.lasers[color].min_power <= lp <= hs.lasers[color].max_power:
hs.lasers[color].set_point = lp
else:
error('MethodFile:: Invalid '+color+' laser power')
#Check filters for laser at each cycle are valid
hs.optics.cycle_dict = check_filters(hs.optics.cycle_dict, hs.optics.ex_dict)
focus_filters = [method.get('green focus filter', fallback = 2.0),
method.get('red focus filter', fallback = 2.4)]
for i, f in enumerate(focus_filters):
try:
f = float(f)
except:
pass
if f not in hs.optics.ex_dict[hs.optics.colors[i]]:
error('ConfigFile:: Focus filter not valid.')
else:
hs.optics.focus_filters[i] = f
# Check Autofocus Settings
hs.AF = method.get('autofocus', fallback = 'partial once')
if hs.AF.lower() in ['','none']: hs.AF = None
if hs.AF not in ['partial', 'partial once', 'full', 'full once', 'manual', None]:
# Skip autofocus and set objective position in config file
try:
if hs.obj.min_z <= int(hs.AF) <= hs.obj.max_z:
hs.AF = int(hs.AF)
except:
error('ConfigFile:: Auto focus method not valid.')
#Enable/Disable z stage
hs.z.active = method.getboolean('enable z stage', fallback = True)
# Get focus Tolerance
hs.focus_tol = float(method.get('focus tolerance', fallback = 0))
# Get focus range
range = float(method.get('focus range', fallback = 90))
spacing = float(method.get('focus spacing', fallback = 4.1))
hs.obj.update_focus_limits(range=range, spacing=spacing) # estimate, get actual value in hs.obj_stack()
hs.stack_split = float(method.get('stack split', fallback = 2/3))
hs.bundle_height = int(method.get('bundle height', fallback = 128))
# Assign output directory
save_path = experiment['save path']
experiment_name = experiment['experiment name']
save_path = join(experiment['save path'], experiment['experiment name'])
if not os.path.exists(save_path):
try:
os.mkdir(save_path)
except:
error('ConfigFile:: Save path not valid.')
# Assign image directory
image_path = join(save_path, experiment['image path'])
if not os.path.exists(image_path):
os.mkdir(image_path)
with open(join(image_path,'machine_name.txt'),'w') as file:
file.write(hs.name)
hs.image_path = image_path
# Assign log directory
log_path = join(save_path, experiment['log path'])
if not os.path.exists(log_path):
os.mkdir(log_path)
hs.log_path = log_path
return hs
def confirm_settings(recipe_z_planes = []):
"""Have user confirm the HiSeq settings before experiment."""
experiment = config['experiment']
method = experiment['method']
method = config[method]
total_cycles = int(experiment['cycles'])
# Print settings to screen
try:
import tabulate
print_table = True
except:
print_table = False
if n_errors > 0:
print()
if not userYN('Continue checking experiment before exiting'):
sys.exit()
# Experiment summary
print()
print('-'*80)
print()
print(experiment['experiment name'], 'summary')
print()
print('method:', experiment['method'])
print('recipe:', method['recipe'])
print('cycles:', experiment['cycles'])
pre_recipe = method.get('pre recipe', fallback = None)
if pre_recipe is not None:
print('pre recipe:', pre_recipe)
first_port = method.get('first port', fallback = None)
if first_port is not None:
print('first_port:', first_port)
print('save path:', experiment['save path'])
print('enable z stage:', hs.z.active)
print('machine:', experiment['machine'])
print()
if not userYN('Confirm experiment'):
sys.exit()
print()
# Flowcell summary
table = {}
for fc in flowcells:
table[fc] = flowcells[fc].sections.keys()
print('-'*80)
print()
print('Flowcells:')
print()
if print_table:
print(tabulate.tabulate(table, headers = 'keys', tablefmt = 'presto'))
else:
print(table)
print()
if not userYN('Confirm flowcells'):
sys.exit()
print()
# Valve summary:
table = []
ports = []
for port in port_dict:
if not isinstance(port_dict[port], dict):
ports.append(int(port_dict[port]))
table.append([port_dict[port], port])
print('-'*80)
print()
print('Valve:')
print()
if print_table:
print(tabulate.tabulate(table, headers=['port', 'reagent'], tablefmt = 'presto'))
else:
print(table)
print()
if not userYN('Confirm valve assignment'):
sys.exit()
print()
# Pump summary:
AorB = [*flowcells.keys()][0]
fc = flowcells[AorB]
print('-'*80)
print()
print('Pump Settings:')
print()
inlet_ports = int(method.get('inlet ports', fallback = 2))
print('Reagents pumped through row with ', inlet_ports, 'inlet ports')
print(hs.p[AorB].n_barrels, 'syringe pump barrels per lane')
print('Flush volume:',fc.volume['flush'], 'μL')
if any([True for port in ports if port in [*range(1,9),*range(10,20)]]):
print('Main prime volume:', fc.volume['main'], 'μL')
if any([True for port in ports if port in [9,21,22,23,24]]):
print('Side prime volume:', fc.volume['side'], 'μL')
if 20 in ports:
print('Sample prime volume:', fc.volume['sample'], 'μL')
print('Flush flowrate:',fc.pump_speed['flush'], 'μL/min')
print('Prime flowrate:',fc.pump_speed['prime'], 'μL/min')
print('Reagent flowrate:',fc.pump_speed['reagent'], 'μL/min')
print('Max volume:', hs.p[AorB].max_volume, 'μL')
print('Min flow:', hs.p[AorB].min_flow, 'μL/min')
print()
if not userYN('Confirm pump settings'):
sys.exit()
# Cycle summary:
variable_ports = hs.v24[AorB].variable_ports
start_cycle = 1
if method.get('pre recipe', fallback = None) is not None:
start_cycle = 0
table = []
for cycle in range(start_cycle,total_cycles+1):
row = []
row.append(cycle)
if len(variable_ports) > 0:
for vp in variable_ports:
if cycle > 0:
row.append(port_dict[vp][cycle])
else:
row.append(None)
if IMAG_counter > 0:
colors = [*hs.optics.cycle_dict.keys()]
for color in colors:
row.append(hs.optics.cycle_dict[color][cycle])
else:
colors = []
table.append(row)
print('-'*80)
print()
print('Cycles:')
print()
if len(variable_ports) + len(colors) > 0:
headers = ['cycle', *variable_ports, *colors]
if print_table:
print(tabulate.tabulate(table, headers, tablefmt='presto'))
else:
print(headers)
print(table)
print()
stop_experiment = not userYN('Confirm cycles')
else:
if total_cycles == 1:
stop_experiment = not userYN('Confirm only 1 cycle')
else:
stop_experiment = not userYN('Confirm all', total_cycles, 'cycles are the same')
if stop_experiment:
sys.exit()
print()
if IMAG_counter > 0:
print('-'*80)
print()
print('Imaging settings:')
print()
laser_power = [hs.lasers['green'].set_point,
hs.lasers['red'].set_point]
print('green laser power:', laser_power[0], 'mW')
print('red laser power:',laser_power[1], 'mW')
print('autofocus:', hs.AF)
if hs.AF is not None:
print('focus spacing', hs.obj.focus_spacing,'um')
print('focus range', hs.obj.focus_range, '%')
if hs.focus_tol > 0 and hs.AF != 'manual':
print('focus tolerance:', hs.focus_tol, 'um')
elif hs.AF != 'manual':
print('focus tolerance: None')
print('WARNING::Out of focus image risk increased')
for i, filter in enumerate(hs.optics.focus_filters):
if filter == 'home':
focus_laser_power = 0
elif filter == 'open':
focus_laser_power = laser_power[i]
else:
focus_laser_power = laser_power[i]*10**(-float(filter))
print(colors[i+1], 'focus laser power ~', focus_laser_power, 'mW')
print('z position when imaging:', hs.z.image_step)
if hs.overlap > 0:
print('pixel overlap:', hs.overlap)
print('overlap direction:', hs.overlap_dir)
z_planes = int(method.get('z planes', fallback = 0))
if z_planes > 0:
print('z planes:', z_planes)
else:
print('z planes:', *recipe_z_planes)
if z_planes > 1 or any(recipe_z_planes):
print('stack split:', hs.stack_split)
if not userYN('Confirm imaging settings'):
sys.exit()
# Check if previous focus positions have been found, and confirm to use
if os.path.exists(join(hs.log_path, 'focus_config.cfg')):
focus_config = configparser.ConfigParser()
focus_config.read(join(hs.log_path, 'focus_config.cfg'))
cycles = 0
sections = []
for section in config.options('sections'):
if focus_config.has_section(section):
sections.append(section)
n_focus_cycles = len(focus_config.options(section))
if n_focus_cycles > cycles:
cycles = n_focus_cycles
table = []
for section in sections:
row = []
row.append(section)
for c in range(1,cycles+1):
if focus_config.has_option(section, str(c)):
row.append(focus_config[section][str(c)])
else:
row.append(None)
table.append(row)
if len(sections) > 0 and cycles > 0:
print('-'*80)
print()
print('Previous Autofocus Objective Positions:')
print()
headers = ['section', *['cycle'+str(c) for c in range(1,cycles+1)]]
if print_table:
print(tabulate.tabulate(table, headers, tablefmt='presto'))
else:
print(headers)
print(table)
print()
if not userYN('Confirm using previous autofocus positions'):
sys.exit()
print()
##########################################################
## Setup HiSeq ###########################################
##########################################################
def initialize_hs(IMAG_counter):
"""Initialize the HiSeq and return the handle."""
global n_errors
experiment = config['experiment']
method = experiment['method']
method = config[method]
if n_errors is 0:
if not userYN('Initialize HiSeq'):
sys.exit()
hs.initializeCams(logger)
x_homed = hs.initializeInstruments()
if not x_homed:
error('HiSeq:: X-Stage did not home correctly')
# HiSeq Settings
inlet_ports = int(method.get('inlet ports', fallback = 2))
hs.move_inlet(inlet_ports) # Move to 2 or 8 port inlet
# Set laser power
for color in hs.lasers.keys():
laser_power = int(method.get(color+' laser power', fallback = 10))
hs.lasers[color].set_power(laser_power)
if IMAG_counter > 0:
if not hs.lasers[color].on:
error('HiSeq:: Lasers did not turn on.')
hs.f.LED('A', 'off')
hs.f.LED('B', 'off')
LED('all', 'startup')
hs.move_stage_out()
return hs
##########################################################
## Check Instructions ####################################
##########################################################
def check_instructions():
"""Check the instructions for errors.
**Returns:**
- first_line (int): Line number for the recipe to start from on the
initial cycle.
- IMAG_counter (int): The number of imaging steps.
"""
method = config.get('experiment', 'method')
method = config[method]
first_port = method.get('first port', fallback = None) # Get first reagent to use in recipe
# Backdoor to input line number for first step in recipe
try:
first_port = int(first_port)
first_line = first_port
first_port = None
except:
first_line = 0
variable_ports = method.get('variable reagents', fallback = None)
valid_wait = []
ports = []
for port in config['reagents'].items():
ports.append(port[1])
if variable_ports is not None:
variable_ports = variable_ports.split(',')
for port in variable_ports:
ports.append(port.strip())
valid_wait = ports
valid_wait.append('IMAG')
valid_wait.append('STOP')
valid_wait.append('TEMP')
recipes = {}
recipes['Recipe'] = config['experiment']['recipe path']
pre_recipe = method.get('pre recipe',fallback= None)
if pre_recipe is not None:
recipes['Pre Recipe'] = pre_recipe
for recipe in sorted([*recipes.keys()]):
f = recipes[recipe]
try:
f = open(recipes[recipe])
except:
error(recipe,'::Unable to open', recipes[recipe])
#Remove blank lines
f_ = [line for line in f if line.strip()]
f.close()
IMAG_counter = 0.0
wait_counter = 0
z_planes = []
for line_num, line in enumerate(f_):
instrument, command = parse_line(line)
if instrument == 'PORT':
# Make sure ports in instruction files exist in port dictionary in config file
if command not in ports:
error(recipe,'::', command, 'on line', line_num,
'is not listed as a reagent')
#Find line to start at for first cycle
if first_line == 0 and first_port is not None and recipe is 'Recipe':
if command.find(first_port) != -1:
first_line = line_num
# Make sure pump volume is a number
elif instrument == 'PUMP':
if command.isdigit() == False:
error(recipe,'::Invalid volume on line', line_num)
# Make sure wait command is valid
elif instrument == 'WAIT':
wait_counter += 1
if command not in valid_wait:
error(recipe,'::Invalid wait command on line', line_num)
# Make sure z planes is a number
elif instrument == 'IMAG':
IMAG_counter = int(IMAG_counter + 1)
# Flag to make check WAIT is used before IMAG for 2 flowcells
if wait_counter >= IMAG_counter:
IMAG_counter = float(IMAG_counter)
if command.isdigit() == False:
error(recipe,'::Invalid number of z planes on line', line_num)
else:
z_planes.append(command)
# Make sure hold time (minutes) is a number
elif instrument == 'HOLD':
if command.isdigit() == False:
if command != 'STOP':
error(recipe,'::Invalid time on line', line_num)
else:
print(recipe,'::WARNING::HiSeq will stop until user input at line',
line_num)
elif instrument == 'TEMP':
if not command.isdigit():
error(recipe,'::Invalid temperature on line', line_num)
# # Warn user that HiSeq will completely stop with this command
# elif instrument == 'STOP':
# print('WARNING::HiSeq will stop until user input at line',
# line_num)
# Make sure the instrument name is valid
else:
error(recipe,'::Bad instrument name on line',line_num)
print(line)
return first_line, IMAG_counter, z_planes
##########################################################
## Check Ports ###########################################
##########################################################
def check_ports():
"""Check for port errors and return a port dictionary.
"""
method = config.get('experiment', 'method')
method = config[method]
total_cycles = int(config.get('experiment', 'cycles'))
# Get cycle and port information from configuration file
valve = config['reagents'] # Get dictionary of port number of valve : name of reagent
cycle_variables = method.get('variable reagents', fallback = None ) # Get list of port names in recipe that change every cycle
cycle_reagents = config['cycles'].items() # Get variable reagents that change with each cycle
port_dict = {}
# Make sure there are no duplicated names in the valve
if len(valve.values()) != len(set(valve.values())):
error('ConfigFile: Reagent names are not unique')
#TODO: PRINT DUPLICATES
if len(valve) > 0:
# Create port dictionary
for port in valve.keys():
try:
port_dict[valve[port]] = int(port)
except:
error('ConfigFile:List reagents as n (int) = name (str) ')
# Add cycle variable port dictionary
if cycle_variables is not None:
cycle_variables = cycle_variables.split(',')
for variable in cycle_variables:
variable = variable.replace(' ','')
if variable in port_dict:
error('ConfigFile::Variable', variable, 'can not be a reagent')
else:
port_dict[variable] = {}
# Fill cycle variable port dictionary with cycle: reagent name
for cycle in cycle_reagents:
reagent = cycle[1]
variable, cyc_number = cycle[0].split(' ')
if reagent in valve.values():
if variable in port_dict:
port_dict[variable][int(cyc_number)] = reagent
else:
error('ConfigFile::', variable, 'not listed as variable reagent')
else:
error('ConfigFiles::Cycle reagent:', reagent, 'does not exist on valve')
# Check number of reagents in variable reagents matches number of total cycles
for variable in cycle_variables:
variable = variable.replace(' ','')
if len(port_dict[variable]) != total_cycles:
error('ConfigFile::Number of', variable, 'reagents does not match experiment cycles')
else:
print('WARNING::No ports are specified')
# table = []
# for port in port_dict:
# if not isinstance(port_dict[port], dict):
# table.append([port_dict[port], port])
# print('Valve summary')
# print(tabulate.tabulate(table, headers=['port', 'reagent'], tablefmt = 'presto'))
return port_dict
def check_filters(cycle_dict, ex_dict):
"""Check filter section of config file.
**Errors:**
- Invalid Filter: System exits when a listed filter does not match
configured filters on the HiSeq.
- Duplicate Cycle: System exists when a filter for a laser is listed for
the same cycle more than once.
- Invalid laser: System exits when a listed laser color does not match
configured laser colors on the HiSeq.
"""
colors = [*cycle_dict.keys()]
# Check laser, cycle, and filter are valid
cycle_filters = config['filters'].items()
for item in cycle_filters:
# Get laser cycle = filter
filter = item[1]
# filters are floats, except for home and open,
# and emission (True/False)
if filter.lower() in ['true', 'yes', '1', 't', 'y']:
filter = True
elif filter.lower() in ['false', 'no', '0', 'f', 'n']:
filter = False
elif filter not in ['home','open']:
filter = float(filter)
laser, cycle = item[0].split()
cycle = int(cycle)
# Check if laser is valid, can use partial match ie, g or G for green
if laser in colors:
laser = [laser]
else:
laser = [colors[i] for i, c in enumerate(colors) if laser.lower() in c[0]]
if len(laser) > 0:
laser = laser[0]
if laser in ex_dict.keys():
if filter in ex_dict[laser]:
if cycle not in cycle_dict[laser]:
cycle_dict[laser][cycle] = filter
else:
error('ConfigFile::Duplicated cycle for', laser, 'laser')
elif laser == 'em':
if isinstance(filter, bool):
if cycle not in cycle_dict[laser]:
cycle_dict[laser][cycle] = filter
else:
error('ConfigFile::Duplicated emission filter cycle')
else:
error('ConfigFile::Invalid filter for', laser, 'laser')
else:
error('ConfigFile:Invalid laser')
# Add default/home to cycles with out filters specified
method = config.get('experiment', 'method')
method = config[method]
start_cycle = 1
if method.get('pre recipe', fallback = None):
start_cycle = 0
last_cycle = int(config.get('experiment','cycles'))+1
# Get/check default filters
default_filters = {}
fallbacks = {'red':'home', 'green':'home', 'em':'True'}
for laser in colors:
filter = method.get('default '+laser+' filter', fallback = fallbacks[laser])
try:
filter = float(filter)
except:
pass
if laser in ex_dict.keys():
if filter in ex_dict[laser].keys():
default_filters[laser] = filter
elif laser == 'em':
if filter in ['True', 'False']:
default_filters[laser] = filter
# Assign default filters to missing cycles
for cycle in range(start_cycle,last_cycle):
for laser in colors:
if cycle not in cycle_dict[laser]:
cycle_dict[laser][cycle] = default_filters[laser]
return cycle_dict
def LED(AorB, indicate):
"""Control front LEDs to communicate what the HiSeq is doing.
**Parameters:**
- AorB (str): Flowcell position (A or B), or all.
- indicate (str): Current action of the HiSeq or state of the flowcell.
=========== =========== =============================
LED MODE indicator HiSeq Action / Flowcell State
=========== =========== ===================================================
off off The flowcell is not in use.
yellow error There is an error with the flowcell.
green startup The HiSeq is starting up or shutting down
pulse green user The HiSeq requires user input
blue sleep The flowcell is holding or waiting.
pulse blue awake HiSeq valve, pump, or temperature action on the flowcell.
sweep blue imaging HiSeq is imaging the flowcell.
=========== =========== ========================================
"""
fc = []
if AorB in flowcells.keys():
fc = [AorB]
elif AorB == 'all':
fc = [*flowcells.keys()]
for AorB in fc:
if indicate == 'startup':
hs.f.LED(AorB, 'green')
elif indicate == 'user':
hs.f.LED(AorB, 'pulse green')
elif indicate == 'error':
hs.f.LED(AorB, 'yellow')
elif indicate == 'sleep':
hs.f.LED(AorB, 'blue')
elif indicate == 'awake':
hs.f.LED(AorB, 'pulse blue')
elif indicate == 'imaging':
hs.f.LED(AorB, 'sweep blue')
elif indicate == 'off':
hs.f.LED(AorB, 'off')
return True
def userYN(*args):
"""Ask a user a Yes/No question and return True if Yes, False if No."""
question = ''
for a in args:
question += str(a) + ' '
response = True
while response:
answer = input(question + '? Y/N = ')
answer = answer.upper().strip()
if answer == 'Y':
response = False
answer = True
elif answer == 'N':
response = False
answer = False
return answer
def do_flush():
"""Flush all, some, or none of lines."""
AorB_ = [*flowcells.keys()][0]
port_dict = hs.v24[AorB_].port_dict
# Select lines to flush
LED('all', 'user')
confirm = False
while not confirm:
flush_ports = input("Flush all, some, or none of the lines? ")
if flush_ports.strip().lower() == 'all':
flush_all = True
flush_ports = [*port_dict.keys()]
for vp in hs.v24[AorB_].variable_ports:
if vp in flush_ports:
flush_ports.remove(vp)
confirm = userYN('Confirm flush all lines')
elif flush_ports.strip().lower() in ['none', 'N', 'n', '']:
flush_ports = []
confirm = userYN('Confirm skip flushing lines')
else:
good =[]
bad = []
for fp in flush_ports.split(','):
fp = fp.strip()
if fp in port_dict.keys():
good.append(fp)
else:
try:
fp = int(fp)
if fp in range(1,hs.v24[AorB_].n_ports+1):
good.append(fp)
else:
bad.append(fp)
except:
bad.append(fp)
if len(bad) > 0:
print('Valid ports:', *good)
print('Invalid ports:', *bad)
confirm = not userYN('Re-enter lines to flush')
else:
confirm = userYN('Confirm only flushing',*good)
if confirm:
flush_ports = good
if len(flush_ports) > 0:
while not userYN('Temporary flowcell(s) locked on to stage'): pass
while not userYN('All valve input lines in water'): pass
while not userYN('Ready to flush'): pass
LED('all', 'startup')
# Flush ports
speed = flowcells[AorB_].pump_speed['flush']
volume = flowcells[AorB_].volume['flush']
for port in flush_ports:
if port in hs.v24[AorB_].variable_ports:
flush_ports.append(*hs.v24[AorB_].port_dict[port].values())
else:
hs.message('Flushing ' + str(port))
for fc in flowcells.values():
AorB = fc.position
fc.thread = threading.Thread(target=hs.v24[AorB].move,
args=(port,))
fc.thread.start()
alive = True
while alive:
alive_ = []
for fc in flowcells.values():
alive_.append(fc.thread.is_alive())
alive = any(alive_)
for fc in flowcells.values():
AorB = fc.position
fc.thread = threading.Thread(target=hs.p[AorB].pump,
args=(volume, speed,))
fc.thread.start()
alive = True
while alive:
alive_ = []
for fc in flowcells.values():
alive_.append(fc.thread.is_alive())
alive = any(alive_)
##########################################################
## Flush Lines ###########################################
##########################################################
def do_prime(flush_YorN):
"""Prime lines with all reagents in config if prompted."""
LED('all', 'user')
## Prime lines
confirm = False
while not confirm:
prime_YorN = userYN("Prime lines")
if prime_YorN:
confirm = userYN("Confirm prime lines")
else:
confirm = userYN("Confirm skip priming lines")
# LED('all', 'startup')
# hs.z.move([0,0,0])
# hs.move_stage_out()
#LED('all', 'user')
if prime_YorN:
if flush_YorN:
while not userYN('Temporary flowcell(s) locked on to stage'): pass
while not userYN('Valve input lines in reagents'): pass
while not userYN('Ready to prime lines'): pass
#Flush all lines
LED('all', 'startup')
while True:
AorB_ = [*flowcells.keys()][0]
port_dict = hs.v24[AorB_].port_dict
speed = flowcells[AorB_].pump_speed['prime']
for port in port_dict.keys():
if isinstance(port_dict[port], int):
hs.message('Priming ' + str(port))
for fc in flowcells.values():
port_num = port_dict[port]
AorB = fc.position
fc.thread = threading.Thread(target=hs.v24[AorB].move,
args=(port,))
fc.thread.start()
alive = True
while alive:
alive_ = []
for fc in flowcells.values():
alive_.append(fc.thread.is_alive())
alive = any(alive_)
for fc in flowcells.values():
if port_num in hs.v24[AorB].side_ports:
volume = fc.volume['side']
elif port_num == hs.v24[AorB].sample_port:
volume = fc.volume['sample']
else:
volume = fc.volume['main']
AorB = fc.position
fc.thread = threading.Thread(target=hs.p[AorB].pump,
args=(volume, speed,))
fc.thread.start()
alive = True
while alive:
alive_ = []
for fc in flowcells.values():
alive_.append(fc.thread.is_alive())
alive = any(alive_)
break
# Rinse flowcells
method = config.get('experiment', 'method') # Read method specific info
method = config[method]
rinse_port = method.get('rinse', fallback = None)
rinse = rinse_port in hs.v24[AorB].port_dict
if rinse_port == port: # Option to skip rinse if last reagent pump was rinse reagent
rinse = False
# Get rinse reagents
if not rinse:
LED('all', 'user')
print('Last reagent pumped was', port)
if userYN('Rinse flowcell'):
while not rinse:
if rinse_port not in hs.v24[AorB].port_dict:
rinse_port = input('Specify rinse reagent: ')
rinse = rinse_port in hs.v24[AorB].port_dict
if not rinse:
print('ERROR::Invalid rinse reagent')
print('Choose from:', *list(hs.v24[AorB].port_dict.keys()))
if rinse:
# Simultaneously Rinse Flowcells
for fc in flowcells.values():
fc.thread = threading.Thread(target=do_rinse,
args=(fc,rinse_port,))
fc.thread.start()
alive = True
# Wait for rinsing to complete
while alive:
alive_ = []
for fc in flowcells.values():
alive_.append(fc.thread.is_alive())
alive = any(alive_)
LED('all', 'user')
while not userYN('Temporary flowcell(s) removed'): pass
while not userYN('Experiment flowcell(s) locked on to stage'): pass
if not prime_YorN:
while not userYN('Valve input lines in reagents'): pass
while not userYN('Door closed'): pass
##########################################################
def do_nothing():
"""Do nothing."""
pass
##########################################################
## iterate over lines, send to pump, and print response ##
##########################################################
def do_recipe(fc):
"""Do the next event in the recipe.
**Parameters:**
- fc (flowcell): The current flowcell.
"""
AorB = fc.position
fc.thread = None
# Skip to first line of recipe on initial cycle
if fc.cycle == 1 and fc.first_line is not None:
for i in range(fc.first_line):
line = fc.recipe.readline()
fc.first_line = None
#get instrument and command
instrument = None
while instrument is None:
line = fc.recipe.readline()
if line:
instrument, command = parse_line(line)
else:
break
if line:
# Move reagent valve
if instrument == 'PORT':
#Move to cycle specific reagent if it is variable a reagent
if fc.cycle <= fc.total_cycles:
if command in hs.v24[AorB].variable_ports:
command = hs.v24[AorB].port_dict[command][fc.cycle]
log_message = 'Move to ' + command
fc.thread = threading.Thread(target = hs.v24[AorB].move,
args = (command,))
if fc.cycle <= fc.total_cycles:
LED(AorB, 'awake')
# Pump reagent into flowcell
elif instrument == 'PUMP':
volume = int(command)
speed = fc.pump_speed['reagent']
log_message = 'Pumping ' + str(volume) + ' uL'
fc.thread = threading.Thread(target = hs.p[AorB].pump,
args = (volume, speed,))
if fc.cycle <= fc.total_cycles:
LED(AorB, 'awake')
# Incubate flowcell in reagent for set time
elif instrument == 'HOLD':
if command.isdigit():
holdTime = float(command)*60
log_message = 'Flowcell holding for ' + str(command) + ' min.'
if hs.virtual:
fc.thread = threading.Timer(holdTime/hs.speed_up, fc.endHOLD)
#fc.thread = threading.Timer(holdTime, fc.endHOLD)
else:
fc.thread = threading.Timer(holdTime, fc.endHOLD)
elif command == 'STOP':
hs.message('PySeq::Paused')
LED(AorB, 'user')
input("Press enter to continue...")
log_message = ('Continuing...')
fc.thread = threading.Thread(target = do_nothing)
if fc.cycle <= fc.total_cycles:
LED(AorB, 'sleep')
# Wait for other flowcell to finish event before continuing with current flowcell
elif instrument == 'WAIT':
if command == 'TEMP':
fc.thread = threading.Thread(target = hs.T.wait_fc_T,
args=(AorB, fc.temperature,))
log_message = ('Waiting to reach '+str(fc.temperature)+'°C')
elif fc.waits_for is not None:
if command in flowcells[fc.waits_for].events_since_IMAG:
log_message = command + ' has occurred, skipping WAIT'
fc.thread = threading.Thread(target = do_nothing)
else:
log_message = 'Waiting for ' + command
fc.thread = threading.Thread(target = WAIT,
args = (AorB, command,))
else:
log_message = 'Skip waiting for ' + command
fc.thread = threading.Thread(target = do_nothing)
if fc.cycle <= fc.total_cycles:
LED(AorB, 'sleep')
# Image the flowcell
elif instrument == 'IMAG':
if hs.scan_flag and fc.cycle <= fc.total_cycles:
hs.message('PySeq::'+AorB+'::Waiting for camera')
while hs.scan_flag:
pass
#hs.scan_flag = True
fc.events_since_IMAG = []
log_message = 'Imaging flowcell'
fc.thread = threading.Thread(target = IMAG,
args = (fc,int(command),))
if fc.cycle <= fc.total_cycles:
LED(AorB, 'imaging')
elif instrument == 'TEMP':
log_message = 'Setting temperature to ' + command + ' °C'
command = float(command)
fc.thread = threading.Thread(target = hs.T.set_fc_T,
args = (AorB,command,))
fc.temperature = command
# Block all further processes until user input
# elif instrument == 'STOP':
# hs.message('PySeq::Paused')
# LED(AorB, 'user')
# input("Press enter to continue...")
# hs.message('PySeq::Continuing...')
#Signal to other flowcell that current flowcell reached signal event
if fc.signal_event == instrument or fc.signal_event == command:
fc.wait_thread.set()
fc.signal_event = None
# Start new action on current flowcell
if fc.thread is not None and fc.cycle <= fc.total_cycles:
fc.addEvent(instrument, command)
hs.message('PySeq::'+AorB+'::cycle'+str(fc.cycle)+'::'+log_message)
thread_id = fc.thread.start()
elif fc.thread is not None and fc.cycle > fc.total_cycles:
fc.thread = threading.Thread(target = time.sleep, args = (10,))
else:
# End of recipe
fc.restart_recipe()
##########################################################
## Image flowcell ########################################
##########################################################
def IMAG(fc, n_Zplanes):
"""Image the flowcell at a number of z planes.
For each section on the flowcell, the stage is first positioned
to the center of the section to find the optimal focus. Then if no
optical settings are listed, the optimal filter sets are found.
Next, the stage is repositioned to scan the entire section and
image the specified number of z planes.
**Parameters:**
fc: The flowcell to image.
n_Zplanes: The number of z planes to image.
**Returns:**
int: Time in seconds to scan the entire section.
"""
hs.scan_flag = True
AorB = fc.position
cycle = str(fc.cycle)
start = time.time()
# Manual focus ALL sections across flowcells
if hs.AF == 'manual':
focus.manual_focus(hs, flowcells)
hs.AF = 'partial once'
#Image sections on flowcell
for section in fc.sections:
pos = fc.stage[section]
hs.y.move(pos['y_initial'])
hs.x.move(pos['x_initial'])
hs.z.move(pos['z_pos'])
hs.obj.move(hs.obj.focus_rough)
# Autofocus
msg = 'PySeq::' + AorB + '::cycle' + cycle+ '::' + str(section) + '::'
if hs.AF and not isinstance(hs.AF, int):
obj_pos = focus.get_obj_pos(hs, section, cycle)
if obj_pos is None:
# Move to focus filters
for i, color in enumerate(hs.optics.colors):
hs.optics.move_ex(color,hs.optics.focus_filters[i])
hs.message(msg + 'Start Autofocus')
try:
if hs.autofocus(pos): # Moves to optimal objective position
hs.message(msg + 'Autofocus complete')
pos['obj_pos'] = hs.obj.position
else: # Moves to rough focus objective position
hs.message(msg + 'Autofocus failed')
pos['obj_pos'] = None
except:
hs.message(msg + 'Autofocus failed')
print(sys.exc_info()[0])
pos['obj_pos'] = None
else:
hs.obj.move(obj_pos)
pos['obj_pos'] = hs.obj.position
focus.write_obj_pos(hs, section, cycle)
#Override recipe number of z planes
if fc.z_planes is not None: n_Zplanes = fc.z_planes
# Calculate objective positions to image
if n_Zplanes > 1 and not isinstance(hs.AF, int):
obj_start = int(hs.obj.position - hs.nyquist_obj*n_Zplanes*hs.stack_split) # (Default) 2/3 of planes below opt_ob_pos and 1/3 of planes above
elif isinstance(hs.AF, int):
obj_start = hs.AF
else:
obj_start = hs.obj.position
image_name = AorB
image_name += '_s' + str(section)
image_name += '_r' + cycle
if fc.IMAG_counter is not None:
image_name += '_' + str(fc.IMAG_counter)
# Scan section on flowcell
hs.y.move(pos['y_initial'])
hs.x.move(pos['x_initial'])
hs.obj.move(obj_start)
n_tiles = pos['n_tiles']
n_frames = pos['n_frames']
# Set filters
for color in hs.optics.cycle_dict.keys():
filter = hs.optics.cycle_dict[color][fc.cycle]
if color is 'em':
hs.optics.move_em_in(filter)
else:
hs.optics.move_ex(color, filter)
hs.message(msg + 'Start Imaging')
try:
scan_time = hs.scan(n_tiles, n_Zplanes, n_frames, image_name)
scan_time = str(int(scan_time/60))
hs.message(msg + 'Imaging completed in', scan_time, 'minutes')
except:
error('Imaging failed.')
# Reset filters
for color in hs.optics.cycle_dict.keys():
if color is 'em':
hs.optics.move_em_in(True)
else:
hs.optics.move_ex(color, 'home')
if fc.IMAG_counter is not None:
fc.IMAG_counter += 1
hs.scan_flag = False
def WAIT(AorB, event):
"""Hold the flowcell *AorB* until the specfied event in the other flowell.
**Parameters:**
AorB (str): Flowcell position, A or B, to be held.
event: Event in the other flowcell that releases the held flowcell.
**Returns:**
int: Time in seconds the current flowcell was held.
"""
signaling_fc = flowcells[AorB].waits_for
cycle = str(flowcells[AorB].cycle)
start = time.time()
flowcells[signaling_fc].signal_event = event # Set the signal event in the signal flowcell
flowcells[signaling_fc].wait_thread.wait() # Block until signal event in signal flowcell
hs.message('PySeq::'+AorB+'::cycle'+cycle+'::Flowcell ready to continue')
flowcells[signaling_fc].wait_thread.clear() # Reset wait event
stop = time.time()
return stop-start
def do_rinse(fc, port=None):
"""Rinse flowcell with reagent specified in config file.
**Parameters:**
fc (flowcell): The flowcell to rinse.
"""
method = config.get('experiment', 'method') # Read method specific info
method = config[method]
if port is None:
port = method.get('rinse', fallback = None)
AorB = fc.position
rinse = port in hs.v24[AorB].port_dict
if rinse:
LED(fc.position, 'awake')
# Move valve
hs.message('PySeq::'+AorB+'::Rinsing flowcell with', port)
fc.thread = threading.Thread(target = hs.v24[AorB].move, args = (port,))
fc.thread.start()
# Pump
port_num = hs.v24[AorB].port_dict[port]
if port_num in hs.v24[AorB].side_ports:
volume = fc.volume['side']
elif port_num == hs.v24[AorB].sample_port:
volume = fc.volume['sample']
else:
volume = fc.volume['main']
speed = fc.pump_speed['reagent']
while fc.thread.is_alive(): # Wait till valve has moved
pass
fc.thread = threading.Thread(target = hs.p[AorB].pump,
args = (volume, speed,))
else:
fc.thread = threading.Thread(target = do_nothing)
##########################################################
## Shut down system ######################################
##########################################################
def do_shutdown():
"""Shutdown the HiSeq and flush all reagent lines if prompted."""
for fc in flowcells.values():
while fc.thread.is_alive():
fc.wait_thread.set()
time.sleep(10)
LED('all', 'startup')
hs.message('PySeq::Shutting down...')
hs.z.move([0, 0, 0])
hs.move_stage_out()
do_flush()
##Flush all lines##
# LED('all', 'user')
#
# # flush_YorN = userYN("Flush lines")
# if flush_YorN:
# hs.message('Lock temporary flowcell on stage')
# hs.message('Place all valve input lines in PBS/water')
# input('Press enter to continue...')
#
# LED('all', 'startup')
# for fc in flowcells.keys():
# volume = flowcells[fc].volume['main']
# speed = flowcells[fc].pump_speed['flush']
# for port in hs.v24[fc].port_dict.keys():
# if isinstance(port_dict[port], int):
# hs.v24[fc].move(port)
# hs.p[fc].pump(volume, speed)
# ##Return pump to top and NO port##
# hs.p[fc].command('OA0R')
# hs.p[fc].command('IR')
# else:
# LED('all', 'user')
hs.message('Retrieve experiment flowcells')
input('Press any key to finish shutting down')
for fc in flowcells.values():
AorB = fc.position
fc_log_path = join(hs.log_path, 'Flowcell'+AorB+'.log')
with open(fc_log_path, 'w') as fc_file:
for i in range(len(fc.history[0])):
fc_file.write(str(fc.history[0][i])+' '+
str(fc.history[1][i])+' '+
str(fc.history[2][i])+'\n')
# Turn off y stage motor
hs.y.move(0)
hs.y.command('OFF')
LED('all', 'off')
##########################################################
## Free Flowcells ########################################
##########################################################
def free_fc():
"""Release the first flowcell if flowcells are waiting on each other."""
# Get which flowcell is to be first
experiment = config['experiment']
cycles = int(experiment.get('first flowcell', fallback = 'A'))
first_fc = experiment.get('first flowcell', fallback = 'A')
if len(flowcells) == 1:
fc = flowcells[[*flowcells][0]]
try:
fc.wait_thread.set()
except:
pass
fc.signal_event = None
else:
flowcells_ = [fc.position for fc in flowcells.values() if fc.total_cycles <= cycles]
if len(flowcells_) == 1:
fc = flowcells_[0]
else:
fc = flowcells[first_fc]
flowcells[fc.waits_for].wait_thread.set()
flowcells[fc.waits_for].signal_event = None
hs.message('PySeq::Flowcells are waiting on each other starting flowcell',
fc.position)
return fc.position
def get_config(args):
"""Return the experiment config appended with the method config.
**Parameters:**
- args (dict): Dictionary with the config path, the experiment name and
the output path to store images and logs.
**Returns:**
- config: The experiment config appended with the method config.
"""
# Create config parser
config = configparser.ConfigParser()
# Defaults that can be overided
config.read_dict({'experiment' : {'log path': 'logs',
'image path': 'images'}
})
# Open config file
if os.path.isfile(args['config']):
config.read(args['config'])
else:
error('ConfigFile::Does not exist')
sys.exit()
# Set output path
config['experiment']['save path'] = args['output']
# Set experiment name
config['experiment']['experiment name'] = args['name']
# save user valve
USERVALVE = False
if config.has_section('reagents'):
valve = config['reagents'].items()
if len(valve) > 0:
USERVALVE = True
# Get method specific configuration
method = config['experiment']['method']
if method in methods.get_methods():
config_path, recipe_path = methods.return_method(method)
config.read(config_path)
elif os.path.isfile(method):
config.read(method)
recipe_path = None
elif config.has_section(method):
recipe_path = None
else:
error('ConfigFile::Error reading method configuration')
sys.exit()
# Check method keys
if not methods.check_settings(config[method]):
go = userYN('Proceed with experiment')
if not go:
sys.exit()
# Get recipe
recipe_name = config[method]['recipe']
if recipe_path is not None:
pass
elif os.path.isfile(recipe_name):
recipe_path = recipe_name
else:
error('ConfigFile::Error reading recipe')
config['experiment']['recipe path'] = recipe_path
# Don't override user defined valve
user_config = configparser.ConfigParser()
user_config.read(args['config'])
if USERVALVE:
config.read_dict({'reagents':dict(user_config['reagents'])})
if user_config.has_section(method):
config.read_dict({method:dict(user_config[method])})
return config
def check_fc_temp(fc):
"""Check temperature of flowcell."""
if fc.temperature is not None:
if fc.temp_timer is None:
fc.temp_timer = threading.Timer(fc.temp_interval, do_nothing)
fc.temp_timer.start()
if not fc.temp_timer.is_alive():
#print('checking temp')
T = hs.T.get_fc_T(fc.position)
hs.message(False, 'PySeq::'+fc.position+'::Temperature::',T,'°C')
fc.temp_timer = None
if abs(fc.temperature - T) > 5:
msg = 'PySeq::'+fc.position+'::WARNING::Set Temperature '
msg += str(fc.temperature) + ' C'
hs.message(msg)
msg = 'PySeq::'+fc.position+'::WARNING::Actual Temperature '
msg += str(T) + ' C'
hs.message(msg)
return T
###################################
## Run System #####################
###################################
args_ = args.get_arguments() # Get config path, experiment name, & output path
if __name__ == 'pyseq.main':
n_errors = 0
config = get_config(args_) # Get config file
logger = setup_logger() # Create logfiles
port_dict = check_ports() # Check ports in configuration file
first_line, IMAG_counter, z_planes = check_instructions() # Checks instruction file is correct and makes sense
flowcells = setup_flowcells(first_line, IMAG_counter) # Create flowcells
hs = configure_instrument(IMAG_counter, port_dict)
confirm_settings(z_planes)
hs = initialize_hs(IMAG_counter) # Initialize HiSeq, takes a few minutes
if n_errors is 0:
flush_YorN = do_flush() # Ask to flush out lines
do_prime(flush_YorN) # Ask to prime lines
if not userYN('Start experiment'):
sys.exit()
# Do prerecipe or Initialize Flowcells
for fc in flowcells.values():
if fc.prerecipe_path:
fc.pre_recipe()
else:
fc.restart_recipe()
cycles_complete = False
while not cycles_complete:
stuck = 0
complete = 0
for fc in flowcells.values():
if not fc.thread.is_alive(): # flowcell not busy, do next step in recipe
do_recipe(fc)
if fc.signal_event: # check if flowcells are waiting on each other
stuck += 1
if fc.cycle > fc.total_cycles: # check if all cycles are complete on flowcell
complete += 1
check_fc_temp(fc)
if stuck == len(flowcells): # Start the first flowcell if they are waiting on each other
free_fc()
if complete == len(flowcells): # Exit while loop
cycles_complete = True
if hs.current_view is not None: # Show latest images in napari, WILL BLOCK
hs.current_view.show()
hs.current_view = None
do_shutdown() # Shutdown HiSeq
else:
error('Total number of errors =', n_errors)
def main():
pass
| 38.437079 | 159 | 0.52832 | 3.53125 |
87cc0616dc224380fe72f6b72a580110e552ccb8
| 1,118 |
sql
|
SQL
|
Training/20762/Allfiles/Demofiles/Mod18/Workload/load_script1.sql
|
nmbazima/SQL-Scripts
|
09a1c50a25db80a4d6f021ac1b7ed26263fe9523
|
[
"Apache-2.0"
] | 1 |
2019-10-08T17:47:07.000Z
|
2019-10-08T17:47:07.000Z
|
Training/20762/Allfiles/Demofiles/Mod18/Workload/load_script1.sql
|
nmbazima/SQL-Scripts
|
09a1c50a25db80a4d6f021ac1b7ed26263fe9523
|
[
"Apache-2.0"
] | null | null | null |
Training/20762/Allfiles/Demofiles/Mod18/Workload/load_script1.sql
|
nmbazima/SQL-Scripts
|
09a1c50a25db80a4d6f021ac1b7ed26263fe9523
|
[
"Apache-2.0"
] | null | null | null |
-- load generation script 1
-- loops for up to 60 minutes, or until the ##stopload shared temp table is created
DROP TABLE IF EXISTS ##stopload;
USE AdventureWorks;
GO
DECLARE @sql nvarchar(MAX), @delay varchar(20), @split int = @@SPID % 5
SELECT @sql = CASE WHEN @split = 0 THEN N'EXEC dbo.uspGetOrderTrackingBySalesOrderID @i1'
WHEN @split = 1 THEN N'EXEC dbo.uspGetManagerEmployees @i1'
WHEN @split = 2 THEN N'EXEC HumanResources.sp_GetEmployee_Person_Info_AsOf @dt1'
ELSE N'EXEC dbo.uspGetOrderTrackingByTrackingNumber @vc1'
END;
DECLARE @start datetime2 = GETDATE(), @i int, @dt datetime2, @vc nvarchar(25) ;
WHILE DATEDIFF(ss,@start,GETDATE()) < 3600 AND OBJECT_ID('tempdb..##stopload') IS NULL
BEGIN
SET @i = RAND()*10000;
IF @split = 2
BEGIN
SET @dt = DATEADD(dd,@i,'2005-01-01');
EXEC sp_executesql @sql, N'@dt1 datetime2', @dt1 = @dt;
END
ELSE
IF @split > 2
BEGIN
SET @vc = CAST(@i AS nvarchar(25));
EXEC sp_executesql @sql, N'@vc1 nvarchar(25)', @vc1 = @vc;
END
ELSE
BEGIN
EXEC sp_executesql @sql, N'@i1 int', @i1 = @i;
END
WAITFOR DELAY '00:00:01';
END
| 27.95 | 89 | 0.686941 | 3.015625 |
08a3074d7868b432bcfaa771338814592eab491c
| 2,166 |
go
|
Go
|
day19/part1/main.go
|
AntonKosov/advent-of-code-2020
|
8c55224bdbbbfc5b66477d76261e715254303dbf
|
[
"MIT"
] | null | null | null |
day19/part1/main.go
|
AntonKosov/advent-of-code-2020
|
8c55224bdbbbfc5b66477d76261e715254303dbf
|
[
"MIT"
] | null | null | null |
day19/part1/main.go
|
AntonKosov/advent-of-code-2020
|
8c55224bdbbbfc5b66477d76261e715254303dbf
|
[
"MIT"
] | null | null | null |
package main
import (
"fmt"
"strings"
"github.com/AntonKosov/advent-of-code-2020/aoc"
)
func main() {
rules, messages := read()
r := process(rules, messages)
fmt.Printf("Answer: %v\n", r)
}
type matcher interface {
verify(rules map[int]matcher, message string, start int) (last int, ok bool)
}
type runeMatcher struct {
r rune
}
func (m runeMatcher) verify(_ map[int]matcher, message string, start int) (last int, ok bool) {
return start, message[start] == byte(m.r)
}
type sequenceMatcher struct {
sequence []int
}
func (m sequenceMatcher) verify(rules map[int]matcher, message string, start int) (last int, ok bool) {
for _, rule := range m.sequence {
matcher := rules[rule]
last, ok = matcher.verify(rules, message, start)
if !ok {
return 0, false
}
start = last + 1
}
return last, true
}
type sequencesMatcher struct {
sequences []sequenceMatcher
}
func (m sequencesMatcher) verify(rules map[int]matcher, message string, start int) (last int, ok bool) {
for _, s := range m.sequences {
last, ok = s.verify(rules, message, start)
if ok {
return last, true
}
}
return 0, false
}
func read() (rules map[int]matcher, messages []string) {
rules = make(map[int]matcher)
lines := aoc.ReadAllInput()
i := 0
for {
line := lines[i]
if line == "" {
break
}
sc := strings.Split(line, ":")
ruleId := aoc.StrToInt(sc[0])
var m matcher
sp := sc[1]
if strings.Contains(sp, "\"") {
m = runeMatcher{r: rune(sp[2])}
} else if strings.Contains(sp, "|") {
sm := sequencesMatcher{}
rs := strings.Split(sp, "|")
for _, r := range rs {
sm.sequences = append(sm.sequences, sequenceMatcher{sequence: aoc.StrToInts(r, " ")})
}
m = sm
} else {
m = sequenceMatcher{sequence: aoc.StrToInts(sp, " ")}
}
rules[ruleId] = m
i++
}
for j := i + 1; j < len(lines); j++ {
line := lines[j]
if line != "" {
messages = append(messages, line)
}
}
return rules, messages
}
func process(rules map[int]matcher, messages []string) int {
sum := 0
for _, m := range messages {
if last, ok := rules[0].verify(rules, m, 0); ok && last == len(m)-1 {
sum++
}
}
return sum
}
| 19.513514 | 104 | 0.626039 | 3.203125 |
e8fb499b8bff3a7d1f52fd19a5495fe47b6684e5
| 2,165 |
py
|
Python
|
bot/lib/controller/GetCommentTask.py
|
nullwriter/ig-actor
|
a089107657ccdf11ba213160c4cc5d3690cecd76
|
[
"MIT"
] | null | null | null |
bot/lib/controller/GetCommentTask.py
|
nullwriter/ig-actor
|
a089107657ccdf11ba213160c4cc5d3690cecd76
|
[
"MIT"
] | null | null | null |
bot/lib/controller/GetCommentTask.py
|
nullwriter/ig-actor
|
a089107657ccdf11ba213160c4cc5d3690cecd76
|
[
"MIT"
] | null | null | null |
import time
import re
from FileLogger import FileLogger as FL
import datetime
class GetCommentTask:
def __init__(self, task, name="extract-comment"):
self.task = task
self.name = name
self.comments = []
self.log = FL('Extracted Comments {:%Y-%m-%d %H:%M:%S}.txt'.format(datetime.datetime.now()))
def init_task(self):
hash_index = 0
loop = True
max_index = len(self.task.hashtags)
next_max_id = ""
while loop:
self.task.check_ops_limit()
current_hash = self.task.hashtags[hash_index]
self.task.api.getHashtagFeed(current_hash, maxid=next_max_id)
print ""
print "CURRENT HASHTAG = " + current_hash
print ""
ig_media = self.task.api.LastJson
if "next_max_id" not in ig_media:
print "####### Changing hashtag #######"
hash_index += 1
next_max_id = ""
if hash_index >= max_index - 1:
break
else:
next_max_id = self.do_task(ig_media)
def do_task(self, ig_media):
last_max_id = ig_media['next_max_id']
if "ranked_items" in ig_media:
key = "ranked_items"
else:
key = "items"
for ig in ig_media[key]:
self.task.api.getMediaComments(ig["id"])
for c in reversed(self.task.api.LastJson['comments']):
txt = c['text']
if self.check_string(txt):
self.comments.append(txt)
print "Comment = " + txt.encode('utf-8', 'ignore').decode('utf-8')
self.log.add_to_file(txt=txt)
self.task.task_count += 1
time.sleep(1)
time.sleep(self.task.get_time_delay())
return last_max_id
"""""
Checks if string doesnt contain special non-english characters, @, or Follow Me.
"""""
def check_string(self,str):
pattern = re.compile("^(?!follow|followme)[\s\w\d\?><;,\{\}\[\]\-_\+=!\#\$%^&\*\|\']*$")
return pattern.match(str)
| 28.116883 | 100 | 0.525635 | 3.140625 |
0c7de1d97913a4091e354725bc2562a43f7d9e56
| 14,825 |
py
|
Python
|
pinochle/Game.py
|
Atrus619/DeckOfCards
|
bf0668ea26041e7faab2b88a03d42ba6887d054a
|
[
"MIT"
] | 1 |
2019-06-27T12:14:38.000Z
|
2019-06-27T12:14:38.000Z
|
pinochle/Game.py
|
Atrus619/DeckOfCards
|
bf0668ea26041e7faab2b88a03d42ba6887d054a
|
[
"MIT"
] | 18 |
2019-07-14T17:40:22.000Z
|
2019-11-11T01:54:07.000Z
|
pinochle/Game.py
|
Atrus619/DeckOfCards
|
bf0668ea26041e7faab2b88a03d42ba6887d054a
|
[
"MIT"
] | null | null | null |
from pinochle.State import State
from classes.Deck import Deck
from classes.Hand import Hand
from pinochle.MeldUtil import MeldUtil
from pinochle.Meld import Meld
from pinochle.Trick import Trick
from pinochle.MeldTuple import MeldTuple
from util.Constants import Constants as cs
from util.util import print_divider
from copy import deepcopy
import random
import numpy as np
import util.state_logger as sl
import logging
from config import Config as cfg
import pinochle.card_util as cu
import time
import pandas as pd
from util.Vectors import Vectors as vs
logging.basicConfig(format='%(levelname)s:%(message)s', level=cfg.logging_level)
# pinochle rules: https://www.pagat.com/marriage/pin2hand.html
class Game:
def __init__(self, name, players, run_id="42069", current_cycle=None, human_test=False, config=cfg):
# Setting run_id = None results in no records being saved to database
self.run_id = run_id
self.name = name.upper()
self.players = players # This is a list
self.number_of_players = len(self.players)
self.dealer = players[0]
self.trump_card = None
self.trump = None
self.priority = random.randint(0, 1)
self.meld_util = None
self.current_cycle = current_cycle # To determine the current value of epsilon
self.human_test = human_test
self.config = config
self.exp_df = pd.DataFrame(columns=['agent_id', 'opponent_id', 'run_id', 'vector', 'action', 'next_vector',
'reward', 'meld_action'])
self.last_meld_state = None
if self.name == cs.PINOCHLE:
self.deck = Deck("pinochle")
else:
self.deck = Deck()
self.hands = {}
self.melds = {}
self.scores = {}
self.meldedCards = {}
self.discard_pile = Hand()
self.player_inter_trick_history = {} # One entry per player, each entry is a tuple containing (prior_state, row_id entry in initial db update)
for player in self.players:
self.hands[player] = Hand()
self.melds[player] = Meld()
self.scores[player] = [0]
self.meldedCards[player] = {}
def create_state(self, played_card=None):
return State(self, played_card)
def deal(self):
for i in range(12):
for player in self.players:
self.hands[player].add_cards(self.deck.pull_top_cards(1))
self.trump_card = self.deck.pull_top_cards(1)[0]
self.trump = self.trump_card.suit
self.meld_util = MeldUtil(self.trump)
# Expected card input: VALUE,SUIT. Example: Hindex
# H = hand, M = meld
def collect_trick_cards(self, player, state):
if type(player).__name__ == 'Human':
trick_input = player.get_action(state, msg=player.name + " select card for trick:")
else: # Bot
if self.human_test:
logging.debug("Model hand before action:")
state.convert_to_human_readable_format(player)
trick_index, meld_index = player.get_action(state, self, current_cycle=self.current_cycle, is_trick=True)
trick_input, _ = player.convert_model_output(trick_index=trick_index, meld_index=meld_index, game=self, is_trick=True)
source = trick_input[0]
index = int(trick_input[1:])
if source == "H":
card_input = self.hands[player].cards[index]
card = self.hands[player].pull_card(card_input)
elif source == "M":
mt = self.melds[player].pull_melded_card(self.melds[player].melded_cards[index])
card = mt.card
print_divider()
logging.debug("Player " + player.name + " plays: " + str(card)) # TODO: Fix this later (possible NULL)
return card
def collect_meld_cards(self, player, state, limit=12):
"""
Collecting cards for meld scoring from player who won trick
:param player: Player we are collecting from
:param state: Current state of game
:param limit: Maximum number of cards that can be collected
:return: list of MeldTuples and whether the interaction was valid (boolean)
"""
first_hand_card = True
valid = True
original_hand_cards = deepcopy(self.hands[player])
original_meld_cards = deepcopy(self.melds[player])
collected_hand_cards = []
collected_meld_cards = []
score = 0
meld_class = None
combo_name = None
if type(player).__name__ == 'Human':
while len(collected_hand_cards) + len(collected_meld_cards) < limit:
if first_hand_card:
print_divider()
logging.debug("For meld please select first card from hand.")
user_input = player.get_action(state, msg=player.name + " select card, type 'Y' to exit:")
if user_input == 'Y':
break
source = user_input[0]
index = int(user_input[1:])
if first_hand_card:
if source != "H":
print_divider()
logging.debug("In case of meld, please select first card from hand.")
continue
first_hand_card = False
if source == "H":
card_input = self.hands[player].cards[index]
card = self.hands[player].pull_card(card_input)
collected_hand_cards.append(card)
elif source == "M":
mt = self.melds[player].pull_melded_card(self.melds[player].melded_cards[index])
collected_meld_cards.append(mt)
# Combine collected hand and meld card lists for score calculation
collected_cards = collected_hand_cards + [mt.card for mt in collected_meld_cards]
if len(collected_cards) > 0:
score, meld_class, combo_name = self.meld_util.calculate_score(collected_cards)
if score == 0:
valid = False
else:
for mt in collected_meld_cards:
original_meld_class = mt.meld_class
if original_meld_class == meld_class:
original_meld_score = mt.score
if original_meld_score <= score:
valid = False
break
if not valid:
self.hands[player] = original_hand_cards
self.melds[player] = original_meld_cards
else: # Bot
valid = True
trick_action, meld_action = player.get_action(state, self, current_cycle=self.current_cycle, is_trick=False)
if meld_action == vs.MELD_COMBINATIONS_ONE_HOT_VECTOR.__len__():
# model chose to pass melding
return [], valid
score, meld_class, combo_name, collected_cards = \
player.convert_model_output(trick_index=trick_action, meld_index=meld_action, game=self, is_trick=False)
return [MeldTuple(card, combo_name, meld_class, score) for card in collected_cards], valid
def play_trick(self):
"""
priority: 0 or 1 for index in player list
:return: index of winner (priority for next trick)
"""
print_divider()
logging.debug(f'Phase 1\tTrick #{12 - len(self.deck)//2}\t{len(self.deck)} card{"s" if len(self.deck) > 1 else ""} remaining in deck')
trick_start_state = self.create_state()
trick = Trick(self.players, self.trump)
# Determine which player goes first based on priority arg
""" !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# TRICK PLAYER LIST IS NOT ALWAYS THE SAME AS THE GAME PLAYER LIST
# THEY COULD BE IN DIFFERENT ORDER
"""
player_order = list(self.players)
player_1 = player_order.pop(self.priority)
player_2 = player_order[0]
trick_player_list = [player_1, player_2]
# Collect card for trick from each player based on order
card_1 = self.collect_trick_cards(player_1, trick_start_state) # Collect card from first player based on priority
if self.human_test:
time.sleep(cfg.human_test_pause_length)
# Recording the first card that was played
first_move_state = self.create_state(card_1)
if self.human_test and 'get_Qs' in dir(self.players[0].model):
print_divider()
bot_state = trick_start_state if self.players[0] == player_1 else first_move_state
human_state = trick_start_state if self.players[1] == player_1 else first_move_state
logging.debug(self.players[0].model.get_Qs(player=self.players[0], player_state=bot_state, opponent=self.players[1], opponent_state=human_state))
if self.players[0] in self.player_inter_trick_history and self.run_id is not None: # Don't update on first trick of game
p1_update_dict = {'player': player_1, 'state_1': self.player_inter_trick_history[player_1][0],
'state_2': trick_start_state, 'row_id': self.player_inter_trick_history[player_1][1]}
p2_update_dict = {'player': player_2, 'state_1': self.player_inter_trick_history[player_2][0],
'state_2': first_move_state, 'row_id': self.player_inter_trick_history[player_2][1]}
self.exp_df = sl.update_state(df=self.exp_df, p1=p1_update_dict, p2=p2_update_dict, win_reward=self.config.win_reward)
card_2 = self.collect_trick_cards(player_2, first_move_state) # Collect card from second player based on priority
if self.human_test:
time.sleep(cfg.human_test_pause_length)
print_divider()
logging.debug("LETS GET READY TO RUMBLE!!!!!!!!!!!!!!!!!!!!!!!")
logging.debug("Card 1: " + str(card_1))
logging.debug("Card 2: " + str(card_2))
if self.human_test:
time.sleep(cfg.human_test_pause_length)
# Determine winner of trick based on collected cards
result = cu.compare_cards(self.trump, card_1, card_2)
print_divider()
logging.debug("VICTOR : " + str(player_1.name if result == 0 else player_2.name))
if self.human_test:
time.sleep(cfg.human_test_pause_length)
# Separate winner and loser for scoring, melding, and next hand
winner = trick_player_list.pop(result)
loser = trick_player_list[0]
# Winner draws a card from the stock, followed by the loser drawing a card from the stock
# TODO: Come back here and allow winner to choose when down to last 2 cards (optional af)
self.hands[winner].add_cards(self.deck.pull_top_cards(1))
if len(self.deck) == 0:
self.hands[loser].add_cards(self.trump_card)
else:
self.hands[loser].add_cards(self.deck.pull_top_cards(1))
# Winner can now meld if they so choose
print_divider()
logging.debug(winner.name + " select cards for meld:")
# Verify that meld is valid. If meld is invalid, force the user to retry.
self.last_meld_state = self.create_state()
mt_list = []
# no melding in this version
while 1:
mt_list, valid = self.collect_meld_cards(winner, self.last_meld_state)
if valid:
break
else:
print_divider()
logging.debug("Invalid combination submitted, please try again.")
# Update scores
if len(mt_list) == 0: # No cards melded, so score is 0
meld_score = 0
else:
meld_score = mt_list[0].score # Score is the same for all MeldTuples in mt_list
trick_score = trick.calculate_trick_score(card_1, card_2)
total_score = meld_score + trick_score
self.discard_pile.add_cards([card_1, card_2])
# log states and actions, player order = TRICK ORDER
if self.run_id is not None:
p1_dict = {'player': player_1, 'state': trick_start_state, 'card': card_1}
p2_dict = {'player': player_2, 'state': first_move_state, 'card': card_2}
meld_dict = {'player': winner, 'meld': mt_list}
self.exp_df, self.player_inter_trick_history = \
sl.log_state(df=self.exp_df, p1=p1_dict, p2=p2_dict, meld=meld_dict,
run_id=self.run_id, history=self.player_inter_trick_history)
self.scores[winner].append(self.scores[winner][-1] + total_score)
self.scores[loser].append(self.scores[loser][-1])
# Update winner's meld
for mt in mt_list:
self.melds[winner].add_melded_card(mt)
# set new priority
self.priority = self.players.index(winner)
def play(self):
while len(self.deck) > 0:
self.play_trick()
final_scores = [self.scores[player][-1] for player in self.players]
winner_index = np.argmax(final_scores)
if self.run_id is not None:
# GAME ORDER (because it doesn't matter here)
end_game_state = self.create_state()
p1_update_dict = {'player': self.players[0], 'state_1': self.player_inter_trick_history[self.players[0]][0], 'state_2': end_game_state,
'row_id': self.player_inter_trick_history[self.players[0]][1]}
p2_update_dict = {'player': self.players[1], 'state_1': self.player_inter_trick_history[self.players[1]][0], 'state_2': end_game_state,
'row_id': self.player_inter_trick_history[self.players[1]][1]}
self.exp_df = sl.update_state(df=self.exp_df, p1=p1_update_dict, p2=p2_update_dict, winner=self.players[winner_index], win_reward=self.config.win_reward,
final_trick_winner=self.players[self.priority])
self.exp_df = sl.log_final_meld(df=self.exp_df, meld_state=self.last_meld_state, history=self.player_inter_trick_history,
final_trick_winner=self.players[self.priority], end_game_state=end_game_state, run_id=self.run_id,
winner=self.players[winner_index], win_reward=self.config.win_reward)
print_divider()
logging.debug("Winner: " + str(self.players[winner_index]) + "\tScore: " + str(final_scores[winner_index]))
logging.debug(
"Loser: " + str(self.players[1 - winner_index]) + "\tScore: " + str(final_scores[1 - winner_index]))
return winner_index, None if self.run_id is None else self.exp_df
| 44.924242 | 165 | 0.616256 | 3 |
38ed3d988d1fcc492ea5b06bced45142ce29f8ef
| 6,243 |
h
|
C
|
CSC/CSC114/GaddisExamples/Chapter18/LinkedList Template Version 2/LinkedList.h
|
HNSS-US/DelTech
|
a424da4e10ec0a33caaa6ed1c1d78837bdc6b0a2
|
[
"MIT"
] | 3 |
2019-02-02T16:59:48.000Z
|
2019-02-28T14:50:08.000Z
|
SourceCode/Chapter 17/LinkedList Template Version 2/LinkedList.h
|
jesushilariohernandez/DelMarCSi.cpp
|
6dd7905daea510452691fd25b0e3b0d2da0b06aa
|
[
"MIT"
] | null | null | null |
SourceCode/Chapter 17/LinkedList Template Version 2/LinkedList.h
|
jesushilariohernandez/DelMarCSi.cpp
|
6dd7905daea510452691fd25b0e3b0d2da0b06aa
|
[
"MIT"
] | 4 |
2020-04-10T17:22:17.000Z
|
2021-11-04T14:34:00.000Z
|
// A class template for holding a linked list.
// The node type is also a class template.
#ifndef LINKEDLIST_H
#define LINKEDLIST_H
//*********************************************
// The ListNode class creates a type used to *
// store a node of the linked list. *
//*********************************************
template <class T>
class ListNode
{
public:
T value; // Node value
ListNode<T> *next; // Pointer to the next node
// Constructor
ListNode (T nodeValue)
{ value = nodeValue;
next = nullptr;}
};
//*********************************************
// LinkedList class *
//*********************************************
template <class T>
class LinkedList
{
private:
ListNode<T> *head; // List head pointer
public:
// Constructor
LinkedList()
{ head = nullptr; }
// Destructor
~LinkedList();
// Linked list operations
void appendNode(T);
void insertNode(T);
void deleteNode(T);
void displayList() const;
};
//**************************************************
// appendNode appends a node containing the value *
// pased into newValue, to the end of the list. *
//**************************************************
template <class T>
void LinkedList<T>::appendNode(T newValue)
{
ListNode<T> *newNode; // To point to a new node
ListNode<T> *nodePtr; // To move through the list
// Allocate a new node and store newValue there.
newNode = new ListNode<T>(newValue);
// If there are no nodes in the list
// make newNode the first node.
if (!head)
head = newNode;
else // Otherwise, insert newNode at end.
{
// Initialize nodePtr to head of list.
nodePtr = head;
// Find the last node in the list.
while (nodePtr->next)
nodePtr = nodePtr->next;
// Insert newNode as the last node.
nodePtr->next = newNode;
}
}
//**************************************************
// displayList shows the value stored in each node *
// of the linked list pointed to by head. *
//**************************************************
template <class T>
void LinkedList<T>::displayList() const
{
ListNode<T> *nodePtr; // To move through the list
// Position nodePtr at the head of the list.
nodePtr = head;
// While nodePtr points to a node, traverse
// the list.
while (nodePtr)
{
// Display the value in this node.
cout << nodePtr->value << endl;
// Move to the next node.
nodePtr = nodePtr->next;
}
}
//**************************************************
// The insertNode function inserts a node with *
// newValue copied to its value member. *
//**************************************************
template <class T>
void LinkedList<T>::insertNode(T newValue)
{
ListNode<T> *newNode; // A new node
ListNode<T> *nodePtr; // To traverse the list
ListNode<T> *previousNode = nullptr; // The previous node
// Allocate a new node and store newValue there.
newNode = new ListNode<T>(newValue);
// If there are no nodes in the list
// make newNode the first node
if (!head)
{
head = newNode;
newNode->next = nullptr;
}
else // Otherwise, insert newNode
{
// Position nodePtr at the head of list.
nodePtr = head;
// Initialize previousNode to nullptr.
previousNode = nullptr;
// Skip all nodes whose value is less than newValue.
while (nodePtr != nullptr && nodePtr->value < newValue)
{
previousNode = nodePtr;
nodePtr = nodePtr->next;
}
// If the new node is to be the 1st in the list,
// insert it before all other nodes.
if (previousNode == nullptr)
{
head = newNode;
newNode->next = nodePtr;
}
else // Otherwise insert after the previous node.
{
previousNode->next = newNode;
newNode->next = nodePtr;
}
}
}
//*****************************************************
// The deleteNode function searches for a node *
// with searchValue as its value. The node, if found, *
// is deleted from the list and from memory. *
//*****************************************************
template <class T>
void LinkedList<T>::deleteNode(T searchValue)
{
ListNode<T> *nodePtr; // To traverse the list
ListNode<T> *previousNode; // To point to the previous node
// If the list is empty, do nothing.
if (!head)
return;
// Determine if the first node is the one.
if (head->value == searchValue)
{
nodePtr = head->next;
delete head;
head = nodePtr;
}
else
{
// Initialize nodePtr to head of list
nodePtr = head;
// Skip all nodes whose value member is
// not equal to num.
while (nodePtr != nullptr && nodePtr->value != searchValue)
{
previousNode = nodePtr;
nodePtr = nodePtr->next;
}
// If nodePtr is not at the end of the list,
// link the previous node to the node after
// nodePtr, then delete nodePtr.
if (nodePtr)
{
previousNode->next = nodePtr->next;
delete nodePtr;
}
}
}
//**************************************************
// Destructor *
// This function deletes every node in the list. *
//**************************************************
template <class T>
LinkedList<T>::~LinkedList()
{
ListNode<T> *nodePtr; // To traverse the list
ListNode<T> *nextNode; // To point to the next node
// Position nodePtr at the head of the list.
nodePtr = head;
// While nodePtr is not at the end of the list...
while (nodePtr != nullptr)
{
// Save a pointer to the next node.
nextNode = nodePtr->next;
// Delete the current node.
delete nodePtr;
// Position nodePtr at the next node.
nodePtr = nextNode;
}
}
#endif
| 26.793991 | 66 | 0.507128 | 3.203125 |
6501e436cf727b0f646b61fcf716e2f64d47d65c
| 1,131 |
py
|
Python
|
hai_tests/test_event_emitter.py
|
valohai/hai
|
f49c4eae2eb74b1738699e32b4b2aeb0f4d922dd
|
[
"MIT"
] | 2 |
2018-10-03T11:13:06.000Z
|
2020-08-07T12:44:22.000Z
|
hai_tests/test_event_emitter.py
|
valohai/hai
|
f49c4eae2eb74b1738699e32b4b2aeb0f4d922dd
|
[
"MIT"
] | 16 |
2018-02-07T11:08:53.000Z
|
2021-11-26T09:21:57.000Z
|
hai_tests/test_event_emitter.py
|
valohai/hai
|
f49c4eae2eb74b1738699e32b4b2aeb0f4d922dd
|
[
"MIT"
] | null | null | null |
import pytest
from hai.event_emitter import EventEmitter
class Thing(EventEmitter):
event_types = {'one', 'two'}
@pytest.mark.parametrize('omni', (False, True))
def test_event_emitter(omni):
t = Thing()
events = []
def handle(sender, **args):
assert sender is t
events.append(args)
if omni:
t.on('*', handle)
else:
t.on('one', handle)
t.emit('one')
t.emit('two')
t.off('one', handle)
t.emit('one', {'oh': 'no'})
if omni:
assert events == [
{'event': 'one'},
{'event': 'two'},
{'event': 'one', 'oh': 'no'},
]
else:
assert events == [
{'event': 'one'},
]
def test_event_emitter_exceptions():
t = Thing()
def handle(**args):
raise OSError('oh no')
t.on('*', handle)
t.emit('one')
with pytest.raises(IOError):
t.emit('one', quiet=False)
def test_event_emitter_unknown_event_types():
t = Thing()
with pytest.raises(ValueError):
t.on('hullo', None)
with pytest.raises(ValueError):
t.emit('hello')
| 18.85 | 47 | 0.528736 | 3.3125 |
cb995f329c005757a28e51fab6ce1a1ae261f937
| 3,732 |
swift
|
Swift
|
OnionMobileClientDSL/DSLUI/DSLView.swift
|
shevakuilin/OnionMobileClientDSL
|
796cb4c4c629cd3522c9d782f02efacbed237e4e
|
[
"MIT"
] | 1 |
2021-12-28T08:14:08.000Z
|
2021-12-28T08:14:08.000Z
|
OnionMobileClientDSL/DSLUI/DSLView.swift
|
shevakuilin/OnionMobileClientDSL
|
796cb4c4c629cd3522c9d782f02efacbed237e4e
|
[
"MIT"
] | null | null | null |
OnionMobileClientDSL/DSLUI/DSLView.swift
|
shevakuilin/OnionMobileClientDSL
|
796cb4c4c629cd3522c9d782f02efacbed237e4e
|
[
"MIT"
] | null | null | null |
//
// DSLView.swift
// OnionMobileClientDSL
//
// 支持DSL的UIView控件
//
// Created by XIANG KUILIN on 2022/1/11.
//
import UIKit
import YC_YogaKit
class DSLView: UIView {
public var dataModel: OMCDData! // 数据模型
fileprivate var attributeSet: OMCDAttributeSet! // DSL属性集
override init(frame: CGRect) {
super.init(frame: frame)
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
/// 初始化元素
/// - Parameters: set DSL属性集
public func initElements(set: OMCDAttributeSet) {
// 保存DSL属性集
attributeSet = set
// 根据DSL属性集生成视图
setViewAttribute()
flexboxLayout()
bindAction()
bindData()
}
}
private extension DSLView {
/// 设置视图属性
private func setViewAttribute() {
self.backgroundColor = attributeSet.viewStyle.backgroundColor
self.alpha = CGFloat(attributeSet.viewStyle.alpha)
self.layer.borderColor = attributeSet.viewStyle.borderColor.cgColor
self.layer.borderWidth = CGFloat(attributeSet.viewStyle.borderWidth)
self.layer.masksToBounds = CGFloat(attributeSet.viewStyle.cornerRadius) > 0 ? true:false
self.layer.cornerRadius = CGFloat(attributeSet.viewStyle.cornerRadius)
}
/// 基于Flexbox进行布局
private func flexboxLayout() {
self.configureLayout { layout in
layout.isEnabled = true
layout.flexDirection = self.attributeSet.flexStyle.flexDirection
layout.justifyContent = self.attributeSet.flexStyle.justifyContent
layout.alignItems = self.attributeSet.flexStyle.alignItems
layout.alignSelf = self.attributeSet.flexStyle.alignSelf
layout.flexGrow = CGFloat(self.attributeSet.flexStyle.flexGrowFloat)
layout.flexShrink = CGFloat(self.attributeSet.flexStyle.flexShrinkFloat)
layout.flexBasis = self.attributeSet.flexStyle.flexBasisPercent
layout.display = self.attributeSet.flexStyle.display
layout.height = YGValue(self.attributeSet.container.layoutHeight["value"] as! CGFloat)
layout.width = YGValue(self.attributeSet.container.layoutWidth["value"] as! CGFloat)
}
self.yoga.applyLayout(preservingOrigin: false)
}
/// 绑定交互事件
private func bindAction() {
// 点击事件
if attributeSet.viewAction.action == "CLICK" {
self.isUserInteractionEnabled = true
let tap = UITapGestureRecognizer(target: self, action: #selector(clickAction))
self.addGestureRecognizer(tap)
} else if attributeSet.viewAction.action == "TOUCH_MOVE" {
self.isUserInteractionEnabled = true
let pan = UIPanGestureRecognizer(target: self, action: #selector(drag(sender:)))
self.addGestureRecognizer(pan)
}
}
/// 绑定数据
private func bindData() {
dataModel = attributeSet.data
}
}
private extension DSLView {
// 点击跳转
@objc
private func clickAction() {
let extra = attributeSet.viewAction.extra
guard let url: String = extra["url"] as? String else {
return
}
guard let theURL = URL(string: url) else {
return
}
// 模拟跳转到Safari
UIApplication.shared.open(theURL, options: [:], completionHandler: nil)
}
// 拖拽
@objc
private func drag(sender: UIPanGestureRecognizer) {
if sender.state == .changed {
let offset = sender.translation(in: self)
sender.view?.center = CGPoint(x: self.center.x + offset.x, y: self.center.y + offset.y)
sender.setTranslation(.zero, in: self)
}
}
}
| 32.736842 | 99 | 0.638264 | 3 |
33026e3d3385ab0a61779b22eebf7f1ae1b53d97
| 3,047 |
py
|
Python
|
pyleecan/Methods/Slot/HoleM51/_comp_point_coordinate.py
|
IrakozeFD/pyleecan
|
5a93bd98755d880176c1ce8ac90f36ca1b907055
|
[
"Apache-2.0"
] | 95 |
2019-01-23T04:19:45.000Z
|
2022-03-17T18:22:10.000Z
|
pyleecan/Methods/Slot/HoleM51/_comp_point_coordinate.py
|
IrakozeFD/pyleecan
|
5a93bd98755d880176c1ce8ac90f36ca1b907055
|
[
"Apache-2.0"
] | 366 |
2019-02-20T07:15:08.000Z
|
2022-03-31T13:37:23.000Z
|
pyleecan/Methods/Slot/HoleM51/_comp_point_coordinate.py
|
IrakozeFD/pyleecan
|
5a93bd98755d880176c1ce8ac90f36ca1b907055
|
[
"Apache-2.0"
] | 74 |
2019-01-24T01:47:31.000Z
|
2022-02-25T05:44:42.000Z
|
from numpy import exp, pi, cos, sin, tan
from ....Functions.Geometry.inter_line_circle import inter_line_circle
def _comp_point_coordinate(self):
"""Compute the point coordinates needed to plot the Slot.
Parameters
----------
self : HoleM51
A HoleM51 object
Returns
-------
point_dict: dict
A dict of the slot coordinates
"""
Rext = self.get_Rext()
# comp point coordinate (in complex)
alpha = self.comp_alpha()
Wslot = 2 * sin(self.W1 / 2) * (Rext - self.H1)
L = 0.5 * (Wslot - self.W0) / cos(alpha) # ||P2,P5||
# Center of the hole
Z0 = Rext - self.H0
Z2 = Z0 + 1j * self.W0 / 2
Z25 = Z0 - 1j * self.W0 / 2
Z15 = Z25 - self.H2
Z1 = Z2 - 1j * self.W2
Z26 = Z1 - 1j * self.W3
Z12 = Z2 - self.H2
Z13 = Z12 - 1j * self.W2
Z14 = Z13 - 1j * self.W3
Z11 = Z12 + 1j * tan(alpha / 2) * self.H2
Z16 = Z15 - 1j * tan(alpha / 2) * self.H2
# Draw the left side with center P2, and X axis =(P2,P5), Y axis=(P2,P10)
Z3 = self.W4 * exp(1j * (pi / 2 - alpha)) + Z2
Z4 = (self.W4 + self.W5) * exp(1j * (pi / 2 - alpha)) + Z2
Z5 = (Rext - self.H1) * exp(1j * self.W1 / 2)
Z10 = (1j * self.H2) * exp(1j * (pi / 2 - alpha)) + Z2
Z9 = (1j * self.H2 + self.W4) * exp(1j * (pi / 2 - alpha)) + Z2
Z8 = (1j * self.H2 + self.W4 + self.W5) * exp(1j * (pi / 2 - alpha)) + Z2
Z7 = (1j * self.H2 + L) * exp(1j * (pi / 2 - alpha)) + Z2
# Draw the right side with center P25, X axis (P25,P23), Y axis(P25,P17)
Z24 = self.W6 * exp(-1j * (pi / 2 - alpha)) + Z25
Z23 = (self.W6 + self.W7) * exp(-1j * (pi / 2 - alpha)) + Z25
Z22 = (Rext - self.H1) * exp(-1j * self.W1 / 2)
Z17 = (-1j * self.H2) * exp(-1j * (pi / 2 - alpha)) + Z25
Z18 = (-1j * self.H2 + self.W6) * exp(-1j * (pi / 2 - alpha)) + Z25
Z19 = (-1j * self.H2 + self.W6 + self.W7) * exp(-1j * (pi / 2 - alpha)) + Z25
Z20 = (-1j * self.H2 + L) * exp(-1j * (pi / 2 - alpha)) + Z25
# Z6 is the intersection of the line [Z7,Z10] and Circle centre
# (0,0) radius Rext - H1
Zint = inter_line_circle(Z7, Z10, Rext - self.H1)
# Select the point with Re(Z) > 0
if Zint[0].real > 0:
Z6 = Zint[0]
else:
Z6 = Zint[1]
Z21 = Z6.conjugate()
point_dict = dict()
point_dict["Z1"] = Z1
point_dict["Z2"] = Z2
point_dict["Z3"] = Z3
point_dict["Z4"] = Z4
point_dict["Z5"] = Z5
point_dict["Z6"] = Z6
point_dict["Z7"] = Z7
point_dict["Z8"] = Z8
point_dict["Z9"] = Z9
point_dict["Z10"] = Z10
point_dict["Z11"] = Z11
point_dict["Z12"] = Z12
point_dict["Z13"] = Z13
point_dict["Z14"] = Z14
point_dict["Z15"] = Z15
point_dict["Z16"] = Z16
point_dict["Z17"] = Z17
point_dict["Z18"] = Z18
point_dict["Z19"] = Z19
point_dict["Z20"] = Z20
point_dict["Z21"] = Z21
point_dict["Z22"] = Z22
point_dict["Z23"] = Z23
point_dict["Z24"] = Z24
point_dict["Z25"] = Z25
point_dict["Z26"] = Z26
return point_dict
| 31.091837 | 81 | 0.535609 | 3.421875 |
0cd35d400b8ba8d38cccab4e5289309cd18ed0ce
| 2,773 |
py
|
Python
|
src/bot/lib/economy/economy.py
|
rdunc/rybot
|
ec3bf6159e095b53e69f6f81af9f10739c180b42
|
[
"MIT"
] | 1 |
2016-01-11T02:10:05.000Z
|
2016-01-11T02:10:05.000Z
|
src/bot/lib/economy/economy.py
|
rdunc/RyBot
|
ec3bf6159e095b53e69f6f81af9f10739c180b42
|
[
"MIT"
] | null | null | null |
src/bot/lib/economy/economy.py
|
rdunc/RyBot
|
ec3bf6159e095b53e69f6f81af9f10739c180b42
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import requests, json, threading, sys
import collections, os, time
from bot.lib.economy import EconomyInit
from bot.lib.core.benchmark import Benchmark
from bot.lib.core.log import Log
from bot.helpers.color_helper import ColorHelper
from bot.helpers.rybot_helper import RyBotHelper
from collections import Counter
class Economy(EconomyInit):
"""Give all offline and online chatters points."""
def give_points(self):
config = self.config
debug = config["debug"]
point_timer = config["give_points_timer"]
api_chatters_url = config["twitch_chatters_url"]
economy_path = "db/" + self.channel + "/economy.json"
try:
twitch_request = requests.get(api_chatters_url + self.channel + "/chatters")
chatters_json = twitch_request.json()
if debug:
time_1 = Benchmark.start()
with open(economy_path, "r") as of:
file_chatters = of.read()
of.close()
if len(file_chatters) > 0:
file_chatters = json.loads(file_chatters)
if debug:
Log.economy("Current file chatters count: {0}".format(len(file_chatters)))
api_chatters = chatters_json["chatters"]["viewers"]
chatters_dictionary = {}
for i in api_chatters:
chatters_dictionary[i] = 1
if debug:
Log.economy("1 point was added to: {0}".format(i))
if len(file_chatters) > 0:
merged_chatters = [chatters_dictionary, file_chatters]
merged_chatters = sum((Counter(dict(i)) for i in merged_chatters), Counter())
else:
merged_chatters = chatters_dictionary
with open(economy_path, "w") as of:
json.dump(merged_chatters, of)
of.close()
Log.economy("1 point was added to {0} {1}".format(len(merged_chatters), RyBotHelper.pluralize(len(merged_chatters), "chatter")))
if debug:
Log.economy("Current chatters from API: {0}".format(len(chatters_dictionary)))
Benchmark.stop(time_1)
except json.decoder.JSONDecodeError:
Log.error("Problem decoding the JSON. Unable to distribute points.")
except requests.exceptions.ConnectionError:
Log.error("Unable to connect to the Twitch API.")
except TypeError:
Log.error("Error finding the viewers.")
except FileNotFoundError:
Log.error("Economy file not found. Unable to distribute points.")
| 39.056338 | 141 | 0.582402 | 3.296875 |
dd1d15c63d20ab4ba5f2ff970c96c873c680b549
| 1,318 |
swift
|
Swift
|
App/isowordsUITests/OnboardingTests.swift
|
bitrise-io/isowords
|
5dc3a8445482a0b27c852df0e72a6d65900ef884
|
[
"PostgreSQL"
] | null | null | null |
App/isowordsUITests/OnboardingTests.swift
|
bitrise-io/isowords
|
5dc3a8445482a0b27c852df0e72a6d65900ef884
|
[
"PostgreSQL"
] | null | null | null |
App/isowordsUITests/OnboardingTests.swift
|
bitrise-io/isowords
|
5dc3a8445482a0b27c852df0e72a6d65900ef884
|
[
"PostgreSQL"
] | null | null | null |
//
// OnboardingTests.swift
// isowordsUITests
//
// Created by Shams Ahmed on 12/07/2021.
//
import XCTest
extension isowordsUITests {
// MARK: - Tests
func testOnboarding_intro() throws {
let app = XCUIApplication()
if app.state == .notRunning {
app.launch()
}
sleep(5)
// next
let button = app.buttons.element(boundBy: 1)
// next
button.tap()
sleep(2)
//next
button.tap()
sleep(2)
// press letter
app.otherElements.element(boundBy: 2).tap()
sleep(1)
skipOnboarding()
// scroll down
app.scrollViews.firstMatch.swipeUp()
app.scrollViews.firstMatch.swipeUp()
sleep(15)
}
// MARK: - Helper
func skipOnboarding() {
let app = XCUIApplication()
sleep(2)
// skip
app.buttons["Skip"].tap()
sleep(5)
// confirm - Yes, skip
app.buttons["Yes, skip"].tap()
// random press
app.otherElements.firstMatch.tap()
// Get started
app.buttons.firstMatch.tap()
sleep(2)
}
}
| 18.305556 | 52 | 0.462822 | 3.1875 |
defa0facf42ec4b32c8cc7a0c408874bb47484cf
| 3,418 |
rs
|
Rust
|
Rust/20211215.rs
|
lsartory/adventofcode.com
|
30f90eb0ae5ca572f2b582f254c66a99cb9bfb70
|
[
"MIT"
] | null | null | null |
Rust/20211215.rs
|
lsartory/adventofcode.com
|
30f90eb0ae5ca572f2b582f254c66a99cb9bfb70
|
[
"MIT"
] | null | null | null |
Rust/20211215.rs
|
lsartory/adventofcode.com
|
30f90eb0ae5ca572f2b582f254c66a99cb9bfb70
|
[
"MIT"
] | null | null | null |
use std::io::{BufRead, BufReader, Error, ErrorKind, Result};
/***********************************************/
const INPUT_FILE:&str = "20211215.txt";
/***********************************************/
fn read_input(filename: &str) -> Result<Vec<String>> {
BufReader::new(std::fs::File::open(filename)?)
.lines()
.map(|line| line?.trim().parse().map_err(|e| Error::new(ErrorKind::InvalidData, e)))
.collect()
}
/***********************************************/
fn parse_input(input: Vec<String>) -> Vec<Vec<u32>> {
input.iter().map(|line| line.chars().map(|c| c.to_digit(10).unwrap_or(u32::MAX)).collect()).collect()
}
/***********************************************/
fn part_1(input: &[Vec<u32>]) {
let mut q = Vec::new();
let mut dist = input.to_owned();
let dist_map = dist.as_mut_slice();
let height = dist_map.len();
let width = if height > 0 { dist_map[0].len() } else { 0 };
for y in 0 .. height {
for x in 0 .. width {
dist_map[y][x] = u32::MAX;
q.push((x, y));
}
}
dist_map[0][0] = 0;
while !q.is_empty() {
let v = q.iter().enumerate().map(|p| (p.1.0, p.1.1, dist_map[p.1.1][p.1.0], p.0)).min_by(|a, b| a.2.cmp(&b.2)).unwrap_or((0, 0, 0, 0));
q.swap_remove(v.3);
for u in q.iter().filter(|a| a.0 == v.0 && ((v.1 > 0 && a.1 == v.1 - 1) || (v.1 < height && a.1 == v.1 + 1)) || a.1 == v.1 && ((v.0 > 0 && a.0 == v.0 - 1) || (v.0 < width && a.0 == v.0 + 1))) {
dist_map[u.1][u.0] = dist_map[u.1][u.0].min(dist_map[v.1][v.0] + input[u.1][u.0]);
}
print!("\r{:5} / {:5}", width * height - q.len(), width * height);
}
print!("\r ");
println!("\rPart 1: {}", dist_map[height - 1][width - 1]);
}
fn part_2(input: &[Vec<u32>]) {
let mut height = input.len();
let mut width = if height > 0 { input[0].len() } else { 0 };
let mut new_map = Vec::new();
for y in 0 .. height * 5 {
let mut row = Vec::new();
for x in 0 .. width * 5 {
row.push(((input[y % height][x % width] - 1 + (x / width) as u32 + (y / height) as u32) % 9) + 1);
}
new_map.push(row);
}
width *= 5;
height *= 5;
let mut q = Vec::new();
let mut dist = new_map.clone();
let dist_map = dist.as_mut_slice();
for y in 0 .. height {
for x in 0 .. width {
dist_map[y][x] = u32::MAX;
q.push((x, y));
}
}
dist_map[0][0] = 0;
while !q.is_empty() {
let v = q.iter().enumerate().map(|p| (p.1.0, p.1.1, dist_map[p.1.1][p.1.0], p.0)).min_by(|a, b| a.2.cmp(&b.2)).unwrap_or((0, 0, 0, 0));
q.swap_remove(v.3);
for u in q.iter().filter(|a| a.0 == v.0 && ((v.1 > 0 && a.1 == v.1 - 1) || (v.1 < height && a.1 == v.1 + 1)) || a.1 == v.1 && ((v.0 > 0 && a.0 == v.0 - 1) || (v.0 < width && a.0 == v.0 + 1))) {
dist_map[u.1][u.0] = dist_map[u.1][u.0].min(dist_map[v.1][v.0] + new_map[u.1][u.0]);
}
print!("\r{:6} / {:6}", width * height - q.len(), width * height);
}
print!("\r ");
println!("\rPart 2: {}", dist_map[height - 1][width - 1]);
}
/***********************************************/
fn main() {
let input = parse_input(read_input(INPUT_FILE).expect(&format!("Could not read {}", INPUT_FILE)));
part_1(&input);
part_2(&input);
}
| 36.361702 | 201 | 0.446752 | 3.140625 |
22249a67a5fa8ed5b23713f20c0a5c0c34fdaa43
| 1,275 |
swift
|
Swift
|
TableViewIndependantHeader/TableViewIndependantHeader/Views/Main/Views/MainHeaderView.swift
|
rcasanovan/TableViewIndependantHeader
|
a10ae891dd24790d5646657268e7ddd1dbddbbe1
|
[
"Apache-2.0"
] | null | null | null |
TableViewIndependantHeader/TableViewIndependantHeader/Views/Main/Views/MainHeaderView.swift
|
rcasanovan/TableViewIndependantHeader
|
a10ae891dd24790d5646657268e7ddd1dbddbbe1
|
[
"Apache-2.0"
] | null | null | null |
TableViewIndependantHeader/TableViewIndependantHeader/Views/Main/Views/MainHeaderView.swift
|
rcasanovan/TableViewIndependantHeader
|
a10ae891dd24790d5646657268e7ddd1dbddbbe1
|
[
"Apache-2.0"
] | null | null | null |
//
// MainHeaderView.swift
// TableViewIndependantHeader
//
// Created by Ricardo Casanova on 30/08/2020.
// Copyright © 2020 Ricardo Casanova. All rights reserved.
//
import UIKit
//__ This class extends UIView. Feel free to modify it if needed
class MainHeaderView: UIView {
public static var height: CGFloat {
return 200.0
}
// MARK: Lifecycle
override init(frame: CGRect) {
super.init(frame: frame)
setupViews()
}
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
setupViews()
}
}
// MARK: - Setup views
extension MainHeaderView {
/**
Setup views
*/
private func setupViews() {
//__ Configure your view here
//__ Background color, title, safe area
backgroundColor = .yellow
configureSubviews()
addSubviews()
}
/**
Configure subviews
*/
private func configureSubviews() {
//__ Configure all the subviews here
}
}
// MARK: - Layout & constraints
extension MainHeaderView {
/**
Add subviews
*/
private func addSubviews() {
//__ Add all the subviews here
//__ Configure the constraints
}
}
| 18.75 | 64 | 0.585098 | 3.046875 |
3eba7589d3864599d8e36290573953150b1ca369
| 4,982 |
swift
|
Swift
|
ShopBag/Checkout/CheckoutViewController.swift
|
altamic/ShopBag
|
ba9ffc6df78601176c8e814905b270f0a3820113
|
[
"MIT"
] | 1 |
2021-01-24T11:14:16.000Z
|
2021-01-24T11:14:16.000Z
|
ShopBag/Checkout/CheckoutViewController.swift
|
altamic/ShopBag
|
ba9ffc6df78601176c8e814905b270f0a3820113
|
[
"MIT"
] | null | null | null |
ShopBag/Checkout/CheckoutViewController.swift
|
altamic/ShopBag
|
ba9ffc6df78601176c8e814905b270f0a3820113
|
[
"MIT"
] | null | null | null |
//
// CheckoutViewController.swift
// ShopBag
//
// Created by Michelangelo Altamore on 23/09/17.
// Copyright © 2017 altamic. All rights reserved.
//
import UIKit
class CheckoutViewController: UITableViewController {
let lineItemCellIdentifier = "LineItemIdentifier"
var lineItems = [LineItem]()
var currencyRatios = [Currency: Double]()
let segmentedButtonCurrencyOrder: [Int: Currency] = [0: .usd, 1: .eur, 2: .chf, 3: .gbp]
let apiClient = URLSessionNetworkClient()
@IBOutlet weak var currencySelectionView: UISegmentedControl!
@IBOutlet weak var totalPriceLabel: UILabel!
override func viewDidLoad() {
super.viewDidLoad()
disableSelectCurrencyButton()
getCurrencyRates()
updateTotalPrice()
}
@IBAction func currencySelectedChangedAction(_ sender: UISegmentedControl) {
setProductsCurrency(segmentedButtonIndex: sender.selectedSegmentIndex)
tableView.reloadData()
updateTotalPrice()
}
private func setProductsCurrency(segmentedButtonIndex: Int) {
if let currency = segmentedButtonCurrencyOrder[segmentedButtonIndex],
let ratio = currencyRatios[currency] {
Product.CURRENCY_RATIO = ratio
Product.CURRENCY_NAME = currency
}
else {
currencySelectionView.selectedSegmentIndex = 0
currencySelectionView.isEnabled = false
}
}
@IBAction func refreshRatesAction(_ sender: UIBarButtonItem) { disableSelectCurrencyButton()
getCurrencyRates()
}
// MARK: - Table view data source
override func numberOfSections(in tableView: UITableView) -> Int {
return 1
}
override func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return lineItems.count
}
override func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: lineItemCellIdentifier, for: indexPath) as! LineItemTableViewCell
cell.configure(with: lineItems[indexPath.row])
cell.minusOneBlock = {
self.addAndSetPrice(-1, indexPath: indexPath, cell: cell)
self.updateTotalPrice()
}
cell.addOneBlock = {
self.addAndSetPrice(1, indexPath: indexPath, cell: cell)
self.updateTotalPrice()
}
return cell
}
private func addAndSetPrice(_ plusOrMinusOne: Int, indexPath: IndexPath,
cell: LineItemTableViewCell) {
guard plusOrMinusOne == 1 || plusOrMinusOne == -1 else { return }
let currentQuantity = self.lineItems[indexPath.row].quantity
self.lineItems[indexPath.row].quantity = plusOrMinusOne == 1 ? min(9, currentQuantity + 1) :
max(1, currentQuantity - 1)
cell.quantity.text = "\(self.lineItems[indexPath.row].quantity)"
cell.priceView.text = formatCurrency(value: self.lineItems[indexPath.row].subTotal(), currencyCode: Product.CURRENCY_NAME)
}
private func updateTotalPrice() {
let total = LineItem.computeTotal(of: lineItems)
self.totalPriceLabel.text = formatCurrency(value: total, currencyCode: Product.CURRENCY_NAME)
}
private func getCurrencyRates() {
let getRatesEndpoint = ApiRouter.getRatesFor(currencies: ["EUR","CHF","GBP"])
apiClient.request(to: getRatesEndpoint) { (result: Result<Currencies>) in
switch result {
case .success(let currencyRates):
if currencyRates.success {
let dateTime = Date(timeIntervalSince1970: TimeInterval(currencyRates.timestamp))
print("JSON API call success: updated rates at \(dateTime)")
self.currencyRatios = self.loadRates(from: currencyRates.quotes)
print(self.currencyRatios)
self.enableSelectCurrencyButton()
}
else {
self.disableSelectCurrencyButton()
}
case .failure(let error):
print("JSON API call failed: \(error.localizedDescription)")
self.disableSelectCurrencyButton()
}
}
}
func loadRates(from quotes: [String: Double]) -> [Currency: Double] {
let initialValue: [Currency: Double] = [.usd: 1.0]
return quotes.reduce(initialValue) { (acc, item) in
let key = item.key
let index = key.index(key.startIndex, offsetBy: 3)
let currencyString = String(describing: key[index...])
return acc.merge(with: [Currency(rawValue: currencyString)!: item.value])
}
}
func disableSelectCurrencyButton() {
DispatchQueue.main.async {
let indexes: [Currency: Int] = [.usd: 0, .eur: 1, .chf: 2, .gbp: 3]
self.currencySelectionView.selectedSegmentIndex = indexes[Product.CURRENCY_NAME]!
self.currencySelectionView.isEnabled = false
}
}
func enableSelectCurrencyButton() {
DispatchQueue.main.async {
self.currencySelectionView.isEnabled = true
self.tableView.reloadData()
self.updateTotalPrice()
}
}
}
| 33.436242 | 126 | 0.681052 | 3.015625 |
4eb4fed8b1f13c73002863d6cabbf8b69b8cfcf0
| 3,455 |
swift
|
Swift
|
Frame Grabber/Requesting Assets/Frame Export/FrameExportTask.swift
|
tiagomartinho/FrameGrabber
|
bbb7631baa1bd4f79e0de023cb58218ceecbc8d6
|
[
"MIT"
] | 1 |
2020-04-29T18:38:51.000Z
|
2020-04-29T18:38:51.000Z
|
Frame Grabber/Requesting Assets/Frame Export/FrameExportTask.swift
|
tiagomartinho/FrameGrabber
|
bbb7631baa1bd4f79e0de023cb58218ceecbc8d6
|
[
"MIT"
] | null | null | null |
Frame Grabber/Requesting Assets/Frame Export/FrameExportTask.swift
|
tiagomartinho/FrameGrabber
|
bbb7631baa1bd4f79e0de023cb58218ceecbc8d6
|
[
"MIT"
] | null | null | null |
import AVFoundation
class FrameExportTask: Operation {
typealias Request = FrameExport.Request
typealias Status = FrameExport.Status
let request: Request
let frameStartIndex: Int
let generator: AVAssetImageGenerator
let frameProcessedHandler: (Int, Status) -> ()
/// - Parameter frameStartIndex: If the task represents a chunk of a larger task, the
/// index describes the index of generated frames relative to the larger task. The
/// value is also used to generate file names for exported images.
/// - Parameter frameProcessedHandler: Called on an arbitrary queue.
init(generator: AVAssetImageGenerator,
request: Request,
frameStartIndex: Int = 0,
frameProcessedHandler: @escaping (Int, Status) -> ()) {
self.generator = generator
self.request = request
self.frameStartIndex = frameStartIndex
self.frameProcessedHandler = frameProcessedHandler
super.init()
self.qualityOfService = .userInitiated
}
override func cancel() {
super.cancel()
generator.cancelAllCGImageGeneration()
}
override func main() {
guard !isCancelled else {
return
}
// Since the operation is already asynchronous, make `generateCGImagesAsynchronously`
// synchronous within the current queue.
let block = DispatchGroup()
block.enter()
// Can be safely modified from the generator's callbacks' threads as they are
// strictly sequential.
let times = request.times.map(NSValue.init)
var countProcessed = 0
generator.generateCGImagesAsynchronously(forTimes: times) { [weak self] _, image, _, status, error in
guard let self = self else { return }
let frameIndex = self.frameStartIndex + countProcessed
// When the operation is cancelled, subsequent AVAssetImageGenerator callbacks
// might report `succeeded` as images might already have been generated while
// the current one is slowly being written to disk. Consider them cancelled too.
switch (self.isCancelled, status, image) {
case (true, _, _), (_, .cancelled, _):
self.frameProcessedHandler(frameIndex, .cancelled)
case (_, .succeeded, let image?):
let writeResult = self.write(image, for: self.request, index: frameIndex)
self.frameProcessedHandler(frameIndex, writeResult)
default:
self.frameProcessedHandler(frameIndex, .failed(error))
}
countProcessed += 1
if countProcessed == times.count {
block.leave()
}
}
block.wait()
}
private func write(_ image: CGImage, for request: Request, index: Int) -> Status {
guard let directory = request.directory else { return .failed(nil) }
let fileUrl = url(forFrameAt: index, in: directory, format: request.encoding.format)
let ok = image.write(to: fileUrl, with: request.encoding)
return ok ? .succeeded([fileUrl]) : .failed(nil)
}
private func url(forFrameAt index: Int, in directory: URL, format: ImageFormat) -> URL {
let suffix = (index == 0) ? "" : "-\(index)"
let fileName = "Frame\(suffix).\(format.fileExtension)"
return directory.appendingPathComponent(fileName)
}
}
| 35.255102 | 109 | 0.635601 | 3.40625 |
4a51ba1f3fa4efeea285e189780d57cd76da577f
| 879 |
js
|
JavaScript
|
test.js
|
krylova-dinara/redux-presentation
|
f9f3703fff152bcf9ac6059b34e688fcd1691149
|
[
"MIT"
] | null | null | null |
test.js
|
krylova-dinara/redux-presentation
|
f9f3703fff152bcf9ac6059b34e688fcd1691149
|
[
"MIT"
] | null | null | null |
test.js
|
krylova-dinara/redux-presentation
|
f9f3703fff152bcf9ac6059b34e688fcd1691149
|
[
"MIT"
] | null | null | null |
import { createStore } from 'redux';
// This is a reducer
function rating(state = 0, action) {
switch (action.type) {
case 'LIKE':
return state + 1;
case 'DISLIKE':
return state - 1;
default:
return state;
}
}
// This is an action creator
function ratingUp() {
return {
type: 'LIKE'
};
}
function ratingDown() {
return {
type: 'DISLIKE'
};
}
// Create a Redux store holding the state of your app.
// Its API is { subscribe, dispatch, getState }.
const store = createStore(rating);
// You can use subscribe() to update the UI in response to state changes.
store.subscribe(() => {
console.log(store.getState());
});
// The only way to mutate the internal state is to dispatch an action.
store.dispatch(ratingUp());
store.dispatch(ratingDown());
store.dispatch(ratingUp());
| 22.538462 | 73 | 0.615472 | 3.078125 |
e7edbdfed8164b295e564361932bcbdae312f33f
| 10,178 |
py
|
Python
|
armory/scenarios/audio_asr.py
|
GuillaumeLeclerc/armory
|
c24928701b4ff6fc37cdb994ea784f9733a8e8da
|
[
"MIT"
] | 1 |
2021-06-17T23:05:58.000Z
|
2021-06-17T23:05:58.000Z
|
armory/scenarios/audio_asr.py
|
GuillaumeLeclerc/armory
|
c24928701b4ff6fc37cdb994ea784f9733a8e8da
|
[
"MIT"
] | null | null | null |
armory/scenarios/audio_asr.py
|
GuillaumeLeclerc/armory
|
c24928701b4ff6fc37cdb994ea784f9733a8e8da
|
[
"MIT"
] | null | null | null |
"""
Automatic speech recognition scenario
"""
import logging
from typing import Optional
from tqdm import tqdm
import numpy as np
from art.preprocessing.audio import LFilter, LFilterPyTorch
from armory.utils.config_loading import (
load_dataset,
load_model,
load_attack,
load_adversarial_dataset,
load_defense_wrapper,
load_defense_internal,
load_label_targeter,
)
from armory.utils import metrics
from armory.scenarios.base import Scenario
from armory.utils.export import SampleExporter
logger = logging.getLogger(__name__)
def load_audio_channel(delay, attenuation, pytorch=True):
"""
Return an art LFilter object for a simple delay (multipath) channel
If attenuation == 0 or delay == 0, return an identity channel
Otherwise, return a channel with length equal to delay + 1
NOTE: lfilter truncates the end of the echo, so output length equals input length
"""
delay = int(delay)
attenuation = float(attenuation)
if delay < 0:
raise ValueError(f"delay {delay} must be a nonnegative number (of samples)")
if delay == 0 or attenuation == 0:
logger.warning("Using an identity channel")
numerator_coef = np.array([1.0])
denominator_coef = np.array([1.0])
else:
if not (-1 <= attenuation <= 1):
logger.warning(f"filter attenuation {attenuation} not in [-1, 1]")
# Simple FIR filter with a single multipath delay
numerator_coef = np.zeros(delay + 1)
numerator_coef[0] = 1.0
numerator_coef[delay] = attenuation
denominator_coef = np.zeros_like(numerator_coef)
denominator_coef[0] = 1.0
if pytorch:
try:
return LFilterPyTorch(
numerator_coef=numerator_coef, denominator_coef=denominator_coef
)
except ImportError:
logger.exception("PyTorch not available. Resorting to scipy filter")
logger.warning("Scipy LFilter does not currently implement proper gradients")
return LFilter(numerator_coef=numerator_coef, denominator_coef=denominator_coef)
class AutomaticSpeechRecognition(Scenario):
def _evaluate(
self,
config: dict,
num_eval_batches: Optional[int],
skip_benign: Optional[bool],
skip_attack: Optional[bool],
skip_misclassified: Optional[bool],
) -> dict:
"""
Evaluate the config and return a results dict
"""
if skip_misclassified:
raise ValueError("skip_misclassified shouldn't be set for ASR scenario")
model_config = config["model"]
estimator, fit_preprocessing_fn = load_model(model_config)
audio_channel_config = config.get("adhoc", {}).get("audio_channel")
if audio_channel_config is not None:
logger.info("loading audio channel")
for k in "delay", "attenuation":
if k not in audio_channel_config:
raise ValueError(f"audio_channel must have key {k}")
audio_channel = load_audio_channel(**audio_channel_config)
if estimator.preprocessing_defences:
estimator.preprocessing_defences.insert(0, audio_channel)
else:
estimator.preprocessing_defences = [audio_channel]
estimator._update_preprocessing_operations()
defense_config = config.get("defense") or {}
defense_type = defense_config.get("type")
if defense_type in ["Preprocessor", "Postprocessor"]:
logger.info(f"Applying internal {defense_type} defense to estimator")
estimator = load_defense_internal(config["defense"], estimator)
if model_config["fit"]:
logger.info(
f"Fitting model {model_config['module']}.{model_config['name']}..."
)
fit_kwargs = model_config["fit_kwargs"]
logger.info(f"Loading train dataset {config['dataset']['name']}...")
batch_size = config["dataset"].pop("batch_size")
config["dataset"]["batch_size"] = fit_kwargs.get(
"fit_batch_size", batch_size
)
train_data = load_dataset(
config["dataset"],
epochs=fit_kwargs["nb_epochs"],
split=config["dataset"].get("train_split", "train_clean100"),
preprocessing_fn=fit_preprocessing_fn,
shuffle_files=True,
)
config["dataset"]["batch_size"] = batch_size
if defense_type == "Trainer":
logger.info(f"Training with {defense_type} defense...")
defense = load_defense_wrapper(config["defense"], estimator)
defense.fit_generator(train_data, **fit_kwargs)
else:
logger.info("Fitting estimator on clean train dataset...")
estimator.fit_generator(train_data, **fit_kwargs)
if defense_type == "Transform":
# NOTE: Transform currently not supported
logger.info(f"Transforming estimator with {defense_type} defense...")
defense = load_defense_wrapper(config["defense"], estimator)
estimator = defense()
attack_config = config["attack"]
attack_type = attack_config.get("type")
targeted = bool(attack_config.get("targeted"))
metrics_logger = metrics.MetricsLogger.from_config(
config["metric"],
skip_benign=skip_benign,
skip_attack=skip_attack,
targeted=targeted,
)
if config["dataset"]["batch_size"] != 1:
logger.warning("Evaluation batch_size != 1 may not be supported.")
predict_kwargs = config["model"].get("predict_kwargs", {})
eval_split = config["dataset"].get("eval_split", "test_clean")
if skip_benign:
logger.info("Skipping benign classification...")
else:
# Evaluate the ART estimator on benign test examples
logger.info(f"Loading test dataset {config['dataset']['name']}...")
test_data = load_dataset(
config["dataset"],
epochs=1,
split=eval_split,
num_batches=num_eval_batches,
shuffle_files=False,
)
logger.info("Running inference on benign examples...")
for x, y in tqdm(test_data, desc="Benign"):
# Ensure that input sample isn't overwritten by estimator
x.flags.writeable = False
with metrics.resource_context(
name="Inference",
profiler=config["metric"].get("profiler_type"),
computational_resource_dict=metrics_logger.computational_resource_dict,
):
y_pred = estimator.predict(x, **predict_kwargs)
metrics_logger.update_task(y, y_pred)
metrics_logger.log_task()
if skip_attack:
logger.info("Skipping attack generation...")
return metrics_logger.results()
# Imperceptible attack still WIP
if (config.get("adhoc") or {}).get("skip_adversarial"):
logger.info("Skipping adversarial classification...")
return metrics_logger.results()
# Evaluate the ART estimator on adversarial test examples
logger.info("Generating or loading / testing adversarial examples...")
if attack_type == "preloaded":
test_data = load_adversarial_dataset(
attack_config,
epochs=1,
split="adversarial",
num_batches=num_eval_batches,
shuffle_files=False,
)
else:
attack = load_attack(attack_config, estimator)
if targeted != attack.targeted:
logger.warning(
f"targeted config {targeted} != attack field {attack.targeted}"
)
test_data = load_dataset(
config["dataset"],
epochs=1,
split=eval_split,
num_batches=num_eval_batches,
shuffle_files=False,
)
if targeted:
label_targeter = load_label_targeter(attack_config["targeted_labels"])
export_samples = config["scenario"].get("export_samples")
if export_samples is not None and export_samples > 0:
sample_exporter = SampleExporter(
self.scenario_output_dir, test_data.context, export_samples
)
else:
sample_exporter = None
for x, y in tqdm(test_data, desc="Attack"):
with metrics.resource_context(
name="Attack",
profiler=config["metric"].get("profiler_type"),
computational_resource_dict=metrics_logger.computational_resource_dict,
):
if attack_type == "preloaded":
x, x_adv = x
if targeted:
y, y_target = y
elif attack_config.get("use_label"):
x_adv = attack.generate(x=x, y=y)
elif targeted:
y_target = label_targeter.generate(y)
x_adv = attack.generate(x=x, y=y_target)
else:
x_adv = attack.generate(x=x)
# Ensure that input sample isn't overwritten by estimator
x_adv.flags.writeable = False
y_pred_adv = estimator.predict(x_adv, **predict_kwargs)
metrics_logger.update_task(y, y_pred_adv, adversarial=True)
if targeted:
metrics_logger.update_task(
y_target, y_pred_adv, adversarial=True, targeted=True,
)
metrics_logger.update_perturbation(x, x_adv)
if sample_exporter is not None:
sample_exporter.export(x, x_adv, y, y_pred_adv)
metrics_logger.log_task(adversarial=True)
if targeted:
metrics_logger.log_task(adversarial=True, targeted=True)
return metrics_logger.results()
| 39.449612 | 91 | 0.599921 | 3.3125 |
331c51cf21a7edb8c933a3fa13b75a18b05760cc
| 3,919 |
py
|
Python
|
openverse_catalog/dags/common/loader/smithsonian_unit_codes.py
|
yavik-kapadia/openverse-catalog
|
853766f2176a96450f456a9fd6675e134c0866e1
|
[
"MIT"
] | 25 |
2021-05-06T20:53:45.000Z
|
2022-03-30T23:18:50.000Z
|
openverse_catalog/dags/common/loader/smithsonian_unit_codes.py
|
yavik-kapadia/openverse-catalog
|
853766f2176a96450f456a9fd6675e134c0866e1
|
[
"MIT"
] | 272 |
2021-05-17T05:53:00.000Z
|
2022-03-31T23:57:20.000Z
|
openverse_catalog/dags/common/loader/smithsonian_unit_codes.py
|
yavik-kapadia/openverse-catalog
|
853766f2176a96450f456a9fd6675e134c0866e1
|
[
"MIT"
] | 13 |
2021-06-12T07:09:06.000Z
|
2022-03-29T17:39:13.000Z
|
"""
This program helps identify smithsonian unit codes which are not yet added to
the smithsonian sub-provider dictionary
"""
import logging
from textwrap import dedent
import requests
from airflow.providers.postgres.hooks.postgres import PostgresHook
from common.loader import provider_details as prov
from providers.provider_api_scripts import smithsonian
logger = logging.getLogger(__name__)
DELAY = smithsonian.DELAY
API_KEY = smithsonian.API_KEY
API_ROOT = smithsonian.API_ROOT
UNITS_ENDPOINT = smithsonian.UNITS_ENDPOINT
PARAMS = {"api_key": API_KEY, "q": "online_media_type:Images"}
SUB_PROVIDERS = prov.SMITHSONIAN_SUB_PROVIDERS
SI_UNIT_CODE_TABLE = "smithsonian_new_unit_codes"
def initialise_unit_code_table(postgres_conn_id, unit_code_table):
postgres = PostgresHook(postgres_conn_id=postgres_conn_id)
"""
Create table to store new unit codes if it does not exist
"""
postgres.run(
dedent(
f"""
CREATE TABLE IF NOT EXISTS public.{unit_code_table} (
new_unit_code character varying(80),
action character varying(40)
);
"""
)
)
"""
Delete old unit code entries
"""
postgres.run(
dedent(
f"""
DELETE FROM public.{unit_code_table};
"""
)
)
def get_new_and_outdated_unit_codes(unit_code_set, sub_prov_dict=SUB_PROVIDERS):
sub_provider_unit_code_set = set()
for sub_prov, unit_code_sub_set in sub_prov_dict.items():
sub_provider_unit_code_set = sub_provider_unit_code_set.union(unit_code_sub_set)
new_unit_codes = unit_code_set - sub_provider_unit_code_set
outdated_unit_codes = sub_provider_unit_code_set - unit_code_set
if bool(new_unit_codes):
logger.info(
f"The new unit codes {new_unit_codes} must be added to "
f"the SMITHSONIAN_SUB_PROVIDERS dictionary"
)
if bool(outdated_unit_codes):
logger.info(
f"The outdated unit codes {outdated_unit_codes} must be "
f"deleted from the SMITHSONIAN_SUB_PROVIDERS dictionary"
)
return new_unit_codes, outdated_unit_codes
def alert_unit_codes_from_api(
postgres_conn_id,
unit_code_table="smithsonian_new_unit_codes",
units_endpoint=UNITS_ENDPOINT,
query_params=PARAMS,
):
response = requests.get(units_endpoint, params=query_params)
unit_code_set = set(response.json().get("response", {}).get("terms", []))
new_unit_codes, outdated_unit_codes = get_new_and_outdated_unit_codes(unit_code_set)
initialise_unit_code_table(postgres_conn_id, unit_code_table)
postgres = PostgresHook(postgres_conn_id=postgres_conn_id)
"""
Populate the table with new unit codes
"""
for new_unit_code in new_unit_codes:
postgres.run(
dedent(
f"""
INSERT INTO public.{unit_code_table}
(new_unit_code, action)
VALUES (
'{new_unit_code}', 'add'
);
"""
)
)
"""
Populate the table with outdated unit codes
"""
for outdated_unit_code in outdated_unit_codes:
postgres.run(
dedent(
f"""
INSERT INTO public.{unit_code_table}
(new_unit_code, action)
VALUES (
'{outdated_unit_code}', 'delete'
);
"""
)
)
"""
Raise exception if human intervention is needed to update the
SMITHSONIAN_SUB_PROVIDERS dictionary by checking the entries in the
smithsonian_new_unit_codes table
"""
if bool(new_unit_codes) or bool(outdated_unit_codes):
raise Exception(
"Please check the smithsonian_new_unit_codes table for necessary "
"updates to the SMITHSONIAN_SUB_PROVIDERS dictionary"
)
| 29.02963 | 88 | 0.661393 | 3.109375 |
9bdcc06e008410565390de920caa830bf199e26a
| 1,669 |
js
|
JavaScript
|
src/redux/reducers/__tests__/index.test.js
|
alebelcor/react-tic-tac-toe
|
041e9628a8a3f7db2842e87bb97353729784ffb7
|
[
"MIT"
] | 1 |
2019-05-21T06:12:51.000Z
|
2019-05-21T06:12:51.000Z
|
src/redux/reducers/__tests__/index.test.js
|
alebelcor/react-tic-tac-toe
|
041e9628a8a3f7db2842e87bb97353729784ffb7
|
[
"MIT"
] | 4 |
2019-05-19T22:23:43.000Z
|
2019-11-01T00:18:11.000Z
|
src/redux/reducers/__tests__/index.test.js
|
alebelcor/react-tic-tac-toe
|
041e9628a8a3f7db2842e87bb97353729784ffb7
|
[
"MIT"
] | null | null | null |
import reducer from '../';
import markSpace from '../markSpace';
import reset from '../reset';
jest.mock('../markSpace')
jest.mock('../reset')
describe('index', () => {
it('should execute the "mark space" reducer', () => {
markSpace.mockImplementation(() => {
return {
turns: [
{ player: 1, position: 1, },
{ player: 2, position: 2, },
],
};
});
let state;
let action;
state = {
turns: [
{ player: 1, position: 1, },
],
};
action = { type: 'MARK_SPACE', payload: { position: 2, }, };
expect(reducer(state, action)).toStrictEqual({
turns: [
{ player: 1, position: 1, },
{ player: 2, position: 2, },
],
});
expect(markSpace).toHaveBeenCalled();
expect(markSpace).toHaveBeenCalledWith(state, action.payload.position);
});
it('should execute the "reset" reducer', () => {
reset.mockImplementation(() => {
return {
turns: [],
};
});
let state;
let action;
state = {
turns: [
{ player: 1, position: 1, },
],
};
action = { type: 'RESET', };
expect(reducer(state, action)).toStrictEqual({
turns: [],
});
expect(reset).toHaveBeenCalled();
expect(reset).toHaveBeenCalledWith();
});
it('should return the current state for unknown actions', () => {
let state;
let action;
state = {
turns: [
{ player: 1, position: 1, },
],
};
action = { type: 'FOO_BAR', };
expect(reducer(state, action)).toStrictEqual({
turns: [
{ player: 1, position: 1, },
],
});
});
});
| 20.353659 | 75 | 0.505093 | 3.09375 |
e50cfe7d8efdb0ee8aa65eb1bb9eceeacb3b6a0d
| 1,681 |
ts
|
TypeScript
|
src/test/suite/jsdos.test.ts
|
dosasm/vscode-dosbox
|
3e3fbb4fbcf39c782dd5e77bb19300ccff5b0369
|
[
"Apache-2.0"
] | 1 |
2021-11-05T10:49:43.000Z
|
2021-11-05T10:49:43.000Z
|
src/test/suite/jsdos.test.ts
|
dosasm/vscode-dosbox
|
3e3fbb4fbcf39c782dd5e77bb19300ccff5b0369
|
[
"Apache-2.0"
] | 2 |
2021-11-08T02:31:40.000Z
|
2021-12-23T14:23:00.000Z
|
src/test/suite/jsdos.test.ts
|
dosasm/vscode-dosbox
|
3e3fbb4fbcf39c782dd5e77bb19300ccff5b0369
|
[
"Apache-2.0"
] | null | null | null |
import * as assert from "assert";
// You can import and use all API from the 'vscode' module
// as well as import your extension to test it
import * as vscode from "vscode";
import * as myExtension from "../../extension";
import { randomString } from "./util";
let api: myExtension.API;
export const jsdosHostTestSuite = suite("test jsdos API", function () {
this.beforeEach(async function () {
const extension = vscode.extensions.getExtension("xsro.vscode-dosbox");
if (api === undefined) {
api = await extension?.activate();
}
assert.ok(
api !== undefined,
api ? Object.keys(api).toString() : "api can't get"
);
});
test("launch jsdos in extension host direct", async function () {
const ci = await api.jsdos.runInHost(undefined, false);
assert.ok(typeof ci.width() === "number");
if (!process.platform) {
this.timeout(10000);
this.retries(3);
}
const t = ci.terminal();
t.show();
const testStr = randomString();
t.sendText(`echo ${testStr}\r`);
const p = new Promise<string>((resolve) => {
let stdout = "";
ci.events().onStdout((val) => {
stdout += val;
if (stdout.includes(testStr)) {
setTimeout(() => {
resolve(stdout);
});
}
});
});
const stdout = await p;
assert.ok(stdout, stdout);
ci.exit();
t.dispose();
});
test("launch jsdos in extension host webworker", async function () {
if (process.platform !== undefined) {
this.skip();
}
const ci = await api.jsdos.runInHost(undefined, true);
assert.ok(typeof ci.width() === "number");
ci.exit();
});
});
| 27.557377 | 75 | 0.588935 | 3.203125 |
94d07aad53c2f0305af4f3613d8c2e305ceba320
| 1,634 |
swift
|
Swift
|
Sources/AsciiEdit/Commands/AsciiEdit.swift
|
daltonclaybrook/AsciiEdit
|
6e97648236a6dca03a23416018dae52277a60c75
|
[
"MIT"
] | 2 |
2021-06-10T13:37:36.000Z
|
2021-07-30T06:55:54.000Z
|
Sources/AsciiEdit/Commands/AsciiEdit.swift
|
daltonclaybrook/AsciiEdit
|
6e97648236a6dca03a23416018dae52277a60c75
|
[
"MIT"
] | null | null | null |
Sources/AsciiEdit/Commands/AsciiEdit.swift
|
daltonclaybrook/AsciiEdit
|
6e97648236a6dca03a23416018dae52277a60c75
|
[
"MIT"
] | null | null | null |
import ArgumentParser
import PathKit
final class AsciiEdit: ParsableCommand {
enum Error: Swift.Error {
case inputFileDoesNotExist(String)
}
static let configuration = CommandConfiguration(
commandName: "asciiedit",
abstract: "A utility for editing `cast` files produced with asciinema",
version: "0.1.0"
)
@Argument(help: "Path to the cast file to edit")
var file: String
@Option(name: [.short, .long], help: "A new width to apply to the cast file")
var width: Int?
@Option(name: [.short, .long], help: "A new height to apply to the cast file")
var height: Int?
@Option(name: [.customShort("d"), .long], help: "A maximum duration in seconds used to clamp each item in the cast file.")
var maxDuration: Double?
@Option(name: [.short, .long], help: "Where the output cast file should be saved. The default is to overwrite the input file.")
var output: String?
func run() throws {
let inputPath = Path(file)
guard inputPath.exists else {
throw Error.inputFileDoesNotExist(file)
}
var castFile = try CastFile(fileText: inputPath.read())
CastFileUtility.updateSize(of: &castFile, width: width, height: height)
CastFileUtility.constrainMaxDuration(of: &castFile, maxDuration: maxDuration)
let outputText = try castFile.generateFileText()
let outputFilePath = output.map { Path($0) } ?? inputPath
if outputFilePath.exists {
try outputFilePath.delete()
}
try outputFilePath.write(outputText)
print("✅ Done!")
}
}
| 32.68 | 131 | 0.649327 | 3.375 |
e775abb2c964a0e3b155e08fadc61923175d58fd
| 3,378 |
js
|
JavaScript
|
src/pages/login.js
|
fullstack202/nest-app
|
9a1470cd0a2d43c2e4a7057cdb0e1c626e9c4276
|
[
"MIT"
] | null | null | null |
src/pages/login.js
|
fullstack202/nest-app
|
9a1470cd0a2d43c2e4a7057cdb0e1c626e9c4276
|
[
"MIT"
] | null | null | null |
src/pages/login.js
|
fullstack202/nest-app
|
9a1470cd0a2d43c2e4a7057cdb0e1c626e9c4276
|
[
"MIT"
] | null | null | null |
import { Box, Button, Container, TextField, Typography } from '@mui/material';
import { useFormik } from 'formik';
import Head from 'next/head';
import { useRouter } from 'next/router';
import * as Yup from 'yup';
const Login = () => {
const router = useRouter();
const formik = useFormik({
initialValues: {
username: '[email protected]',
password: 'changeme'
},
validationSchema: Yup.object({
username: Yup
.string()
.email(
'Must be a valid email')
.max(255)
.required(
'Email is required'),
password: Yup
.string()
.max(255)
.required(
'Password is required')
}),
onSubmit: async () => {
// Simple POST request with a JSON body using fetch
const requestOptions = {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(formik.values)
};
await fetch('http://localhost:3000/auth/login', requestOptions)
.then(response => response.json())
.then(data => {
if (data?.statusCode == 401) {
formik.setErrors({ password: "Invalid Username or Password." });
}
localStorage.setItem('access_token', data.access_token);
formik.setSubmitting(false);
router.push('/');
});
}
});
return (
<>
<Head>
<title>Login | Material Kit</title>
</Head>
<Box
component="main"
sx={{
alignItems: 'center',
display: 'flex',
flexGrow: 1,
minHeight: '100%'
}}
>
<Container maxWidth="sm">
<form onSubmit={formik.handleSubmit}>
<Box sx={{ my: 3 }}>
<Typography
color="textPrimary"
variant="h4"
>
Sign in
</Typography>
</Box>
<TextField
error={Boolean(formik.touched.username && formik.errors.username)}
fullWidth
helperText={formik.touched.username && formik.errors.username}
label="Email Address"
margin="normal"
name="username"
onBlur={formik.handleBlur}
onChange={formik.handleChange}
type="email"
value={formik.values.username}
variant="outlined"
/>
<TextField
error={Boolean(formik.touched.password && formik.errors.password)}
fullWidth
helperText={formik.touched.password && formik.errors.password}
label="Password"
margin="normal"
name="password"
onBlur={formik.handleBlur}
onChange={formik.handleChange}
type="password"
value={formik.values.password}
variant="outlined"
/>
<Box sx={{ py: 2 }}>
<Button
color="primary"
disabled={formik.isSubmitting}
fullWidth
size="large"
type="submit"
variant="contained"
>
Sign In Now
</Button>
</Box>
</form>
</Container>
</Box>
</>
);
};
export default Login;
| 28.386555 | 80 | 0.488455 | 3.0625 |
f089b2ee0df2c39cb69f3cc09604ca66100e6ff2
| 3,266 |
js
|
JavaScript
|
true-business-backend/models/business.js
|
sophiemullerc/true-business
|
e062165e27344246f865c42f2b7499beae4ac24a
|
[
"MIT"
] | 1 |
2018-10-08T16:28:05.000Z
|
2018-10-08T16:28:05.000Z
|
true-business-backend/models/business.js
|
Lambda-School-Labs/CS10-business-review
|
e062165e27344246f865c42f2b7499beae4ac24a
|
[
"MIT"
] | 26 |
2018-09-12T17:32:38.000Z
|
2018-10-09T18:16:31.000Z
|
true-business-backend/models/business.js
|
sophiemullerc/true-business
|
e062165e27344246f865c42f2b7499beae4ac24a
|
[
"MIT"
] | 1 |
2018-09-10T16:06:01.000Z
|
2018-09-10T16:06:01.000Z
|
const mongoose = require('mongoose');
const businessSchema = new mongoose.Schema({
// places_details: name
// returns a formatted string
name: {
type: String,
required: true,
},
// places_details: types
// returns an array of strings
types: [
{
type: String,
},
],
// places_details: formatted_address
// returns a formatted string
formatted_address: {
type: String,
required: true,
},
// places_details: formatted_phone_number
// returns a formatted string
formatted_phone_number: {
type: String,
required: true,
},
// places_details: website
// Some restaurants don't have a website, no required
website: {
type: String,
},
// places_details: photos
// returns an array of objects
// Unlikely, but possible there won't be any, no required
photos: {
type: Array,
default: [
{
link: "https://png.icons8.com/ios/100/000000/organization.png",
width: 100,
height: 100,
},
],
},
// places_details: place_id
// returns a string
place_id: {
type: String,
required: true,
},
// places_details: opening_hours/weekday_text
// returns an array of seven strings
opening_hours: {
type: Object,
},
// places_details: address_components/long_name
// Not 100% about this one, but I believe it is what we are looking for
// returns full text description supposedly (or name of address component?)
address_components: {
type: Object,
},
// aggregate (may be the wrong word...) number thus far from the reviews
// Ex. two reviews, 1 star and 5 star, this number would be 3
// When new review is added, this is calculated; grab the number of reviews, increment that by 1
// grab the stars, add the star rating from the new review to this rating, divide by 2
stars: {
type: Number,
default: 0,
},
// Just the total number of reviews on this business. I would assume it would be as simple
// as updating the business each time a new review has been posted.
// Alternatively, we could probably just do business.reviews.length or something on the
// front end whenever calculating stars / popularity.
totalReviews: {
type: Number,
default: 0,
},
reviews: [
{
type: mongoose.Schema.Types.ObjectId,
ref: 'Review',
},
],
// For the map object that will be placed on the business page.
location: {
type: Object,
},
createdOn: {
type: Date,
required: true,
default: Date.now(),
},
updatedOn: {
type: Date,
required: true,
default: Date.now(),
},
popularity: {
type: Boolean,
required: true,
default: false,
},
rating: {
type: Number,
default: 0,
}
});
let businessModel = mongoose.model('Business', businessSchema);
// Pre-save hook
businessSchema.pre('save', function(next) {
businessModel.find({ place_id: this.place_id }, (err, docs) => {
if (!docs.length) {
next();
} else {
console.log('Business exists already');
next(new Error('Business exists!'));
}
});
});
// // Post-save hook
// // This is where we update the net promotor score or whatever
// businessSchema.pre('save', function(next) {
// });
module.exports = businessModel;
| 24.931298 | 98 | 0.642682 | 3.28125 |
85b54054c359222c7aa70f9ff647dc6732348c5b
| 988 |
c
|
C
|
code/2212.c
|
Tarpelite/OJ_research
|
5c23591a50e755dac800dfaedb561290ce35fc5b
|
[
"MIT"
] | null | null | null |
code/2212.c
|
Tarpelite/OJ_research
|
5c23591a50e755dac800dfaedb561290ce35fc5b
|
[
"MIT"
] | null | null | null |
code/2212.c
|
Tarpelite/OJ_research
|
5c23591a50e755dac800dfaedb561290ce35fc5b
|
[
"MIT"
] | null | null | null |
#include <stdio.h>
int N(int l, int r);
int M(int l, int r);
int H(int l, int r);
int min(int a, int b);
int max(int a, int b);
int a[100], n;
int main()
{
int K, l, r;
scanf("%d %d", &n, &K);
for(int i = 0; i < n; i++)
{
scanf("%d", &a[i]);
}
for(int i = 0; i < K; i++)
{
scanf("%d %d", &l, &r);
printf("%d\n", H( min( N(l, r), M(l, r) ), max( N(l, r), M(l, r) ) ) );
}
return 0;
}
int N(int l, int r)
{
int ans = 0;
for(int i = l; i <= r; i++)
{
ans += a[i];
}
ans %= n;
return ans;
}
int M(int l, int r)
{
int ans = 1;
for(int i = l; i <= r; i++)
{
ans *= a[i] % n;
ans %= n;
}
ans %= n;
return ans;
}
int H(int l, int r)
{
int ans;
if(l == r)
{
return a[l];
}
ans = a[l] ^ a[l + 1];
for(int i = l + 2; i <= r; i++)
{
ans = ans ^ a[i];
}
return ans;
}
int min(int a, int b)
{
if(a > b)
{
return b;
}
else
{
return a;
}
}
int max(int a, int b)
{
if(a >= b)
{
return a;
}
else
{
return b;
}
}
| 10.185567 | 73 | 0.425101 | 3.421875 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.