hexsha
stringlengths 40
40
| size
int64 5
1.05M
| ext
stringclasses 98
values | lang
stringclasses 21
values | max_stars_repo_path
stringlengths 3
945
| max_stars_repo_name
stringlengths 4
118
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
368k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
945
| max_issues_repo_name
stringlengths 4
118
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
134k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
945
| max_forks_repo_name
stringlengths 4
135
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
1.05M
| avg_line_length
float64 1
1.03M
| max_line_length
int64 2
1.03M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
db6bd69d9c985a26654bb24d30f5325ad41f533c | 778 | php | PHP | module/ZourceUser/src/ZourceUser/TaskService/Service/PermissionsFactory.php | zource/zource | 207d095e7452aadbf5b14d545ed453bf4b8f7778 | [
"RSA-MD"
] | 1 | 2016-04-30T02:49:49.000Z | 2016-04-30T02:49:49.000Z | module/ZourceUser/src/ZourceUser/TaskService/Service/PermissionsFactory.php | zource/zource | 207d095e7452aadbf5b14d545ed453bf4b8f7778 | [
"RSA-MD"
] | null | null | null | module/ZourceUser/src/ZourceUser/TaskService/Service/PermissionsFactory.php | zource/zource | 207d095e7452aadbf5b14d545ed453bf4b8f7778 | [
"RSA-MD"
] | 1 | 2019-12-15T20:38:20.000Z | 2019-12-15T20:38:20.000Z | <?php
/**
* This file is part of Zource. (https://github.com/zource/)
*
* @link https://github.com/zource/zource for the canonical source repository
* @copyright Copyright (c) 2005-2016 Zource. (https://github.com/zource/)
* @license https://raw.githubusercontent.com/zource/zource/master/LICENSE MIT
*/
namespace ZourceUser\TaskService\Service;
use Zend\ServiceManager\FactoryInterface;
use Zend\ServiceManager\ServiceLocatorInterface;
use ZourceUser\TaskService\Permissions;
class PermissionsFactory implements FactoryInterface
{
public function createService(ServiceLocatorInterface $serviceLocator)
{
/** @var array $config */
$config = $serviceLocator->get('Config');
return new Permissions($config['zource_permissions']);
}
}
| 29.923077 | 78 | 0.739075 |
b361b2cf0e7e9fe18efebbc5dd17ef328ebb1b63 | 3,646 | py | Python | ml/gbdt/XgboostDemo.py | lj72808up/AI_handcraft | 31c48b91eccf2a64a9fb2a24f1829045c1252358 | [
"Apache-2.0"
] | null | null | null | ml/gbdt/XgboostDemo.py | lj72808up/AI_handcraft | 31c48b91eccf2a64a9fb2a24f1829045c1252358 | [
"Apache-2.0"
] | null | null | null | ml/gbdt/XgboostDemo.py | lj72808up/AI_handcraft | 31c48b91eccf2a64a9fb2a24f1829045c1252358 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import pandas as pd
import numpy as np
import xgboost as xgb
def getData():
data = pd.read_csv("../../datasets/census.csv")
income_raw = data['income']
features_raw = data.drop('income', axis=1)
# 独热编码,只取收入大于50k作为输出字段
income = pd.get_dummies(income_raw).iloc[:, 1:]
# 处理取值范围很大的特征
skewed = ['capital-gain', 'capital-loss']
features_raw[skewed] = data[skewed].apply(lambda x: np.log(x + 1))
features = pd.get_dummies(features_raw)
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(features, income, test_size=0.2, random_state=0,
stratify=income)
# 将'X_train'和'y_train'进一步切分为训练集和验证集
X_train, X_val, y_train, y_val = train_test_split(X_train, y_train, test_size=0.2, random_state=0, stratify=y_train)
return X_train, X_val, X_test, y_train, y_val, y_test
def xgboost_train(X_train, y_train):
# To load a numpy array into DMatrix
dtrain = xgb.DMatrix(data=X_train, label=y_train)
param = {'max_depth': 2, 'eta': 1, 'silent': 1, 'objective': 'binary:logistic',
'nthread': 4, 'eval_metric': 'auc'}
# Specify validations set to watch performance
bst = xgb.train(param,dtrain, num_round)
# predict
dtest = xgb.DMatrix(X_test)
predict = bst.predict(dtest)
predict[predict>0.5] = 1
predict[predict<=0.5] = 0
return bst,predict
# Xgboost + logisticregression
def xgboostAndLogistic(X_train, y_train):
# To load a numpy array into DMatrix
dtrain = xgb.DMatrix(data=X_train, label=y_train)
param = {'max_depth': 2, 'eta': 1, 'silent': 1, 'objective': 'binary:logistic',
'nthread': 4, 'eval_metric': 'auc'}
# Specify validations set to watch performance
bst = xgb.train(param,dtrain, num_round)
# predict leaf index
dtest = xgb.DMatrix(X_train)
leafindex = bst.predict(dtest, ntree_limit=num_round, pred_leaf=True)
print "xgboost train success"
# one hot encoding
features = pd.DataFrame(leafindex,columns=[i for i in range(0,num_round)])
features = pd.get_dummies(features)
from sklearn.linear_model import LogisticRegression
clf = LogisticRegression(random_state=10)
clf.fit(features,y_train)
# evaluate(clf.predict(features),y_train)
return bst,clf
def predictXgboostAndLogistic(bst,clf):
dtest = xgb.DMatrix(X_test)
leafindex = bst.predict(dtest, ntree_limit=num_round, pred_leaf=True)
features = pd.DataFrame(leafindex,columns=[i for i in range(0,num_round)])
features = pd.get_dummies(features)
predict = clf.predict(features)
# dtest = xgb.DMatrix(X_test)
# predict = bst.predict(dtest)
# predict[predict>0.5] = 1
# predict[predict<=0.5] = 0
# evaluate(predict,y_test)
return predict
def evaluate(predict,y_test):
from sklearn.metrics import fbeta_score, accuracy_score, confusion_matrix
accuracy = accuracy_score(y_test, predict)
fscore = fbeta_score(y_test, predict, beta=0.5)
print "Final accuracy score on the validation data: {:.4f}".format(accuracy)
print "F-score on validation data: {:.4f}".format(fscore)
print confusion_matrix(y_test, predict)
if __name__ == "__main__":
X_train, X_val, X_test, y_train, y_val, y_test = getData()
num_round = 300
bst,predict = xgboost_train(X_train, y_train)
# bst,clf = xgboostAndLogistic(X_train, y_train)
# predict = predictXgboostAndLogistic(bst,clf) # Final accuracy score on the validation data: 0.8703,F-score on validation data: 0.7500
evaluate(predict,y_test) | 36.46 | 139 | 0.687054 |
b07e2566915be2e011fe1f35222fbd0ceed58b08 | 467 | py | Python | 9.py | manuelsousa7/ia-labs | 81a7cda3478e174ca2c9dad40b4ef68471931004 | [
"MIT"
] | null | null | null | 9.py | manuelsousa7/ia-labs | 81a7cda3478e174ca2c9dad40b4ef68471931004 | [
"MIT"
] | null | null | null | 9.py | manuelsousa7/ia-labs | 81a7cda3478e174ca2c9dad40b4ef68471931004 | [
"MIT"
] | null | null | null | def Agente_corredor_ex_9():
def __init__(self):
self.cur = 1
def invoca(self):
pos = 0
if pos == 0:
if(self.cur == 0):
return "fica parado"
if(self.cur > pos and self.cur > 1):
self.cur = self.cur - 1
return "andar-"
if(self.cur < pos and self.cur < 8):
self.cur = self.cur + 1
return "andar+"
return ""
asd = Agente_corredor_ex_9() | 24.578947 | 44 | 0.475375 |
2c30ecd251e15b007d0d5dbc63bdfa346a6c3c99 | 301 | lua | Lua | MMOCoreORB/bin/scripts/object/tangible/furniture/flooring/tile/frn_flooring_tile_s31.lua | V-Fib/FlurryClone | 40e0ca7245ec31b3815eb6459329fd9e70f88936 | [
"Zlib",
"OpenSSL"
] | 18 | 2017-02-09T15:36:05.000Z | 2021-12-21T04:22:15.000Z | MMOCoreORB/bin/scripts/object/tangible/furniture/flooring/tile/frn_flooring_tile_s31.lua | V-Fib/FlurryClone | 40e0ca7245ec31b3815eb6459329fd9e70f88936 | [
"Zlib",
"OpenSSL"
] | 61 | 2016-12-30T21:51:10.000Z | 2021-12-10T20:25:56.000Z | MMOCoreORB/bin/scripts/object/tangible/furniture/flooring/tile/frn_flooring_tile_s31.lua | V-Fib/FlurryClone | 40e0ca7245ec31b3815eb6459329fd9e70f88936 | [
"Zlib",
"OpenSSL"
] | 71 | 2017-01-01T05:34:38.000Z | 2022-03-29T01:04:00.000Z | object_tangible_furniture_flooring_tile_frn_flooring_tile_s31 = object_tangible_furniture_flooring_tile_shared_frn_flooring_tile_s31:new {
}
ObjectTemplates:addTemplate(object_tangible_furniture_flooring_tile_frn_flooring_tile_s31, "object/tangible/furniture/flooring/tile/frn_flooring_tile_s31.iff")
| 75.25 | 159 | 0.923588 |
36b4356783b70528dbf91f8b8adbc453cf7138de | 7,005 | swift | Swift | ios/Classes/SwiftSecureApplicationPlugin.swift | stevenspiel/secure_application | cb99a59b1f34a9464ab6dd179915fe63329014e7 | [
"MIT"
] | null | null | null | ios/Classes/SwiftSecureApplicationPlugin.swift | stevenspiel/secure_application | cb99a59b1f34a9464ab6dd179915fe63329014e7 | [
"MIT"
] | null | null | null | ios/Classes/SwiftSecureApplicationPlugin.swift | stevenspiel/secure_application | cb99a59b1f34a9464ab6dd179915fe63329014e7 | [
"MIT"
] | null | null | null | import Flutter
import UIKit
public class SwiftSecureApplicationPlugin: NSObject, FlutterPlugin {
var secured = false;
var opacity: CGFloat = 0.2;
var useLaunchImage: Bool = false;
var backgroundColor: UIColor = UIColor.white;
var backgroundTask: UIBackgroundTaskIdentifier!
internal let registrar: FlutterPluginRegistrar
init(registrar: FlutterPluginRegistrar) {
self.registrar = registrar
super.init()
registrar.addApplicationDelegate(self)
}
public static func register(with registrar: FlutterPluginRegistrar) {
let channel = FlutterMethodChannel(name: "secure_application", binaryMessenger: registrar.messenger())
let instance = SwiftSecureApplicationPlugin(registrar: registrar)
registrar.addMethodCallDelegate(instance, channel: channel)
}
public func applicationWillResignActive(_ application: UIApplication) {
if ( secured ) {
self.registerBackgroundTask()
UIApplication.shared.ignoreSnapshotOnNextApplicationLaunch()
if let window = UIApplication.shared.windows.filter({ (w) -> Bool in
return w.isHidden == false
}).first {
if (useLaunchImage) {
if let existingView = window.viewWithTag(99697) {
window.bringSubviewToFront(existingView)
return
} else {
let imageView = UIImageView.init(frame: window.bounds)
imageView.tag = 99697
imageView.backgroundColor = backgroundColor
imageView.clipsToBounds = true
imageView.contentMode = .center
imageView.image = UIImage(named: "LaunchImage")
imageView.isMultipleTouchEnabled = true
imageView.translatesAutoresizingMaskIntoConstraints = false
window.addSubview(imageView)
window.bringSubviewToFront(imageView)
window.snapshotView(afterScreenUpdates: true)
RunLoop.current.run(until: Date(timeIntervalSinceNow:0.5))
}
} else {
if let existingView = window.viewWithTag(99699), let existingBlurrView = window.viewWithTag(99698) {
window.bringSubviewToFront(existingView)
window.bringSubviewToFront(existingBlurrView)
return
} else {
let colorView = UIView(frame: window.bounds);
colorView.tag = 99699
colorView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
colorView.backgroundColor = backgroundColor.withAlphaComponent(opacity)
window.addSubview(colorView)
window.bringSubviewToFront(colorView)
let blurEffect = UIBlurEffect(style: UIBlurEffect.Style.extraLight)
let blurEffectView = UIVisualEffectView(effect: blurEffect)
blurEffectView.frame = window.bounds
blurEffectView.autoresizingMask = [.flexibleWidth, .flexibleHeight]
blurEffectView.tag = 99698
window.addSubview(blurEffectView)
window.bringSubviewToFront(blurEffectView)
window.snapshotView(afterScreenUpdates: true)
RunLoop.current.run(until: Date(timeIntervalSinceNow:0.5))
}
}
}
self.endBackgroundTask()
}
}
func registerBackgroundTask() {
self.backgroundTask = UIApplication.shared.beginBackgroundTask { [weak self] in
self?.endBackgroundTask()
}
assert(self.backgroundTask != UIBackgroundTaskIdentifier.invalid)
}
func endBackgroundTask() {
print("Background task ended.")
UIApplication.shared.endBackgroundTask(backgroundTask)
backgroundTask = UIBackgroundTaskIdentifier.invalid
}
public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
if (call.method == "secure") {
secured = true;
if let args = call.arguments as? Dictionary<String, Any> {
if let opacity = args["opacity"] as? NSNumber {
self.opacity = opacity as! CGFloat
}
if let useLaunchImage = args["useLaunchImage"] as? Bool {
self.useLaunchImage = useLaunchImage
}
if let backgroundColor = args["backgroundColor"] as? String {
self.backgroundColor = hexStringToUIColor(hex: backgroundColor)
}
}
} else if (call.method == "open") {
secured = false;
} else if (call.method == "opacity") {
if let args = call.arguments as? Dictionary<String, Any>,
let opacity = args["opacity"] as? NSNumber {
self.opacity = opacity as! CGFloat
}
} else if (call.method == "backgroundColor") {
if let args = call.arguments as? Dictionary<String, Any>,
let backgroundColor = args["backgroundColor"] as? String {
self.backgroundColor = hexStringToUIColor(hex: backgroundColor)
}
} else if (call.method == "useLaunchImage") {
if let args = call.arguments as? Dictionary<String, Any>,
let useLaunchImage = args["useLaunchImage"] as? Bool {
self.useLaunchImage = useLaunchImage
}
} else if (call.method == "unlock") {
if let window = UIApplication.shared.windows.filter({ (w) -> Bool in
return w.isHidden == false
}).first {
if let colorView = window.viewWithTag(99699), let blurrView = window.viewWithTag(99698) {
UIView.animate(withDuration: 0.5, animations: {
colorView.alpha = 0.0
}, completion: { finished in
colorView.removeFromSuperview()
blurrView.removeFromSuperview()
})
}
if let imageView = window.viewWithTag(99697) {
UIView.animate(withDuration: 0.3, animations: {
imageView.alpha = 0.0
}, completion: { finished in
imageView.removeFromSuperview()
})
}
}
}
}
func hexStringToUIColor (hex:String) -> UIColor {
var cString:String = hex.trimmingCharacters(in: .whitespacesAndNewlines).uppercased()
if (cString.hasPrefix("#")) {
cString.remove(at: cString.startIndex)
}
if ((cString.count) != 6) {
return UIColor.gray
}
var rgbValue:UInt64 = 0
Scanner(string: cString).scanHexInt64(&rgbValue)
return UIColor(
red: CGFloat((rgbValue & 0xFF0000) >> 16) / 255.0,
green: CGFloat((rgbValue & 0x00FF00) >> 8) / 255.0,
blue: CGFloat(rgbValue & 0x0000FF) / 255.0,
alpha: CGFloat(1.0)
)
}
}
| 40.028571 | 116 | 0.591006 |
c96a020c00b288df0ed52869272721b1b9e2afca | 340 | ts | TypeScript | module/creative/homepage/homepage.centerpanel.component.d.ts | CharityDynamics/amexio-ng-extensions | 639d55bd4e7eaa39f01710e939f211afc4c92213 | [
"Apache-2.0"
] | null | null | null | module/creative/homepage/homepage.centerpanel.component.d.ts | CharityDynamics/amexio-ng-extensions | 639d55bd4e7eaa39f01710e939f211afc4c92213 | [
"Apache-2.0"
] | null | null | null | module/creative/homepage/homepage.centerpanel.component.d.ts | CharityDynamics/amexio-ng-extensions | 639d55bd4e7eaa39f01710e939f211afc4c92213 | [
"Apache-2.0"
] | null | null | null | import { OnInit } from '@angular/core';
import { DeviceQueryService } from './../../services/device/device.query.service';
export declare class AmexioHomePageCenterPanelComponent implements OnInit {
matchMediaService: DeviceQueryService;
type: string;
constructor(matchMediaService: DeviceQueryService);
ngOnInit(): void;
}
| 37.777778 | 82 | 0.758824 |
c67ca6fa80c24b53f57ba9a070e4a09304e59bfd | 10,805 | py | Python | parsers/occtonet.py | hybridcattt/electricitymap-contrib | 16178c39c4130fa3ef0c125ebfba58d3033fa34b | [
"MIT"
] | 1 | 2021-05-19T09:00:58.000Z | 2021-05-19T09:00:58.000Z | parsers/occtonet.py | hybridcattt/electricitymap-contrib | 16178c39c4130fa3ef0c125ebfba58d3033fa34b | [
"MIT"
] | null | null | null | parsers/occtonet.py | hybridcattt/electricitymap-contrib | 16178c39c4130fa3ef0c125ebfba58d3033fa34b | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# coding=utf-8
import logging
import datetime
import pandas as pd
# The arrow library is used to handle datetimes
import arrow
# The request library is used to fetch content through HTTP
import requests
from io import StringIO
# Abbreviations:
# JP-HKD : Hokkaido
# JP-TH : Tohoku (incl. Niigata)
# JP-TK : Tokyo area (Kanto)
# JP-CB : Chubu
# JP-HR : Hokuriku
# JP-KN : Kansai
# JP-CG : Chugoku
# JP-SK : Shikoku
# JP-KY : Kyushu
# JP-ON : Okinawa
exchange_mapping = {
'JP-HKD->JP-TH':[1],
'JP-TH->JP-TK':[2],
'JP-CB->JP-TK':[3],
'JP-CB->JP-KN':[4],
'JP-CB->JP-HR':[5,11],
'JP-HR->JP-KN':[6],
'JP-CG->JP-KN':[7],
'JP-KN->JP-SK':[8],
'JP-CG->JP-SK':[9],
'JP-CG->JP-KY':[10]
}
def fetch_exchange(zone_key1='JP-TH', zone_key2='JP-TK', session=None,
target_datetime=None, logger=logging.getLogger(__name__)):
"""
Requests the last known power exchange (in MW) between two zones
Arguments:
----------
zone_key: used in case a parser is able to fetch multiple countries
session: request session passed in order to re-use an existing session
target_datetime: the datetime for which we want production data. If not
provided, we should default it to now. If past data is not available,
raise a NotImplementedError. Beware that the provided target_datetime is
UTC. To convert to local timezone, you can use
`target_datetime = arrow.get(target_datetime).to('America/New_York')`.
Note that `arrow.get(None)` returns UTC now.
logger: an instance of a `logging.Logger` that will be passed by the
backend. Information logged will be publicly available so that correct
execution of the logger can be checked. All Exceptions will automatically
be logged, so when something's wrong, simply raise an Exception (with an
explicit text). Use `logger.warning` or `logger.info` for information
that can useful to check if the parser is working correctly. A default
logger is used so that logger output can be seen when coding / debugging.
Returns:
--------
If no data can be fetched, any falsy value (None, [], False) will be
ignored by the backend. If there is no data because the source may have
changed or is not available, raise an Exception.
A dictionary in the form:
{
'sortedZoneKeys': 'DK->NO',
'datetime': '2017-01-01T00:00:00Z',
'netFlow': 0.0,
'source': 'mysource.com'
}
"""
#get target date in time zone Asia/Tokyo
query_date = arrow.get(target_datetime).to('Asia/Tokyo').strftime('%Y/%m/%d')
sortedZoneKeys = '->'.join(sorted([zone_key1, zone_key2]))
exch_id = exchange_mapping[sortedZoneKeys]
r = session or requests.session()
# Login to occtonet
Cookies = get_cookies(r)
# Get headers for querying exchange
Headers = get_headers(r, exch_id[0], query_date, Cookies)
# Add request tokens to headers
Headers = get_request_token(r, Headers, Cookies)
# Query data
df = get_exchange(r, Headers, Cookies)
# CB-HR -exceptions
if sortedZoneKeys == 'JP-CB->JP-HR':
df = df.set_index(['Date', 'Time'])
Headers = get_headers(r, exch_id[1], query_date, Cookies)
Headers = get_request_token(r, Headers, Cookies)
df2 = get_exchange(r, Headers, Cookies)
df2 = df2.set_index(['Date', 'Time'])
df = df + df2
df = df.reset_index()
# fix occurrences of 24:00hrs
list24 = list(df.index[df['Time']=='24:00'])
for idx in list24:
df.loc[idx, 'Date'] = arrow.get(df.loc[idx, 'Date']).shift(days=1).strftime('%Y/%m/%d')
df.loc[idx, 'Time'] = '00:00'
# correct flow direction, if needed
flows_to_revert = ['JP-CB->JP-TK', 'JP-CG->JP-KN', 'JP-CG->JP-SK']
if sortedZoneKeys in flows_to_revert:
df['netFlow'] = -1 * df['netFlow']
df['source'] = 'occtonet.occto.or.jp'
df['datetime'] = df.apply(parse_dt, axis=1)
df['sortedZoneKeys'] = sortedZoneKeys
df = df[['source', 'datetime', 'netFlow', 'sortedZoneKeys']]
results = df.to_dict('records')
for result in results:
result['datetime'] = result['datetime'].to_pydatetime()
return results
def fetch_exchange_forecast(zone_key1='JP-TH', zone_key2='JP-TK', session=None,
target_datetime=None, logger=logging.getLogger(__name__)):
"""
Gets exchange forecast between two specified zones.
Returns a list of dictionaries.
"""
#get target date in time zone Asia/Tokyo
query_date = arrow.get(target_datetime).to('Asia/Tokyo').strftime('%Y/%m/%d')
# Forecasts ahead of current date are not available
if query_date > arrow.get().to('Asia/Tokyo').strftime('%Y/%m/%d'):
raise NotImplementedError(
"Future dates(local time) not implemented for selected exchange")
sortedZoneKeys = '->'.join(sorted([zone_key1, zone_key2]))
exch_id = exchange_mapping[sortedZoneKeys]
# Login to occtonet
r = session or requests.session()
Cookies = get_cookies(r)
Headers = get_headers(r, exch_id[0], query_date, Cookies)
# Query data
Headers = get_request_token(r, Headers, Cookies)
df = get_exchange_fcst(r, Headers, Cookies)
# CB-HR -exceptions
if sortedZoneKeys == 'JP-CB->JP-HR':
df = df.set_index(['Date', 'Time'])
Headers = get_headers(r, exch_id[1], query_date, Cookies)
Headers = get_request_token(r, Headers, Cookies)
df2 = get_exchange_fcst(r, Headers, Cookies)
df2 = df2.set_index(['Date', 'Time'])
df = df + df2
df = df.reset_index()
# fix possible occurrences of 24:00hrs
list24 = list(df.index[df['Time']=='24:00'])
for idx in list24:
df.loc[idx, 'Date'] = arrow.get(str(df.loc[idx, 'Date'])).shift(days=1).strftime('%Y/%m/%d')
df.loc[idx, 'Time'] = '00:00'
# correct flow direction, if needed
flows_to_revert = ['JP-CB->JP-TK', 'JP-CG->JP-KN', 'JP-CG->JP-SK']
if sortedZoneKeys in flows_to_revert:
df['netFlow'] = -1 * df['netFlow']
# Add zonekey, source
df['source'] = 'occtonet.occto.or.jp'
df['datetime'] = df.apply(parse_dt, axis=1)
df['sortedZoneKeys'] = sortedZoneKeys
df = df[['source', 'datetime', 'netFlow', 'sortedZoneKeys']]
# Format output
results = df.to_dict('records')
for result in results:
result['datetime'] = result['datetime'].to_pydatetime()
return results
def get_cookies(session=None):
s = session or requests.session()
s.get('http://occtonet.occto.or.jp/public/dfw/RP11/OCCTO/SD/LOGIN_login')
return s.cookies
def get_headers(session, exch_id, date, cookies):
payload = {
'ajaxToken':'',
'downloadKey':'',
'fwExtention.actionSubType':'headerInput',
'fwExtention.actionType':'reference',
'fwExtention.formId':'CA01S070P',
'fwExtention.jsonString':'',
'fwExtention.pagingTargetTable':'',
'fwExtention.pathInfo':'CA01S070C',
'fwExtention.prgbrh':'0',
'msgArea':'',
'requestToken':'',
'requestTokenBk':'',
'searchReqHdn':'',
'simFlgHdn':'',
'sntkTgtRklCdHdn':'',
'spcDay':date,
'spcDayHdn':'',
'tgtRkl':'{:02d}'.format(exch_id),
'transitionContextKey':'DEFAULT',
'updDaytime':''
}
s = session
r = s.post('http://occtonet.occto.or.jp/public/dfw/RP11/OCCTO/SD/CA01S070C?fwExtention.pathInfo=CA01S070C&fwExtention.prgbrh=0',
cookies=cookies, data=payload)
headers=r.text
headers = eval(headers.replace('false', 'False').replace('null', 'None'))
if headers['root']['errMessage']:
raise RuntimeError('Headers not available due to {}'.format(headers['root']['errMessage']))
else:
payload['msgArea'] = headers['root']['bizRoot']['header']['msgArea']['value']
payload['searchReqHdn'] = headers['root']['bizRoot']['header']['searchReqHdn']['value']
payload['spcDayHdn'] = headers['root']['bizRoot']['header']['spcDayHdn']['value']
payload['updDaytime'] = headers['root']['bizRoot']['header']['updDaytime']['value']
return payload
def get_request_token(session, payload, cookies):
s = session
payload['fwExtention.actionSubType']='ok'
r = s.post('http://occtonet.occto.or.jp/public/dfw/RP11/OCCTO/SD/CA01S070C?'
+'fwExtention.pathInfo=CA01S070C&fwExtention.prgbrh=0',
cookies=cookies, data=payload)
headers=r.text
headers = eval(headers.replace('false', 'False').replace('null', 'None'))
if headers['root']['errFields']:
raise RuntimeError('Request token not available due to {}'.format(headers['root']['errFields']))
else:
payload['downloadKey'] = headers['root']['bizRoot']['header']['downloadKey']['value']
payload['requestToken'] = headers['root']['bizRoot']['header']['requestToken']['value']
return payload
def get_exchange(session, payload, cookies):
s = session
payload['fwExtention.actionSubType']='download'
r = s.post('http://occtonet.occto.or.jp/public/dfw/RP11/OCCTO/SD/CA01S070C?'
+'fwExtention.pathInfo=CA01S070C&fwExtention.prgbrh=0',
cookies=cookies, data=payload)
r.encoding = 'shift-jis'
df = pd.read_csv(StringIO(r.text), delimiter=',')
df = df[['対象日付', '対象時刻', '潮流実績']]
df.columns = ['Date', 'Time', 'netFlow']
df = df.dropna()
return df
def get_exchange_fcst(session, payload, cookies):
s = session
payload['fwExtention.actionSubType']='download'
r = s.post('http://occtonet.occto.or.jp/public/dfw/RP11/OCCTO/SD/CA01S070C?fwExtention.pathInfo=CA01S070C&fwExtention.prgbrh=0',
cookies=cookies, data=payload)
r.encoding = 'shift-jis'
df = pd.read_csv(StringIO(r.text), delimiter=',')
df = df[['対象日付', '対象時刻', '計画潮流(順方向)']]
df.columns = ['Date', 'Time', 'netFlow']
df = df.dropna()
return df
def parse_dt(row):
return arrow.get(' '.join([row['Date'], row['Time']]).replace('/', '-')).replace(tzinfo='Asia/Tokyo').datetime
if __name__ == '__main__':
"""Main method, never used by the Electricity Map backend, but handy for testing."""
print('fetch_exchange(JP-CB, JP-HR) ->')
print(fetch_exchange('JP-CB', 'JP-HR')[-3:])
print('fetch_exchange(JP-CG, JP-KY) ->')
print(fetch_exchange('JP-CG', 'JP-KY')[-3:])
print('fetch_exchange_forecast(JP-CB, JP-HR) ->')
print(fetch_exchange_forecast('JP-CB', 'JP-HR')[-3:])
print('fetch_exchange_forecast(JP-CG, JP-KY) ->')
print(fetch_exchange_forecast('JP-CG', 'JP-KY')[-3:])
| 38.045775 | 132 | 0.628413 |
661682d22f5d95e0fb78f57c0ef5b1bf46bd9696 | 28 | py | Python | src/__init__.py | klein203/Python-Learning | 98ac61eb439e516198dd9cdf0dd9255e2541ea9b | [
"MIT"
] | null | null | null | src/__init__.py | klein203/Python-Learning | 98ac61eb439e516198dd9cdf0dd9255e2541ea9b | [
"MIT"
] | null | null | null | src/__init__.py | klein203/Python-Learning | 98ac61eb439e516198dd9cdf0dd9255e2541ea9b | [
"MIT"
] | null | null | null | '''
@author: xusheng
'''
| 7 | 17 | 0.464286 |
fce6a69fe01d034aac459a219ac935f705d584f6 | 41,355 | lua | Lua | app/utils.lua | stepan-mitkin/drakon.tech | 9ebf8fbf0bfe208b8f282fe0dec3670cebdfe85e | [
"Unlicense"
] | 30 | 2020-04-07T02:25:34.000Z | 2022-03-19T13:49:47.000Z | app/utils.lua | stepan-mitkin/drakon.tech | 9ebf8fbf0bfe208b8f282fe0dec3670cebdfe85e | [
"Unlicense"
] | 20 | 2020-04-05T05:48:17.000Z | 2021-12-01T10:45:52.000Z | app/utils.lua | stepan-mitkin/drakon.tech | 9ebf8fbf0bfe208b8f282fe0dec3670cebdfe85e | [
"Unlicense"
] | 17 | 2020-04-08T20:58:26.000Z | 2022-03-19T13:49:56.000Z | -- Autogenerated with DRAKON Editor 1.33
local table = table
local string = string
local pairs = pairs
local ipairs = ipairs
local type = type
local tostring = tostring
local tonumber = tonumber
local error = error
local print = print
-- configuration
local global_cfg = global_cfg
local price_cfg = price_cfg
local math = require("math")
local clock = require("clock")
local log = require("log")
local digest = require("digest")
local fiber = require("fiber")
local io = require("io")
local os = require("os")
local utf8 = require("lua-utf8")
local json=require('json')
local msgpack = require("msgpack")
local pickle = require("pickle")
local fun = require('fun')
local socket = require("socket")
local lxp = require("lxp")
local box = box
local a_code = string.byte("a")
local z_code = string.byte("z")
local d0 = string.byte("0")
local d9 = string.byte("9")
local under = string.byte("_")
local at = string.byte("@")
local dot = string.byte(".")
local dash = string.byte("-")
setfenv(1, {})
local content_types = {}
local g_separators = {}
local gspace = {}
local g_chars = {}
gspace[10] = true
gspace[13] = true
gspace[32] = true
gspace[9] = true
gspace[string.byte("{")] = true
gspace[string.byte("}")] = true
gspace[string.byte("-")] = true
gspace[string.byte("_")] = true
gspace[string.byte("/")] = true
gspace[string.byte("+")] = true
gspace[string.byte("*")] = true
gspace[string.byte("\\")] = true
gspace[string.byte("%")] = true
gspace[string.byte("&")] = true
gspace[string.byte("^")] = true
gspace[string.byte("=")] = true
gspace[string.byte("?")] = true
gspace[string.byte("!")] = true
gspace[string.byte("\"")] = true
gspace[string.byte("\'")] = true
gspace[string.byte(".")] = true
gspace[string.byte(",")] = true
gspace[string.byte(";")] = true
gspace[string.byte(":")] = true
gspace[string.byte("(")] = true
gspace[string.byte(")")] = true
gspace[string.byte("[")] = true
gspace[string.byte("]")] = true
gspace[string.byte("<")] = true
gspace[string.byte(">")] = true
gspace[string.byte("|")] = true
local g_days_in_month = { 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 }
function add_char_range(first, last, result)
local i
local f = string.byte(first, 1)
local l = string.byte(last, 1)
i = f
while true do
if i <= l then
else
break
end
table.insert(result, i)
i = i + 1
end
end
function add_list_to_set(set, list)
if list then
for _, value in ipairs(list) do
set[value] = true
end
end
end
function add_months(now, months)
local datetime = os.date("*t", now)
local add_months = months % 12
local next_month = datetime.month + add_months
local year2 = datetime.year + math.floor(months / 12)
if next_month > 12 then
year2 = year2 + 1
next_month = next_month - 12
end
local days_next_month = get_days_in_month(
year2,
next_month
)
local day2 = math.min(
datetime.day,
days_next_month
)
datetime.day = day2
datetime.month = next_month
datetime.year = year2
return os.time(datetime)
end
function add_range(dst, src)
local count = #src
local insert = table.insert
i = 1
while true do
if i <= count then
else
break
end
insert(
dst,
src[i]
)
i = i + 1
end
end
function add_separator(text)
g_separators[utf8.byte(text)] = true
end
function add_set(src, dst)
if src then
for key, value in pairs(src) do
dst[key] = value
end
end
end
function append_char(text, code)
return utf8.insert(
text,
utf8.char(code)
)
end
function bash_escape(text)
local quote = utf8.codepoint("'")
local result = ""
for i, code in utf8.next, text do
if code == quote then
result = utf8.insert(
result,
"'\\''"
)
else
result = utf8.insert(
result,
utf8.char(code)
)
end
end
return result
end
function build_index_parts(table_ref, part_names)
local parts = {}
for _, field_name in ipairs(part_names) do
local ordinal = find_by_prop(
table_ref.fields,
"name",
field_name
)
local field = table_ref.fields[ordinal]
table.insert(parts, ordinal)
table.insert(parts, field.type)
end
return parts
end
function build_needle(text)
local special = {}
special[utf8.codepoint("%")] = true
special[utf8.codepoint("(")] = true
special[utf8.codepoint(")")] = true
special[utf8.codepoint("[")] = true
special[utf8.codepoint("]")] = true
special[utf8.codepoint("-")] = true
special[utf8.codepoint("+")] = true
special[utf8.codepoint("^")] = true
special[utf8.codepoint("$")] = true
special[utf8.codepoint(".")] = true
special[utf8.codepoint("\"")] = true
special[utf8.codepoint("\'")] = true
local space = utf8.codepoint(" ")
local current = ""
for i, code in utf8.next, text do
if code == space then
current = utf8.insert(
current,
"%s"
)
else
if special[code] then
current = utf8.insert(
current,
"%"
)
end
current = utf8.insert(
current,
utf8.char(code)
)
end
end
return utf8.lower(current)
end
function build_random_chars()
local result = {}
add_char_range("a", "z", result)
add_char_range("A", "Z", result)
add_char_range("0", "9", result)
return result
end
function bulk_action(table, field_index, action)
local counter = 0
if table then
for it, row in table:pairs() do
local old_value = row[field_index]
local new_value = action(old_value)
if new_value then
local row2 = row:update(
{{"=", field_index, new_value}}
)
table:replace(row2)
counter = counter + 1
end
end
return counter
else
error("table not found: " .. table_name)
end
end
function contains(list, element)
if list then
for _, item in ipairs(list) do
if item == element then
return true
end
end
return false
else
return false
end
end
function copy(obj)
local result = {}
add_set(obj, result)
return result
end
function copy_props(src, dst)
for key, value in pairs(src) do
dst[key] = value
end
end
function create_primary_key(table_ref, btable)
local parts = {}
local ordinal = 1
local index_type
for _, field in ipairs(table_ref.fields) do
if field.pk then
table.insert(parts, ordinal)
table.insert(parts, field.type)
end
ordinal = ordinal + 1
end
if #parts == 2 then
index_type = "hash"
else
index_type = "tree"
end
btable:create_index(
"primary",
{
type = index_type,
unique = true,
parts = parts
}
)
end
function create_table(table_ref)
local btable = box.space[table_ref.name]
if btable == nil then
btable = box.schema.create_space(
table_ref.name
)
create_primary_key(table_ref, btable)
end
table_ref.data = btable
if table_ref.indexes then
for _, index in ipairs(table_ref.indexes) do
if btable.index[index.name] then
else
local parts = build_index_parts(
table_ref,
index.parts
)
btable:create_index(
index.name,
{
type = index.type,
unique = index.unique,
parts = parts
}
)
end
end
end
end
function date(yyyy, mm, dd)
local tt = {
year = yyyy,
month = mm,
day = dd
}
return os.time(tt)
end
function date8_to_expiry(date8)
local time = parse_date8(date8)
if time then
local minute = 60
return time - minute
else
return nil
end
end
function days_to_secs(days)
return days * 24 * 3600
end
function delete_by(table_ref, index_name, value)
local pk_length = 0
for i, field in ipairs(table_ref.fields) do
if field.pk then
else
pk_length = i - 1
break
end
end
local rows = table_ref.data
.index[index_name]
:select(value)
for _, row in ipairs(rows) do
local key = take_first(row, pk_length)
table_ref.data:delete(key)
end
end
function delete_row(table_ref, key)
return table_ref.data:delete(key)
end
function delete_row2(table_ref, key1, key2)
return table_ref.data:delete({key1, key2})
end
function ends_with(what, with)
local start = #what - #with + 1
local sub = what:sub(start, #what)
return sub == with
end
function filter(list, criterion)
local result = {}
if list then
for _, item in ipairs(list) do
if criterion(item) then
table.insert(
result,
item
)
end
end
end
return result
end
function find(list, item)
if list then
for i, v in ipairs(list) do
if v == item then
return i
end
end
return -1
else
return -1
end
end
function find_by_prop(array, prop, value)
for i, item in ipairs(array) do
if item[prop] == value then
return i
end
end
return -1
end
function find_many_in_line(line_no, line, needle, result, original)
local first, last
local current = line
local start = 0
while true do
first, last = utf8.find(
current,
needle
)
if first then
else
break
end
local item = {
line = original,
first = first + start,
last = last + start,
line_no = line_no
}
table.insert(result, item)
current = utf8.sub(
current,
last + 1
)
start = start + last
end
end
function find_many_substrings(haystack, needle, ignore_case)
local original
local result = {}
if (is_empty(haystack)) or (is_empty(needle)) then
else
local lines = split(haystack, "\n")
for i, line in ipairs(lines) do
original = line
if ignore_case then
line = utf8.lower(line)
end
find_many_in_line(
i,
line,
needle,
result,
original
)
end
end
return result
end
function get_days_in_month(year, month)
local days = g_days_in_month[month]
if (month == 2) and (is_leap_year(year)) then
return 29
else
return days
end
end
function get_extension(filename)
local dotstr = "."
local dot = dotstr:byte(1)
local i = #filename
while true do
if i > 0 then
else
return ""
end
if filename:byte(i) == dot then
return string.sub(filename, i + 1)
end
i = i - 1
end
end
function get_filename(path)
local parts = split(path, "/")
return parts[#parts]
end
function get_host_name(url)
if (url) and (not (url == "")) then
local first, last = url:find("://")
if last then
local no_prot = url:sub(last + 1)
local slash = no_prot:find("/")
local colon = no_prot:find(":")
local host_port
if slash then
host_port = no_prot:sub(1, slash - 1)
else
host_port = no_prot
end
if colon then
return host_port:sub(1, colon - 1)
else
return host_port
end
else
return ""
end
else
return ""
end
end
function get_mime(filename, default)
local extension = get_extension(filename)
local type = content_types[extension]
if type then
else
type = default
end
return type
end
function get_product(product_id)
local product = price_cfg.products[product_id]
if product then
return product
else
error("product '"
.. tostring(product_id)
.. "' not found")
end
end
function get_row(table_ref, key)
local row = table_ref.data:get(key)
if row then
return get_row_core(table_ref, row)
else
return nil
end
end
function get_row2(table_ref, key1, key2)
local row = table_ref.data:get({key1, key2})
if row then
return get_row_core(table_ref, row)
else
return nil
end
end
function get_row_core(table_ref, row)
local fields = row[#row]
local tfields = table_ref.fields
local count = #tfields
local i = 1
while true do
if i < count then
else
break
end
local field = tfields[i]
local part = row[i]
fields[field.name] = part
i = i + 1
end
return fields
end
function get_rows(table_ref, key1)
local rows = table_ref.data.index.primary
:select(key1)
local output = {}
for _, row in ipairs(rows) do
local out_row = get_row_core(table_ref, row)
table.insert(output, out_row)
end
return output
end
function good_id_symbols(id)
local chars = string_to_chars(id)
for _, code in ipairs(chars) do
if ((((is_alpha(code)) or (is_digit(code))) or (code == under)) or (code == dot)) or (code == dash) then
else
return false
end
end
return true
end
function group_by(list, property)
local result = {}
for _, item in ipairs(list) do
local key = item[property]
local group = result[key]
if group then
else
group = {}
result[key] = group
end
table.insert(
group,
item
)
end
return result
end
function http_post(url, data, mime, headers, user)
local tmp = os.tmpname()
local header_str = ""
local user_str = ""
if headers then
for _, header in ipairs(headers) do
header_str = header_str .. " -H \""
.. header .. "\""
end
end
if user then
user_str = "-u '" .. user .. "'"
end
local command = string.format(
'curl -X POST --data \'%s\' -H "Content-type: %s" %s %s %s > %s',
data,
mime,
header_str,
user_str,
url,
tmp
)
log.info(command)
os.execute(command)
local response = read_all_bytes(tmp)
log.info(tostring(response))
os.remove(tmp)
return response
end
function http_post_json(url, obj, headers, user)
local mime = "application/json; charset=utf-8"
local data = json.encode(obj)
local response = http_post(url, data, mime, headers, user)
if response then
if response == "" then
return {}
else
local result = json.decode(response)
return result
end
else
return nil
end
end
function init()
content_types["html"] = "text/html; charset=utf-8"
content_types["htm"] = "text/html; charset=utf-8"
content_types["css"] = "text/css; charset=utf-8"
content_types["js"] = "application/javascript; charset=utf-8"
content_types["png"] = "image/png"
content_types["jpg"] = "image/jpeg"
content_types["svg"] = "image/svg+xml"
content_types["json"] = "application/json; charset=utf-8"
content_types["txt"] = "text/plain; charset=utf-8"
content_types["ttf"] = "application/x-font-ttf"
content_types["eot"] = "application/vnd.ms-fontobject"
content_types["woff"] = "application/x-font-woff"
content_types["ico"] = "image/x-icon"
content_types["pdf"] = "application/pdf"
init_separators()
g_chars = build_random_chars()
end
function init_separators()
add_separator(" ")
add_separator("\t")
add_separator("\r")
add_separator("\n")
add_separator(".")
add_separator(",")
add_separator(":")
add_separator(";")
add_separator("-")
add_separator("=")
add_separator("\"")
add_separator("'")
add_separator("/")
add_separator("\\")
add_separator("(")
add_separator(")")
add_separator("[")
add_separator("]")
add_separator("{")
add_separator("}")
add_separator("~")
add_separator("`")
add_separator("#")
add_separator("@")
add_separator("%")
add_separator("$")
add_separator("&")
add_separator("?")
add_separator("!")
add_separator("^")
add_separator("|")
add_separator("<")
add_separator(">")
end
function insert1(table_ref, fields)
local id
while true do
id = random_password(6)
local row = table_ref.data:get(id)
if row then
else
break
end
end
local id_field = table_ref.fields[1].name
fields[id_field] = id
insert_row(table_ref, fields)
return id
end
function insert_row(table_ref, fields)
local row = {}
local tfields = table_ref.fields
local count = #tfields
local i = 1
while true do
if i < count then
else
table.insert(row, fields)
table_ref.data:replace(row)
break
end
local field = tfields[i]
local field_name = field.name
local part = fields[field_name]
if part == nil then
error("insert_row: " ..
table_ref.name ..
": field is nil: " ..
field_name)
break
end
fields[field_name] = nil
table.insert(row, part)
i = i + 1
end
end
function is_alpha(code)
if (code >= a_code) and (code <= z_code) then
return true
else
return false
end
end
function is_digit(code)
if (code >= d0) and (code <= d9) then
return true
else
return false
end
end
function is_empty(value)
if (value) and (not (#value == 0)) then
return false
else
return true
end
end
function is_leap_year(year)
if year % 4 == 0 then
if year % 100 == 0 then
if year % 400 == 0 then
return true
else
return false
end
else
return true
end
else
return false
end
end
function is_space(code)
return not not gspace[code]
end
function join(list, separator)
local result = ""
for _, item in ipairs(list) do
if result == "" then
result = item
else
result = result .. separator .. item
end
end
return result
end
function list_to_set(list)
local result = {}
add_list_to_set(result, list)
return result
end
function load_as_base64(filename)
local bytes = read_all_bytes(filename)
if bytes then
local b64 = digest.base64_encode(bytes)
return b64
else
return nil
end
end
function make_comparer(prop)
return function(left, right)
return left[prop] < right[prop]
end
end
function make_comparer_desc(prop)
return function(left, right)
return left[prop] > right[prop]
end
end
function make_language_path(language)
if (language) and (not (language == "")) then
if language == "en-us" then
return "en"
else
return language
end
else
return "en"
end
end
function map(list, mapper)
local result = {}
if list then
for _, item in ipairs(list) do
local copy = mapper(item)
table.insert(
result,
copy
)
end
end
return result
end
function months_to_secs(months)
local secs_in_month = 3600 * 24 * 365.25 / 12
return round(months * secs_in_month)
end
function msgpack_call(host, port, request)
local sock = socket.tcp_connect(host, port)
if sock then
local response
if send_msgpack(sock, request) then
response = receive_msgpack(sock)
else
response = nil
end
sock:close()
return response
else
log.error("could not open socket: "
.. tostring(host) .. ":" .. tostring(port))
return nil
end
end
function normalize_string(text)
local low = utf8.lower(text)
local machine = string_normalizer()
machine.result = ""
for i, code in utf8.next, low do
if is_space(code) then
machine:space(code)
else
machine:char(code)
end
end
return machine.result
end
function parse_compound_name(text)
if text then
local state = "idle"
local low = utf8.lower(text)
local result = ""
local start = true
for i, code in utf8.next, low do
local is_separ = g_separators[code]
if state == "idle" then
if is_separ then
else
if start then
else
result = utf8.insert(
result,
" "
)
end
result = utf8.insert(
result,
utf8.char(code)
)
state = "token"
end
else
if is_separ then
state = "idle"
else
start = false
result = utf8.insert(
result,
utf8.char(code)
)
end
end
end
return result
else
return ""
end
end
function parse_date8(date8)
if date8 then
local text = tostring(date8)
local ys = text:sub(1, 4)
local ms = text:sub(5, 6)
local ds = text:sub(7, 8)
local year = tonumber(ys)
local month = tonumber(ms)
local day = tonumber(ds)
if ((year) and (month)) and (day) then
local tt = {
year = year,
month = month,
day = day,
hour = 0,
min = 0,
sec = 0
}
return os.time(tt)
else
return nil
end
else
return nil
end
end
function parse_query(text)
local parts = split(text, "&")
local result = {}
for _, part in ipairs(parts) do
local chunks = split(part, "=")
if #chunks == 2 then
local key = chunks[1]
local value = chunks[2]
result[key] = value
end
end
return result
end
function parse_xml(names, xml)
local self = {
result = {},
names = {}
}
for _, name in ipairs(names) do
local tag = "<" .. name .. ">"
self.names[tag] = name
end
local callbacks = {
Default = function(parser, str)
simple_xml_default(self, str)
end
}
local parser = lxp.new(callbacks)
local ok, msg, line = parser:parse(xml)
parser:stop()
if ok then
return true, self.result
else
return false, msg
end
end
function print_amount(amount)
return string.format("%.2f", amount)
end
function print_table(obj)
for i, row in pairs(obj) do
print(i, row)
end
end
function random_password(length)
local result = ""
local bytes = digest.urandom(length)
local i = 1
while true do
if i <= #bytes then
else
break
end
local code = string.byte(bytes, i)
local index = (code % #g_chars) + 1
local char = g_chars[index]
result = result .. string.char(char)
i = i + 1
end
return result
end
function random_string()
local result = ""
local bytes = digest.urandom(20)
local i = 1
while true do
if i <= #bytes then
else
break
end
local code = string.byte(bytes, i)
result = result .. string.format("%x", code)
i = i + 1
end
return result
end
function read_all_bytes(filename)
local file, msg = io.open(filename, "rb")
if file then
local content = file:read("*all")
file:close()
return content
else
return nil
end
end
function read_json(filename)
local data = read_all_bytes(filename)
local obj = json.decode(data)
return obj
end
function receive_length(sock)
local bytes = sock:read(4)
if bytes then
if #bytes == 0 then
return 0
else
local length = pickle.unpack("i", bytes)
return length
end
else
log.error("error reading length")
return 0
end
end
function receive_msgpack(sock)
local length = receive_length(sock)
if length then
local bytes = sock:read(length)
if length then
return msgpack.decode(bytes)
else
log.error("error reading payload from socket")
return nil
end
else
return nil
end
end
function remove(list, item)
local index = find(list, item)
if index == -1 then
else
table.remove(list, index)
end
end
function repeat_value(what, count)
qs = {}
i = 1
while true do
if i <= count then
else
break
end
table.insert(qs, what)
i = i + 1
end
return qs
end
function replace(str, from_s, to_s)
local i
local c
local result = ""
local from = from_s:byte(1)
local to = to_s:byte(1)
i = 1
while true do
if i <= #str then
else
break
end
c = str:byte(i)
if c == from then
result = result .. to_s
else
result = result .. string.char(c)
end
i = i + 1
end
return result
end
function replace_quoted(text, from, to)
local first, last
if text then
local from2 = "'" .. from .. "'"
first, last = text:find(from2)
if first == nil then
local from3 = "\"" .. from .. "\""
first, last = text:find(from3)
if first == nil then
first, last = text:find(from)
if first == nil then
return nil
else
local to2 = "'" .. to .. "'"
local result = text:sub(1, first - 1) ..
to2 ..
text:sub(last + 1)
return result
end
else
local to2 = "'" .. to .. "'"
local result = text:sub(1, first - 1) ..
to2 ..
text:sub(last + 1)
return result
end
else
local to2 = "'" .. to .. "'"
local result = text:sub(1, first - 1) ..
to2 ..
text:sub(last + 1)
return result
end
else
return nil
end
end
function replace_quoted_in_object(obj, path, from, to)
local result = nil
if obj then
local current = obj
local i
i = 1
while true do
if i < #path then
else
local property = path[#path]
local value = current[property]
local value2 = replace_quoted(
value,
from,
to
)
if value2 then
current[property] = value2
result = obj
end
break
end
local step = path[i]
current = current[step]
if current then
else
break
end
i = i + 1
end
end
return result
end
function reverse(list)
local result = {}
if list then
local i = #list
while true do
if i > 0 then
else
break
end
local value = list[i]
table.insert(result, value)
i = i - 1
end
end
return result
end
function round(num, idp)
local mult = 10^(idp or 0)
return math.floor(num * mult + 0.5) / mult
end
function select_by(table_ref, index_name, value)
local rows = table_ref.data.index[index_name]
:select(value)
local output = {}
for _, row in ipairs(rows) do
local out_row = get_row_core(table_ref, row)
table.insert(output, out_row)
end
return output
end
function send_length(sock, bytes)
local length_bytes = pickle.pack("i", #bytes)
if sock:write(length_bytes) then
return true
else
log.error("could not send length")
return false
end
end
function send_msgpack(sock, obj)
if obj then
local bytes = msgpack.encode(obj)
if send_length(sock, bytes) then
if sock:write(bytes) then
return true
else
log.error("error sending payload")
return false
end
else
return false
end
else
error("obj is nil")
end
end
function set_to_list(set)
local result = {}
if set then
for key, value in pairs(set) do
if value then
table.insert(
result,
key
)
end
end
end
return result
end
function simple_xml_default(self, text)
local expected = self.expected
if expected then
self.result[expected] = text
self.expected = nil
else
local name = self.names[text]
if name then
self.expected = name
end
end
end
function sort_by_prop(array, prop, direction)
if direction == "desc" then
comparer = make_comparer_desc(prop)
else
comparer = make_comparer(prop)
end
table.sort(array, comparer)
end
function split(text, separator)
local tokens = {}
local current = ""
if text then
local sep_code = utf8.codepoint(separator)
for i, code in utf8.next, text do
if sep_code == code then
if #current == 0 then
else
table.insert(tokens, current)
current = ""
end
else
current = utf8.insert(
current,
utf8.char(code)
)
end
end
if #current == 0 then
else
table.insert(tokens, current)
end
end
return tokens
end
function split_trim(text, separ)
local parts_all = split(text, separ)
local parts = {}
for _, raw_part in ipairs(parts_all) do
local part = trim(raw_part)
if #part == 0 then
else
table.insert(
parts,
part
)
end
end
return parts
end
function starts_with(what, with)
local sub = what:sub(1, #with)
return sub == with
end
function string_contains(haystack, needle)
if is_empty(needle) then
return false
else
local first, last = utf8.find(
haystack,
needle
)
if first then
return true
else
return false
end
end
end
function string_contains_words(haystack, needle)
if (is_empty(needle)) or (is_empty(haystack)) then
return false
else
local first, last = utf8.find(
haystack,
needle
)
if first then
if first > 1 then
local before = utf8.sub(
haystack,
first - 1,
first - 1
)
if before == " " then
if last < utf8.len(haystack) then
local after = utf8.sub(
haystack,
last + 1,
last + 1
)
if after == " " then
return true
else
return false
end
else
return true
end
else
return false
end
else
if last < utf8.len(haystack) then
local after = utf8.sub(
haystack,
last + 1,
last + 1
)
if after == " " then
return true
else
return false
end
else
return true
end
end
else
return false
end
end
end
function string_normalizer_Normal_char(self, code)
self.result = append_char(
self.result,
code
)
self.state = "Normal"
end
function string_normalizer_Normal_space(self, code)
self.state = "Whitespace"
end
function string_normalizer_Start_char(self, code)
self.result = append_char(
self.result,
code
)
self.state = "Normal"
end
function string_normalizer_Start_space(self, code)
self.state = "Start"
end
function string_normalizer_Whitespace_char(self, code)
self.result = append_char(
self.result,
32
)
self.result = append_char(
self.result,
code
)
self.state = "Normal"
end
function string_normalizer_Whitespace_space(self, code)
self.state = "Whitespace"
end
function string_to_chars(text)
local result = {}
local i = 1
while true do
if i <= #text then
else
break
end
local code = string.byte(text, i)
table.insert(result, code)
i = i + 1
end
return result
end
function take_first(array, length)
local result = {}
local i
i = 1
while true do
if i <= length then
else
break
end
table.insert(
result,
array[i]
)
i = i + 1
end
return result
end
function trim(text)
return (text:gsub("^%s*(.-)%s*$", "%1"))
end
function update2(space, id, value)
space:update(id, {{"=", 2, value}})
end
function update3(space, id, id2, value)
space:update({id, id2}, {{"=", 3, value}})
end
function update4(space, id, id2, id3, value)
space:update({id, id2, id3}, {{"=", 4, value}})
end
function update_row(table_ref, fields)
local key = {}
local i
local tfields = table_ref.fields
local count = #tfields
i = 1
while true do
if i < count then
else
local old_row = table_ref.data:get(key)
if old_row then
local new_row = {}
i = 1
while true do
if i < count then
else
break
end
local field = tfields[i]
local field_name = field.name
local part = fields[field_name]
if part == nil then
part = old_row[i]
else
fields[field_name] = nil
end
table.insert(new_row, part)
i = i + 1
end
local old_fields = old_row[#old_row]
copy_props(fields, old_fields)
table.insert(new_row, old_fields)
table_ref.data:replace(new_row)
else
error("update_row: " ..
table_ref.name ..
" row not found")
end
break
end
local field = tfields[i]
local field_name = field.name
if field.pk then
local part = fields[field_name]
if part == nil then
error("update_row: " ..
table_ref.name ..
": field is nil: " ..
field_name)
break
end
table.insert(key, part)
end
i = i + 1
end
end
function utc_time()
return os.time(os.date("!*t"))
end
function write2(space, id, value)
space:upsert({id, value}, {{"=", 2, value}})
end
function write4(space, id, id2, id3, value)
space:upsert({id, id2, id3, value}, {{"=", 4, value}})
end
function write_all_bytes(filename, data)
local file, msg = io.open(filename, "w+b")
if file then
if data then
file:write(data)
end
file:close()
return true
else
return false
end
end
function write_json(filename, obj)
local data = json.encode(obj)
return write_all_bytes(filename, data)
end
function string_normalizer()
local obj = {}
obj.type_name = "string_normalizer"
obj.state = "Start"
obj.char = function(self, code)
local _state_ = self.state
if _state_ == "Start" then
return string_normalizer_Start_char(self, code)
elseif _state_ == "Normal" then
return string_normalizer_Normal_char(self, code)
elseif _state_ == "Whitespace" then
return string_normalizer_Whitespace_char(self, code)
end
return nil
end
obj.space = function(self, code)
local _state_ = self.state
if _state_ == "Start" then
return string_normalizer_Start_space(self, code)
elseif _state_ == "Normal" then
return string_normalizer_Normal_space(self, code)
elseif _state_ == "Whitespace" then
return string_normalizer_Whitespace_space(self, code)
end
return nil
end
return obj
end
init()
return {
build_needle = build_needle,
copy_props = copy_props,
contains = contains,
reverse = reverse,
write2 = write2,
update2 = update2,
update3 = update3,
update4 = update4,
remove = remove,
write4 = write4,
read_all_bytes = read_all_bytes,
write_all_bytes = write_all_bytes,
split = split,
list_to_set = list_to_set,
set_to_list = set_to_list,
trim = trim,
good_id_symbols = good_id_symbols,
random_string = random_string,
write_json = write_json,
is_empty = is_empty,
join = join,
replace = replace,
add_range = add_range,
get_extension = get_extension,
get_mime = get_mime,
get_filename = get_filename,
load_as_base64 = load_as_base64,
starts_with = starts_with,
date = date,
add_list_to_set = add_list_to_set,
days_to_secs = days_to_secs,
make_language_path = make_language_path,
round = round,
months_to_secs = months_to_secs,
get_product = get_product,
copy = copy,
send_msgpack = send_msgpack,
receive_msgpack = receive_msgpack,
http_post = http_post,
http_post_json = http_post_json,
msgpack_call = msgpack_call,
string_to_chars = string_to_chars,
is_digit = is_digit,
get_host_name = get_host_name,
print_amount = print_amount,
parse_xml = parse_xml,
parse_query = parse_query,
parse_compound_name = parse_compound_name,
parse_date8 = parse_date8,
date8_to_expiry = date8_to_expiry,
add_months = add_months,
get_days_in_month = get_days_in_month,
is_leap_year = is_leap_year,
read_json = read_json,
normalize_string = normalize_string,
string_contains = string_contains,
find_many_substrings = find_many_substrings,
print_table = print_table,
string_contains_words = string_contains_words,
random_password = random_password,
replace_quoted = replace_quoted,
replace_quoted_in_object = replace_quoted_in_object,
bulk_action = bulk_action,
bash_escape = bash_escape,
map = map,
group_by = group_by,
filter = filter,
repeat_value = repeat_value,
add_set = add_set,
split_trim = split_trim,
get_row = get_row,
get_row2 = get_row2,
get_rows = get_rows,
insert_row = insert_row,
select_by = select_by,
delete_row = delete_row,
delete_row2 = delete_row2,
update_row = update_row,
delete_by = delete_by,
create_table = create_table,
insert1 = insert1,
utc_time = utc_time,
sort_by_prop = sort_by_prop,
ends_with = ends_with
}
| 22.975 | 112 | 0.53667 |
e237c4a0b8429b19e54a06ecba180ffb1faee3b6 | 605 | py | Python | setup.py | NikitaMelnikov/open-api-python-client | c45579b9fdb3c4e62c45590fb6b2f12321596b2a | [
"Unlicense"
] | 102 | 2019-09-27T12:18:37.000Z | 2022-02-02T21:01:44.000Z | setup.py | NikitaMelnikov/open-api-python-client | c45579b9fdb3c4e62c45590fb6b2f12321596b2a | [
"Unlicense"
] | 27 | 2019-12-05T13:19:44.000Z | 2021-12-20T08:17:28.000Z | setup.py | NikitaMelnikov/open-api-python-client | c45579b9fdb3c4e62c45590fb6b2f12321596b2a | [
"Unlicense"
] | 23 | 2019-10-03T13:17:55.000Z | 2022-01-30T06:43:02.000Z | # coding: utf-8
from setuptools import setup, find_packages
NAME = "tinkoff-invest-openapi-client"
VERSION = "0.0.7"
REQUIRES = [
"websocket_client == 0.56.0",
"certifi >= 14.05.14",
"six >= 1.10",
"python_dateutil >= 2.5.3",
"setuptools >= 21.0.0",
"urllib3 >= 1.15.1",
"pytz == 2019.3"
]
setup(
name=NAME,
version=VERSION,
description="Tinkoff Invest OpenAPI Сlient",
author_email="[email protected]",
url="",
keywords=["Swagger", "OpenAPI", "Tinkoff"],
install_requires=REQUIRES,
packages=find_packages(),
include_package_data=True
)
| 20.862069 | 48 | 0.626446 |
a7db291c73abaaf0f18548d53f010217ead0937f | 836 | asm | Assembly | oeis/022/A022036.asm | neoneye/loda-programs | 84790877f8e6c2e821b183d2e334d612045d29c0 | [
"Apache-2.0"
] | 11 | 2021-08-22T19:44:55.000Z | 2022-03-20T16:47:57.000Z | oeis/022/A022036.asm | neoneye/loda-programs | 84790877f8e6c2e821b183d2e334d612045d29c0 | [
"Apache-2.0"
] | 9 | 2021-08-29T13:15:54.000Z | 2022-03-09T19:52:31.000Z | oeis/022/A022036.asm | neoneye/loda-programs | 84790877f8e6c2e821b183d2e334d612045d29c0 | [
"Apache-2.0"
] | 3 | 2021-08-22T20:56:47.000Z | 2021-09-29T06:26:12.000Z | ; A022036: Define the sequence T(a_0,a_1) by a_{n+2} is the greatest integer such that a_{n+2}/a_{n+1}<a_{n+1}/a_n for n >= 0. This is T(7,43).
; Submitted by Jon Maiga
; 7,43,264,1620,9940,60989,374211,2296051,14087908,86439348,530366956,3254178965,19966705347,122509956183,751685824132,4612127828672,28298688661016,173632607265289,1065359694467895,6536740399587511,40107557356899000,246088426157852600,1509927741297161799,9264481956889456492,56844194316076460597,348779612554694159011,2140011299261194103951,13130493286064485150381,80564926921136521329745,494323199319306342190092,3033024849938355392864854,18609767360729040969857612,114184175321707230298230705
mov $4,2
lpb $4
mov $1,7
mov $2,5
mov $3,8
mov $4,1
lpb $0
sub $0,1
div $3,$2
mov $2,$1
mul $1,6
add $1,$3
mul $3,$1
lpe
lpe
mov $0,$1
| 39.809524 | 494 | 0.77512 |
b8abb0396479c2f2d3ee3cfbdaa0404a997b53d9 | 93 | h | C | crypto_hash.h | cyph/mceliece.js | b0aed2b632fba91e0cb05a02faef0367d9f6fbf1 | [
"BSD-2-Clause"
] | 7 | 2016-09-25T17:09:18.000Z | 2020-01-05T21:52:07.000Z | crypto_hash.h | cyph/mceliece.js | b0aed2b632fba91e0cb05a02faef0367d9f6fbf1 | [
"BSD-2-Clause"
] | 1 | 2018-12-08T14:38:10.000Z | 2019-09-10T14:15:29.000Z | crypto_hash.h | cyph/mceliece.js | b0aed2b632fba91e0cb05a02faef0367d9f6fbf1 | [
"BSD-2-Clause"
] | 3 | 2017-03-27T00:01:12.000Z | 2021-03-24T21:37:55.000Z | int crypto_hash(
unsigned char *out,
const unsigned char *in,
unsigned long long inlen
);
| 15.5 | 25 | 0.741935 |
1aaa336f68b8c7a2bd71a54054446086dee3a643 | 8,039 | py | Python | mongo_old/carrotmongo.py | andreiliphd/Carrot | d949d25de384ebce243e93e65dd692f59afd7c90 | [
"MIT"
] | 6 | 2019-07-11T17:12:27.000Z | 2019-09-16T16:06:07.000Z | mongo_old/carrotmongo.py | andreiliphd/Carrot | d949d25de384ebce243e93e65dd692f59afd7c90 | [
"MIT"
] | null | null | null | mongo_old/carrotmongo.py | andreiliphd/Carrot | d949d25de384ebce243e93e65dd692f59afd7c90 | [
"MIT"
] | 1 | 2020-04-20T18:36:47.000Z | 2020-04-20T18:36:47.000Z | import mongoengine as mo
import datetime
import pickle
import os
import glob
import numpy as np
try:
from configcarrot import *
except ImportError:
login = '' # Please provide your login to MongoDB
password = '' # Please provide your password to MongoDB
host = '' # Please provide your host name to MongoDB
class Carrot(mo.Document):
date_time = mo.DateTimeField(required=True)
epoch = mo.IntField()
train_loss = mo.FloatField()
test_loss = mo.FloatField()
train_accuracy = mo.FloatField()
test_accuracy = mo.FloatField()
parameters = mo.FileField()
gradients = mo.FileField()
class Query():
def __init__(self):
self.model = Carrot
self.keys = []
self.date_time = []
self.epoch = []
self.train_loss = []
self.test_loss = []
self.train_accuracy = []
self.test_accuracy = []
if not os.path.exists(os.path.join('carrot', 'preprocessing')):
mo.connect(host='mongodb+srv://' + login + ':' + password + '@' + host,
authentication_source='admin')
self.init_populate()
self.get_keys()
if not os.path.exists(os.path.join('carrot', 'processed')):
self.processing()
self.populate_training()
self.current_option = ''
self.get_options()
def init_populate(self):
print('init_populate started')
for selected_data in self.model.objects(epoch=1):
data_layer = selected_data.parameters.read()
data_layer = pickle.loads(data_layer)
self.keys = list(data_layer.keys())
result = []
date_time = []
epoch = []
train_loss = []
test_loss = []
train_accuracy = []
test_accuracy = []
for obj_num, selected_data in enumerate(self.model.objects):
print('Started loop')
date_time.append(selected_data.date_time)
epoch.append(selected_data.epoch)
train_loss.append(selected_data.train_loss)
test_loss.append(selected_data.test_loss)
train_accuracy.append(selected_data.train_accuracy)
test_accuracy.append(selected_data.test_accuracy)
parameters = pickle.loads(selected_data.parameters.read())
gradients = pickle.loads(selected_data.gradients.read())
os.makedirs(os.path.join('carrot','preprocessing'), exist_ok=True)
for number, key in enumerate(self.keys):
print(key)
print(type(key))
print(parameters[key])
pickle.dump(parameters[key], open(
os.path.join('carrot', 'preprocessing', 'parameters_' +
key + '_' + str(obj_num) + '_' + str(number)), "wb"))
pickle.dump(gradients[key], open(
os.path.join('carrot', 'preprocessing','gradients_' +
key + '_' + str(obj_num) + '_' + str(number)), "wb"))
print('finishing')
result.append(date_time)
result.append(epoch),
result.append(train_loss)
result.append(test_loss)
result.append(train_accuracy)
result.append(test_accuracy)
pickle.dump(result, open(
os.path.join('carrot', 'preprocessing', 'data'), "wb"))
def get_keys(self):
os.makedirs(os.path.join('carrot', 'processed'), exist_ok=True)
files = glob.glob('carrot/preprocessing/*')
keys = []
for file in files:
file = file[21:]
if 'data' in file:
continue
name = file.split('_')
if name[1] not in keys:
keys.append(name[1])
self.keys = keys
def processing(self):
os.makedirs(os.path.join('carrot', 'processed'), exist_ok=True)
files = glob.glob('carrot/preprocessing/*')
maximum = []
for file in files:
file = file[21:]
if 'data' in file:
continue
name = file.split('_')
maximum.append(int(name[2]))
maximum = max(maximum)
for num, key in enumerate(self.keys):
results_param = []
results_grad = []
for seq in range(18):
for file in files:
file_full = file
file = file[21:]
if 'data' in file:
continue
name = file.split('_')
if name[1] == key and name[0] == "parameters" and name[2] == str(seq):
_ = pickle.load(open(file_full, 'rb'))
results_param.append(_)
if name[1] == key and name[0] == "gradients" and name[2] == str(seq):
_ = pickle.load(open(file_full, 'rb'))
results_grad.append(_)
pickle.dump(results_param, open(
os.path.join('carrot', 'processed', 'parameters_' +
key), "wb"))
pickle.dump(results_grad, open(
os.path.join('carrot', 'processed', 'gradients_' +
key), "wb"))
def populate_training(self):
os.makedirs(os.path.join('carrot', 'processed'), exist_ok=True)
files = glob.glob('carrot/preprocessing/*')
for file in files:
full_file = file
file = file[21:]
if 'data' in file:
_ = pickle.load(open(full_file, 'rb'))
self.date_time = _[0]
self.epoch = _[1]
self.train_loss = _[2]
self.test_loss = _[3]
self.train_accuracy = _[4]
self.test_accuracy = _[5]
def search_layer(self, layer, parameter = True):
os.makedirs(os.path.join('carrot', 'processed'), exist_ok=True)
files = glob.glob('carrot/processed/*')
result = []
for file in files:
full_file = file
file = file[17:]
name = file.split('_')
if parameter:
if (name[1] == layer) and (name[0] == 'parameters'):
result = pickle.load(open(full_file, 'rb'))
break
else:
if (name[1] == layer) and (name[0] == 'gradients'):
result = pickle.load(open(full_file, 'rb'))
break
return result
def shape_processing(self, layer, parameter=True, seq=0):
self.current_option = layer
arr = self.search_layer(layer, parameter)
arr = np.concatenate(arr, axis=0)
if len(arr.shape) != 4:
try:
arr = arr.reshape(-1, 3, 3, 3)
except Exception:
try:
arr = arr.reshape(-1, 2, 2, 2)
except Exception:
try:
arr = arr.reshape(-1, 1, 1, 1)
except Exception:
pass
print('Shape of the array', arr.shape)
batch_size = arr.shape[0] - 1
x = arr[seq].transpose((0, 1, 2)).ravel()
y = arr[seq].transpose((1, 2, 0)).ravel()
z = arr[seq].transpose((2, 0, 1)).ravel()
return (batch_size, x, y, z)
def get_options(self):
# options = [
# {'label': 'New York City', 'value': 'NYC'},
# {'label': 'Montréal', 'value': 'MTL'},
# {'label': 'San Francisco', 'value': 'SF'}
# ]
options = []
for num, key in enumerate(self.keys):
if num == 0:
self.current_option = key
dic = {}
dic['label'] = key
dic['value'] = key
options.append(dic)
return options
def print_shape(self):
for key in self.keys:
arr = self.search_layer(key, parameter=False)
print(arr[0].shape)
query = Query()
| 35.414097 | 90 | 0.511133 |
38a2693edb0f969032f85ff3a7970e9fb16d081c | 1,518 | php | PHP | application/views/layouts/main_dashboard.php | changuarin/project2 | cc60b60ad7f133f0aacc4c8315102a25ab59263d | [
"MIT"
] | null | null | null | application/views/layouts/main_dashboard.php | changuarin/project2 | cc60b60ad7f133f0aacc4c8315102a25ab59263d | [
"MIT"
] | null | null | null | application/views/layouts/main_dashboard.php | changuarin/project2 | cc60b60ad7f133f0aacc4c8315102a25ab59263d | [
"MIT"
] | null | null | null | <!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<!-- Meta, title, CSS, favicons, etc. -->
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>CHAN CHAN CHAN ! | </title>
<!-- Bootstrap -->
<link type="text/css" href="<?php echo base_url(); ?>assets/vendors/bootstrap/dist/css/bootstrap.min.css" rel="stylesheet">
<!-- Font Awesome -->
<link type="text/css" href="<?php echo base_url(); ?>assets/vendors/font-awesome/css/font-awesome.min.css" rel="stylesheet">
<!-- NProgress -->
<link type="text/css" href="<?php echo base_url(); ?>assets/vendors/nprogress/nprogress.css" rel="stylesheet">
<!-- Animate.css -->
<link type="text/css" href="<?php echo base_url(); ?>assets/vendors/animate.css/animate.min.css" rel="stylesheet">
<!-- Bootstrap Core CSS -->
<link href="<?php echo base_url(); ?>assets/css1/bootstrap.min.css" rel="stylesheet">
<!-- Custom CSS -->
<link href="<?php echo base_url(); ?>assets/css1/sb-admin.css" rel="stylesheet">
<!-- Morris Charts CSS -->
<link href="<?php echo base_url(); ?>assets/css1/plugins/morris.css" rel="stylesheet">
<!-- Custom Fonts -->
<link href="<?php echo base_url(); ?>assets/font-awesome/css/font-awesome.min.css" rel="stylesheet" type="text/css">
<?php
$this->load->view($main_content);
?>
| 39.947368 | 128 | 0.627141 |
f9ccd1f3367a7f74960b3b5eb3c8142e23dc718a | 524 | asm | Assembly | programs/oeis/005/A005090.asm | neoneye/loda | afe9559fb53ee12e3040da54bd6aa47283e0d9ec | [
"Apache-2.0"
] | 22 | 2018-02-06T19:19:31.000Z | 2022-01-17T21:53:31.000Z | programs/oeis/005/A005090.asm | neoneye/loda | afe9559fb53ee12e3040da54bd6aa47283e0d9ec | [
"Apache-2.0"
] | 41 | 2021-02-22T19:00:34.000Z | 2021-08-28T10:47:47.000Z | programs/oeis/005/A005090.asm | neoneye/loda | afe9559fb53ee12e3040da54bd6aa47283e0d9ec | [
"Apache-2.0"
] | 5 | 2021-02-24T21:14:16.000Z | 2021-08-09T19:48:05.000Z | ; A005090: Number of primes == 2 mod 3 dividing n.
; 0,1,0,1,1,1,0,1,0,2,1,1,0,1,1,1,1,1,0,2,0,2,1,1,1,1,0,1,1,2,0,1,1,2,1,1,0,1,0,2,1,1,0,2,1,2,1,1,0,2,1,1,1,1,2,1,0,2,1,2,0,1,0,1,1,2,0,2,1,2,1,1,0,1,1,1,1,1,0,2,0,2,1,1,2,1,1,2,1,2,0,2,0,2,1,1,0,1,1,2
add $0,1
mov $2,2
mov $3,$0
mov $4,$0
lpb $3
mov $5,$4
lpb $5
lpb $5
mov $6,$0
div $0,$2
sub $3,1
mod $6,$2
cmp $6,0
sub $5,$6
lpe
add $1,$6
lpe
add $2,3
mov $6,$0
cmp $6,1
cmp $6,0
sub $3,$6
lpe
mov $0,$1
| 18.714286 | 201 | 0.469466 |
a31e774d845ce7263ed9557df4efbcb97e7ca7ec | 1,210 | java | Java | src/com/facebook/buck/jvm/java/abi/StubJarResourceEntry.java | fkorotkov/buck | 4d63790ceda1028281600af9cf75153ccb92a5f5 | [
"Apache-2.0"
] | 1 | 2018-02-28T06:26:56.000Z | 2018-02-28T06:26:56.000Z | src/com/facebook/buck/jvm/java/abi/StubJarResourceEntry.java | fkorotkov/buck | 4d63790ceda1028281600af9cf75153ccb92a5f5 | [
"Apache-2.0"
] | 1 | 2018-12-10T15:54:22.000Z | 2018-12-10T19:30:37.000Z | src/com/facebook/buck/jvm/java/abi/StubJarResourceEntry.java | fkorotkov/buck | 4d63790ceda1028281600af9cf75153ccb92a5f5 | [
"Apache-2.0"
] | null | null | null | /*
* Copyright 2017-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.jvm.java.abi;
import java.io.IOException;
import java.nio.file.Path;
class StubJarResourceEntry extends StubJarEntry {
private final LibraryReader input;
private final Path path;
public static StubJarResourceEntry of(LibraryReader input, Path path) {
return new StubJarResourceEntry(input, path);
}
private StubJarResourceEntry(LibraryReader input, Path path) {
this.input = input;
this.path = path;
}
@Override
public void write(StubJarWriter writer) throws IOException {
writer.writeEntry(path, () -> input.openResourceFile(path));
}
}
| 30.25 | 76 | 0.742149 |
f88921a4aaa50797c28c5c59e547bb8b5f75ff8b | 1,626 | h | C | SDKFrameworks/NOSSDK/Storage/NOSUploadOption.h | nemOoO-wang/MMLive | 01753b5ba72138e4d3f3d6d8d397a40b34aef062 | [
"MIT"
] | 5 | 2018-02-27T07:06:17.000Z | 2020-12-24T03:44:28.000Z | NOSSDK/Storage/NOSUploadOption.h | NetEase-Object-Storage/nos-ios-sdk | 10178d3c90cc9c8eea41c81ed14aa18e11103284 | [
"MIT"
] | null | null | null | NOSSDK/Storage/NOSUploadOption.h | NetEase-Object-Storage/nos-ios-sdk | 10178d3c90cc9c8eea41c81ed14aa18e11103284 | [
"MIT"
] | null | null | null | //
// NOSUploadOption.h
// NOSSDK
//
// Created by hzzhenghuabin on 2015/1/11.
// Copyright (c) 2015年 NetEase. All rights reserved.
//
#import <Foundation/Foundation.h>
/**
* 上传进度回调函数
*
* @param key 上传时指定的存储key
* @param percent 进度百分比
*/
typedef void (^NOSUpProgressHandler)(NSString *key, float percent);
/**
* 上传中途取消函数
*
* @return 如果想取消,返回True, 否则返回No
*/
typedef BOOL (^NOSUpCancellationSignal)(void);
/**
* 可选参数集合,此类初始化后sdk上传使用时 不会对此进行改变;如果参数没有变化以及没有使用依赖,可以重复使用。
*/
@interface NOSUploadOption : NSObject
/**
* 用户自定义元数据,key必须以x-nos-meta-开头
*/
@property (copy, nonatomic, readonly) NSDictionary *metas;
/**
* 指定文件的mime类型
*/
@property (copy, nonatomic, readonly) NSString *mimeType;
/**
* 是否进行md5校验
*/
//@property (readonly) BOOL checkMd5;
/**
* 进度回调函数
*/
@property (copy, readonly) NOSUpProgressHandler progressHandler;
/**
* 中途取消函数
*/
@property (copy, readonly) NOSUpCancellationSignal cancellationSignal;
/**
* 可选参数的初始化方法
*
* @param mimeType mime类型
* @param progress 进度函数
* @param params 自定义服务器回调参数
* @param check 是否进行md5检查
* @param cancellation 中途取消函数
*
* @return 可选参数类实例
*/
- (instancetype)initWithMime:(NSString *)mimeType
progressHandler:(NOSUpProgressHandler)progress
metas:(NSDictionary *)metas
//checkMd5:(BOOL)check
cancellationSignal:(NOSUpCancellationSignal)cancellation;
- (instancetype)initWithProgessHandler:(NOSUpProgressHandler)progress;
@end
| 21.394737 | 71 | 0.627921 |
98072800023e6c145ca54c06505acd0fcaa7cdd1 | 232 | py | Python | config.py | caiopo/reddit-link-bot | 5e3ec64f8f1cedb5164d20b7b2cbbdec2338bd82 | [
"MIT"
] | null | null | null | config.py | caiopo/reddit-link-bot | 5e3ec64f8f1cedb5164d20b7b2cbbdec2338bd82 | [
"MIT"
] | 1 | 2018-06-13T14:45:51.000Z | 2018-06-13T14:47:31.000Z | config.py | caiopo/reddit-link-bot | 5e3ec64f8f1cedb5164d20b7b2cbbdec2338bd82 | [
"MIT"
] | 1 | 2018-06-11T06:28:31.000Z | 2018-06-11T06:28:31.000Z | from os import environ
# the token you get from botfather
BOT_TOKEN = environ['BOT_TOKEN']
APP_NAME = environ['APP_NAME']
PORT = int(environ.get('PORT', 5000))
WEBHOOK_URL = 'https://' + APP_NAME + '.herokuapp.com/' + BOT_TOKEN
| 21.090909 | 67 | 0.706897 |
4b850fca47a28b4d93716589ba65ebe4fa6022dd | 1,780 | kt | Kotlin | src/main/kotlin/algorithms/DeBruijnGraphFromKmers.kt | jimandreas/stepikBioinformaticsCourse | c378d2976ddc4a772fa68f7f17d3c8b224807be8 | [
"Apache-2.0"
] | null | null | null | src/main/kotlin/algorithms/DeBruijnGraphFromKmers.kt | jimandreas/stepikBioinformaticsCourse | c378d2976ddc4a772fa68f7f17d3c8b224807be8 | [
"Apache-2.0"
] | null | null | null | src/main/kotlin/algorithms/DeBruijnGraphFromKmers.kt | jimandreas/stepikBioinformaticsCourse | c378d2976ddc4a772fa68f7f17d3c8b224807be8 | [
"Apache-2.0"
] | null | null | null | @file:Suppress("unused")
package algorithms
/**
* from a [d] list of kmer strings,
* produce a map of kmer to kmer connections
*
* Example:
val input = listOf(
"GAGG",
"CAGG",
"GGGG",
"GGGA",
"CAGG",
"AGGG",
"GGAG"
)
output: (as a map of string to list of strings)
AGG -> GGG
CAG -> AGG,AGG
GAG -> AGG
GGA -> GAG
GGG -> GGG,GGA
*/
fun deBruijnGraphFromKmers(d: List<String>): Map<String, List<String>> {
val strLen = d[0].length
val resultMap: MutableMap<String, MutableList<String>> = mutableMapOf()
for (s in d) {
val prefix = s.substring(0, strLen - 1)
val suffix = s.substring(1, strLen)
if (resultMap.containsKey(prefix)) {
val list = resultMap[prefix]
list!!.add(suffix)
} else {
resultMap.putIfAbsent(prefix, mutableListOf(suffix))
}
}
return (resultMap.toSortedMap())
}
/**
* for a [input] map of kmer to kmer list, iterate over the
* list and output a string with the representation.
*/
fun deBruijnDirectedGraphConversion(input: Map<String, List<String>>): String {
val buf = StringBuilder()
for (entry in input) {
buf.append("${entry.key} -> ")
val theList = entry.value
buf.append(theList.joinToString(separator = ","))
buf.append("\n")
}
return buf.toString()
}
/**
* for a [s] string of kmers (e.g. text from a problem description -
* return a list of parsed lines.
* Assumptions: the string is a trimmed list of just kmers - no whitespace
*/
fun parseKmerString(s: String): List<String> {
val reader = s.reader()
val lines = reader.readLines()
val stringList: MutableList<String> = mutableListOf()
for (line in lines) {
stringList.add(line)
}
return stringList
} | 23.116883 | 79 | 0.625281 |
e2a555db8d7cf9ac42df8cdcb1a96397251f7187 | 933 | py | Python | leetcode/933.py | GihwanKim/Baekjoon | 52eb2bf80bb1243697858445e5b5e2d50d78be4e | [
"MIT"
] | null | null | null | leetcode/933.py | GihwanKim/Baekjoon | 52eb2bf80bb1243697858445e5b5e2d50d78be4e | [
"MIT"
] | null | null | null | leetcode/933.py | GihwanKim/Baekjoon | 52eb2bf80bb1243697858445e5b5e2d50d78be4e | [
"MIT"
] | null | null | null | """
File: 933.py
Title: Number of Recent Calls
Difficulty: Easy
URL: https://leetcode.com/problems/number-of-recent-calls/
"""
import bisect
import unittest
class RecentCounter:
def __init__(self):
self.requests = []
def ping(self, t: int) -> int:
self.requests.append(t)
recent_requests = 0
i = bisect.bisect(self.requests, t) - 1
while i >= 0:
r = self.requests[i]
if (t - 3000) <= r:
recent_requests += 1
else:
break
i -= 1
return recent_requests
class SolutionTestCase(unittest.TestCase):
def test_example1(self):
obj = RecentCounter()
self.assertEqual(obj.ping(1), 1)
self.assertEqual(obj.ping(100), 2)
self.assertEqual(obj.ping(3001), 3)
self.assertEqual(obj.ping(3002), 3)
if __name__ == "__main__":
unittest.main()
| 21.697674 | 62 | 0.565916 |
9aa107a2dd974d929da416818a778f28e7235e7d | 372 | py | Python | Exercicios/ex063.py | vincytarsis/Python | f98005917486bc191c85c971ec8e2c71fb9dd4c7 | [
"MIT"
] | null | null | null | Exercicios/ex063.py | vincytarsis/Python | f98005917486bc191c85c971ec8e2c71fb9dd4c7 | [
"MIT"
] | null | null | null | Exercicios/ex063.py | vincytarsis/Python | f98005917486bc191c85c971ec8e2c71fb9dd4c7 | [
"MIT"
] | null | null | null | """ Escreva um programa que leia um número n inteiro qualquer e mostre na tela os n primeiros elementos de
uma sequência de Fibonacci. Ex: 0 → 1 → 1 → 2 → 3 → 5 → 8 """
n = int(input('Entre com um número:'))
a = 0
b = 1
c = 0
count = 1
print('Sequencia de Fibonacci: ', end=' ')
while count <= n:
print(c, end=' ')
count += 1
a = b
b = c
c = a + b
| 19.578947 | 106 | 0.575269 |
587659843cfa27303309ca1f74d0b6ea2c09c967 | 418 | css | CSS | src/AntiSaccades/AntiSaccadesExercise/AntiSaccadesExercise.css | TYohoJr/eyeTracker | cba849b45ac9d1afb908508ec4bc870d752ab136 | [
"MIT"
] | null | null | null | src/AntiSaccades/AntiSaccadesExercise/AntiSaccadesExercise.css | TYohoJr/eyeTracker | cba849b45ac9d1afb908508ec4bc870d752ab136 | [
"MIT"
] | null | null | null | src/AntiSaccades/AntiSaccadesExercise/AntiSaccadesExercise.css | TYohoJr/eyeTracker | cba849b45ac9d1afb908508ec4bc870d752ab136 | [
"MIT"
] | null | null | null | #antisaccades-center-dot {
height: 30px;
width: 30px;
border-radius: 50%;
position: fixed;
top: 50%;
left: 50%;
}
#antisaccades-right-dot {
height: 30px;
width: 30px;
border-radius: 50%;
position: absolute;
top: 50%;
left: 60%;
}
#antisaccades-left-dot {
height: 30px;
width: 30px;
border-radius: 50%;
position: absolute;
top: 50%;
left: 40%;
} | 16.076923 | 26 | 0.566986 |
38b00d6cb48bf1ffae5477f68be874ffeab6826f | 1,665 | php | PHP | src/Message/Query/QueryResponse.php | dmgctrlr/omnipay-nmi | 9396e86ebc539e21b0db4688c63239d810d697dc | [
"MIT"
] | null | null | null | src/Message/Query/QueryResponse.php | dmgctrlr/omnipay-nmi | 9396e86ebc539e21b0db4688c63239d810d697dc | [
"MIT"
] | null | null | null | src/Message/Query/QueryResponse.php | dmgctrlr/omnipay-nmi | 9396e86ebc539e21b0db4688c63239d810d697dc | [
"MIT"
] | null | null | null | <?php
namespace Omnipay\NMI\Message\Query;
use Omnipay\Common\Message\RequestInterface;
use Omnipay\Common\Message\AbstractResponse;
/**
* NMI Direct Post Response
*/
class QueryResponse extends AbstractResponse
{
public function __construct(RequestInterface $request, $data)
{
$this->request = $request;
$xml = simplexml_load_string($data, 'SimpleXMLElement', LIBXML_NOWARNING);
$this->data = $xml;
parent::__construct($request, $xml);
}
public function isSuccessful()
{
return isset($this->data->transaction);
}
public function getCode()
{
return trim($this->data['response']);
}
public function getResponseCode()
{
return trim($this->data['response_code']);
}
public function getMessage()
{
return trim($this->data['responsetext']);
}
public function getAuthorizationCode()
{
return trim($this->data['authcode']);
}
public function getAVSResponse()
{
return trim($this->data['avsresponse']);
}
public function getCVVResponse()
{
return trim($this->data['cvvresponse']);
}
public function getOrderId()
{
return trim($this->data['orderid']);
}
public function getTransactionReference()
{
return trim($this->data['transactionid']);
}
public function getTransactions()
{
return $this->data->transaction;
}
public function getCardReference()
{
if (isset($this->data['customer_vault_id'])) {
return trim($this->data['customer_vault_id']);
}
return null;
}
}
| 20.555556 | 82 | 0.607207 |
0a4507b55d966dd34dbc7b30c428a4f085ba9084 | 670 | cs | C# | GameEngine/Mathematic/ComplexMath.cs | jwdeveloper/game_console | 3acd0c227926bcc67d6c78cb6d39cb168ec4def6 | [
"MIT"
] | null | null | null | GameEngine/Mathematic/ComplexMath.cs | jwdeveloper/game_console | 3acd0c227926bcc67d6c78cb6d39cb168ec4def6 | [
"MIT"
] | null | null | null | GameEngine/Mathematic/ComplexMath.cs | jwdeveloper/game_console | 3acd0c227926bcc67d6c78cb6d39cb168ec4def6 | [
"MIT"
] | null | null | null | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace GameEngine
{
public class ComplexMath
{
public static int GetDistnace(Vector2 p1, Vector2 p2) => (int)Math.Sqrt((p1.x - p2.x) * (p1.x - p2.x) + (p1.y - p2.y) * (p1.y - p2.y));
public static Vector2 GetDirection(Vector2 p1, Vector2 p2)
{
Vector2 direction = p1 - p2;
direction.x = direction.x == 0 ? 0 : (direction.x / Math.Abs(direction.x));
direction.y = direction.y == 0 ? 0 : (direction.y / Math.Abs(direction.y));
return direction;
}
}
}
| 22.333333 | 143 | 0.592537 |
5d3cf98ee50d07bafbbeca6c2cee3e9a6d3a3570 | 3,394 | cpp | C++ | dev/Code/Framework/AzQtComponents/AzQtComponents/Gallery/SliderComboPage.cpp | brianherrera/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | [
"AML"
] | 1,738 | 2017-09-21T10:59:12.000Z | 2022-03-31T21:05:46.000Z | dev/Code/Framework/AzQtComponents/AzQtComponents/Gallery/SliderComboPage.cpp | ArchitectureStudios/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | [
"AML"
] | 427 | 2017-09-29T22:54:36.000Z | 2022-02-15T19:26:50.000Z | dev/Code/Framework/AzQtComponents/AzQtComponents/Gallery/SliderComboPage.cpp | ArchitectureStudios/lumberyard | f85344403c1c2e77ec8c75deb2c116e97b713217 | [
"AML"
] | 671 | 2017-09-21T08:04:01.000Z | 2022-03-29T14:30:07.000Z | /*
* All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
* its licensors.
*
* For complete copyright and license terms please see the LICENSE at the root of this
* distribution (the "License"). All use of this software is governed by the License,
* or, if provided, by the license below or the license accompanying this file. Do not
* remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
*/
#include "SliderComboPage.h"
#include <Gallery/ui_SliderComboPage.h>
#include <AzQtComponents/Components/Widgets/Slider.h>
#include <AzQtComponents/Components/Widgets/SliderCombo.h>
#include <QDebug>
using namespace AzQtComponents;
SliderComboPage::SliderComboPage(QWidget* parent)
: QWidget(parent)
, ui(new Ui::SliderComboPage)
{
ui->setupUi(this);
ui->verticalSliderComboWithoutValue->setRange(0, 100);
ui->verticalSliderCombo->setRange(0, 100);
ui->verticalSliderCombo->setValue(50);
ui->verticalSliderComboWithSoftRange->setRange(-500, 1000);
ui->verticalSliderComboWithSoftRange->setSoftRange(0, 500);
ui->verticalSliderComboWithSoftRange->setValue(250);
ui->verticalSliderDoubleComboWithoutValue->setRange(0.5, 250.5);
ui->verticalSliderDoubleCombo->setRange(0.5, 250.5);
ui->verticalSliderDoubleCombo->setValue(100.0);
ui->verticalSliderDoubleComboWithSoftRange->setRange(-500.0, 1000.0);
ui->verticalSliderDoubleComboWithSoftRange->setSoftRange(0.0, 500.0);
ui->verticalSliderDoubleComboWithSoftRange->setValue(250.0);
ui->curveSliderDoubleCombo->setRange(0.0, 1.0);
ui->curveSliderDoubleCombo->setCurveMidpoint(0.25);
ui->curveSliderDoubleCombo->setValue(0.25);
connect(ui->verticalSliderCombo, &SliderCombo::valueChanged, this, &SliderComboPage::sliderValueChanged);
connect(ui->verticalSliderComboWithoutValue, &SliderCombo::valueChanged, this, &SliderComboPage::sliderValueChanged);
connect(ui->verticalSliderComboWithSoftRange, &SliderCombo::valueChanged, this, &SliderComboPage::sliderValueChanged);
connect(ui->verticalSliderDoubleCombo, &SliderDoubleCombo::valueChanged, this, &SliderComboPage::sliderValueChanged);
connect(ui->verticalSliderDoubleComboWithoutValue, &SliderDoubleCombo::valueChanged, this, &SliderComboPage::sliderValueChanged);
connect(ui->verticalSliderDoubleComboWithSoftRange, &SliderDoubleCombo::valueChanged, this, &SliderComboPage::sliderValueChanged);
QString exampleText = R"(
A Slider Combo is a widget which combines a Slider and a Spin Box.<br/>
<pre>
#include <AzQtComponents/Components/Widgets/SliderCombo.h>
// Here's an example that creates a sliderCombo
SliderCombo* sliderCombo = new SliderCombo();
sliderCombo->setRange(0, 100);
// Set the starting value
sliderCombo->setValue(50);
</pre>
)";
ui->exampleText->setHtml(exampleText);
}
SliderComboPage::~SliderComboPage()
{
}
void SliderComboPage::sliderValueChanged()
{
if (auto doubleCombo = qobject_cast<SliderDoubleCombo*>(sender()))
qDebug() << "Double combo slider valueChanged:" << doubleCombo->value();
else if (auto combo = qobject_cast<SliderCombo*>(sender()))
qDebug() << "Combo slider valueChanged:" << combo->value();
else
Q_UNREACHABLE();
}
#include <Gallery/SliderComboPage.moc>
| 35.726316 | 134 | 0.755451 |
070680ba0caf9af24fe45dbe4ef4b26083313fbe | 1,480 | html | HTML | _includes/introduction.html | phui/phui.github.io | 165173101ec43d12ac96fa101a51a44318f9f914 | [
"MIT"
] | null | null | null | _includes/introduction.html | phui/phui.github.io | 165173101ec43d12ac96fa101a51a44318f9f914 | [
"MIT"
] | 2 | 2020-03-01T15:59:58.000Z | 2020-03-01T19:30:13.000Z | _includes/introduction.html | phui/phui.github.io | 165173101ec43d12ac96fa101a51a44318f9f914 | [
"MIT"
] | null | null | null | <h2 {% if site.style == 'dark' %}class="text-white"{% endif %}>About Me</h2>
<br/>
<p class="f4 mb-4" align="justify">
I am a Ph.D. student in the Informatics program at Indiana University Luddy
School of Informatics, Computing, and Engineering, working as part of the
<a target="_blank" href="https://osome.iuni.iu.edu/">Observatory on Social Media (OSoMe)</a>.
My adviser is Dr.
<a target="_blank" href="http://cnets.indiana.edu/fil/">Filippo Menczer</a>
My research focuses on
<a target="_blank" href="https://news.iu.edu/stories/2019/09/iub/releases/12-botslayer-launch.html">the detection and characterization of malicious campaigns on social media</a>.
</p>
<p class="f4 mb-4" align="justify">
I work a lot on Twitter dataset to
<div style="padding-left:2rem" align="left">
<ul class="f4 mb-4">
<li>Detect coordinated user accounts in malicious campaigns</li>
<li>Characterize malicious account behaviors at scale</li>
<li>Develop tools that everyone can use to combat malicious campaigns</li>
</ul>
</div>
<p class="f4 mb-4" align="justify">
I am one of the main developers in OSoMe (see my projects below).
In particular, I lead the development of BotSlayer, from streamer and database
to algorithms and containerization.
</p>
<p class="f4 mb-4" align="justify">
If you would like to <strong>contact me</strong>, please connect with me via the
Linkedin link above.
Alternatively, you can check out my recent papers below. Some of them contain my email
address.
</p>
| 46.25 | 178 | 0.739865 |
9118bb1e025cb2b6edea76aec7259d673c39d325 | 924 | dart | Dart | test/nested_binding_test.dart | MiCHiLU/polymer.dart | 164a558cca9def53e0bada37de058108d2b7acba | [
"BSD-3-Clause"
] | 1 | 2015-02-10T03:50:43.000Z | 2015-02-10T03:50:43.000Z | test/nested_binding_test.dart | MiCHiLU/polymer.dart | 164a558cca9def53e0bada37de058108d2b7acba | [
"BSD-3-Clause"
] | null | null | null | test/nested_binding_test.dart | MiCHiLU/polymer.dart | 164a558cca9def53e0bada37de058108d2b7acba | [
"BSD-3-Clause"
] | null | null | null | // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
library polymer.test.nested_binding_test;
import 'dart:async';
import 'dart:html';
import 'package:polymer/polymer.dart';
import 'package:unittest/unittest.dart';
import 'package:unittest/html_config.dart';
@CustomTag('my-test')
class MyTest extends PolymerElement {
final List fruits = toObservable(['apples', 'oranges', 'pears']);
final _testDone = new Completer();
MyTest.created() : super.created();
ready() {
expect($['fruit'].text.trim(), 'Short name: [pears]');
_testDone.complete();
}
}
main() => initPolymer().run(() {
useHtmlConfiguration();
setUp(() => Polymer.onReady);
test('ready called',
() => (querySelector('my-test') as MyTest)._testDone.future);
});
| 26.4 | 77 | 0.692641 |
a42a14792780314b50accd6210555aaa8d2feee7 | 1,233 | php | PHP | app/models/UserTransaction.php | architattrey/ezfy | 56376a5fe79327a4ceed574c8e5a0b8f4c54a68c | [
"MIT"
] | null | null | null | app/models/UserTransaction.php | architattrey/ezfy | 56376a5fe79327a4ceed574c8e5a0b8f4c54a68c | [
"MIT"
] | null | null | null | app/models/UserTransaction.php | architattrey/ezfy | 56376a5fe79327a4ceed574c8e5a0b8f4c54a68c | [
"MIT"
] | null | null | null | <?php
namespace App\models;
use Illuminate\Database\Eloquent\Model;
class UserTransaction extends Model
{
protected $table = 'user_transactions';
public $timestamps = false;
protected $primaryKey = 'id';
protected $fillable = [
'order_id',
'user_id',
'name',
'product_id',
'invoice_id',
'institute_id',
'manager_id',
'amount',
'no_of_clothes',
'status',
'user_type',
'dlvry_placed',
'dlvry_started',
'dlvry_washed',
'dlvry_delivered',
'start_type',
'remaining_washes',
'transaction_type',
'expire_date',
'created_at',
'updated_at',
];
#get manager
public function getManager(){
return $this->belongsTo('App\models\AppManagers','manager_id');
}
#get user
public function getUser(){
return $this->belongsTo('App\models\Appusers','user_id');
}
#get institute
public function getInstitute(){
return $this->belongsTo('App\models\Institutes','institute_id');
}
#get products
public function getProduct(){
return $this->belongsTo('App\models\Products','product_id');
}
}
| 24.66 | 72 | 0.583942 |
da35d845468d53b4140d323552762d21d323470c | 4,212 | php | PHP | www/wordpress/wp-content/plugins/magento2-2.3.5/app/code/Magento/Catalog/Test/Unit/Block/Adminhtml/Rss/NotifyStockTest.php | Fmendoza91/Proyecto-Final | 52ce40ac6af577fad47f1b51db697eca11f15d99 | [
"MIT"
] | null | null | null | www/wordpress/wp-content/plugins/magento2-2.3.5/app/code/Magento/Catalog/Test/Unit/Block/Adminhtml/Rss/NotifyStockTest.php | Fmendoza91/Proyecto-Final | 52ce40ac6af577fad47f1b51db697eca11f15d99 | [
"MIT"
] | null | null | null | www/wordpress/wp-content/plugins/magento2-2.3.5/app/code/Magento/Catalog/Test/Unit/Block/Adminhtml/Rss/NotifyStockTest.php | Fmendoza91/Proyecto-Final | 52ce40ac6af577fad47f1b51db697eca11f15d99 | [
"MIT"
] | null | null | null | <?php
/**
* Copyright © Magento, Inc. All rights reserved.
* See COPYING.txt for license details.
*/
namespace Magento\Catalog\Test\Unit\Block\Adminhtml\Rss;
use Magento\Framework\TestFramework\Unit\Helper\ObjectManager as ObjectManagerHelper;
/**
* Class NotifyStockTest
* @package Magento\Catalog\Block\Adminhtml\Rss
*/
class NotifyStockTest extends \PHPUnit\Framework\TestCase
{
/**
* @var \Magento\Catalog\Block\Adminhtml\Rss\NotifyStock
*/
protected $block;
/**
* @var ObjectManagerHelper
*/
protected $objectManagerHelper;
/**
* @var \Magento\Backend\Block\Context|\PHPUnit_Framework_MockObject_MockObject
*/
protected $context;
/**
* @var \Magento\Catalog\Model\Rss\Product\NotifyStock|\PHPUnit_Framework_MockObject_MockObject
*/
protected $rssModel;
/**
* @var \Magento\Framework\App\Rss\UrlBuilderInterface|\PHPUnit_Framework_MockObject_MockObject
*/
protected $rssUrlBuilder;
/**
* @var \Magento\Framework\UrlInterface|\PHPUnit_Framework_MockObject_MockObject
*/
protected $urlBuilder;
/**
* @var array
*/
protected $rssFeed = [
'title' => 'Low Stock Products',
'description' => 'Low Stock Products',
'link' => 'http://magento.com/rss/feeds/index/type/notifystock',
'charset' => 'UTF-8',
'entries' => [
[
'title' => 'Low Stock Product',
'description' => 'Low Stock Product has reached a quantity of 1.',
'link' => 'http://magento.com/catalog/product/edit/id/1',
],
],
];
protected function setUp()
{
$this->rssModel = $this->getMockBuilder(\Magento\Catalog\Model\Rss\Product\NotifyStock::class)
->setMethods(['getProductsCollection', '__wakeup'])
->disableOriginalConstructor()->getMock();
$this->rssUrlBuilder = $this->createMock(\Magento\Framework\App\Rss\UrlBuilderInterface::class);
$this->urlBuilder = $this->createMock(\Magento\Framework\UrlInterface::class);
$this->objectManagerHelper = new ObjectManagerHelper($this);
$this->block = $this->objectManagerHelper->getObject(
\Magento\Catalog\Block\Adminhtml\Rss\NotifyStock::class,
[
'urlBuilder' => $this->urlBuilder,
'rssModel' => $this->rssModel,
'rssUrlBuilder' => $this->rssUrlBuilder
]
);
}
public function testGetRssData()
{
$this->rssUrlBuilder->expects($this->once())->method('getUrl')
->will($this->returnValue('http://magento.com/rss/feeds/index/type/notifystock'));
$item = $this->getMockBuilder(\Magento\Catalog\Model\Product::class)
->setMethods(['__sleep', '__wakeup', 'getId', 'getQty', 'getName'])
->disableOriginalConstructor()
->getMock();
$item->expects($this->once())->method('getId')->will($this->returnValue(1));
$item->expects($this->once())->method('getQty')->will($this->returnValue(1));
$item->expects($this->any())->method('getName')->will($this->returnValue('Low Stock Product'));
$this->rssModel->expects($this->once())->method('getProductsCollection')
->will($this->returnValue([$item]));
$this->urlBuilder->expects($this->once())->method('getUrl')
->with('catalog/product/edit', ['id' => 1, '_secure' => true, '_nosecret' => true])
->will($this->returnValue('http://magento.com/catalog/product/edit/id/1'));
$data = $this->block->getRssData();
$this->assertTrue(is_string($data['title']));
$this->assertTrue(is_string($data['description']));
$this->assertTrue(is_string($data['entries'][0]['description']));
$this->assertEquals($this->rssFeed, $data);
}
public function testGetCacheLifetime()
{
$this->assertEquals(600, $this->block->getCacheLifetime());
}
public function testIsAllowed()
{
$this->assertTrue($this->block->isAllowed());
}
public function testGetFeeds()
{
$this->assertEmpty($this->block->getFeeds());
}
}
| 34.52459 | 104 | 0.6085 |
2c65584e8066d874578c2f9877a23e7292123209 | 1,480 | py | Python | files/029 - distinct powers.py | farukara/Project-Euler-problems | 806fdbd797edd9929728b43cc428a55df50e1c01 | [
"MIT"
] | null | null | null | files/029 - distinct powers.py | farukara/Project-Euler-problems | 806fdbd797edd9929728b43cc428a55df50e1c01 | [
"MIT"
] | null | null | null | files/029 - distinct powers.py | farukara/Project-Euler-problems | 806fdbd797edd9929728b43cc428a55df50e1c01 | [
"MIT"
] | null | null | null | #!python3
# coding: utf-8
# Consider all integer combinations of ab for 2 ≤ a ≤ 5 and 2 ≤ b ≤ 5:
#
# 22=4, 23=8, 24=16, 25=32
# 32=9, 33=27, 34=81, 35=243
# 42=16, 43=64, 44=256, 45=1024
# 52=25, 53=125, 54=625, 55=3125
# If they are then placed in numerical order, with any repeats removed, we get the following sequence of 15 distinct terms:
#
# 4, 8, 9, 16, 25, 27, 32, 64, 81, 125, 243, 256, 625, 1024, 3125
#
# How many distinct terms are in the sequence generated by ab for 2 ≤ a ≤ 100 and 2 ≤ b ≤ 100?
#https://projecteuler.net/problem=29
from time import perf_counter
import matplotlib.pyplot as plt
from math import log
def using_set(limit):
seq = set()
for i in range(2,limit):
for j in range(2,limit):
seq.add(i**j)
#print(len(seq))
def using_list(limit):
l = []
for a in range(2,limit):
for b in range (2,limit):
c = a**b
if c not in l:
l.append(c)
#print(len(l))
yset = []
ylist = []
xline = []
i = 1
while i < 101:
start = perf_counter()
using_set(i)
end = perf_counter()
yset.append(end - start)
xline.append(i)
start = perf_counter()
using_list(i)
end = perf_counter()
ylist.append(end-start)
i += (i+int(log(i)))
print(i)
plt.plot(xline, yset, label="set")
plt.plot(xline, ylist, label="list")
plt.xlabel("number of items")
plt.ylabel("time (seconds)")
plt.title("Set vs List time performance")
plt.legend()
plt.show()
| 24.666667 | 123 | 0.608784 |
b0b1b73b79fb2b58e77f3e64156f5dbe39d92f34 | 285 | py | Python | teacher/test.py | HaihuaHaihua/EduManagementSys | 37fffd5f5bcfa03cc222a8c5c309ea6447bdfb2b | [
"Apache-2.0"
] | null | null | null | teacher/test.py | HaihuaHaihua/EduManagementSys | 37fffd5f5bcfa03cc222a8c5c309ea6447bdfb2b | [
"Apache-2.0"
] | null | null | null | teacher/test.py | HaihuaHaihua/EduManagementSys | 37fffd5f5bcfa03cc222a8c5c309ea6447bdfb2b | [
"Apache-2.0"
] | null | null | null | from django.shortcuts import render
from django.db import connection
# Create your views here.
cursor = connection.cursor()
cursor.execute("select * from studentinfo where classid=201800100")
rows = cursor.fetchall()
print(rows)
# return render(request,'index.html',context=rows)
| 20.357143 | 67 | 0.775439 |
a52a1592f336b364f5861dea520d9b6f31341de5 | 7,835 | lua | Lua | Exporter/Main.lua | wow-rp-addons/LibRPMedia | 7773a391a18b67f1b8d587420b8b4cef0030bdb1 | [
"Unlicense"
] | null | null | null | Exporter/Main.lua | wow-rp-addons/LibRPMedia | 7773a391a18b67f1b8d587420b8b4cef0030bdb1 | [
"Unlicense"
] | 15 | 2019-06-12T22:41:10.000Z | 2020-01-14T19:59:20.000Z | Exporter/Main.lua | wow-rp-addons/LibRPMedia | 7773a391a18b67f1b8d587420b8b4cef0030bdb1 | [
"Unlicense"
] | 1 | 2020-06-20T19:18:32.000Z | 2020-06-20T19:18:32.000Z | -- This file is licensed under the terms expressed in the LICENSE file.
-- Fiddle with the package path a bit to allow our working directory to
-- be the root of the repository prior to loading packages.
package.path = package.path .. ";./Exporter/?.lua;./Exporter/?/init.lua";
local Icons = require "Exporter.Icons";
local Log = require "Exporter.Log";
local Music = require "Exporter.Music";
local Resources = require "Exporter.Resources";
local Serializer = require "Exporter.Serializer";
local Utils = require "Exporter.Utils";
local etlua = require "etlua";
local lfs = require "lfs";
-- Upvalues.
local strformat = string.format;
local strmatch = string.match;
local strsub = string.sub;
-- Script usage text.
local USAGE_TEXT = [[
%s [flags]
Exports databases based on information obtained from public dumps.
Flags:
-c, --config Path to the configuration file for the exporter.
]];
-- Command line options table.
local options = {
-- Configuration file path.
config = nil,
};
-- Read options from the command line.
local argi = 1;
while argi < #arg do
local argv = arg[argi];
argi = argi + 1;
if argv == "-c" or argv == "--config" then
options.config = tostring(arg[argi]);
argi = argi + 1;
else
print(strformat(USAGE_TEXT, arg[0]));
print(strformat("Unknown option: %s", argv));
os.exit(1);
end
end
-- Validate options.
if not options.config or options.config == "" then
print(strformat(USAGE_TEXT, arg[0]));
print("No config file specified (--config)");
os.exit(1);
end
-- Product configuration table. This represents the default configuration of
-- the exporter and is merged into by the user-specific config.
local config = {
-- Project token for this game variant.
project = nil,
-- Product name for obtaining data from the patch/CDN servers.
product = nil,
-- Region to use when connecting to patch/CDN server.
region = nil,
-- Name of the database file to generate.
database = nil,
-- Name of the manifest file to generate.
manifest = nil,
-- Name of the template file to use for the database output.
template = "Exporter/Templates/Database.lua.tpl",
-- Override mapping of DB2 names to explicit build versions to download.
databaseOverrides = {},
-- Settings for icon database generation.
icons = {
-- List of icon name patterns to exclude from the database.
excludeNames = {},
-- List of atlas name patterns to include.
includeAtlases = {},
},
-- Settings for music database generation.
music = {
-- Mapping of soundkit IDs to be explicitly included or excluded.
-- The value of each entry should be false to omit the soundkit,
-- true to include it, or a string to include it with a custom name.
--
-- If a kit is included, a name must be obtainable from the client
-- databases; if not, it will be skipped and a warning logged.
--
-- Custom names take priority over those found within the client
-- databases.
--
-- Soundkits present within this mapping will be overridden and
-- excluded if matching any of the files or names present in the
-- excludeFiles and excludeNames lists.
overrideKits = {},
-- List of file IDs to exclude from the database.
excludeFiles = {},
-- List of file/soundkit name patterns to exclude from the database.
excludeNames = {},
},
-- If true, enable debug logging in the exporter.
verbose = os.getenv("DEBUG") == "1",
-- Path to a directory for storing cached content.
cacheDir = Resources.GetCacheDirectory(),
};
-- Extracts the major and minor version components from the library script.
local function GetLibraryVersion()
local libraryFile = assert(io.open("LibRPMedia-1.0.lua", "rb"));
local library = assert(libraryFile:read("*a"));
libraryFile:close();
local versionMajor = strmatch(library, "MODULE_MAJOR = (%b\"\")");
if not versionMajor then
error("Failed to extract major version from library.");
end
local versionMinor = strmatch(library, "MODULE_MINOR = (%d+)");
if not versionMinor then
error("Failed to extract minor version from library.");
end
return strsub(versionMajor, 2, -2), tonumber(versionMinor);
end
-- Run the actual script in protected mode so we can log fatal errors cleanly.
local ok, err = pcall(function()
-- Read in the user configuration and merge it.
Log.Info("Loading configuration file...", options.config);
config = Utils.Merge(config, Serializer.LoadFile(options.config));
-- Configure modules.
Log.Info("Configuring exporter...");
Log.SetLogLevel(config.verbose and Log.Level.Debug or Log.Level.Info);
Resources.SetCacheDirectory(config.cacheDir);
Resources.SetDatabaseVersionOverrides(config.databaseOverrides);
Resources.SetProductName(config.product);
Resources.SetRegion(config.region);
Icons.SetExcludedNames(config.icons.excludeNames);
Icons.SetIncludedAtlases(config.icons.includeAtlases);
Music.SetOverrideKits(config.music.overrideKits);
Music.SetExcludedFiles(config.music.excludeFiles);
Music.SetExcludedNames(config.music.excludeNames);
-- Load the manifest if one exists.
local manifest;
if lfs.attributes(config.manifest, "mode") == "file" then
Log.Info("Loading manifest...", { path = config.manifest });
local ok, result = pcall(Serializer.LoadFile, config.manifest);
if not ok then
Log.Warn("Failed to load manifest.", { err = result });
else
manifest = result;
end
end
-- Create a manifest if one wasn't loaded.
if not manifest then
Log.Info("Creating new manifest...");
manifest = {};
end
-- Persist build information in the manifest.
local build = Resources.GetBuildInfo();
manifest.build = { bkey = build.bkey, version = build.version };
Log.Info("Obtained build information.", manifest.build);
-- Update the manifest for each database type.
manifest.music = Music.GetManifest(manifest.music or {});
manifest.icons = Icons.GetManifest(manifest.icons or {});
-- Write the manifest out.
Log.Info("Writing manifest file...", { path = config.manifest });
Serializer.SaveFile(config.manifest, manifest, Serializer.OptionsPretty);
-- Generate the actual database contents.
local database = {};
database.music = Music.GetDatabase(manifest.music);
database.icons = Icons.GetDatabase(manifest.icons);
-- Read in the main script to obtain the library version.
Log.Info("Fetching library version...");
local versionMajor, versionMinor = GetLibraryVersion();
-- Read in the template file and render the database.
Log.Info("Loading template file...", { path = config.template });
local templateFile = assert(io.open(config.template, "rb"));
local template = assert(templateFile:read("*a"));
templateFile:close();
Log.Info("Rendering template contents...");
local content = assert(etlua.render(template, {
-- Data.
build = build,
config = config,
database = database,
manifest = manifest,
-- Versioning.
versionMajor = versionMajor,
versionMinor = versionMinor,
-- Functions.
Dump = Serializer.Dump,
}));
-- Write the rendered template out.
Log.Info("Writing database contents...", { path = config.database });
local databaseFile = assert(io.open(config.database, "wb"));
assert(databaseFile:write(content));
databaseFile:close();
end);
if not ok then
Log.Fatal("Fatal error during export.", { err = err });
end
| 34.065217 | 78 | 0.666496 |
e25e9af1e2fb1c254f94f1f3897b45a3e9f81d63 | 10,929 | py | Python | QBG/AutoFormula/AutoFormula.py | GYMS-PKU/Daily-Frequency-Quant | 808eda9930efecff04ecf98abf617404cadd0003 | [
"MIT"
] | 3 | 2021-11-21T04:35:04.000Z | 2022-03-04T09:19:53.000Z | QBG/AutoFormula/AutoFormula.py | GYMS-PKU/Daily-Frequency-Quant | 808eda9930efecff04ecf98abf617404cadd0003 | [
"MIT"
] | null | null | null | QBG/AutoFormula/AutoFormula.py | GYMS-PKU/Daily-Frequency-Quant | 808eda9930efecff04ecf98abf617404cadd0003 | [
"MIT"
] | 5 | 2021-10-03T00:00:22.000Z | 2022-03-07T09:02:00.000Z | # Copyright (c) 2021 Dai HBG
"""
该代码定义一个调用FormulaTree类生成公式树的自动化公式生成器,然后返回一个公式
开发日志:
2021-09-13
-- 更新:AutoFormula类初始化需要传入一个data类
2021-09-20
-- 更新:新增多个算子
2021-10-15
-- 更新:test_formula方法新增字段fix_weekday,可以指定计算周几的信号
2021-10-22
-- 更新:formula解析新增更多类型
2021-11-25
-- 更新:新增2_num_num_num类型算子的解析支持
"""
import numpy as np
import sys
import datetime
sys.path.append('C:/Users/Administrator/Desktop/Daily-Frequency-Quant/QBG')
sys.path.append('C:/Users/HBG/Desktop/Repositories/Daily-Frequency-Quant/QBG')
from Tester.AutoTester import *
from AutoFormula.FormulaTree import *
from AutoFormula.SignalGenerator import *
class AutoFormula:
def __init__(self, start_date: str, end_date: str, data: Data, height: int = 3, symmetric: bool = False):
"""
:param start_date: 该公式树
:param end_date:
:param data: Data实例
:param height: 最大深度
:param symmetric: 是否对称
"""
self.height = height
self.symmetric = symmetric
self.start_date = start_date
self.end_date = end_date
self.tree_generator = FormulaTree()
self.tree = self.tree_generator.init_tree(height=self.height, symmetric=self.symmetric, dim_structure='2_2')
self.operation = SignalGenerator(data=data)
self.formula_parser = FormulaParser()
self.AT = AutoTester()
def cal_formula(self, tree: FormulaTree, data_dic: dict, return_type: str = 'signal') -> np.array: # 递归计算公式树的值
"""
:param tree: 需要计算的公式树
:param data_dic: 原始数据的字典,可以通过字段读取对应的矩阵
:param return_type: 返回值形式
:return: 返回计算好的signal矩阵
"""
if return_type == 'signal':
if tree.variable_type == 'data':
if type(tree.name) == int or type(tree.name) == float:
return tree.name # 直接挂载在节点上,但是应该修改成需要数字的就直接返回数字
return data_dic[tree.name].copy() # 当前版本需要返回一个副本
elif tree.variable_type == 'intra_data':
if tree.num_1 is not None:
return data_dic[tree.name][:, tree.num_1, :].copy()
else:
return data_dic[tree.name].copy() # 如果没有数字就直接返回原本的数据
else:
if tree.operation_type == '1':
return self.operation.operation_dic[tree.name](self.cal_formula(tree.left, data_dic, return_type))
if tree.operation_type == '1_num':
return self.operation.operation_dic[tree.name](self.cal_formula(tree.left, data_dic, return_type),
tree.num_1)
if tree.operation_type == '1_num_num':
return self.operation.operation_dic[tree.name](self.cal_formula(tree.left, data_dic, return_type),
tree.num_1, tree.num_2)
if tree.operation_type == '1_num_num_num':
return self.operation.operation_dic[tree.name](self.cal_formula(tree.left, data_dic, return_type),
tree.num_1, tree.num_2, tree.num_3)
if tree.operation_type == '2': # 此时需要判断有没有数字
if tree.num_1 is None:
return self.operation.operation_dic[tree.name](self.cal_formula(tree.left, data_dic,
return_type),
self.cal_formula(tree.right, data_dic,
return_type))
else:
if tree.left is not None:
return self.operation.operation_dic[tree.name](self.cal_formula(tree.left, data_dic,
return_type),
tree.num_1)
else:
return self.operation.operation_dic[tree.name](tree.num_1,
self.cal_formula(tree.right, data_dic,
return_type))
if tree.operation_type == '2_num':
return self.operation.operation_dic[tree.name](self.cal_formula(tree.left, data_dic, return_type),
self.cal_formula(tree.right, data_dic, return_type),
tree.num_1)
if tree.operation_type == '2_num_num':
return self.operation.operation_dic[tree.name](self.cal_formula(tree.left, data_dic, return_type),
self.cal_formula(tree.right, data_dic, return_type),
tree.num_1, tree.num_2)
if tree.operation_type == '2_num_num_num':
return self.operation.operation_dic[tree.name](self.cal_formula(tree.left, data_dic, return_type),
self.cal_formula(tree.right, data_dic, return_type),
tree.num_1, tree.num_2, tree.num_3)
if tree.operation_type == '3':
return self.operation.operation_dic[tree.name](self.cal_formula(tree.left, data_dic, return_type),
self.cal_formula(tree.middle, data_dic, return_type),
self.cal_formula(tree.right, data_dic, return_type))
if return_type == 'str':
if tree.variable_type == 'data':
return tree.name # 返回字符串
elif tree.variable_type == 'intra_data': # 这里也需要判断是否有数字
if tree.num_1 is not Nonr:
return '{' + tree.name + ',{}'.format(tree.num_1) + '}'
else:
return '{' + tree.name + '}'
else:
if tree.operation_type == '1':
return tree.name + '{' + (self.cal_formula(tree.left, data_dic, return_type)) + '}'
if tree.operation_type == '1_num':
return tree.name + '{' + self.cal_formula(tree.left, data_dic, return_type) + ',' + str(
tree.num_1) + '}'
if tree.operation_type == '1_num_num':
return tree.name + '{' + self.cal_formula(tree.left, data_dic, return_type) + ',' + str(
tree.num_1) + ',' + str(tree.num_2) + '}'
if tree.operation_type == '1_num_num_num':
return tree.name + '{' + self.cal_formula(tree.left, data_dic, return_type) + ',' + str(
tree.num_1) + ',' + str(tree.num_2) + ',' + str(tree.num_3) + '}'
if tree.operation_type == '2': # 此时需要判断是否有数字
if tree.num_1 is not None:
return tree.name + '{' + self.cal_formula(tree.left, data_dic, return_type) + ',' + \
self.cal_formula(tree.right, data_dic, return_type) + '}'
else:
if tree.left is not None:
return tree.name + '{' + self.cal_formula(tree.left, data_dic, return_type) + ',' + \
str(tree.num_1) + '}'
else:
return tree.name + '{' + str(tree.num_1) + ',' + \
self.cal_formula(tree.right, data_dic, return_type) + '}'
if tree.operation_type == '2_num':
return tree.name + '{' + self.cal_formula(tree.left, data_dic, return_type) + ',' + \
self.cal_formula(tree.right, data_dic, return_type) + ',' + \
str(tree.num_1) + '}'
if tree.operation_type == '2_num_num':
return tree.name + '{' + self.cal_formula(tree.left, data_dic, return_type) + ',' + \
self.cal_formula(tree.right, data_dic, return_type) + ',' + \
str(tree.num_1) + ',' + str(tree.num_2) + '}'
if tree.operation_type == '2_num_num_num':
return tree.name + '{' + self.cal_formula(tree.left, data_dic, return_type) + ',' + \
self.cal_formula(tree.right, data_dic, return_type) + ',' + \
str(tree.num_1) + ',' + str(tree.num_2) + ',' + str(tree.num_3) + '}'
if tree.operation_type == '3':
return tree.name + '{' + self.cal_formula(tree.left, data_dic, return_type) + ',' + \
self.cal_formula(tree.middle, data_dic, return_type) + ',' + \
self.cal_formula(tree.right, data_dic, return_type) + '}'
def test_formula(self, formula: str, data: Data, start_date: str = None, end_date: str = None,
prediction_mode: str = False, fix_weekday: bool = None):
"""
:param formula: 需要测试的因子表达式,如果是字符串形式,需要先解析成树
:param data: Data类
:param start_date: 如果不提供则按照Data类默认的来
:param end_date: 如果不提供则按照Data类默认的来
:param prediction_mode: 是否是最新预测模式,是的话不需要测试,只生成signal
:param fix_weekday: 指定统计哪些日期的信号
:return: 返回统计值以及该因子产生的信号矩阵
"""
if not prediction_mode:
if type(formula) == str:
formula = self.formula_parser.parse(formula)
signal = self.cal_formula(formula, data.data_dic) # 暂时为了方便,无论如何都计算整个回测区间的因子值
if start_date is None:
start_date = str(data.start_date)
if end_date is None:
end_date = str(data.end_date)
start = data.get_real_date(start_date, direction='forward')
end = data.get_real_date(end_date, direction='backward')
# return signal,start,end
if fix_weekday is None:
return self.AT.test(signal[start:end + 1], data.ret[start + 1:end + 2], top=data.top[start:end + 1]), \
signal
else:
tmp = [i for i in range(start, end + 1) if data.position_date_dic[i].weekday() == (fix_weekday - 1) % 7]
return self.AT.test(signal[tmp, :], data.ret[[i + 1 for i in tmp], :], top=data.top[tmp, :]), signal
else:
if type(formula) == str:
formula = self.formula_parser.parse(formula)
return self.cal_formula(formula, data.data_dic)
| 57.219895 | 120 | 0.500503 |
143dc26d901417e34c8d09b025a5b44d9964673d | 512 | tsx | TypeScript | packages/@headlessui-react/src/internal/open-closed.tsx | colinking/headlessui | 5442ea1b247795458cc9c8f63adef6b6f85d6b30 | [
"MIT"
] | 14,428 | 2020-09-16T18:13:23.000Z | 2022-03-31T18:50:40.000Z | packages/@headlessui-react/src/internal/open-closed.tsx | colinking/headlessui | 5442ea1b247795458cc9c8f63adef6b6f85d6b30 | [
"MIT"
] | 728 | 2020-09-16T23:15:14.000Z | 2022-03-31T23:34:35.000Z | packages/@headlessui-react/src/internal/open-closed.tsx | colinking/headlessui | 5442ea1b247795458cc9c8f63adef6b6f85d6b30 | [
"MIT"
] | 630 | 2020-09-24T20:06:54.000Z | 2022-03-31T09:55:56.000Z | import React, {
createContext,
useContext,
// Types
ReactNode,
ReactElement,
} from 'react'
let Context = createContext<State | null>(null)
Context.displayName = 'OpenClosedContext'
export enum State {
Open,
Closed,
}
export function useOpenClosed() {
return useContext(Context)
}
interface Props {
value: State
children: ReactNode
}
export function OpenClosedProvider({ value, children }: Props): ReactElement {
return <Context.Provider value={value}>{children}</Context.Provider>
}
| 17.066667 | 78 | 0.726563 |
178ecb2c2ffa4ba2340d7a9fb996d5738b94da3f | 63 | sql | SQL | modules/core/db/update/hsql/19/190208-3-add-reference-from-vet-to-employee.sql | skatova/cuba-petclinic-data-model-composition | 28671237b883f57abf043e7f61f90224c3f5ef73 | [
"Apache-2.0"
] | 1 | 2019-06-30T19:38:21.000Z | 2019-06-30T19:38:21.000Z | modules/core/db/update/hsql/19/190208-3-add-reference-from-vet-to-employee.sql | skatova/cuba-petclinic-data-model-composition | 28671237b883f57abf043e7f61f90224c3f5ef73 | [
"Apache-2.0"
] | 1 | 2019-09-03T13:05:40.000Z | 2019-09-03T13:05:40.000Z | modules/core/db/update/hsql/19/190208-3-add-reference-from-vet-to-employee.sql | skatova/cuba-petclinic-data-model-composition | 28671237b883f57abf043e7f61f90224c3f5ef73 | [
"Apache-2.0"
] | 4 | 2019-07-18T09:49:12.000Z | 2021-03-19T09:58:01.000Z | alter table PETCLINIC_VET add column EMPLOYEE_ID varchar(36) ;
| 31.5 | 62 | 0.825397 |
b70ab27291e972a07101b16bbcdc72dad0db6c31 | 1,747 | kt | Kotlin | src/backend/codecc/core/common/common-redis/src/main/kotlin/com/tencent/devops/common/redis/RedissionConfig.kt | austingod/bk-ci | 90df2f0629160a814419c85c922de8a5d1bd7b4d | [
"MIT"
] | null | null | null | src/backend/codecc/core/common/common-redis/src/main/kotlin/com/tencent/devops/common/redis/RedissionConfig.kt | austingod/bk-ci | 90df2f0629160a814419c85c922de8a5d1bd7b4d | [
"MIT"
] | null | null | null | src/backend/codecc/core/common/common-redis/src/main/kotlin/com/tencent/devops/common/redis/RedissionConfig.kt | austingod/bk-ci | 90df2f0629160a814419c85c922de8a5d1bd7b4d | [
"MIT"
] | null | null | null | package com.tencent.bk.codecc.codeccjob.component
import org.redisson.Redisson
import org.redisson.api.RRateLimiter
import org.redisson.api.RateIntervalUnit
import org.redisson.api.RateType
import org.redisson.api.RedissonClient
import org.redisson.config.Config
import org.springframework.beans.factory.annotation.Value
import org.springframework.context.annotation.Bean
import org.springframework.context.annotation.Configuration
@Configuration
class RedissionConfig {
@Value("\${spring.redis.host}")
private lateinit var redisHost: String
@Value("\${spring.redis.port}")
private lateinit var redisPort: String
@Value("\${spring.redis.password}")
private lateinit var redisPassword: String
@Value("\${spring.redis.database:0}")
private var redisDB: Int = 0
@Value("\${issue.submit.limit.intervalLimit:25}")
private var intervalLimit: Long = 25
@Value("\${issue.submit.limit.intervalSeconds:1}")
private var intervalSeconds: Long = 1
@Bean
fun issueSubmitRateLimiter(): RRateLimiter {
val config = Config()
config.useSingleServer()
.setAddress("redis://$redisHost:$redisPort").setPassword(redisPassword).database = redisDB
val client = Redisson.create(config)
val rateLimiter = client.getRateLimiter("SUBMIT_ISSUE_RATE_LIMITER")
rateLimiter.trySetRate(RateType.OVERALL, intervalLimit, intervalSeconds, RateIntervalUnit.SECONDS)
return rateLimiter
}
@Bean
fun redissionClient(): RedissonClient {
val config = Config()
config.useSingleServer()
.setAddress("redis://$redisHost:$redisPort").setPassword(redisPassword).database = redisDB
return Redisson.create(config)
}
} | 32.962264 | 106 | 0.724671 |
0dd943fbdcdb57dae789ba2e8efa53326b177098 | 2,007 | rb | Ruby | lib/rawk_log/stat.rb | lightman76/rawk | 015577a815a8b743443405a675e1bdb26c109eab | [
"Beerware"
] | null | null | null | lib/rawk_log/stat.rb | lightman76/rawk | 015577a815a8b743443405a675e1bdb26c109eab | [
"Beerware"
] | null | null | null | lib/rawk_log/stat.rb | lightman76/rawk | 015577a815a8b743443405a675e1bdb26c109eab | [
"Beerware"
] | null | null | null | module RawkLog
class Stat
DEFAULT_LABEL_SIZE = 30
HEADER = "Count Sum(secs) Max Median Avg Min Std"
def initialize(key)
@key=key
@min = nil
@max = nil
@sum = 0
@sum_squares = 0
@count = 0
@values = []
end
def add(value)
value=1.0*value
@count+=1
@min = value unless @min
@min = value if value<@min
@max = value unless @max
@max = value if value>@max
@sum += value
@sum_squares += value*value
@values << value
end
def header(label_size = DEFAULT_LABEL_SIZE)
sprintf "%*s %s" % [-label_size, "Request", HEADER]
end
def key
@key
end
def count
@count
end
def sum
@sum
end
def min
@min
end
def max
@max
end
def average
@count > 0 ? @sum/@count : @sum
end
def median
return nil unless @values
l = @values.length
return nil unless l>0
@values.sort!
return (@values[l/2-1]+@values[l/2])/2 if l%2==0
@values[(l+1)/2-1]
end
def standard_deviation
return 0 if @count<=1
Math.sqrt((@sum_squares - (@sum*@sum/@count))/ (@count))
end
def to_s(label_size = DEFAULT_LABEL_SIZE)
if count > 0
sprintf("%*s %6d %9.2f %7d %7d %7d %7d %7d", -label_size, key, count, sum, max*1000.0, median*1000.0, average*1000.0, min*1000.0, standard_deviation*1000.0)
else
sprintf("%*s %6d", -label_size, key, 0)
end
end
def self.test
stat = Stat.new(30)
stat.add(5)
stat.add(6)
stat.add(8)
stat.add(9)
results = [7==stat.median ? "median Success" : "median Failure"]
results <<= (7==stat.average ? "average Success" : "average Failure")
results <<= (158==(stat.standard_deviation*100).round ? "std Success" : "std Failure")
puts results.join("\n")
exit (results.select { |m| m =~ /Failure/ }.size)
end
end
end
| 20.690722 | 164 | 0.539113 |
43e76c38a208b2aa257f43cd9673d42d79ea47ad | 532 | tsx | TypeScript | __tests__/modal.test.tsx | jokereven/02metaverse | f196d9272b3820ceca9307af014aac7118e1d1ac | [
"MIT"
] | 15 | 2021-10-20T07:08:59.000Z | 2022-03-07T06:26:02.000Z | __tests__/modal.test.tsx | jokereven/02metaverse | f196d9272b3820ceca9307af014aac7118e1d1ac | [
"MIT"
] | 26 | 2021-10-13T03:41:50.000Z | 2022-03-21T13:10:33.000Z | __tests__/modal.test.tsx | jokereven/02metaverse | f196d9272b3820ceca9307af014aac7118e1d1ac | [
"MIT"
] | 14 | 2021-10-13T02:38:55.000Z | 2022-03-26T07:50:09.000Z | import '@testing-library/jest-dom/extend-expect'
import { render, waitFor } from '@testing-library/react'
import Modal from '../components/minor/modal'
describe('Drawer Component', () => {
it('Render Component', async () => {
const dom = render(
<Modal isOpen={true} type='drawer'>
<p>drawer</p>
</Modal>
)
const { getByText } = dom
const lazyContent = await waitFor(() => getByText(/drawer/))
expect(lazyContent).toBeInTheDocument()
})
})
| 26.6 | 68 | 0.577068 |
dbfd9d05ea412b1469421775d95cb6673c059527 | 1,814 | php | PHP | app/Http/Controllers/SignController.php | Alirezamosavi/sss | d44437d256b52f79b60f206a877dcb43602ba3b8 | [
"MIT"
] | null | null | null | app/Http/Controllers/SignController.php | Alirezamosavi/sss | d44437d256b52f79b60f206a877dcb43602ba3b8 | [
"MIT"
] | null | null | null | app/Http/Controllers/SignController.php | Alirezamosavi/sss | d44437d256b52f79b60f206a877dcb43602ba3b8 | [
"MIT"
] | null | null | null | <?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Models\signature;
use Image; // this is composer require intervention/image
use Illuminate\Support\Facades\Response;
class SignController extends Controller
{
public function index() // this to get all data in table
{
$books = signature::latest()->paginate(2);
// return array_reverse($books);
return response()->json($books);
}
public function signe() // this to get all data in table
{
$books = signature::all()->toArray();
return array_reverse($books);
}
function insert_image(Request $request) // this to insert new data
{
$image_file = $request->user_image;
$form_data = array(
'user_image' => $image_file
);
signature::create($form_data);
return response()
->json(['status' => 'successs'], 200);
}
function fetch_image($image_id) // this to get all data in table
{
$image = signature::findOrFail($image_id); // to get all image we must convert data in table to image so
$image_file = Image::make($image->user_image); // so make image and use this is composer require intervention/image
$response = Response::make($image_file->encode('jpeg')); // this is to response our image in frontend
$response->header('Content-Type', 'image/jpeg');
return $response;
}
public function destroy($id) /// this function delete data
{
//
$user = signature::findOrFail($id);
$filePath = $user->image;
if (file_exists($filePath)) {
unlink($filePath);
}
$user -> delete();
return back()->with('success', 'success delete');
}
}
| 25.194444 | 123 | 0.594267 |
da5f0c76c6e1941f95c057808d2abd29e1df1fa6 | 625 | php | PHP | application/models/Bagcolor_Model.php | kyasararamrana/srb | a37c7cd0547b09885d7856e3ea7d06302dd0dfb2 | [
"MIT"
] | null | null | null | application/models/Bagcolor_Model.php | kyasararamrana/srb | a37c7cd0547b09885d7856e3ea7d06302dd0dfb2 | [
"MIT"
] | null | null | null | application/models/Bagcolor_Model.php | kyasararamrana/srb | a37c7cd0547b09885d7856e3ea7d06302dd0dfb2 | [
"MIT"
] | null | null | null | <?php
/**
*
*/
class Bagcolor_Model extends CI_Model
{
function __construct()
{
parent::__construct();
}
public $table = 'ecom_bagcolor';
//insert
public function insert($post_data='')
{
return $this->db->insert($this->table,$post_data);
}
//get bag colors
public function get_bagcolor()
{
$this->db->order_by('bag_color','asc');
return $this->db->get($this->table)->result();
}
//get active bag colors
public function get_active_bagcolor()
{
$this->db->order_by('bag_color','asc');
return $this->db->get_where($this->table, array('status' => 1))->result();
}
}
?>
| 18.939394 | 78 | 0.6192 |
cd0022ac67c2760fd54cff75ef75001c958da715 | 2,070 | ru | Ruby | systems/2014/english-russian/newstest2014.onlineA.0.en-ru/a5a3109e4265d86b7741a715b2a0eb71.newstest2014.onlineA.0.en-ru.ru | fredblain/docQE-corp | 6c03de96252f128e257cbbac633756e5280da0e6 | [
"BSD-3-Clause"
] | null | null | null | systems/2014/english-russian/newstest2014.onlineA.0.en-ru/a5a3109e4265d86b7741a715b2a0eb71.newstest2014.onlineA.0.en-ru.ru | fredblain/docQE-corp | 6c03de96252f128e257cbbac633756e5280da0e6 | [
"BSD-3-Clause"
] | null | null | null | systems/2014/english-russian/newstest2014.onlineA.0.en-ru/a5a3109e4265d86b7741a715b2a0eb71.newstest2014.onlineA.0.en-ru.ru | fredblain/docQE-corp | 6c03de96252f128e257cbbac633756e5280da0e6 | [
"BSD-3-Clause"
] | null | null | null | Существует нет баз данных группы в министерстве внутренних дел «Братский круг», которым США связаны Лепс
Группа «Братско круг», к которому Вашингтон связал певец Григорий Лепс, не появляются в базах данных агентств безопасности России.
Как отметил "Интерфаксу" в правоохранительных органах РФ, после сообщения о Григорий Лепс вводится от министерства финансов США в черные списки по подозрению в причастности с Евразийского преступного синдиката «Братский круг» вышел в средствах массовой информации, были проверены все базы данных, но такой группы не был найден.
Органы безопасности считают, что распространение такой информации помогает поддерживать миф о так называемой «русской мафии», который якобы работает за границей.
«Даже в само - «Братско круг», название есть определенный элемент таинственности, содержащие намек на тайные ложи.
«Все это очень похоже на пиар-ход,» сказал источник агентства.
31 октября Министерство финансов США объявил вступление Артур Бадалян, Григорий Lepsveridze, Вадим Лялин, Сергей Москаленко, Яков Ribalskiy и Игорь Шлыков в свои черные списки по подозрению в причастности с Евразийского преступного синдиката «Братский круг».
Лиц, внесенных в этот список подозреваемых ссылок на определенный Владислав Леонтьев и Гафур Рахимов, которые считаются влиятельных членов преступных групп и находятся на санкции США списки с февраля 2012 года.
По данным американских властей Григорий Лепс» служит курьера для доставки денег от имени Владислав Леонтьев».
На Григорий Лепс производственный центр они отказались комментировать США обвинения, выдвинутые на певицу, назвав его «чепухой».
В 2011 году президент Соединенных Штатов Америки Barack Obama добавляется в список «существенных транснациональных организованных преступных групп» синдиката «Братско круг» и приказ Министерства финансов «нарушают и тормозят их глобальных преступных операций».
Вступления человека в черные списки из американского Департамента казначейства влечет за собой блокирование его активы в США и запрет деловые контакты с ним лиц, проживающих в Соединенных Штатах.
| 159.230769 | 327 | 0.836715 |
5717d03b93efec91c2ec308ac5e36ec8df734bf8 | 26,977 | js | JavaScript | src/mui/input/ReferenceArrayInput.spec.js | rwaidaAlmehanni/admin-on-rest-kindian | 45a91d9471ab33c783816a47526796316d239707 | [
"MIT"
] | 4,589 | 2016-09-02T12:39:44.000Z | 2022-02-21T07:19:10.000Z | src/mui/input/ReferenceArrayInput.spec.js | rwaidaAlmehanni/admin-on-rest-kindian | 45a91d9471ab33c783816a47526796316d239707 | [
"MIT"
] | 1,421 | 2016-09-04T10:49:32.000Z | 2018-07-24T15:15:22.000Z | src/mui/input/ReferenceArrayInput.spec.js | rwaidaAlmehanni/admin-on-rest-kindian | 45a91d9471ab33c783816a47526796316d239707 | [
"MIT"
] | 1,098 | 2016-09-03T10:57:10.000Z | 2022-02-24T12:07:45.000Z | import React from 'react';
import assert from 'assert';
import { shallow } from 'enzyme';
import sinon from 'sinon';
import {
getSelectedReferencesStatus,
getDataStatus,
ReferenceArrayInput,
REFERENCES_STATUS_READY,
REFERENCES_STATUS_INCOMPLETE,
REFERENCES_STATUS_EMPTY,
} from './ReferenceArrayInput';
describe('Reference Array Input', () => {
describe('<getSelectedReferencesStatus />', () => {
it('should return ready if input value has no references', () => {
const test = (input, referenceRecords) =>
assert.equal(
getSelectedReferencesStatus(input, referenceRecords),
REFERENCES_STATUS_READY
);
test({}, []);
test({ value: null }, []);
test({ value: false }, []);
test({ value: [] }, []);
});
it('should return empty if there is some input values but the referenceRecords is empty', () => {
assert.equal(
getSelectedReferencesStatus({ value: [1, 2] }, []),
REFERENCES_STATUS_EMPTY
);
});
it('should return incomplete if there is less data in the referenceRecords than values in the input value', () => {
assert.equal(
getSelectedReferencesStatus({ value: [1, 2] }, [{ id: 1 }]),
REFERENCES_STATUS_INCOMPLETE
);
});
it('should return ready if there is as much data in the referenceRecords as there are values in the input value', () => {
assert.equal(
getSelectedReferencesStatus({ value: [1, 2] }, [
{ id: 1 },
{ id: 2 },
]),
REFERENCES_STATUS_READY
);
});
});
describe('getDataStatus', () => {
const data = {
input: {},
matchingReferences: null,
referenceRecords: [],
translate: x => `*${x}*`,
};
it('should indicate whether the data are ready or not', () => {
const test = (data, waiting, explanation) =>
assert.equal(getDataStatus(data).waiting, waiting, explanation);
test(
data,
true,
'we must wait until the references fetch is finished and there is no reference already associated with the resource.'
);
test(
{ ...data, input: { value: [1, 2] } },
true,
'we must wait until the references fetch is finished and linked references data are not found.'
);
test(
{
...data,
input: { value: [1, 2] },
referenceRecords: [{ id: 1 }],
},
false,
'it is ready if the references fetch is not finished but at least one linked reference data are found.'
);
test(
{ ...data, input: { value: [1, 2] }, matchingReferences: [] },
false,
'it is ready if none linked reference data are not found, but the references fetch is finished.'
);
test(
{
...data,
input: { value: [1, 2] },
matchingReferences: { error: 'error' },
},
false,
'it is ready if linked reference data are not found, but the references fetch is finished with error.'
);
});
it('should return an error if needed', () => {
const test = (data, error, explanation) => {
const status = getDataStatus(data);
assert.equal(status.waiting, false);
assert.equal(status.error, error, explanation);
};
test(
{
...data,
matchingReferences: { error: 'error' },
},
'*aor.input.references.all_missing*',
'there is an error if the references fetch fails and there is no linked reference'
);
test(
{
...data,
matchingReferences: { error: 'error' },
input: { value: [1] },
},
'*aor.input.references.all_missing*',
'there is an error if the references fetch fails and there is all linked reference without data'
);
test(
{
...data,
matchingReferences: { error: 'error' },
input: { value: [1, 2] },
referenceRecords: [{ id: 1 }],
},
null,
'there is no error if the references fetch fails but there is at least one linked reference with data'
);
test(
{
...data,
matchingReferences: [{ id: 2 }],
input: { value: [1, 2] },
referenceRecords: [],
},
null,
'there is no error if there is all linked references without data but the references fetch succeeds'
);
test(
{
...data,
matchingReferences: [],
input: { value: [1, 2] },
referenceRecords: [],
},
null,
'there is no error if there is a linked reference without data but the references fetch succeeds even empty'
);
test(
{
...data,
matchingReferences: [{ id: 1 }],
},
null,
'there is no error if the references fetch succeeds and there is no linked reference'
);
});
it('should return a warning if needed', () => {
const test = (data, warning, explanation) => {
const status = getDataStatus(data);
assert.equal(status.waiting, false);
assert.equal(status.error, null);
assert.equal(status.warning, warning, explanation);
};
test(
{
...data,
matchingReferences: { error: 'error on fetch' },
input: { value: [1] },
referenceRecords: [{ id: 1 }],
},
'*error on fetch*',
'there is a warning if the references fetch fails but there is linked references with data'
);
test(
{
...data,
matchingReferences: [{ id: 3 }],
input: { value: [1, 2] },
referenceRecords: [{ id: 2 }],
},
'*aor.input.references.many_missing*',
'there is a warning if there is at least one linked reference without data but the references fetch succeeds'
);
test(
{
...data,
matchingReferences: [],
input: { value: [1, 2] },
referenceRecords: [{ id: 1 }, { id: 2 }],
},
null,
'there is no warning if there is all linked references with data and the references fetch succeeds even empty'
);
test(
{
...data,
matchingReferences: [],
},
null,
'there is no warning if the references fetch succeeds and there is no linked references'
);
});
it('should return choices consistent with the data status', () => {
const test = (data, warning, choices, explanation) => {
const status = getDataStatus(data);
assert.equal(status.waiting, false);
assert.equal(status.error, null);
assert.equal(status.warning, warning);
assert.deepEqual(status.choices, choices, explanation);
};
test(
{
...data,
matchingReferences: { error: 'error on fetch' },
input: { value: [1, 2] },
referenceRecords: [{ id: 1 }, { id: 2 }],
},
'*error on fetch*',
[{ id: 1 }, { id: 2 }],
'if the references fetch fails the choices are the linked references'
);
test(
{
...data,
matchingReferences: [{ id: 3 }],
input: { value: [1, 2] },
referenceRecords: [],
},
'*aor.input.references.many_missing*',
[{ id: 3 }],
'if there is no data for the linked references, the choices are those returned by fetch'
);
test(
{
...data,
matchingReferences: [
{ id: 1 },
{ id: 2 },
{ id: 3 },
{ id: 4 },
],
input: { value: [1, 2] },
referenceRecords: [{ id: 1 }, { id: 2 }],
},
null,
[{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }],
'if there is data for the linked reference and the references fetch succeeds, we use the choices returned by fetch (that will include the linked reference, but this is not managed at getDataStatus method level.)'
);
});
});
describe('<ReferenceArrayInput />', () => {
const defaultProps = {
addField: true,
crudGetMatching: () => true,
crudGetMany: () => true,
input: {},
reference: 'tags',
resource: 'posts',
source: 'tag_ids',
matchingReferences: [{ id: 1 }],
allowEmpty: true,
translate: x => `*${x}*`,
referenceRecords: [],
meta: {},
};
const MyComponent = () => <span id="mycomponent" />;
it('should render a spinner as long as there are no references fetched and no selected references', () => {
const wrapper = shallow(
<ReferenceArrayInput
{...{
...defaultProps,
matchingReferences: null,
input: {},
}}
>
<MyComponent />
</ReferenceArrayInput>
);
const MyComponentElement = wrapper.find('MyComponent');
assert.equal(MyComponentElement.length, 0);
const SpinnerElement = wrapper.find('ReferenceLoadingProgress');
assert.equal(SpinnerElement.length, 1);
});
it('should render a spinner as long as there are no references fetched and there are no data found for the references already selected', () => {
const wrapper = shallow(
<ReferenceArrayInput
{...{
...defaultProps,
matchingReferences: null,
input: { value: [1, 2] },
referenceRecords: [],
}}
>
<MyComponent />
</ReferenceArrayInput>
);
const MyComponentElement = wrapper.find('MyComponent');
assert.equal(MyComponentElement.length, 0);
const SpinnerElement = wrapper.find('ReferenceLoadingProgress');
assert.equal(SpinnerElement.length, 1);
});
it('should not render a spinner if the references are being searched but data from at least one selected reference was found', () => {
const wrapper = shallow(
<ReferenceArrayInput
{...{
...defaultProps,
matchingReferences: null,
input: { value: [1, 2] },
referenceRecords: [{ id: 1 }],
}}
>
<MyComponent />
</ReferenceArrayInput>
);
const SpinnerElement = wrapper.find('ReferenceLoadingProgress');
assert.equal(SpinnerElement.length, 0);
const MyComponentElement = wrapper.find('MyComponent');
assert.equal(MyComponentElement.length, 1);
assert.deepEqual(MyComponentElement.prop('choices'), [{ id: 1 }]);
});
it('should display an error in case of references fetch error and there are no selected reference in the input value', () => {
const wrapper = shallow(
<ReferenceArrayInput
{...{
...defaultProps,
matchingReferences: { error: 'fetch error' },
input: {},
referenceRecords: [],
}}
>
<MyComponent />
</ReferenceArrayInput>
);
const MyComponentElement = wrapper.find('MyComponent');
assert.equal(MyComponentElement.length, 0);
const ErrorElement = wrapper.find('ReferenceError');
assert.equal(ErrorElement.length, 1);
assert.equal(
ErrorElement.prop('error'),
'*aor.input.references.all_missing*'
);
});
it('should display an error in case of references fetch error and there are no data found for the references already selected', () => {
const wrapper = shallow(
<ReferenceArrayInput
{...{
...defaultProps,
matchingReferences: { error: 'fetch error' },
input: { value: [1] },
referenceRecords: [],
}}
>
<MyComponent />
</ReferenceArrayInput>
);
const MyComponentElement = wrapper.find('MyComponent');
assert.equal(MyComponentElement.length, 0);
const ErrorElement = wrapper.find('ReferenceError');
assert.equal(ErrorElement.length, 1);
assert.equal(
ErrorElement.prop('error'),
'*aor.input.references.all_missing*'
);
});
it('should not display an error in case of references fetch error but data from at least one selected reference was found', () => {
const wrapper = shallow(
<ReferenceArrayInput
{...{
...defaultProps,
matchingReferences: { error: 'fetch error' },
input: { value: [1, 2] },
referenceRecords: [{ id: 2 }],
}}
>
<MyComponent />
</ReferenceArrayInput>
);
const ErrorElement = wrapper.find('ReferenceError');
assert.equal(ErrorElement.length, 0);
const MyComponentElement = wrapper.find('MyComponent');
assert.equal(MyComponentElement.length, 1);
assert.deepEqual(MyComponentElement.prop('choices'), [{ id: 2 }]);
});
it('should send an error to the children if references fetch fails but selected references are not empty', () => {
const wrapper = shallow(
<ReferenceArrayInput
{...{
...defaultProps,
matchingReferences: { error: 'fetch error' },
input: { value: [1, 2] },
referenceRecords: [{ id: 2 }],
}}
>
<MyComponent />
</ReferenceArrayInput>
);
const ErrorElement = wrapper.find('ReferenceError');
assert.equal(ErrorElement.length, 0);
const MyComponentElement = wrapper.find('MyComponent');
assert.equal(MyComponentElement.length, 1);
assert.deepEqual(MyComponentElement.prop('meta'), {
error: '*fetch error*',
touched: true,
});
});
it('should send an error to the children if references were found but selected references are not complete', () => {
const wrapper = shallow(
<ReferenceArrayInput
{...{
...defaultProps,
matchingReferences: [],
input: { value: [1, 2] },
referenceRecords: [{ id: 2 }],
}}
>
<MyComponent />
</ReferenceArrayInput>
);
const ErrorElement = wrapper.find('ReferenceError');
assert.equal(ErrorElement.length, 0);
const MyComponentElement = wrapper.find('MyComponent');
assert.equal(MyComponentElement.length, 1);
assert.deepEqual(MyComponentElement.prop('meta'), {
error: '*aor.input.references.many_missing*',
touched: true,
});
});
it('should send an error to the children if references were found but selected references are empty', () => {
const wrapper = shallow(
<ReferenceArrayInput
{...{
...defaultProps,
matchingReferences: [],
input: { value: [1, 2] },
referenceRecords: [],
}}
>
<MyComponent />
</ReferenceArrayInput>
);
const ErrorElement = wrapper.find('ReferenceError');
assert.equal(ErrorElement.length, 0);
const MyComponentElement = wrapper.find('MyComponent');
assert.equal(MyComponentElement.length, 1);
assert.deepEqual(MyComponentElement.prop('meta'), {
error: '*aor.input.references.many_missing*',
touched: true,
});
});
it('should not send an error to the children if all references were found', () => {
const wrapper = shallow(
<ReferenceArrayInput
{...{
...defaultProps,
matchingReferences: [],
input: { value: [1, 2] },
referenceRecords: [{ id: 1 }, { id: 2 }],
}}
>
<MyComponent />
</ReferenceArrayInput>
);
const ErrorElement = wrapper.find('ReferenceError');
assert.equal(ErrorElement.length, 0);
const MyComponentElement = wrapper.find('MyComponent');
assert.equal(MyComponentElement.length, 1);
assert.deepEqual(MyComponentElement.prop('meta'), {});
});
it('should render enclosed component if references present in input are available in state', () => {
const wrapper = shallow(
<ReferenceArrayInput
{...{
...defaultProps,
input: { value: [1] },
referenceRecords: [1],
}}
>
<MyComponent />
</ReferenceArrayInput>
);
const ErrorElement = wrapper.find('ReferenceError');
assert.equal(ErrorElement.length, 0);
const MyComponentElement = wrapper.find('MyComponent');
assert.equal(MyComponentElement.length, 1);
});
it('should render enclosed component even if the references are empty', () => {
const wrapper = shallow(
<ReferenceArrayInput
{...{
...defaultProps,
matchingReferences: [],
}}
>
<MyComponent />
</ReferenceArrayInput>
);
const SpinnerElement = wrapper.find('ReferenceLoadingProgress');
assert.equal(SpinnerElement.length, 0);
const ErrorElement = wrapper.find('ReferenceError');
assert.equal(ErrorElement.length, 0);
const MyComponentElement = wrapper.find('MyComponent');
assert.equal(MyComponentElement.length, 1);
});
it('should render enclosed component if allowEmpty is true', () => {
const wrapper = shallow(
<ReferenceArrayInput {...defaultProps} allowEmpty>
<MyComponent />
</ReferenceArrayInput>
);
const MyComponentElement = wrapper.find('MyComponent');
assert.equal(MyComponentElement.length, 1);
});
it('should call crudGetMatching on mount with default fetch values', () => {
const crudGetMatching = sinon.spy();
shallow(
<ReferenceArrayInput
{...defaultProps}
allowEmpty
crudGetMatching={crudGetMatching}
>
<MyComponent />
</ReferenceArrayInput>,
{ lifecycleExperimental: true }
);
assert.deepEqual(crudGetMatching.args[0], [
'tags',
'posts@tag_ids',
{
page: 1,
perPage: 25,
},
{
field: 'id',
order: 'DESC',
},
{},
]);
});
it('should allow to customize crudGetMatching arguments with perPage, sort, and filter props', () => {
const crudGetMatching = sinon.spy();
shallow(
<ReferenceArrayInput
{...defaultProps}
allowEmpty
crudGetMatching={crudGetMatching}
sort={{ field: 'foo', order: 'ASC' }}
perPage={5}
filter={{ q: 'foo' }}
>
<MyComponent />
</ReferenceArrayInput>,
{ lifecycleExperimental: true }
);
assert.deepEqual(crudGetMatching.args[0], [
'tags',
'posts@tag_ids',
{
page: 1,
perPage: 5,
},
{
field: 'foo',
order: 'ASC',
},
{
q: 'foo',
},
]);
});
it('should call crudGetMatching when setFilter is called', () => {
const crudGetMatching = sinon.spy();
const wrapper = shallow(
<ReferenceArrayInput
{...defaultProps}
allowEmpty
crudGetMatching={crudGetMatching}
>
<MyComponent />
</ReferenceArrayInput>,
{ lifecycleExperimental: true }
);
wrapper.instance().setFilter('bar');
assert.deepEqual(crudGetMatching.args[1], [
'tags',
'posts@tag_ids',
{
page: 1,
perPage: 25,
},
{
field: 'id',
order: 'DESC',
},
{
q: 'bar',
},
]);
});
it('should use custom filterToQuery function prop', () => {
const crudGetMatching = sinon.spy();
const wrapper = shallow(
<ReferenceArrayInput
{...defaultProps}
allowEmpty
crudGetMatching={crudGetMatching}
filterToQuery={searchText => ({ foo: searchText })}
>
<MyComponent />
</ReferenceArrayInput>,
{ lifecycleExperimental: true }
);
wrapper.instance().setFilter('bar');
assert.deepEqual(crudGetMatching.args[1], [
'tags',
'posts@tag_ids',
{
page: 1,
perPage: 25,
},
{
field: 'id',
order: 'DESC',
},
{
foo: 'bar',
},
]);
});
it('should call crudGetMany on mount if value is set', () => {
const crudGetMany = sinon.spy();
shallow(
<ReferenceArrayInput
{...defaultProps}
allowEmpty
crudGetMany={crudGetMany}
input={{ value: [5, 6] }}
>
<MyComponent />
</ReferenceArrayInput>,
{ lifecycleExperimental: true }
);
assert.deepEqual(crudGetMany.args[0], ['tags', [5, 6]]);
});
it('should pass onChange down to child component', () => {
const onChange = sinon.spy();
const wrapper = shallow(
<ReferenceArrayInput
{...defaultProps}
allowEmpty
onChange={onChange}
>
<MyComponent />
</ReferenceArrayInput>
);
wrapper.find('MyComponent').simulate('change', 'foo');
assert.deepEqual(onChange.args[0], ['foo']);
});
it('should pass meta down to child component', () => {
const wrapper = shallow(
<ReferenceArrayInput
{...defaultProps}
allowEmpty
meta={{ touched: false }}
>
<MyComponent />
</ReferenceArrayInput>
);
const myComponent = wrapper.find('MyComponent');
assert.deepEqual(myComponent.prop('meta'), { touched: false });
});
});
});
| 38.319602 | 228 | 0.440709 |
aed0f71c6c1d6aa4a524bc8a9b2239008259df3d | 1,825 | lua | Lua | ag_zonelimit/client.lua | NormalAgent/zonelimit | 63caa7fa5c36d0c3445f3fc78c2742a6914a0d32 | [
"Unlicense"
] | null | null | null | ag_zonelimit/client.lua | NormalAgent/zonelimit | 63caa7fa5c36d0c3445f3fc78c2742a6914a0d32 | [
"Unlicense"
] | null | null | null | ag_zonelimit/client.lua | NormalAgent/zonelimit | 63caa7fa5c36d0c3445f3fc78c2742a6914a0d32 | [
"Unlicense"
] | null | null | null | local ZonelimitN = false
local ZonelimitO = false
local closestZone = 1
Citizen.CreateThread(function()
for i = 1, #Agent.zones, 1 do
local blip = AddBlipForRadius(Agent.zones[i].x, Agent.zones[i].y, Agent.zones[i].z, Agent.radius)
SetBlipHighDetail(blip, true)
SetBlipColour(blip, 11)
SetBlipAlpha (blip, 128)
local blip1 = AddBlipForCoord(x, y, z)
SetBlipSprite (blip1, sprite)
SetBlipDisplay(blip1, true)
SetBlipScale (blip1, 0.9)
SetBlipColour (blip1, 11)
SetBlipAsShortRange(blip1, true)
end
end)
Citizen.CreateThread(function()
while true do
local playerPed = PlayerPedId()
local x, y, z = table.unpack(GetEntityCoords(playerPed, true))
local minDistance = 100000
Citizen.Wait(10000)
for i = 1, #Agent.zones, 1 do
dist = Vdist(Agent.zones[i].x, Agent.zones[i].y, Agent.zones[i].z, x, y, z)
if dist < minDistance then
minDistance = dist
closestZone = i
end
end
end
end)
Citizen.CreateThread(function()
while true do
local player = PlayerPedId()
local x,y,z = table.unpack(GetEntityCoords(player, true))
local dist = Vdist(Agent.zones[closestZone].x, Agent.zones[closestZone].y, Agent.zones[closestZone].z, x, y, z)
local vehicle = GetVehiclePedIsIn(player, false)
local speed = GetEntitySpeed(vehicle)
if dist <= Agent.radius then
if not ZonelimitN then
ZonelimitN = true
ZonelimitO = false
end
else
if not ZonelimitO then
if Agent.speedlimitador then
SetVehicleMaxSpeed(vehicle, 1000.00)
end
ZonelimitO = true
ZonelimitN = false
end
Citizen.Wait(200)
end
if ZonelimitN then
Citizen.Wait(10)
if Agent.speedlimitador then
mphs = 2.237
maxspeed = Agent.speedlimitador/mphs
SetVehicleMaxSpeed(vehicle, maxspeed)
end
end
end
end) | 26.838235 | 114 | 0.689315 |
cbddf86e1765b4b83bf1752e8cf8fc0f4c3852d3 | 1,091 | sh | Shell | 9.6-debian-dev/docker-initdb/configure-repmgr.sh | panubo/docker-postgres | 9646334190da0f8ad9475581ee1eb93d1e81ba57 | [
"MIT"
] | 1 | 2018-09-19T00:37:10.000Z | 2018-09-19T00:37:10.000Z | 9.6-debian-dev/docker-initdb/configure-repmgr.sh | panubo/docker-postgres | 9646334190da0f8ad9475581ee1eb93d1e81ba57 | [
"MIT"
] | null | null | null | 9.6-debian-dev/docker-initdb/configure-repmgr.sh | panubo/docker-postgres | 9646334190da0f8ad9475581ee1eb93d1e81ba57 | [
"MIT"
] | null | null | null | #!/usr/bin/env bash
# Initialise database with repmgr extension
set -e
[ "${REPMGR_ENABLED}" == "true" ] || { echo "Info: REPMGR_ENABLED false"; exit 0; }
[ -z "$REPMGR_PASSWORD" ] && { echo "Error: REPMGR_PASSWORD not set"; exit 128; }
: ${REPMGR_USER:='repmgr'}
: ${REPMGR_DB:='repmgr'}
psql -v ON_ERROR_STOP=1 --username "${POSTGRES_USER}" --dbname "${POSTGRES_DB}" << EOF
CREATE OR REPLACE FUNCTION __tmp_create_user() returns void as \$$
BEGIN
IF NOT EXISTS (
SELECT -- SELECT list can stay empty for this
FROM pg_catalog.pg_user
WHERE usename = '${REPMGR_USER}') THEN
CREATE USER ${REPMGR_USER};
END IF;
END;
\$$ language plpgsql;
SELECT __tmp_create_user();
DROP FUNCTION __tmp_create_user();
ALTER USER ${REPMGR_USER} WITH PASSWORD '${REPMGR_PASSWORD}';
EOF
echo "SELECT 'CREATE DATABASE ${REPMGR_DB} OWNER ${REPMGR_USER}' WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = '${REPMGR_DB}')\gexec" | psql -v ON_ERROR_STOP=1 --username "${POSTGRES_USER}" --dbname "${POSTGRES_DB}"
| 34.09375 | 227 | 0.654445 |
d67460faceb6834ac7b1501ab882a562780718cc | 1,486 | cs | C# | BM.Atlas/Assets/Scripts/launch/IntroUI.cs | BrainModes/BM.Atlas | 288d2e467f57bf214c21df9d8607e00f2761649f | [
"Apache-2.0"
] | null | null | null | BM.Atlas/Assets/Scripts/launch/IntroUI.cs | BrainModes/BM.Atlas | 288d2e467f57bf214c21df9d8607e00f2761649f | [
"Apache-2.0"
] | 5 | 2021-03-18T14:09:47.000Z | 2021-04-13T13:40:07.000Z | BM.Atlas/Assets/Scripts/launch/IntroUI.cs | BrainModes/BM.Atlas | 288d2e467f57bf214c21df9d8607e00f2761649f | [
"Apache-2.0"
] | 1 | 2021-03-23T09:50:57.000Z | 2021-03-23T09:50:57.000Z | using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class IntroUI : MonoBehaviour
{
public string welcomeMsg = "Welcome to the BrainAtlas!";
public string clickMsg = "You can interact with the 3d brain map by using a mouse to select regions of the brain by clicking and dragging. Use the scrollwheel to zoom in and out on the brain.";
public string touchMsg = "You can interact with the 3d brain map by tapping, dragging and pinching the brain. You can also tap to select items in the accordion menu on the left-hand side.";
public string clickStartMsg = "Click the screen to get started.";
public string touchStartMsg = "Tap the screen to get started.";
private string introText;
private Text txt;
// Start is called before the first frame update
void Start()
{
txt = gameObject.GetComponent<Text>();
introText += welcomeMsg;
#if UNITY_WEBGL
introText += "\n\n";
introText += clickMsg;
introText += "\n\n";
introText += clickStartMsg;
#else
if (Input.touchSupported) {
introText += "\n\n";
introText += touchMsg;
introText += "\n\n";
introText += touchStartMsg;
} else {
introText += "\n\n";
introText += clickMsg;
introText += "\n\n";
introText += clickStartMsg;
}
#endif
txt.text = introText;
}
}
| 33.022222 | 197 | 0.629879 |
f5d3818110522f37e17177ac1d02fbe8d9d4e91c | 2,554 | css | CSS | css/style.css | br-ose/homework4-2021 | d0ea65be9ab677146e3de15a60d036f23e6bed5d | [
"CC0-1.0"
] | null | null | null | css/style.css | br-ose/homework4-2021 | d0ea65be9ab677146e3de15a60d036f23e6bed5d | [
"CC0-1.0"
] | null | null | null | css/style.css | br-ose/homework4-2021 | d0ea65be9ab677146e3de15a60d036f23e6bed5d | [
"CC0-1.0"
] | null | null | null | /*STARTER CODE*/
/*
We have started some of the selectors for you.
For others we have provided hints on what the selectors should be.
There are some missing selectors.
Make sure to learn what the starter code does!!
*/
/*STARTER CODE - DO NOT CHANGE*/
main
{
margin-bottom: 100px;
font-family: "Andika",sans-serif;
}
caption
{
margin:5px 0px;
}
/* If you decide to uncomment this code, comment it back for the Autograder. */
/* tr{
-webkit-transition: background-color 0.5s ease;
transition: background-color 0.5s ease;
} */
/*END STARTER CODE*/
body{
color: #000000;
font-size: 16px;
margin: 1.2em;
background:url(../images/pigeon-background.JPEG),#DDDDDD;
background-size: cover ;
background-position: center;
background-repeat:no-repeat;
background-size:cover;
/*Req 1*/
}
h1{
color: #c12258;
font-size: 32px;
line-height:2;
text-align:center;
/*Req 2*/
}
p{
padding-top:15px;
}
a{
font-size: 16px;
text-decoration-line:underline;
text-decoration-style:wavy;
color:#B21F50;
}
table
{
border-collapse:separate;
border: 5px solid #000000;
border-radius:50px;
margin-left:auto;
margin-right:auto;
border-spacing:0;
overflow:hidden;
}
tr:nth-child(even){
background-color:#d1d1d1;
}
tr:nth-child(odd){
background-color:transparent;
}
tr:hover
{
background: rgba(126, 8, 37, 0.25);
}
/*Req 5*/
/*Add selector and rule here*/
*:focus
{
border:3px dashed #417243;
}
td{
border-top:2px solid black;
padding:10px;
margin:auto;
}
.colhead{
text-transform:uppercase;
margin-right:auto;
margin-left:auto;
display: table-cell;
color: #FFFFFF;
min-width:125px;
padding:10px;
background-color:#7e0825;
}
.colhead:first-child
{
border-top-left-radius:50px
}
.colhead:last-child
{
border-top-right-radius:50px;
}
tr:last-child:first-child
{
border-bottom-left-radius:50px;
}
tr:last-child:first-child
{
border-bottom-right-radius:50px;
}
.rowhead
{
border:2px solid black;
}
footer{
position:fixed;quotes: none ;
bottom:0;
width:100%;
min-height:75px;
height:fit-content;
background:#e9dfdf;
text-align:center;
color: black;
}
/* I gave you the selector, but not the property-value pair.
You can use Inspect Element or your editor to find selectors. */
/*Add selectors and rules here*/
| 15.9625 | 86 | 0.626468 |
d1aa0b2ba69ff68ab085b4fbdbcee0b0133533ec | 2,279 | cs | C# | Panosen.CodeDom.Tag.Vue/directives/VBind.cs | panosen/panosen-codedom-tag-vue | 9869e1c50bed1b8cccd58181ea558283d0d16bd6 | [
"MIT"
] | null | null | null | Panosen.CodeDom.Tag.Vue/directives/VBind.cs | panosen/panosen-codedom-tag-vue | 9869e1c50bed1b8cccd58181ea558283d0d16bd6 | [
"MIT"
] | 1 | 2022-02-09T16:20:04.000Z | 2022-02-09T16:20:04.000Z | Panosen.CodeDom.Tag.Vue/directives/VBind.cs | panosen/panosen-codedom-tag-vue | 9869e1c50bed1b8cccd58181ea558283d0d16bd6 | [
"MIT"
] | null | null | null | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Panosen.CodeDom.Tag.Vue
{
/// <summary>
/// v-bind修饰符
/// </summary>
public enum VBindModifiers
{
/// <summary>
/// none
/// </summary>
None = 0,
/// <summary>
/// Bind as a DOM property instead of an attribute (what’s the difference?). If the tag is a component then .prop will set the property on the component’s $el.
/// </summary>
Prop = 1,
/// <summary>
/// (2.1.0+) transform the kebab-case attribute name into camelCase.
/// </summary>
Camel = 2,
/// <summary>
/// (2.3.0+) a syntax sugar that expands into a v-on handler for updating the bound value.
/// </summary>
Sync = 3
}
/// <summary>
/// v-bind 扩展
/// https://vuejs.org/v2/api/#v-bind
/// </summary>
public static class VBindExtension
{
/// <summary>
/// v-bind
/// </summary>
public static TVueComponent VBind<TVueComponent>(this TVueComponent builder, string value)
where TVueComponent : VueComponent
{
builder.AddProperty($"v-bind", value);
return builder;
}
/// <summary>
/// v-bind
/// </summary>
public static TVueComponent VBind<TVueComponent>(this TVueComponent builder, string key, string value, VBindModifiers vBindModifiers = default)
where TVueComponent : VueComponent
{
switch (vBindModifiers)
{
case VBindModifiers.Prop:
builder.AddProperty($"v-bind:{key}.prop", value);
break;
case VBindModifiers.Camel:
builder.AddProperty($"v-bind:{key}.camel", value);
break;
case VBindModifiers.Sync:
builder.AddProperty($"v-bind:{key}.sync", value);
break;
case VBindModifiers.None:
default:
builder.AddProperty($"v-bind:{key}", value);
break;
}
return builder;
}
}
}
| 28.848101 | 167 | 0.519965 |
233d9a28153ce56d608296f1f12f858a6d371657 | 1,870 | css | CSS | css/style.css | Maiada-Ibrahim/salmoncook | 192bd88660cd06a9e1fa010245be384c0a1ff88d | [
"MIT"
] | null | null | null | css/style.css | Maiada-Ibrahim/salmoncook | 192bd88660cd06a9e1fa010245be384c0a1ff88d | [
"MIT"
] | null | null | null | css/style.css | Maiada-Ibrahim/salmoncook | 192bd88660cd06a9e1fa010245be384c0a1ff88d | [
"MIT"
] | null | null | null |
*{box-sizing: border-box;}
body{
background-image: url(../img/back4.png);
background-size: cover;
background-repeat: no-repeat;
border: solid 2px mediumblue;
margin: 2%;
height:200vh;
}
header p{
color:teal;
}
header div{
display: block;
height: 10vh;
width: 100%;
}
header nav ul li {
font-family:Verdana, Geneva, Tahoma, sans-serif;
margin: 2vh;
display:inline-block;
}
nav{
padding: 5%;
}
h1{
font-size: 40px;
color: purple;
display: inline-block;
position: relative;
bottom: 5vh;
left: 70vh;
}
article div img{
width: 35vh;
height: 40vh;
}
article div{
width: 35vh;
height: 50vh;
margin-top: 5vh;
display: inline-block;
margin: 10vh;
padding: 3vh;
}
section{
display:inline-block;
margin-top: 10vh;
margin-bottom: 15vh;
width: 100%;
width: 100%;
}
article{
display: inline-block;
margin-left:30vh;
}
section h2{
font-family: fantasy;
color: purple;
text-align: center;
font-size: 40px;
}
section p{
font-family: Impact, Haettenschweiler, 'Arial Narrow Bold', sans-serif;
font-size: 20px;
text-align: center;
margin-top: 10vh;
}
#last{
margin-left: 45%;
}
a {
text-decoration-line: none;
}
p{
font-size: 20px;
color:rgb(14, 245, 245);
}
table
tr,
td {
border: 1px solid;
border-color: white;
}
#logo{
display: inline-block;
}
main div form{
display:inline-block ;
position: relative;
top: 20px;
left: 30px;
width: 250px;
height: 400px;
}
main div form fieldset{
border: 2px royalblue solid;
padding: 30px;
}
input{
margin-top: 10px;
}
#tableindex tr ,td{
border:none;
}
#tableindex{
display: inline-block;
position:relative ;
top: 120px;
left: 118vh;
}
| 13.169014 | 75 | 0.593583 |
e2e2dc06730b59c854b6d2860c899cd9a7b8b670 | 64 | py | Python | gseapy/__init__.py | oreh/gseapy | d3212afb2e8d61f37957d685da6ef28f723d98e6 | [
"MIT"
] | 1 | 2021-01-15T02:02:12.000Z | 2021-01-15T02:02:12.000Z | gseapy/__init__.py | oreh/gseapy | d3212afb2e8d61f37957d685da6ef28f723d98e6 | [
"MIT"
] | null | null | null | gseapy/__init__.py | oreh/gseapy | d3212afb2e8d61f37957d685da6ef28f723d98e6 | [
"MIT"
] | null | null | null | #
from .gsea import call, replot, prerank
__version__ ='0.4.3' | 12.8 | 39 | 0.703125 |
57fb74bf0f61c7d200a6e525308e725cb949e9bd | 466 | php | PHP | app/Controllers/Home.php | fghaffar26/sekolah_kolaborasi | 54945676c86fa90f773038cbe81aa3f3236bbd8f | [
"MIT"
] | null | null | null | app/Controllers/Home.php | fghaffar26/sekolah_kolaborasi | 54945676c86fa90f773038cbe81aa3f3236bbd8f | [
"MIT"
] | null | null | null | app/Controllers/Home.php | fghaffar26/sekolah_kolaborasi | 54945676c86fa90f773038cbe81aa3f3236bbd8f | [
"MIT"
] | null | null | null | <?php
namespace App\Controllers;
use \App\Models\UsulanSmaModel;
use \App\Models\UsulanSmpModel;
use \App\Models\GalleryModel;
class Home extends BaseController
{
public function index()
{
$smaModel = new UsulanSmaModel();
$smpModel = new UsulanSmpModel();
$galleryModel = new GalleryModel();
$data = [
'sma' => $smaModel->findAll(),
'smp' => $smpModel->findAll(),
'gallery' => $galleryModel->findAll()
];
return view('index', $data);
}
}
| 19.416667 | 40 | 0.671674 |
2ff1eb165873aa0e593f8fb1a0b4ad8bba2b6ad4 | 903 | py | Python | modules/core_api/info_controllers.py | srcc-msu/job_statistics | 74680a4e4c105ebcff94f089e07fcb44dbcc12d9 | [
"MIT"
] | null | null | null | modules/core_api/info_controllers.py | srcc-msu/job_statistics | 74680a4e4c105ebcff94f089e07fcb44dbcc12d9 | [
"MIT"
] | null | null | null | modules/core_api/info_controllers.py | srcc-msu/job_statistics | 74680a4e4c105ebcff94f089e07fcb44dbcc12d9 | [
"MIT"
] | null | null | null | from flask import jsonify, Response, request, redirect, Blueprint, abort
from core.job.models import Job
from application.helpers import crossdomain
job_info_api_pages = Blueprint('job_info_api', __name__
, template_folder='templates')
@job_info_api_pages.route("/record/<int:job_id>/<int:task_id>")
@job_info_api_pages.route("/record/<int:job_id>", defaults={'task_id': 0})
@crossdomain(origin='*')
def json_job(job_id: int, task_id: int) -> Response:
try:
return str(Job.get_by_id(job_id, task_id).id)
except LookupError:
abort(404)
@job_info_api_pages.route("/<int:record_id>")
@crossdomain(origin='*')
def job_record(record_id: int) -> Response:
return redirect(request.base_url + "/info")
@job_info_api_pages.route("/<int:record_id>/info")
@crossdomain(origin='*')
def json_job_info(record_id: int) -> Response:
data = Job.query.get_or_404(record_id)
return jsonify(data.to_dict())
| 31.137931 | 74 | 0.754153 |
c4545d26f42e20282c276664e20c31c163072b06 | 10,794 | cc | C++ | lite/kernels/apu/bridges/elementwise_ops.cc | wanglei91/Paddle-Lite | 8b2479f4cdd6970be507203d791bede5a453c09d | [
"Apache-2.0"
] | 1,799 | 2019-08-19T03:29:38.000Z | 2022-03-31T14:30:50.000Z | lite/kernels/apu/bridges/elementwise_ops.cc | wanglei91/Paddle-Lite | 8b2479f4cdd6970be507203d791bede5a453c09d | [
"Apache-2.0"
] | 3,767 | 2019-08-19T03:36:04.000Z | 2022-03-31T14:37:26.000Z | lite/kernels/apu/bridges/elementwise_ops.cc | wanglei91/Paddle-Lite | 8b2479f4cdd6970be507203d791bede5a453c09d | [
"Apache-2.0"
] | 798 | 2019-08-19T02:28:23.000Z | 2022-03-31T08:31:54.000Z | // Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "lite/core/subgraph/subgraph_bridge_registry.h"
#include "lite/kernels/apu/bridges/graph.h"
#include "lite/kernels/apu/bridges/utility.h"
namespace paddle {
namespace lite {
namespace subgraph {
namespace apu {
int ElementwiseConverter(void* ctx, OpLite* op, KernelBase* kernel) {
CHECK(ctx != nullptr);
CHECK(op != nullptr);
auto graph = static_cast<Graph*>(ctx);
auto model = graph->model();
auto op_info = op->op_info();
auto op_type = op_info->Type();
auto scope = op->scope();
int neuron_errCode;
VLOG(3) << "[APU] Converting [" + op_type + "]";
// Get input and output vars and op attributes
auto x_name = op_info->Input("X").front();
auto x_scale_name = "X0_scale";
auto x = scope->FindTensor(x_name);
auto x_dims = x->dims();
auto y_name = op_info->Input("Y").front();
auto y_scale_name = "Y0_scale";
auto y = scope->FindTensor(y_name);
auto y_dims = y->dims();
auto out_name = op_info->Output("Out").front();
auto out_scale_name = "Out0_scale";
auto out = scope->FindTensor(out_name);
auto out_dims = out->dims();
auto axis = op_info->GetAttr<int>("axis");
if (axis < 0) {
axis = x_dims.size() - y_dims.size();
}
auto x_shape = x_dims.Vectorize();
auto y_shape = y_dims.Vectorize();
// Two dimensions are compatible when:
// 1. they are equal, or
// 2. one of them is 1
for (int i = axis; i < x_shape.size(); i++) {
if (x_dims[i] != y_dims[i - axis]) {
// Input 1 compatible dimensions as input0
if (y_dims[i - axis] != 1) {
LOG(WARNING) << i << ":" << axis << ":" << y_dims[i - axis];
return FAILED;
}
}
} // End of for
int32_t fuse_val[1] = {NEURON_FUSED_NONE};
// Act node
if (op_type == "fusion_elementwise_add_activation" ||
op_type == "fusion_elementwise_sub_activation" ||
op_type == "fusion_elementwise_mul_activation" ||
op_type == "fusion_elementwise_div_activation") {
auto act_type = op_info->GetAttr<std::string>("act_type");
if (act_type == "relu") {
fuse_val[0] = NEURON_FUSED_RELU;
} else if (act_type == "relu1") {
fuse_val[0] = NEURON_FUSED_RELU1;
} else if (act_type == "relu6") {
fuse_val[0] = NEURON_FUSED_RELU6;
} else if (!act_type.empty()) {
fuse_val[0] = NEURON_FUSED_NONE;
LOG(WARNING) << "Support act_type: " << act_type;
return FAILED;
}
} // End of if
VLOG(3) << "x_name" << x_name;
CHECK(op_info->HasInputScale(x_scale_name, true));
auto x_scale = op_info->GetInputScale(x_scale_name, true)[0];
CHECK(op_info->HasInputScale(y_scale_name, true));
auto y_scale = op_info->GetInputScale(y_scale_name, true)[0];
CHECK(op_info->HasOutputScale(out_scale_name, true));
auto out_scale = op_info->GetOutputScale(out_scale_name, true)[0];
// Add x tensor type
NeuronOperandType xType;
xType.type = NEURON_TENSOR_QUANT8_ASYMM;
xType.scale = x_scale;
xType.zeroPoint = 128;
xType.dimensionCount = x_dims.size();
std::vector<uint32_t> dims_x = {(uint32_t)x_dims[0],
(uint32_t)x_dims[2],
(uint32_t)x_dims[3],
(uint32_t)x_dims[1]};
xType.dimensions = &dims_x[0];
std::shared_ptr<Node> x_node = nullptr;
if (graph->Has(x_name)) {
VLOG(3) << "Graph has " << x_name;
if (graph->IsInput(x_name)) {
VLOG(3) << x_name << "is input and already exist";
x_name = "transpose_" + x_name;
}
if (graph->IsOutput(x_name)) {
VLOG(3) << x_name << "is input and output node";
x_name = "transpose_" + x_name;
}
x_node = graph->Get(x_name);
} else {
if (graph->IsInput(x_name)) {
insert_transpose_node(ctx,
x_name,
"transpose_" + x_name,
{(uint32_t)x_dims[0],
(uint32_t)x_dims[1],
(uint32_t)x_dims[2],
(uint32_t)x_dims[3]},
dims_x,
{0, 2, 3, 1},
xType.scale,
xType.zeroPoint);
// Change x name after insert transpose op for x data relayout
x_name = "transpose_" + x_name;
x_node = graph->Get(x_name);
} else {
NeuronModel_addOperand(model, &xType);
x_node = graph->Add(x_name, dims_x);
}
} // End of else
VLOG(3) << "x node idx: " << x_node->index() << "x_dims: " << x_dims
<< ": x_scale: " << x_scale << ", xType: " << xType.dimensions[0]
<< ":" << xType.dimensions[1] << ":" << xType.dimensions[2] << ":"
<< xType.dimensions[3];
// Add y tensor type
NeuronOperandType yType;
yType.type = NEURON_TENSOR_QUANT8_ASYMM;
yType.scale = y_scale;
yType.zeroPoint = 128;
yType.dimensionCount = y_dims.size();
std::vector<uint32_t> dims_y = {(uint32_t)y_dims[0],
(uint32_t)y_dims[2],
(uint32_t)y_dims[3],
(uint32_t)y_dims[1]};
yType.dimensions = &dims_y[0];
std::shared_ptr<Node> y_node = nullptr;
if (graph->Has(y_name)) {
VLOG(3) << "Graph has " << y_name;
y_node = graph->Get(y_name);
} else {
if (graph->IsInput(y_name)) {
insert_transpose_node(ctx,
y_name,
"transpose_" + y_name,
{(uint32_t)y_dims[0],
(uint32_t)y_dims[1],
(uint32_t)y_dims[2],
(uint32_t)y_dims[3]},
dims_y,
{0, 2, 3, 1},
yType.scale,
yType.zeroPoint);
y_name = "transpose_" + y_name;
y_node = graph->Get(y_name);
} else {
NeuronModel_addOperand(model, &yType);
y_node = graph->Add(y_name, dims_y);
}
}
VLOG(3) << "y node idx: " << y_node->index() << "y_dims: " << y_dims
<< ": y_scale: " << y_scale << ", yType: " << yType.dimensions[0]
<< ":" << yType.dimensions[1] << ":" << yType.dimensions[2] << ":"
<< yType.dimensions[3];
// Add fuse operand type
NeuronOperandType int32Type;
int32Type.type = NEURON_INT32;
int32Type.dimensionCount = 0;
std::vector<uint32_t> dims_int32 = {1};
// Add fuse operand
std::shared_ptr<Node> fuse_node = nullptr;
NeuronModel_addOperand(model, &int32Type); // Operand 2: fuse
fuse_node = graph->Add(out_name + "_fuse", dims_int32);
// Add out tensor type
NeuronOperandType outType;
outType.type = NEURON_TENSOR_QUANT8_ASYMM;
outType.scale = out_scale;
outType.zeroPoint = 128;
outType.dimensionCount = out_dims.size();
std::vector<uint32_t> dims_out = {(uint32_t)out_dims[0],
(uint32_t)out_dims[2],
(uint32_t)out_dims[3],
(uint32_t)out_dims[1]};
outType.dimensions = &dims_out[0];
std::shared_ptr<Node> out_node = nullptr;
if (graph->Has(out_name)) {
VLOG(3) << "Graph has " << out_name;
out_node = graph->Get(out_name);
} else {
if (graph->IsOutput(out_name)) {
NeuronModel_addOperand(model, &outType);
out_node = graph->Add("transpose_" + out_name, dims_out);
} else {
NeuronModel_addOperand(model, &outType);
out_node = graph->Add(out_name, dims_out);
}
}
VLOG(3) << "out node idx: " << out_node->index() << "out_dims: " << out_dims
<< ": out_scale: " << out_scale
<< ", outType: " << outType.dimensions[0] << ":"
<< outType.dimensions[1] << ":" << outType.dimensions[2] << ":"
<< outType.dimensions[3];
// Set fuse value
NeuronModel_setOperandValue(
model, fuse_node->index(), fuse_val, sizeof(int32_t) * 1);
std::vector<uint32_t> addInIndex = {
x_node->index(), // 0: A tensor
y_node->index(), // 1: A tensor of the same OperandCode,
// and compatible dimensions as input 0
fuse_node->index()}; // 2: fuse
std::vector<uint32_t> addOutIndex = {out_node->index()};
if (op_type == "elementwise_add" ||
op_type == "fusion_elementwise_add_activation") {
neuron_errCode = NeuronModel_addOperation(model,
NEURON_ADD,
addInIndex.size(),
&addInIndex[0],
addOutIndex.size(),
&addOutIndex[0]);
} else {
LOG(WARNING) << "[APU] Unsupported op type: " << op_type;
return FAILED;
}
if (NEURON_NO_ERROR != neuron_errCode) {
LOG(WARNING) << "ADD op fail:" << op_type;
return FAILED;
}
if (graph->IsOutput(out_name)) {
// Insert transpose for NHWC -> NCHW
insert_transpose_node(ctx,
"transpose_" + out_name,
out_name,
dims_out,
{(uint32_t)out_dims[0],
(uint32_t)out_dims[1],
(uint32_t)out_dims[2],
(uint32_t)out_dims[3]},
{0, 3, 1, 2},
outType.scale,
outType.zeroPoint);
out_node = graph->Get(out_name);
if (out_node == nullptr) return FAILED;
}
return REBUILD_WHEN_SHAPE_CHANGED;
}
} // namespace apu
} // namespace subgraph
} // namespace lite
} // namespace paddle
REGISTER_SUBGRAPH_BRIDGE(elementwise_add,
kAPU,
paddle::lite::subgraph::apu::ElementwiseConverter);
REGISTER_SUBGRAPH_BRIDGE(elementwise_mul,
kAPU,
paddle::lite::subgraph::apu::ElementwiseConverter);
REGISTER_SUBGRAPH_BRIDGE(fusion_elementwise_add_activation,
kAPU,
paddle::lite::subgraph::apu::ElementwiseConverter);
| 35.98 | 78 | 0.55642 |
4dd19460249a1aeb076d362f99797ec87e3c1c55 | 195 | cs | C# | CefGlueHeadless/Delegates/BrowserLoadingStateChanged.cs | alexsaves/CefGlueHeadless | f9c82aaa803ada3589d8f748cda8129e7e11991f | [
"MIT"
] | 2 | 2021-04-26T18:06:56.000Z | 2021-12-26T06:37:39.000Z | CefGlueHeadless/Delegates/BrowserLoadingStateChanged.cs | alexsaves/CefGlueHeadless | f9c82aaa803ada3589d8f748cda8129e7e11991f | [
"MIT"
] | null | null | null | CefGlueHeadless/Delegates/BrowserLoadingStateChanged.cs | alexsaves/CefGlueHeadless | f9c82aaa803ada3589d8f748cda8129e7e11991f | [
"MIT"
] | null | null | null | using Xilium.CefGlue;
namespace CefGlueHeadless.Delegates
{
public delegate void BrowserLoadingStateChangeDelegate(CefBrowser browser, bool isLoading, bool canGoBack, bool canGoForward);
}
| 27.857143 | 130 | 0.825641 |
21b9f19ac470322c60eab06b1a1494d36dadf73f | 841 | js | JavaScript | src/controllers/Configuration.js | homelabaas/dns-api | d5d1b3dcb84d146bc16410213a16e104edf1ebfa | [
"MIT"
] | 2 | 2021-02-28T19:17:51.000Z | 2021-02-28T19:17:52.000Z | src/controllers/Configuration.js | homelabaas/dns-api | d5d1b3dcb84d146bc16410213a16e104edf1ebfa | [
"MIT"
] | 2 | 2020-07-17T10:26:21.000Z | 2021-05-09T08:52:36.000Z | src/controllers/Configuration.js | homelabaas/dns-api | d5d1b3dcb84d146bc16410213a16e104edf1ebfa | [
"MIT"
] | null | null | null | const utils = require('../utils/writer.js');
const container = require('../diContainer');
module.exports.getConfig = async function getConfig (req, res, next) {
const service = container.resolve('configurationService');
try {
const returnValue = service.getConfig();
utils.writeJson(res, returnValue);
} catch (error) {
console.error('Error', error);
utils.writeJson(res, { "error": error.message });
}
}
module.exports.setConfig = async function setConfig (req, res, next) {
const service = container.resolve('configurationService');
var body = req.swagger.params['body'].value;
try {
const returnValue = await service.setConfig(body.dnsforwarders);
utils.writeJson(res, returnValue);
} catch (error) {
console.error('Error', error);
utils.writeJson(res, { "error": error.message });
}
}; | 33.64 | 70 | 0.687277 |
7bcb50bba0b9a756a39d86b9ad089a039f2b3326 | 391 | rb | Ruby | db/migrate/20130207213528_change_activeadmin_selleo_cms_assets.rb | tb/activeadmin-selleo-cms | c7f20f2e0fc81ff7fd166ef069099ee8b4a5ba31 | [
"MIT"
] | 2 | 2015-10-05T19:52:51.000Z | 2020-10-31T11:51:34.000Z | db/migrate/20130207213528_change_activeadmin_selleo_cms_assets.rb | tb/activeadmin-selleo-cms | c7f20f2e0fc81ff7fd166ef069099ee8b4a5ba31 | [
"MIT"
] | null | null | null | db/migrate/20130207213528_change_activeadmin_selleo_cms_assets.rb | tb/activeadmin-selleo-cms | c7f20f2e0fc81ff7fd166ef069099ee8b4a5ba31 | [
"MIT"
] | 1 | 2020-10-31T11:51:35.000Z | 2020-10-31T11:51:35.000Z | class ChangeActiveadminSelleoCmsAssets < ActiveRecord::Migration
def up
add_column :activeadmin_selleo_cms_assets, :cover_file_name, :string
add_column :activeadmin_selleo_cms_assets, :cover_content_type, :string
add_column :activeadmin_selleo_cms_assets, :cover_file_size, :integer
add_column :activeadmin_selleo_cms_assets, :caption, :string
end
def down
end
end
| 30.076923 | 75 | 0.805627 |
2ff950af253986d1349c2774e870edb272992ba0 | 298 | kt | Kotlin | app/src/main/java/com/kyberswap/android/presentation/main/profile/UserInfoState.kt | newtalentxp/android-app | 8b149808a898a91cc9b1438a919556354359750c | [
"MIT"
] | 20 | 2019-09-12T04:23:12.000Z | 2021-03-02T16:51:35.000Z | app/src/main/java/com/kyberswap/android/presentation/main/profile/UserInfoState.kt | zachwylde00/android-app | ddf69ff4a6aa26c54e938fd84d496b5792e352e5 | [
"MIT"
] | 3 | 2020-05-13T21:24:39.000Z | 2021-04-26T09:53:01.000Z | app/src/main/java/com/kyberswap/android/presentation/main/profile/UserInfoState.kt | zachwylde00/android-app | ddf69ff4a6aa26c54e938fd84d496b5792e352e5 | [
"MIT"
] | 7 | 2019-09-28T02:31:18.000Z | 2021-04-05T17:15:51.000Z | package com.kyberswap.android.presentation.main.profile
import com.kyberswap.android.domain.model.UserInfo
sealed class UserInfoState {
object Loading : UserInfoState()
class ShowError(val message: String?) : UserInfoState()
class Success(val userInfo: UserInfo?) : UserInfoState()
}
| 29.8 | 60 | 0.771812 |
c8fe00d292851fec62c3fefa9da151bac721bbda | 666 | go | Go | db/mysql.go | DanPlayer/goin | 4c8b249a83a510328a04ac7cc169ea958e0dbddc | [
"MIT"
] | 6 | 2021-05-31T13:59:52.000Z | 2021-06-09T09:20:04.000Z | db/mysql.go | DanPlayer/goin | 4c8b249a83a510328a04ac7cc169ea958e0dbddc | [
"MIT"
] | 1 | 2021-06-01T03:06:35.000Z | 2021-06-01T03:07:53.000Z | db/mysql.go | DanPlayer/goin | 4c8b249a83a510328a04ac7cc169ea958e0dbddc | [
"MIT"
] | 3 | 2021-05-31T14:41:31.000Z | 2022-03-04T09:43:27.000Z | package db
import (
"database/sql"
"fmt"
"goin/conf"
"gorm.io/driver/mysql"
"gorm.io/gorm"
"time"
)
var (
Eloquent *gorm.DB
EloquentDb *sql.DB
)
func MysqlDial(config *conf.MysqlConf) error {
var err error
Eloquent, err = gorm.Open(mysql.Open(config.DSN), &gorm.Config{})
if err != nil {
fmt.Printf("mysql connect error %v", err)
}
if Eloquent.Error != nil {
fmt.Printf("database error %v", Eloquent.Error)
}
EloquentDb, err = Eloquent.DB()
if err != nil {
fmt.Printf("database error %v", Eloquent.Error)
}
EloquentDb.SetMaxIdleConns(100)
EloquentDb.SetMaxOpenConns(5000)
EloquentDb.SetConnMaxLifetime(time.Second * 60)
return nil
}
| 18.5 | 66 | 0.690691 |
8eafdb0dabda6274b5d372601f4ffd552531fe06 | 1,813 | js | JavaScript | js/gc_gamepad_scanner/model_dclassic.js | nobu-e753/gcscannerjs | dac85344353f5c9627d4d64fdc5893cb545acdd4 | [
"Apache-2.0"
] | null | null | null | js/gc_gamepad_scanner/model_dclassic.js | nobu-e753/gcscannerjs | dac85344353f5c9627d4d64fdc5893cb545acdd4 | [
"Apache-2.0"
] | null | null | null | js/gc_gamepad_scanner/model_dclassic.js | nobu-e753/gcscannerjs | dac85344353f5c9627d4d64fdc5893cb545acdd4 | [
"Apache-2.0"
] | 1 | 2018-06-24T07:19:19.000Z | 2018-06-24T07:19:19.000Z | /**
* Scanner module for digital-classic gamepad (ex. 4+8buttons)
*/
var GC_GAMEPAD_dclassic = {};
GC_GAMEPAD_dclassic.id = null;
GC_GAMEPAD_dclassic.enableDev = false;
GC_GAMEPAD_dclassic.enableAng = false;
GC_GAMEPAD_dclassic.init = function(aEnableDev, aEnableAng){
GC_GAMEPAD_dclassic.enableDev = aEnableDev;
GC_GAMEPAD_dclassic.enableAng = aEnableDev && aEnableAng;
GC_GAMEPAD_dclassic.stateStr = "";
}
GC_GAMEPAD_dclassic.scan = function(){
var tDev = navigator.getGamepads()[GC_GAMEPAD_dclassic.dev];
if (!GC_GAMEPAD_dclassic.enableDev || (tDev == null))
return null;
// scan dpad
const tUP = (tDev.buttons[12].pressed) ? 1:0;
const tDW = (tDev.buttons[13].pressed) ? 1:0;
const tRT = (tDev.buttons[14].pressed) ? 1:0;
const tLF = (tDev.buttons[15].pressed) ? 1:0;
var tRawDpad = (tLF<<3) | (tDW<<2) | (tRT<<1) | (tUP<<0);
tNewDpad = GC_GAMEPAD_dclassic.to9dir[tRawDpad];
// scan buttons
var tNewBtn = [];
for (var i=0; i<Math.min(12, tDev.buttons.length); i++){
if (tDev.buttons[i].pressed)
tNewBtn.push(i);
}
// scan analog (no analog sticks)
var tNewAng = [0, 0, 0, 0]
const tNewState = {
"dpad" : tNewDpad,
"btn" : tNewBtn,
"ang" : tNewAng,
"dur" : -1
};
const tNewStateStr = JSON.stringify(tNewState);
if (GC_GAMEPAD_dclassic.stateStr !== tNewStateStr){
GC_GAMEPAD_dclassic.stateStr = tNewStateStr;
return tNewState;
} else
return null;
}
GC_GAMEPAD_dclassic.to9dir = [
5, // 0x0
8, // 0x1
6, // 0x2
9, // 0x3
2, // 0x4
5, // 0x5
3, // 0x6
5, // 0x7
4, // 0x8
7, // 0x9
5, // 0xA
5, // 0xB
1, // 0xC
5, // 0xD
5, // 0xE
5 // 0xF
];
| 24.835616 | 64 | 0.586873 |
2397109f9ffdb3b76380188c706b08fee2fbf624 | 1,416 | js | JavaScript | assets/admin/tinymce/modules/bridge/Gruntfile.js | faidlurrohman/CODEIGNITER | bfd14de0d85b92f43eed35e5e27f67ab7b5ac43e | [
"MIT"
] | 7 | 2019-11-28T02:43:39.000Z | 2020-12-21T12:56:41.000Z | assets/admin/tinymce/modules/bridge/Gruntfile.js | faidlurrohman/CODEIGNITER | bfd14de0d85b92f43eed35e5e27f67ab7b5ac43e | [
"MIT"
] | 14 | 2020-04-24T20:15:43.000Z | 2021-02-24T06:32:58.000Z | assets/admin/tinymce/modules/bridge/Gruntfile.js | faidlurrohman/CODEIGNITER | bfd14de0d85b92f43eed35e5e27f67ab7b5ac43e | [
"MIT"
] | 6 | 2020-01-09T08:53:01.000Z | 2021-05-17T07:48:11.000Z | const {
CheckerPlugin,
TsConfigPathsPlugin
} = require('awesome-typescript-loader');
const LiveReloadPlugin = require('webpack-livereload-plugin');
const path = require('path');
module.exports = function(grunt) {
var packageData = grunt.file.readJSON('package.json');
const tsDemoSourceFile = path.resolve('src/demo/ts/Demo.ts');
const jsDemoDestFile = path.resolve('scratch/compiled/demo.js');
grunt.initConfig({
pkg: packageData,
shell: {
command: 'tsc'
},
webpack: {
options: {
mode: 'development',
watch: true
},
dev: {
entry: tsDemoSourceFile,
mode: 'development',
devtool: 'source-map',
resolve: {
extensions: ['.ts', '.js'],
plugins: [
new TsConfigPathsPlugin({
compiler: 'typescript'
})
]
},
module: {
rules: [
{
test: /\.ts$/,
use: [
{
loader: 'awesome-typescript-loader'
}
]
}
]
},
plugins: [new LiveReloadPlugin(), new CheckerPlugin()],
output: {
filename: path.basename(jsDemoDestFile),
path: path.dirname(jsDemoDestFile)
}
}
}
});
require('load-grunt-tasks')(grunt);
grunt.registerTask('default', []);
};
| 21.454545 | 66 | 0.507768 |
7dcc27f05b866675e0a869c67066ee5a7e981f32 | 4,809 | css | CSS | homework/week-5/grid/style.css | Obemegargel/wdd-130-megargel-project | 4fef3ad367cc868daa61480ecf8ec5491f973c10 | [
"MIT"
] | null | null | null | homework/week-5/grid/style.css | Obemegargel/wdd-130-megargel-project | 4fef3ad367cc868daa61480ecf8ec5491f973c10 | [
"MIT"
] | null | null | null | homework/week-5/grid/style.css | Obemegargel/wdd-130-megargel-project | 4fef3ad367cc868daa61480ecf8ec5491f973c10 | [
"MIT"
] | null | null | null | /**
*
* Page Styles
*
* These styles are for the page. Please do not edit them. Scroll down
* until you see the activity CSS and that is where you can start editing.
*
*/
.exercise {
width: 600px;
margin: 20px auto;
font-size: 20px;
line-height: 30px;
font-family: Arial, sans-serif;
border: 1px solid black;
padding: 10px 20px;
clear:both;
overflow: auto;
}
.collapsible {
position: relative;
color: #ffffff;
font-size: 20px;
font-weight: bold;
background-color: #0076b6;
padding: 20px 3rem 20px 20px;
margin: 0 0 20px 0;
width: 100%;
border: none;
text-align: left;
outline: none;
font-weight: bold;
cursor: pointer;
}
.collapsible:after {
position: absolute;
content: '+';
top: 10px;
right: 20px;
font-size: 2.4rem;
font-weight: bold;
}
.collapsible.open:after {
content: 'x';
}
.collapsible.open + .collapsible-content {
max-height: none;
padding: 0 20px;
border-top: 2px solid #0076b6;
border-bottom: 2px solid #0076b6;
background-color: rgba(0, 118, 182, 0.05);
}
.collapsible-content {
position: relative;
max-height: 0;
overflow: hidden;
margin: -22px -20px 20px -20px;
transition: max-height 0.2s ease-out;
}
.collapsible-content img {
width: 100%;
height: auto;
}
.content-area {
margin: 10px 0;
overflow: auto;
}
/**
*
* ACTIVITY 1
*
* The styles in the following section are for exercise 1.
* Please add to or edit these styles to solve exercise 1.
*
*/
#content-1 {
}
#content-1 .red {
width: 100px;
height: 100px;
background-color: red;
}
#content-1 .green {
width: 100px;
height: 100px;
background-color: green;
}
#content-1 .yellow {
width: 100px;
height: 100px;
background-color: gold;
}
#content-1 .blue {
width: 100px;
height: 100px;
background-color: blue;
}
/* END ACTIVITY 1 */
/**
*
* ACTIVITY 2
*
* The styles in the following section are for exercise 2.
* Please add to or edit these styles to solve exercise 2.
*
*/
#content-2 {
}
#content-2 .red {
width: 100px;
height: 100px;
background-color: red;
}
#content-2 .green {
width: 100px;
height: 100px;
background-color: green;
}
#content-2 .yellow {
width: 100px;
height: 100px;
background-color: gold;
}
#content-2 .blue {
width: 100px;
height: 100px;
background-color: blue;
}
/* END ACTIVITY 2 */
/**
*
* ACTIVITY 3
*
* The styles in the following section are for exercise 3.
* Please add to or edit these styles to solve exercise 3.
*
*/
#content-3 {
}
#content-3 .red {
width: 100px;
height: 100px;
background-color: red;
}
#content-3 .green {
width: 100px;
height: 100px;
background-color: green;
}
#content-3 .yellow {
width: 100px;
height: 100px;
background-color: gold;
}
#content-3 .blue {
width: 100px;
height: 100px;
background-color: blue;
}
/* END ACTIVITY 3 */
/**
*
* ACTIVITY 4
*
* The styles in the following section are for exercise 4.
* Please add to or edit these styles to solve exercise 4.
*
*/
#content-4 {
height: 400px;
}
#content-4 .red {
width: 100px;
height: 100px;
background-color: red;
}
#content-4 .green {
width: 100px;
height: 100px;
background-color: green;
}
#content-4 .yellow {
width: 100px;
height: 100px;
background-color: gold;
}
#content-4 .blue {
width: 100px;
height: 100px;
background-color: blue;
}
/* END ACTIVITY 4 */
/**
*
* ACTIVITY 5
*
* The styles in the following section are for exercise 5.
* Please add to or edit these styles to solve exercise 5.
*
*/
#content-5 {
height: 400px;
}
#content-5 .red {
width: 100px;
height: 100px;
background-color: red;
}
#content-5 .green {
width: 100px;
height: 100px;
background-color: green;
}
#content-5 .yellow {
width: 100px;
height: 100px;
background-color: gold;
}
#content-5 .blue {
width: 100px;
height: 100px;
background-color: blue;
}
/* END ACTIVITY 5 */
/**
*
* ACTIVITY 6
*
* The styles in the following section are for exercise 6.
* Please add to or edit these styles to solve exercise 6.
*
*/
#content-6 {
}
#content-6 .red {
width: 100px;
height: 100px;
background-color: red;
}
#content-6 .green {
width: 100px;
height: 100px;
background-color: green;
}
#content-6 .yellow {
width: 100px;
height: 100px;
background-color: gold;
}
#content-6 .blue {
width: 100px;
height: 100px;
background-color: blue;
}
/* END ACTIVITY 6 */ | 15.122642 | 74 | 0.595758 |
af076f9d951bed0f24d3e57e9b5dcb299d77676f | 7,807 | rb | Ruby | lib/ops_manager/appliance_deployment.rb | compozed/ops_manager_cli | 4df50e6a3fec4d88731c35e8dfb17d71c32fe2a7 | [
"MIT"
] | 13 | 2016-09-07T03:09:01.000Z | 2017-01-27T00:08:28.000Z | lib/ops_manager/appliance_deployment.rb | compozed/ops_manager_cli | 4df50e6a3fec4d88731c35e8dfb17d71c32fe2a7 | [
"MIT"
] | 34 | 2016-09-03T01:07:08.000Z | 2018-06-18T16:44:51.000Z | lib/ops_manager/appliance_deployment.rb | compozed/ops_manager_cli | 4df50e6a3fec4d88731c35e8dfb17d71c32fe2a7 | [
"MIT"
] | 10 | 2016-09-09T21:40:35.000Z | 2018-06-18T16:23:41.000Z | require "ops_manager/api/opsman"
require "ops_manager/api/pivnet"
require 'ops_manager/config/opsman_deployment'
require 'fileutils'
class OpsManager::ApplianceDeployment
extend Forwardable
attr_reader :config
def_delegators :pivnet_api, :get_product_releases, :accept_product_release_eula,
:get_product_release_files, :download_product_release_file
def_delegators :opsman_api, :create_user, :get_installation_assets,
:get_installation_settings, :get_diagnostic_report, :upload_installation_assets, :get_ensure_availability,
:import_stemcell, :target, :password, :username, :ops_manager_version= , :reset_access_token, :get_pending_changes,
:wait_for_https_alive
attr_reader :config_file
def initialize(config_file)
@config_file = config_file
end
def run
OpsManager.set_conf(:target, config[:ip])
OpsManager.set_conf(:username, config[:username])
OpsManager.set_conf(:password, config[:password])
OpsManager.set_conf(:pivnet_token, config[:pivnet_token])
case
when current_version.empty?
puts "No OpsManager deployed at #{config[:ip]}. Deploying ...".green
deploy
create_first_user
when current_version < desired_version then
puts "OpsManager at #{config[:ip]} version is #{current_version}. Upgrading to #{desired_version} .../".green
upgrade
when current_version == desired_version then
if pending_changes?
puts "OpsManager at #{config[:ip]} version has pending changes. Applying changes...".green
products = "all"
if config[:selected_deployments]
products = config[:selected_deployments]
elsif config[:single_tile_deploy]
products = "none"
end
OpsManager::InstallationRunner.trigger!(products).wait_for_result
else
puts "OpsManager at #{config[:ip]} version is already #{config[:desired_version]}. Skiping ...".green
end
end
puts '====> Finish!'.green
end
def appliance
@appliance ||= if config[:provider] =~/vsphere/i
OpsManager::Appliance::Vsphere.new(config)
else
OpsManager::Appliance::AWS.new(config)
end
end
def create_first_user
puts '====> Creating initial user'.green
until( create_user.code.to_i == 200) do
print ' .'.green ; sleep 1
end
end
def deploy
appliance.deploy_vm
wait_for_https_alive 300
end
def upgrade
get_installation_assets
download_current_stemcells
appliance.stop_current_vm(current_name)
deploy
upload_installation_assets
wait_for_uaa
provision_stemcells
products = "all"
if config[:selected_deployments]
products = config[:selected_deployments]
elsif config[:single_tile_deploy]
products = "none"
end
OpsManager::InstallationRunner.trigger!(products).wait_for_result
end
def list_current_stemcells
JSON.parse(installation_settings).fetch('products').inject([]) do |a, p|
product_name = "stemcells"
if p['stemcell'].fetch('os') =~ /windows/i
product_name = "stemcells-windows-server"
end
if p['stemcell'].fetch('os') =~ /ubuntu-xenial/i
product_name = "stemcells-ubuntu-xenial"
end
a << { version: p['stemcell'].fetch('version'), product: product_name }
end.uniq
end
# Finds available stemcell's pivotal network release.
# If it can not find the exact version it will try to find the newest minor version available.
# #
# @param version [String] the version number, eg: '2362.17'
# @return release_id [Integer] the pivotal netowkr release id of the found stemcell.
def find_stemcell_release(version, product_name)
version = OpsManager::Semver.new(version)
releases = stemcell_releases(product_name).collect do |r|
{
release_id: r['id'],
version: OpsManager::Semver.new(r['version']),
}
end
releases.keep_if{ |r| r[:version].major == version.major }
exact_version = releases.select {|r| r[:version] == version }
return exact_version.first[:release_id] unless exact_version.empty?
releases_sorted_by_version = releases.sort_by{ |r| r[:version].minor }.reverse
return releases_sorted_by_version.first[:release_id] unless releases_sorted_by_version.empty?
end
# Finds stemcell's pivotal network release file.
# #
# @param release_id [String] the version number, eg: '2362.17'
# @param filename [Regex] the version number, eg: /vsphere/
# @return id and name [Array] the pivotal network file ID and Filename for the matching stemcell.
def find_stemcell_file(release_id, filename, product_name)
files = JSON.parse(get_product_release_files(product_name, release_id).body).fetch('product_files')
file = files.select{ |r| r.fetch('aws_object_key') =~ filename }.first
return file['id'], file['aws_object_key'].split('/')[-1]
end
# Lists all the available stemcells in the current installation_settings.
# Downloads those stemcells.
def download_current_stemcells
print "====> Downloading existing stemcells ...".green
puts "no stemcells found".green if list_current_stemcells.empty?
FileUtils.mkdir_p current_stemcell_dir
list_current_stemcells.each do |stemcell_info|
stemcell_version = stemcell_info[:version]
product_name = stemcell_info[:product]
release_id = find_stemcell_release(stemcell_version, product_name)
accept_product_release_eula(product_name, release_id)
stemcell_regex = /vsphere/
if config[:provider] == "AWS"
stemcell_regex = /aws/
end
file_id, file_name = find_stemcell_file(release_id, stemcell_regex, product_name)
download_product_release_file(product_name, release_id, file_id, write_to: "#{current_stemcell_dir}/#{file_name}")
end
end
def new_vm_name
@new_vm_name ||= "#{config[:name]}-#{config[:desired_version]}"
end
def current_version
@current_version ||= OpsManager::Semver.new(version_from_diagnostic_report)
end
def current_name
@current_name ||= "#{config[:name]}-#{current_version}"
end
def desired_version
@desired_version ||= OpsManager::Semver.new(config[:desired_version])
end
def provision_stemcells
reset_access_token
Dir.glob("#{current_stemcell_dir}/*").each do |stemcell_filepath|
import_stemcell(stemcell_filepath)
end
end
def wait_for_uaa
puts '====> Waiting for UAA to become available ...'.green
while !uaa_available?
sleep(5)
end
end
private
def uaa_available?
res = get_ensure_availability
res.code.eql? '302' and res.body.include? '/auth/cloudfoundry'
end
def diagnostic_report
@diagnostic_report ||= get_diagnostic_report
end
def version_from_diagnostic_report
return unless diagnostic_report
version = parsed_diagnostic_report
.fetch("versions")
.fetch("release_version")
version.gsub(/.0$/,'')
end
def parsed_diagnostic_report
JSON.parse(diagnostic_report.body)
end
def current_vm_name
@current_vm_name ||= "#{config[:name]}-#{current_version}"
end
def pivnet_api
@pivnet_api ||= OpsManager::Api::Pivnet.new
end
def opsman_api
@opsman_api ||= OpsManager::Api::Opsman.new
end
def config
@config ||= OpsManager::Config::OpsmanDeployment.new(YAML.load_file(@config_file))
end
def desired_version?(version)
!!(desired_version.to_s =~/#{version}/)
end
def installation_settings
@installation_settings ||= get_installation_settings.body
end
def stemcell_releases(product_name)
JSON.parse(get_product_releases(product_name).body).fetch('releases')
end
def current_stemcell_dir
"/tmp/current_stemcells"
end
def pending_changes?
!JSON.parse(get_pending_changes.body).fetch('product_changes').empty?
end
end
| 31.479839 | 120 | 0.712566 |
a33eff63e057e702fd5f4a735d24972ffdf872df | 3,984 | c | C | Unintegrated/background/background/student-distrib/schedule.c | TaKeTube/TLEOS | b0cd42edecae5103cacb4e6f66506e05f212a25e | [
"AFL-1.1"
] | null | null | null | Unintegrated/background/background/student-distrib/schedule.c | TaKeTube/TLEOS | b0cd42edecae5103cacb4e6f66506e05f212a25e | [
"AFL-1.1"
] | null | null | null | Unintegrated/background/background/student-distrib/schedule.c | TaKeTube/TLEOS | b0cd42edecae5103cacb4e6f66506e05f212a25e | [
"AFL-1.1"
] | null | null | null | #include "schedule.h"
#include "terminal.h"
#include "syscall.h"
#include "x86_desc.h"
#include "lib.h"
/* Reference: https://wiki.osdev.org/Programmable_Interval_Timer */
/*
* pit_init
* DESCRIPTION: initialize the PIT, see schedule.h file for command details
* INPUT: none
* OUTPUT: none
* RETURN: none
* SIDE AFFECTS: none
*/
void pit_init()
{
/* sent command to pit */
outb(PIT_CMD, PIT_CMD_PORT);
/* sent least significant bits of period */
outb(PIT_LATCH && PIT_BITMASK, PIT_CHANNEL_0);
/* sent most significant bits of period */
outb(PIT_LATCH >> PIT_MSB_OFFSET, PIT_CHANNEL_0);
/* enable interrupt */
enable_irq(PIT_IRQ);
return;
}
/*
* pit_handler
* DESCRIPTION: PIT handler, call scheduler to do scheduling
* INPUT: none
* OUTPUT: none
* RETURN: none
* SIDE AFFECTS: none
*/
void pit_handler()
{
/*
* this send eoi CANNOT be put after scheduler because when the new executed
* shell (excute by terminal switch) comes back to terminal switch
* using esp ebp stored in execute, eoi would not be sent, so even if IF is 1, all interrupt would
* fail because PIT has the highest priority.
*/
send_eoi(PIT_IRQ);
/* call scheduler */
scheduler();
}
/*
* scheduler
* DESCRIPTION: do scheduling, switch between current running processes in different terminals
* INPUT: none
* OUTPUT: none
* RETURN: none
* SIDE AFFECTS: none
*/
void scheduler()
{
int i; /* loop index for finding new process' terminal */
pcb_t* curr_pcb; /* current running process' pcb */
pcb_t* next_pcb; /* next process' pcb */
uint32_t curr_process_term_id; /* current running process' terminal id */
uint32_t next_term_id; /* next process' terminal id */
uint32_t next_pid; /* next process id */
/* if curr_pid is -1, which means the first process has not executed, just return */
/* this cannot be removed because if it is removed, scheduler would switch to an inexistent */
/* place and mess every thing up when the first shell hasn't been executed */
if(curr_pid == -1)
return;
/* get current process ralevent info */
curr_pcb = get_pcb_ptr(curr_pid);
curr_process_term_id = curr_pcb->term_id;
next_term_id = (curr_process_term_id + 1)%TERMINAL_NUM;
/* get next process's terminal's id */
for(i = 0; i < TERMINAL_NUM && !terminals[next_term_id].is_running; i++)
next_term_id = (next_term_id + 1)%TERMINAL_NUM;
/* if process does not change, which means there is only one process running, just return */
if(next_term_id == curr_process_term_id)
return;
/* get next process's id */
next_pid = terminals[next_term_id].curr_pid;
/* set paging */
set_paging(next_pid);
/* remap video memory */
if(next_term_id == curr_term_id)
vid_remap((uint8_t *)VIDEO);
else
vid_remap(terminals[next_term_id].vid_buf);
/* get next process's pcb */
next_pcb = get_pcb_ptr(next_pid);
/* set current fd array */
cur_fd_array = next_pcb->fd_array;
/* set kernel stack pointer */
tss.esp0 = KS_BASE_ADDR - KS_SIZE * next_pid - sizeof(int32_t);
/* update current pid */
curr_pid = next_pid;
/* store current's esp, ebp */
asm volatile(" \n\
movl %%ebp, %0 \n\
movl %%esp, %1 \n\
"
: "=r"(curr_pcb->ebp), "=r"(curr_pcb->esp)
:
);
/* get next process's esp, ebp */
asm volatile(" \n\
movl %0, %%ebp \n\
movl %1, %%esp \n\
"
:
: "r"(next_pcb->ebp), "r"(next_pcb->esp)
);
}
| 30.646154 | 102 | 0.581074 |
6d9366b97d47f4bad9c9d7c7ad4e0c73c47f88d6 | 11,485 | c | C | Lab2/CLCD_Lab.c | WhiteHyun/Embedded-Systems | 97bcccd11d402ce9e4e30f46361b7a2fadf014b2 | [
"MIT"
] | null | null | null | Lab2/CLCD_Lab.c | WhiteHyun/Embedded-Systems | 97bcccd11d402ce9e4e30f46361b7a2fadf014b2 | [
"MIT"
] | null | null | null | Lab2/CLCD_Lab.c | WhiteHyun/Embedded-Systems | 97bcccd11d402ce9e4e30f46361b7a2fadf014b2 | [
"MIT"
] | null | null | null | #include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <stdbool.h>
#include <wiringPi.h>
#define LCD_D4 2
#define LCD_D5 3
#define LCD_D6 1
#define LCD_D7 4
#define LCD_RS 7
#define LCD_EN 0
#define BTN_PLUS 5 //#102
#define BTN_MINUS 6 //#103
#define BTN_0 27 //#98
#define BTN_1 22 //#100
#define BTN_2 23 //#108
#define BTN_3 26 //#99
#define BTN_4 14 //SCLK
#define BTN_5 21 //#101
#define BTN_6 11 //#118
#define BTN_7 12 //MOSI
#define BTN_8 13 //MISO
#define BTN_9 10 //CEO
#define BTN_EQUAL 24 //#97
#define OVERFLOW 0
#define INVALID_OPERATION 1
void write4bits(unsigned char command); //4비트 읽어씀
void sendDataCmd4(unsigned char data); //8비트 데이터를 4비트씩 끊어서 보냄
void putCmd4(unsigned char cmd); //명령어 입력
void putChar(char c); //문자 CLCD에 넣기위한 함수
void initialize_textlcd(int *inputSet, int *outputSet); //초기 init 함수
int Input(int *inputSet); //버튼 입력값을 받는 함수
void waitForEnter(char *expression, char *inputChar, int *inputSet); //입력 받은 것을 처리하는 함수
int calculate(bool plusOrMinus, char *start, int sum); //주어진 식에 대해 계산하는 함수
void printResult(char *expression); //계산출력용 함수
void errorPrint(int errno); //에러 처리함수
int buffer = 0; //화면 공간을 사용할 때마다 값이 1씩 증가할 변수, 32개가 넘어갈 경우 오버플로우를 출력할 때 사용함
bool debug = false; //디버그 모드(메인에서 들어오는 입력값에 따라 디버깅모드일지 유저모드일지 판별함)
int main(int argc, char **argv)
{
if (argc > 2)
{
printf("Error! Only one parameter can be received.");
return 0;
}
else if (argc == 2)
{
if (strcmp(argv[1], "1") != 0)
{
printf("Error! Only '1' can be input.");
return 0;
}
else
debug = true;
}
int inputSet[13] = {BTN_PLUS, BTN_MINUS, BTN_EQUAL, BTN_0, BTN_1, BTN_2, BTN_3, BTN_4, BTN_5, BTN_6, BTN_7, BTN_8, BTN_9};
char inputChar[13] = {'+', '-', '=', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9'};
int outputSet[6] = {LCD_D4, LCD_D5, LCD_D6, LCD_D7, LCD_RS, LCD_EN};
char expression[32] = {
0,
};
int i;
wiringPiSetup();
initialize_textlcd(inputSet, outputSet);
/* Calculate */
while (true)
{
buffer = 0; //CLCD 출력버퍼를 비움
for (i = 0; i < 32; i++) //수식값을 NULL로 초기화
expression[i] = 0;
//수식을 입력받는 함수
waitForEnter(expression, inputChar, inputSet);
//수식을 계산하여 출력해주는 함수
printResult(expression);
//입력받으면 다시 처음부터 반복
while (!(digitalRead(BTN_0) || digitalRead(BTN_1) || digitalRead(BTN_2) ||
digitalRead(BTN_3) || digitalRead(BTN_4) || digitalRead(BTN_5) ||
digitalRead(BTN_6) || digitalRead(BTN_7) || digitalRead(BTN_8) || digitalRead(BTN_9)))
{
delay(35);
}
while (digitalRead(BTN_0) || digitalRead(BTN_1) || digitalRead(BTN_2) ||
digitalRead(BTN_3) || digitalRead(BTN_4) || digitalRead(BTN_5) ||
digitalRead(BTN_6) || digitalRead(BTN_7) || digitalRead(BTN_8) || digitalRead(BTN_9))
{
delay(35);
}
}
return 0;
}
void write4bits(unsigned char command)
{
digitalWrite(LCD_D4, (command & 1));
command >>= 1;
digitalWrite(LCD_D5, (command & 1));
command >>= 1;
digitalWrite(LCD_D6, (command & 1));
command >>= 1;
digitalWrite(LCD_D7, (command & 1));
digitalWrite(LCD_EN, 1);
delayMicroseconds(10);
digitalWrite(LCD_EN, 0);
delayMicroseconds(10);
}
void sendDataCmd4(unsigned char data)
{
write4bits(((data >> 4) & 0x0f));
write4bits((data & 0x0f));
delayMicroseconds(100);
}
void putCmd4(unsigned char cmd)
{
digitalWrite(LCD_RS, 0);
sendDataCmd4(cmd);
}
void putChar(char c)
{
digitalWrite(LCD_RS, 1);
sendDataCmd4(c);
}
void initialize_textlcd(int *inputSet, int *outputSet)
{
int i;
//CLCD 초기화
for (i = 0; i < 6; i++)
{
pinMode(outputSet[i], OUTPUT);
digitalWrite(outputSet[i], 0);
}
delay(35);
pinMode(BTN_MINUS, INPUT);
pullUpDnControl(BTN_MINUS, PUD_UP);
pinMode(BTN_PLUS, INPUT);
pullUpDnControl(BTN_PLUS, PUD_UP);
for (i = 2; i < 13; i++)
{
pinMode(inputSet[i], INPUT);
pullUpDnControl(inputSet[i], PUD_DOWN);
}
delay(35);
putCmd4(0x28); // 4비트 2줄 Setting (DL = 0, N = 1)
putCmd4(0x28);
putCmd4(0x28);
putCmd4(0x0e); // 디스플레이온 커서 온 (D = 1, C = 1, B = 0)
putCmd4(0x02); // 커서 홈
delay(2);
putCmd4(0x01); // 표시 클리어
putCmd4(0x01);
delay(2);
printf("initialize_done\n");
}
int Input(int *inputSet)
{
int flag = 0x0000;
int i, state = 0; //state: 입력 유무 상태를 저장할 변수
while (true)
{
//버튼을 눌렀을 때
if ((!digitalRead(BTN_PLUS)) || (!digitalRead(BTN_MINUS)) || digitalRead(BTN_EQUAL) || digitalRead(BTN_0) ||
digitalRead(BTN_1) || digitalRead(BTN_2) || digitalRead(BTN_3) ||
digitalRead(BTN_4) || digitalRead(BTN_5) || digitalRead(BTN_6) ||
digitalRead(BTN_7) || digitalRead(BTN_8) || digitalRead(BTN_9))
{
delay(10);
if (state == 0) //입력 받은 상태가 아닌 경우
{
for (i = 0; i < 13; i++)
{
if (i < 2 && !digitalRead(inputSet[i]))
{
flag |= 1 << i;
break;
}
else if (i >= 2 && digitalRead(inputSet[i]))
{
flag |= 1 << i;
break;
}
}
state = 1; //입력 받은 상태 저장
}
}
//버튼을 때었을 때
else if (!((!digitalRead(BTN_PLUS)) || (!digitalRead(BTN_MINUS)) || digitalRead(BTN_EQUAL) || digitalRead(BTN_0) ||
digitalRead(BTN_1) || digitalRead(BTN_2) || digitalRead(BTN_3) ||
digitalRead(BTN_4) || digitalRead(BTN_5) || digitalRead(BTN_6) ||
digitalRead(BTN_7) || digitalRead(BTN_8) || digitalRead(BTN_9)))
{
if (state == 1) //입력을 받아놓은 상태인 경우
{
delay(10);
break;
}
}
}
return flag;
}
void waitForEnter(char *expression, char *inputChar, int *inputSet)
{
int flag; //0b0 0000 0000 0000
//연산자 중복 확인값
bool overlap = false;
int i;
int index = 0;
putCmd4(0x01); // 표시 클리어
//Wait for push
while (true)
{
//Input함수를 통해 값을 입력받음
flag = Input(inputSet);
for (i = 0; i < 13; i++)
{
//연산자일 경우
if (i < 2 && ((flag & (0x0001 << i)) != 0))
{
if (overlap) //직전에 이미 연산자 입력을 받았을 경우
goto fail;
overlap = true;
if (debug)
printf("'%c' input\n", inputChar[i]);
break;
}
//등호('=')를 입력받았을 경우
if (i == 2 && ((flag & (0x0001 << i)) != 0))
{
if (overlap)
goto fail;
//등호 입력 받은 것을 CLCD 및 수식에다가 출력
if (debug)
printf("'=' input\n");
break;
}
//피연산자일 경우
else if (i > 2 && ((flag & (0x0001 << i)) != 0))
{
overlap = false;
if (debug)
printf("'%c' input\n", inputChar[i]);
break;
}
}
/*값 할당 및 CLCD 출력*/
putChar(inputChar[i]);
expression[index++] = inputChar[i];
buffer++;
delay(35);
/*출력 제어 공간*/
if (buffer == 16) //1줄을 꽉 채운 경우
putCmd4(0xC0); //2행1열로 커서 이동
//overflow error
else if (buffer > 32) //2줄에서부터 화면을 넘어가는 경우
{
printf("buffer = %d\n", buffer);
buffer = 0;
errorPrint(OVERFLOW);
return;
}
//등호를 입력받았을 경우 버튼입력 무한 반복문 탈출
if (i == 2)
break;
}
if (debug)
{
printf("successfuly Entered\n");
printf("expression %s\n", expression);
}
return;
fail:
//문자열로 된 수식을 다 지움
for (i = 0; expression[i] != 0; i++)
expression[i] = 0;
buffer = 0;
//에러 출력
errorPrint(INVALID_OPERATION);
return;
}
int calculate(bool plusOrMinus, char *start, long long sum)
{
if (plusOrMinus)
sum += strtol(start, NULL, 10);
else
sum -= strtol(start, NULL, 10);
return sum;
}
void printResult(char *expression)
{
if (expression[0] == 0)
return;
int i;
long long sum = 0;
char *start = expression; //초기 첫 번째 주소로저장
bool plusOrMinus = true;
if (debug)
{
printf("before expression\n");
if (strlen(expression) >= 32) //overflow error
printf("OverFlow Error\n");
else
for (i = 0; expression[i] != 0; i++)
printf("%c", expression[i]);
printf("\n");
}
for (i = 0; expression[i] != 0; i++)
{
if (expression[i] == '+')
{
if (i == 0)
continue;
sum = calculate(plusOrMinus, start, sum);
plusOrMinus = true;
start = expression + i + 1; //'+' 이후의 위치를 가리킴
continue;
}
else if (expression[i] == '-')
{
if (i == 0)
continue;
sum = calculate(plusOrMinus, start, sum);
plusOrMinus = false;
start = expression + i + 1; //'-' 이후의 위치를 가리킴
continue;
}
else if (expression[i] == '=')
{
if (debug)
printf("===========equal input============\n");
sum = calculate(plusOrMinus, start, sum);
break;
}
}
if (debug)
printf("sum = %lld\n", sum);
for (i = 0; expression[i] != 0; i++)
expression[i] = 0;
sprintf(expression, "%lld", sum);
//결과값 CLCD에 출력
for (i = 0; expression[i] != 0; i++)
{
if (debug)
printf("expression[%d] = %c\n", i, expression[i]);
putChar(expression[i]);
buffer++;
if (buffer == 16)
putCmd4(0xC0); //2행1열로 커서 이동
//overflow error
else if (buffer > 32)
{
if (debug)
printf("Interrupt, buffer = %d\n", buffer);
errorPrint(OVERFLOW);
return;
}
}
}
void errorPrint(int errno)
{
int i;
char *err[3] = {" Overflow",
" Invalid",
" operation"};
putCmd4(0x02); // 커서 홈
delay(2);
putCmd4(0x01); // 표시 클리어
delay(2);
if (errno == OVERFLOW)
{
printf("OVERFLOW ERROR TRAPED, errno = %d\n", errno);
for (i = 0; err[0][i] != 0; i++)
putChar(err[0][i]);
}
else if (errno = INVALID_OPERATION)
{
printf("INVALID_OPERATION ERROR TRAPED, errno = %d\n", errno);
for (i = 0; err[1][i] != 0; i++)
putChar(err[1][i]);
putCmd4(0xC0);
for (i = 0; err[2][i] != 0; i++)
putChar(err[2][i]);
}
delay(2000);
putCmd4(0x01); // 표시 클리어
putCmd4(0x02); // 커서 홈
} | 28.428218 | 126 | 0.480453 |
23d77ac2461d1c0ef3223e1c91e5337293bb65fd | 7,439 | js | JavaScript | public/javascripts/schedule.js | whatevercamps/Rentsy | 4aa73fdec0497bdae02ab42941c65685b422f282 | [
"MIT"
] | null | null | null | public/javascripts/schedule.js | whatevercamps/Rentsy | 4aa73fdec0497bdae02ab42941c65685b422f282 | [
"MIT"
] | null | null | null | public/javascripts/schedule.js | whatevercamps/Rentsy | 4aa73fdec0497bdae02ab42941c65685b422f282 | [
"MIT"
] | 3 | 2020-03-03T03:48:16.000Z | 2020-03-03T15:02:30.000Z | var calendar;
/**
* [Function to load a JavaScript Calendar]
* https://fullcalendar.io
*/
document.addEventListener("DOMContentLoaded", function () {
var calendarEl = document.getElementById("calendar");
//dayGrid dayGridWeek dayGridMonth listWeek
var view = "dayGridMonth";
// eslint-disable-next-line no-undef
calendar = new FullCalendar.Calendar(calendarEl, {
buttonText: {
today: "Hoy"
},
noEventsMessage: "No hay eventos este día",
eventLimit: true,
nowIndicator: true,
themeSystem: "bootstrap",
plugins: ["bootstrap", "timeGrid", "dayGrid", "list"],
bootstrapFontAwesome: true,
customButtons: {
day: {
text: "Dia",
click: function () {
view = "dayGrid";
calendar.changeView("dayGrid");
}
},
week: {
text: "Semana",
click: function () {
view = "dayGridWeek";
calendar.changeView("timeGridWeek");
}
},
month: {
text: "Mes",
click: function () {
calendar.changeView("dayGridMonth");
}
},
list: {
text: "Lista",
click: function () {
calendar.changeView("listWeek");
}
}
},
header: {
close: "fa-times",
prev: "fa-chevron-left",
next: "fa-chevron-right",
prevYear: "fa-angle-double-left",
nextYear: "fa-angle-double-right",
left: "prev,next today",
center: "title",
right: "day,week,month,list"
},
defaultView: view,
locale: "es-ES",
eventClick: function(info) {
loadModal(info.event);
console.log(info.event);
console.log(info.event.extendedProps);
},
events: [{
title: "Objeto 1",
start: "2020-02-22T14:30:00",
extendedProps: {
estado: "Arrendado",
arrendatario: "[email protected]",
arrendador: "Juan Sebastián Bravo",
id: 1
},
},
{
title: "Calcuadora Texas",
start: "2020-02-22T07:00:00",
backgroundColor: "green",
borderColor: "green"
}
],
});
calendar.render();
});
/**
* [loadModal function to see a modal window with the info of the object in the Calendar]
* @param {[array of JSON]} event [this parameter have the info of the object in the Calendar]
*/
const loadModal = (event) =>
{
console.log("event", event.extendedProps);
$(".modal-title")[0].innerHTML = event.title;
$("#arrendador").val(event.extendedProps.arrendador);
$("#descripcion").val(event.extendedProps.description);
$("#precioHora").val(event.extendedProps.priceHour);
$("#precioDia").val(event.extendedProps.priceDay);
$("#guardar").attr("formaction", "/update/"+event.extendedProps._id);
$("#borrar").attr("formaction", "/delete/"+event.extendedProps._id);
if(event.extendedProps.arrendador)
{
$("#descripcion").attr("disabled", true);
$("#precioHora").attr("disabled", true);
$("#precioDia").attr("disabled", true);
$("#borrar").attr("disabled", true);
$("#guardar").attr("disabled", true);
}
else
{
$("#descripcion").removeAttr("disabled");
$("#precioHora").removeAttr("disabled");
$("#precioDia").removeAttr("disabled");
$("#borrar").removeAttr("disabled");
$("#guardar").removeAttr("disabled");
}
$(".modal").show();
};
/**
* [loadOject function for render the objects in the Calendar]
* @param {[int]} id [the id of every object that the user have]
*/
const loadObject = (id) =>
{
calendar.destroy();
fetch("/get/"+id)
.then(response => response.json())
.then(data => {
data = data[0];
var calendarEl = document.getElementById("calendar");
//dayGrid dayGridWeek dayGridMonth listWeek
var view = "dayGridMonth";
if(data.events){
data.events.forEach(function(part, index) {
data.events[index]._id = "5e5865f875c48d1126e209b6";
data.events[index].name = "Calculadora Texas";
data.events[index].description = "Arriendo calculadora texas";
data.events[index].priceHour = "2000";
data.events[index].priceDay = "5000";
});}
calendar = new FullCalendar.Calendar(calendarEl, {
buttonText: {
today: "Hoy"
},
noEventsMessage: "No hay eventos este día",
eventLimit: true,
nowIndicator: true,
themeSystem: "bootstrap",
plugins: ["bootstrap", "timeGrid", "dayGrid", "list"],
bootstrapFontAwesome: true,
customButtons: {
day: {
text: "Dia",
click: function () {
view = "dayGrid";
calendar.changeView("dayGrid");
}
},
week: {
text: "Semana",
click: function () {
view = "dayGridWeek";
calendar.changeView("timeGridWeek");
}
},
month: {
text: "Mes",
click: function () {
calendar.changeView("dayGridMonth");
}
},
list: {
text: "Lista",
click: function () {
calendar.changeView("listWeek");
}
}
},
header: {
close: "fa-times",
prev: "fa-chevron-left",
next: "fa-chevron-right",
prevYear: "fa-angle-double-left",
nextYear: "fa-angle-double-right",
left: "prev,next today",
center: "title",
right: "day,week,month,list"
},
defaultView: view,
locale: "es-ES",
eventClick: function(info) {
console.log(info);
loadModal(info.event);
console.log(info.event);
console.log(info.event.extendedProps);
},
events: data.events,
});
calendar.render();
})
.catch(err => {
console.log(err);
// Do something for an error here
});
};
/**
* [closeModal function to hide the modal window]
*/
const closeModal = () => $(".modal").hide();
$(".modal").on("shown.bs.modal", function () {
$("#myInput").trigger("focus");
});
| 33.062222 | 95 | 0.454228 |
3e93b93a7f06d9b42c7db57d69b6658c2631164c | 480 | kt | Kotlin | token-support-azure-exchange/src/main/kotlin/no/nav/tms/token/support/azure/exchange/config/cache/AccessTokenEntry.kt | navikt/tms-ktor-token-support | 9af53ab1fe7637918f8cd8612a76c213f07d34cf | [
"MIT"
] | 3 | 2021-11-23T19:12:42.000Z | 2021-12-10T11:31:05.000Z | token-support-azure-exchange/src/main/kotlin/no/nav/tms/token/support/azure/exchange/config/cache/AccessTokenEntry.kt | navikt/tms-ktor-token-support | 9af53ab1fe7637918f8cd8612a76c213f07d34cf | [
"MIT"
] | 2 | 2021-02-23T15:34:15.000Z | 2021-12-01T09:58:52.000Z | token-support-azure-exchange/src/main/kotlin/no/nav/tms/token/support/azure/exchange/config/cache/AccessTokenEntry.kt | navikt/tms-ktor-token-support | 9af53ab1fe7637918f8cd8612a76c213f07d34cf | [
"MIT"
] | 1 | 2021-04-14T12:09:39.000Z | 2021-04-14T12:09:39.000Z | package no.nav.tms.token.support.azure.exchange.config.cache
import no.nav.tms.token.support.azure.exchange.consumer.AzureTokenResponse
internal data class AccessTokenEntry(
val accessToken: String,
val expiresInSeconds: Long
) {
companion object {
fun fromResponse(response: AzureTokenResponse) = AccessTokenEntry (
accessToken = response.accessToken,
expiresInSeconds = response.expiresIn.toLong()
)
}
}
| 30 | 75 | 0.69375 |
c05f14ebebff8a989901ff5eb515a64cfa74d74d | 799 | rs | Rust | src/color.rs | msherwood21/raytrace | 0290a252f1188b6afc0a94aaaee09cc12e0f91e1 | [
"Unlicense"
] | null | null | null | src/color.rs | msherwood21/raytrace | 0290a252f1188b6afc0a94aaaee09cc12e0f91e1 | [
"Unlicense"
] | null | null | null | src/color.rs | msherwood21/raytrace | 0290a252f1188b6afc0a94aaaee09cc12e0f91e1 | [
"Unlicense"
] | null | null | null | use crate::rtweekend;
use crate::vec3;
pub fn write_color(out: &mut dyn std::io::Write, pixel_color: vec3::Color, samples_per_pixel: i32) {
let mut r = pixel_color.x();
let mut g = pixel_color.y();
let mut b = pixel_color.z();
// Divide the color by the number of samples and gamma-correct for gamma=2.0.
let scale = 1.0 / f64::from(samples_per_pixel);
r = (scale * r).sqrt();
g = (scale * g).sqrt();
b = (scale * b).sqrt();
// Write the translated [0,255] value of each color component.
write!(
out,
"{} {} {}\n",
(256.0 * rtweekend::clamp(r, 0.0, 0.999)) as i32,
(256.0 * rtweekend::clamp(g, 0.0, 0.999)) as i32,
(256.0 * rtweekend::clamp(b, 0.0, 0.999)) as i32
)
.expect("failed to output color line");
}
| 31.96 | 100 | 0.578223 |
c9ebb6cfc7062f5fb8c14f21547025e06c4c1b63 | 10,994 | lua | Lua | levee/core/hub.lua | thelinuxkid/levee | 947847fd9e4bf67333f285f13c92e47441b0f8ce | [
"BSD-2-Clause"
] | 11 | 2018-11-05T20:32:06.000Z | 2020-08-16T09:32:17.000Z | levee/core/hub.lua | thelinuxkid/levee | 947847fd9e4bf67333f285f13c92e47441b0f8ce | [
"BSD-2-Clause"
] | null | null | null | levee/core/hub.lua | thelinuxkid/levee | 947847fd9e4bf67333f285f13c92e47441b0f8ce | [
"BSD-2-Clause"
] | 3 | 2018-09-12T03:53:11.000Z | 2021-03-08T04:34:07.000Z | local ffi = require('ffi')
local C = ffi.C
local errors = require("levee.errors")
local _ = require("levee._")
local d = require("levee.d")
local message = require("levee.core.message")
local log = _.log.Log("levee.core.hub")
local State_mt = {}
State_mt.__index = State_mt
function State_mt:recv(timeout)
if self.value then
local value = self.value
self.value = nil
return nil, nil, value
end
self.co = coroutine.running()
local err, sender, value = self.hub:pause(timeout)
self.co = nil
return err, sender, value
end
function State_mt:set(value)
if not self.co then
self.value = value
return
end
local co = self.co
self.co = nil
self.hub:_coresume(co, nil, nil, value)
end
local function State(hub)
local self = setmetatable({hub=hub}, State_mt)
return self
end
local Trace_mt = {}
Trace_mt.__index = Trace_mt
function Trace_mt:pprint(state)
local function d(stack, i)
print(("%s%-50s %3s %3s %3s %10.2f"):format(
("| "):rep(i),
stack.f,
stack.spawned,
stack.term,
stack.n,
stack.took / (1000 * 1000)))
end
local function p(stack, i)
i = i or 0
d(stack, i)
local totals = {f = "-"}
for k, v in pairs(stack) do
if k ~= "f" and k ~= "tree" then totals[k] = v end
end
if next(stack.tree) then
for name, substack in pairs(stack.tree) do
local subtotals = p(substack, i + 1)
for k, v in pairs(subtotals) do
if k ~= "f" and k ~= "tree" then totals[k] = totals[k] + v end
end
end
d(totals, i)
end
return totals
end
state = state or self.state
p(state.stacks[state.main])
end
function Trace_mt:capture(f, co)
local info = debug.getinfo(f)
local source = ("%s:%s"):format(info.short_src, info.linedefined)
local parent = self.threads[coroutine.running()]
local stack = parent.tree[source] or {
f = source,
spawned = 0,
term = 0,
n = 0,
took = 0,
tree = {}, }
parent.tree[source] = stack
self.threads[co] = stack
stack.spawned = stack.spawned + 1
self.state.spawned = self.state.spawned + 1
end
function Trace_mt:context(f)
if not self.threads or not self.threads[coroutine.running()] then
return f()
end
local info = debug.getinfo(2)
local source = ("%s:%s"):format(info.short_src, info.currentline)
local parent = self.threads[coroutine.running()]
local stack = parent.tree[source] or {
f = source,
spawned = 0,
term = 0,
n = 0,
took = 0,
tree = {}, }
parent.tree[source] = stack
self.threads[coroutine.running()] = stack
local ret = {f()}
self.threads[coroutine.running()] = parent
return unpack(ret)
end
function Trace_mt:patch()
self.save = {
_coresume = self.r.hub._coresume,
spawn = self.r.hub.spawn,
spawn_later = self.r.hub.spawn_later, }
self.r.hub._coresume = function(hub, co, err, sender, value)
local took = _.time.Timer()
self.save._coresume(hub, co, err, sender, value)
took:finish()
if self.state and self.threads[co] then
local stack = self.threads[co]
stack.n = stack.n + 1
stack.took = stack.took + tonumber(took:nanoseconds())
-- clean up when a thread completes
if coroutine.status(co) == "dead" then
stack.term = stack.term + 1
self.state.term = self.state.term + 1
self.threads[co] = nil
end
end
end
self.r.hub.spawn = function(hub, f, a)
local co = coroutine.create(f)
self:capture(f, co)
hub.ready:push({co, a})
hub:continue()
end
self.r.hub.spawn_later = function (hub, ms, f)
local co = coroutine.create(f)
self:capture(f, co)
ms = hub.poller:abstime(ms)
hub.scheduled:push(ms, co)
end
end
function Trace_mt:restore()
for k, v in pairs(self.save) do self.r.hub[k] = v end
self.save = nil
end
function Trace_mt:start()
assert(not self.state)
self.threads = {}
self.state = {}
self.state.spawned = 0
self.state.term = 0
self.state.stacks = {}
self.state.main = self.main
self.state.stacks[self.main] = {
f = self.main,
spawned = 1,
term = 0,
n = 0,
took = 0,
tree = {}, }
self.threads[coroutine.running()] = self.state.stacks[self.main]
self:patch()
end
function Trace_mt:stop()
self:restore()
self.threads = nil
self.state = nil
end
local function Trace(hub)
local self = setmetatable({}, Trace_mt)
self.r = setmetatable({}, {__mode="v"})
self.r.hub = hub
local info = debug.getinfo(3)
self.main = ("%s:%s"):format(info.short_src, info.currentline)
return self
end
local Hub_mt = {}
Hub_mt.__index = Hub_mt
function Hub_mt:pipe()
local sender = message.Sender(self)
local recver = message.Recver(self)
sender.recver = recver
recver.sender = sender
return sender, recver
end
function Hub_mt:value(value)
local sender = message.Value(self, value)
local recver = message.Recver(self)
sender.recver = recver
recver.sender = sender
return sender, recver
end
function Hub_mt:flag(value)
local sender = message.Flag(self, value)
local recver = message.Recver(self)
sender.recver = recver
recver.sender = sender
return sender, recver
end
function Hub_mt:gate()
local sender = message.Gate(self)
local recver = message.Recver(self)
sender.recver = recver
recver.sender = sender
return sender, recver
end
function Hub_mt:queue(size)
local sender = message.Sender(self)
local recver = message.Queue(self, size)
sender.recver = recver
recver.sender = sender
return sender, recver
end
function Hub_mt:stalk(size)
local sender = message.Sender(self)
local recver = message.Stalk(self, size)
sender.recver = recver
recver.sender = sender
return sender, recver
end
function Hub_mt:selector()
return message.Selector(self)
end
function Hub_mt:router()
local sender = message.Router(self)
local recver = message.Recver(self)
sender.recver = recver
recver.sender = sender
return sender, recver
end
function Hub_mt:dealer()
local sender = message.Sender(self)
local recver = message.Dealer(self)
sender.recver = recver
recver.sender = sender
return sender, recver
end
function Hub_mt:broadcast()
return message.Broadcast(self)
end
function Hub_mt:pool(factory, size)
return message.Pool(self, factory, size)
end
function Hub_mt:_coresume(co, err, sender, value)
if co ~= self._pcoro then
local status, target = coroutine.resume(co, err, sender, value)
if not status then
log:fatal(debug.traceback(co) .. "\n\n" .. target)
end
else
coroutine.yield(err, sender, value)
end
end
function Hub_mt:_coyield(co, err, sender, value)
if coroutine.running() ~= self._pcoro then
return coroutine.yield(co, err, sender, value)
end
local status, err, sender, value = coroutine.resume(
self.loop, co, err, sender, value)
if not status then
log:fatal(("%s\n\n%s"):format(debug.traceback(self.loop), err))
end
return err, sender, value
end
function Hub_mt:spawn(f, a)
local co = coroutine.create(f)
self.ready:push({co, a})
self:continue()
end
function Hub_mt:spawn_later(ms, f)
local co = coroutine.create(f)
ms = self.poller:abstime(ms)
self.scheduled:push(ms, co)
end
function Hub_mt:spawn_every(iter, f)
self:spawn(function()
for item in iter do self:spawn(f, item) end
end)
end
function Hub_mt:sleep(ms)
ms = self.poller:abstime(ms)
self.scheduled:push(ms, coroutine.running())
self:_coyield()
end
function Hub_mt:pause(ms)
if not ms then return self:_coyield() end
ms = self.poller:abstime(ms)
local timeout = self.scheduled:push(ms, coroutine.running())
local err, sender, value = self:_coyield()
if err ~= errors.TIMEOUT then
timeout:remove()
end
return err, sender, value
end
function Hub_mt:resume(co, err, sender, value)
self.ready:push({co, err, sender, value})
end
function Hub_mt:continue()
self.ready:push({coroutine.running()})
self:_coyield()
end
function Hub_mt:register(no, r, w)
local r_ev = r and State(self)
local w_ev = w and State(self)
self.registered[no] = {r_ev, w_ev}
self.poller:register(no, r, w)
return r_ev, w_ev
end
local function EVStub()
return {
recv = function() return 1 end,
set = function() end,
}
end
function Hub_mt:register_nopoll(no, r, w)
local r_ev = r and EVStub()
local w_ev = w and EVStub()
self.registered[no] = {r_ev, w_ev}
return r_ev, w_ev
end
function Hub_mt:unregister(no)
local r = self.registered[no]
if r then
table.insert(self.closing, no)
-- this is only needed if a platform doesn't remove an fd from a poller on
-- fd close
-- TODO: detect r, w
-- self.poller:unregister(no, r, w)
if r[1] then r[1]:set(-1) end
if r[2] then r[2]:set(-1) end
self.registered[no] = nil
end
end
function Hub_mt:in_use()
for no in pairs(self.registered) do
if not self.dialer.state or
(no ~= self.dialer.r and no ~= self.dialer.state.io[1]) then
return true
end
end
return false
end
function Hub_mt:pump()
local num = #self.ready
for _ = 1, num do
local co, err, sender, value = unpack(self.ready:pop())
self:_coresume(co, err, sender, value)
end
local timeout
if #self.ready > 0 then
timeout = 0
else
timeout = self.scheduled:peek()
end
if #self.closing > 0 then
for i = 1, #self.closing do
C.close(self.closing[i])
end
self.closing = {}
end
local err, events, n = self.poller:poll(timeout)
assert(not err)
while true do
local timeout = self.scheduled:peek()
if not timeout or self.poller:reltime(timeout) > 0 then
break
end
local ms, co = self.scheduled:pop()
self:_coresume(co, errors.TIMEOUT)
end
for i = 0, n - 1 do
local no, c_ev, s_ev, r_ev, w_ev, e_ev = events[i]:value()
if c_ev then
self.thread.chan:pump()
elseif s_ev then
self.signal:trigger(no)
else
local r = self.registered[no]
if r then
if not e_ev then
if r_ev then r[1]:set(1) end
if w_ev then r[2]:set(1) end
else
if r[1] then r[1]:set(-1) end
if r[2] then r[2]:set(-1) end
end
end
end
end
end
function Hub_mt:main()
while true do
self:pump()
end
end
local function Hub(options)
options = options or {}
local self = setmetatable({}, Hub_mt)
self.ready = d.Fifo()
self.scheduled = d.Heap()
self.registered = {}
self.poller = _.poller()
self.closing = {}
self._pcoro = coroutine.running()
self.loop = coroutine.create(function()
local status, err = xpcall(
function() return self:main() end,
function(err) return debug.traceback() .. "\n\n" .. err end)
if not status then
log:fatal(err .. "\n\nmain loop crashed")
end
end)
self.io = require("levee.core.io")(self)
self.signal = require("levee.core.signal")(self)
self.process = require("levee.core.process")(self)
self.thread = require("levee.core.thread")(self)
self.stream = require("levee.net.stream")(self)
self.dgram = require("levee.net.dgram")(self)
self.dialer = require("levee.net.dialer")(self)
self.dns = require("levee.net.dns")(self)
self.tcp = self.stream
self.http = require("levee.p.http")(self)
self.consul = require("levee.app.consul")(self)
self.trace = Trace(self)
if options.trace then self.trace:start() end
return self
end
return Hub
| 19.667263 | 76 | 0.681008 |
687d960e24e0d54893d3e22400d366da6a6f53dd | 7,652 | sql | SQL | PAK_XSLT_LOG_1.sql | mirmas/InteractivePrints | 26afbc05bcd379b6412dd06459f17124daaf1916 | [
"MIT"
] | 4 | 2018-07-17T14:34:33.000Z | 2020-09-18T13:51:16.000Z | PAK_XSLT_LOG_1.sql | mirmas/InteractivePrints | 26afbc05bcd379b6412dd06459f17124daaf1916 | [
"MIT"
] | null | null | null | PAK_XSLT_LOG_1.sql | mirmas/InteractivePrints | 26afbc05bcd379b6412dd06459f17124daaf1916 | [
"MIT"
] | 2 | 2019-12-30T20:31:22.000Z | 2020-08-04T06:36:03.000Z | -- This project using the following MIT License:
--
-- The MIT License (MIT)
--
-- Copyright (c) 2021 Mirmas IC
--
-- Permission is hereby granted, free of charge, to any person obtaining a copy
-- of this software and associated documentation files (the "Software"), to deal
-- in the Software without restriction, including without limitation the rights
-- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-- copies of the Software, and to permit persons to whom the Software is
-- furnished to do so, subject to the following conditions:
--
-- The above copyright notice and this permission notice shall be included in all
-- copies or substantial portions of the Software.
--
-- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-- SOFTWARE.
CREATE OR REPLACE PACKAGE BODY "PAK_XSLT_LOG" AS
/* set level of logging:
1 - log just errors
2 - log errors and warnings (default)
3 - log all (informations, errors and warnings)*/
PROCEDURE SetLevel(P_LEVEL NUMBER) is
begin
g_level:= P_LEVEL;
end;
function GetProcedure(p_call_stack varchar2)
return varchar2
as
l_start number;
l_end number;
begin
l_start := instr(p_call_stack,chr(10),1,5);
l_end := instr(p_call_stack,chr(10),1,6);
if nvl(l_start, 0) = 0 then
return null;
end if;
if l_end > 0 then
return substr(p_call_stack, l_start, l_end-l_start);
else
return substr(p_call_stack, l_start);
end if;
return null;
end;
/* Write to PAK_XSLT_LOG */
PROCEDURE WriteLog
( P_DESCRIPTION VARCHAR2 DEFAULT 'Information'
, P_LOG_TYPE VARCHAR2 DEFAULT g_information
, p_apex_debug_level NUMBER DEFAULT 7
, P_PROCEDURE VARCHAR2 DEFAULT NULL
, P_SQLERRM VARCHAR2 DEFAULT NULL
, P_ERR_NUM NUMBER DEFAULT NULL
, p_start_time PLS_INTEGER DEFAULT NULL
) IS
PRAGMA AUTONOMOUS_TRANSACTION;
l_WriteLog boolean;
l_apex_debug_level number;
l_secs number;
l_description varchar2(4000) := substr(p_description,1,3900);
BEGIN
l_apex_debug_level := p_apex_debug_level;
l_WriteLog:=false;
if g_level = 1 and P_LOG_TYPE = g_error then
l_WriteLog:=true;
elsif g_level = 2 and P_LOG_TYPE in (g_error, g_warning) then
l_WriteLog:=true;
elsif g_level = 3 then
l_WriteLog:=true;
end if;
if l_WriteLog then
if p_start_time is not null then
l_secs := (dbms_utility.get_time() - p_start_time) / 100;
l_description := l_description||' time: '||to_char(l_secs);
end if;
if p_log_type = g_error then
l_apex_debug_level := 1; --If error then set most important APEX debug level
apex_debug_message.log_message( substr(dbms_utility.format_error_stack,1,3900), p_level => l_apex_debug_level );
apex_debug_message.log_message( substr(dbms_utility.format_error_backtrace,1,3900), p_level => l_apex_debug_level );
end if;
apex_debug_message.log_message(substr(substr(GetProcedure(DBMS_UTILITY.FORMAT_CALL_STACK),1,200)
||' '||l_description,1,3900), p_level => l_apex_debug_level);
$IF CCOMPILING.g_logger_exists $THEN
l_description := 'Interactive Prints-'||l_description;
if p_log_type = g_error then
logger.log_error (l_description, substr(nvl(p_procedure, GetProcedure(DBMS_UTILITY.FORMAT_CALL_STACK)),1,200));
elsif p_log_type = g_warning then
logger.log_warning (l_description, substr(nvl(p_procedure, GetProcedure(DBMS_UTILITY.FORMAT_CALL_STACK)),1,200));
else
logger.log(l_description, substr(nvl(p_procedure, GetProcedure(DBMS_UTILITY.FORMAT_CALL_STACK)),1,200));
end if;
$ELSE
insert into XSLT_LOG
( ID_XSLT_LOG
, LOG_TYPE
, DESCRIPTION
, LOG_DATE
, DBUSER
, APPUSER
, APP_ID
, PAGE_ID
, APEX_WORKSPACE
, PROCEDURE
, ERROR_MESSAGE
, ERR_NUM
)
values
( XSLT_LOG_seq.nextval
, p_log_type
, l_description
, sysdate
, user
, V('APP_USER')
, V('APP_ID')
, V('APP_PAGE_ID')
, APEX_UTIL.FIND_WORKSPACE(apex_application.get_security_group_id)
, substr(nvl(p_procedure, GetProcedure(DBMS_UTILITY.FORMAT_CALL_STACK)),1,200)
, substr(P_SQLERRM, 1, 1024)
, P_ERR_NUM
);
$END
end if;
commit;
exception
when others then
rollback;
raise_application_error(-20001, sqlerrm);
end WriteLog;
/**Shrink XSLT_LOG table to p_max_XSLT_LOG_recs records with deleting the oldest records or if
* number of records is greater than nvl(p_trunc_XSLT_LOG_recs, 10 * p_max_XSLT_LOG_recs)
* truncate table xslt_log.
*
* @param p_max_XSLT_LOG_recs See procedure description above
* @param p_trunc_XSLT_LOG_recs See procedure description above
*/
PROCEDURE ClearLog(
p_max_XSLT_LOG_recs number default 10000,
p_trunc_XSLT_LOG_recs number default null
)
AS
l_XSLT_LOG_recs number;
l_max_procedures varchar2(1000);
cursor c_cur is
select count_recs, procedure_name from
(
select count(*) count_recs, procedure procedure_name from XSLT_LOG group by procedure order by count(*) desc
)
where rownum <= 5;
BEGIN
select count(*) into l_XSLT_LOG_recs from XSLT_LOG;
if l_XSLT_LOG_recs between p_max_XSLT_LOG_recs and nvl(p_trunc_XSLT_LOG_recs, 10 * p_max_XSLT_LOG_recs) then
delete from XSLT_LOG where id_XSLT_LOG in
(
select id_XSLT_LOG from
(
select id_XSLT_LOG from XSLT_LOG order by id_XSLT_LOG
)
where rownum <= l_XSLT_LOG_recs - p_max_XSLT_LOG_recs
);
WriteLog(
P_DESCRIPTION => 'Deleted '||to_char(l_XSLT_LOG_recs - p_max_XSLT_LOG_recs)||' oldest records.'
, P_LOG_TYPE => g_warning
, P_procedure => 'PAK_XSLT_LOG.ClearLog'
);
commit;
elsif l_XSLT_LOG_recs > nvl(p_trunc_XSLT_LOG_recs, 10 * p_max_XSLT_LOG_recs) then
for r_cur in c_cur loop
l_max_procedures := l_max_procedures||r_cur.procedure_name||':'||r_cur.count_recs||', ';
end loop;
l_max_procedures := rtrim(l_max_procedures, ', ');
EXECUTE IMMEDIATE 'truncate table xslt_log';
WriteLog(
P_DESCRIPTION => 'Log truncated ('||to_char(l_XSLT_LOG_recs)||' records). Check procedures: '||l_max_procedures
, P_LOG_TYPE => g_warning
, P_procedure => 'PAK_XSLT_LOG.ClearLog'
);
commit;
end if;
exception
when others then
WriteLog
( P_DESCRIPTION => 'Error'
, P_LOG_TYPE => PAK_XSLT_LOG.g_error
, P_PROCEDURE => 'PAK_XSLT_LOG.ClearLog'
, P_SQLERRM => sqlerrm
);
rollback;
END ClearLog;
begin
-- constructor set level to log errors and warnings
g_level:= 2;
END PAK_XSLT_LOG;
/
| 34.160714 | 127 | 0.651725 |
071d73959876ab0eef9e66bc12dbced7be2ad48f | 485 | rb | Ruby | app/components/content/generic_block_component.rb | GayeCord/get-into-teaching-app | 75a8fd3b95e7fcf07f3b03846cdc2456071b2807 | [
"MIT"
] | 6 | 2021-03-15T15:53:02.000Z | 2021-12-09T22:56:34.000Z | app/components/content/generic_block_component.rb | GayeCord/get-into-teaching-app | 75a8fd3b95e7fcf07f3b03846cdc2456071b2807 | [
"MIT"
] | 2,221 | 2020-05-06T08:16:29.000Z | 2022-03-31T17:26:35.000Z | app/components/content/generic_block_component.rb | uk-gov-mirror/DFE-Digital.get-into-teaching-app | 7d07d3bd83ab4b2dd5d97ca18dfe75438fca20d5 | [
"MIT"
] | 6 | 2020-12-21T15:44:47.000Z | 2022-03-18T16:12:07.000Z | module Content
class GenericBlockComponent < ViewComponent::Base
attr_reader :title, :classes
def initialize(title:, icon_image:, icon_alt:, icon_size: nil, classes: [])
@title = title
@icon_image = icon_image
@icon_alt = icon_alt
@icon_size = icon_size
@classes = classes
end
def icon
tag.div(class: "blocks__icon") do
image_pack_tag(@icon_image, alt: @icon_alt, size: @icon_size)
end
end
end
end
| 24.25 | 79 | 0.63299 |
6c9a6d245acf61f62fc46438aef0d15f4dc387ba | 2,468 | sql | SQL | content/test/data/conversions/databases/version_6.sql | DamieFC/chromium | 54ce2d3c77723697efd22cfdb02aea38f9dfa25c | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1 | 2020-10-18T02:33:40.000Z | 2020-10-18T02:33:40.000Z | content/test/data/conversions/databases/version_6.sql | DamieFC/chromium | 54ce2d3c77723697efd22cfdb02aea38f9dfa25c | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 3 | 2021-05-17T16:28:52.000Z | 2021-05-21T22:42:22.000Z | content/test/data/conversions/databases/version_6.sql | DamieFC/chromium | 54ce2d3c77723697efd22cfdb02aea38f9dfa25c | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | PRAGMA foreign_keys=OFF;
BEGIN TRANSACTION;
CREATE TABLE impressions(impression_id INTEGER PRIMARY KEY,impression_data TEXT NOT NULL,impression_origin TEXT NOT NULL,conversion_origin TEXT NOT NULL,reporting_origin TEXT NOT NULL,impression_time INTEGER NOT NULL,expiry_time INTEGER NOT NULL,num_conversions INTEGER DEFAULT 0,active INTEGER DEFAULT 1,conversion_destination TEXT NOT NULL,source_type INTEGER NOT NULL,attributed_truthfully INTEGER NOT NULL,priority INTEGER NOT NULL);
CREATE TABLE conversions (conversion_id INTEGER PRIMARY KEY, impression_id INTEGER, conversion_data TEXT NOT NULL, conversion_time INTEGER NOT NULL, report_time INTEGER NOT NULL);
CREATE TABLE rate_limits(rate_limit_id INTEGER PRIMARY KEY,attribution_type INTEGER NOT NULL,impression_id INTEGER NOT NULL,impression_site TEXT NOT NULL,impression_origin TEXT NOT NULL,conversion_destination TEXT NOT NULL,conversion_origin TEXT NOT NULL,conversion_time INTEGER NOT NULL);
CREATE TABLE meta(key LONGVARCHAR NOT NULL UNIQUE PRIMARY KEY, value LONGVARCHAR);
INSERT INTO meta VALUES('mmap_status','-1');
INSERT INTO meta VALUES('version','6');
INSERT INTO meta VALUES('last_compatible_version','6');
CREATE INDEX conversion_destination_idx ON impressions(active, conversion_destination, reporting_origin);
CREATE INDEX impression_expiry_idx ON impressions(expiry_time);
CREATE INDEX impression_origin_idx ON impressions(impression_origin);
CREATE INDEX conversion_report_idx ON conversions(report_time);
CREATE INDEX conversion_impression_id_idx ON conversions(impression_id);
CREATE INDEX rate_limit_impression_site_type_idx ON rate_limits(attribution_type, conversion_destination, impression_site, conversion_time);
CREATE INDEX rate_limit_conversion_time_idx ON rate_limits(conversion_time);
CREATE INDEX rate_limit_impression_id_idx ON rate_limits(impression_id);
INSERT INTO impressions
VALUES (1,
'9357e17751666f64',
'https://a.impression.test',
'https://conversion.test',
'https://report.test',
13245278349693988,
13247870349693988,
0,
1,
'https://conversion.test/',
0,
1,
3),
(2,
'9357e17751666f64',
'https://b.impression.test',
'https://conversion.test',
'https://report.test',
13245278349693988,
13247870349693988,
0,
1,
'https://conversion.test/',
0,
1,
4);
COMMIT;
| 39.806452 | 437 | 0.76175 |
d40814898f73041f438fa0f03ef91c650862d413 | 1,967 | hpp | C++ | include/dynamixel_ros2_control/dynamixel_ros2_controll.hpp | Schnilz/dynamixel_ros2_control | b8e3f915cfbe9a59e5de08d62843e62ab3a2b107 | [
"MIT"
] | null | null | null | include/dynamixel_ros2_control/dynamixel_ros2_controll.hpp | Schnilz/dynamixel_ros2_control | b8e3f915cfbe9a59e5de08d62843e62ab3a2b107 | [
"MIT"
] | 1 | 2022-02-12T11:17:23.000Z | 2022-02-12T11:17:23.000Z | include/dynamixel_ros2_control/dynamixel_ros2_controll.hpp | Schnilz/dynamixel_ros2_control | b8e3f915cfbe9a59e5de08d62843e62ab3a2b107 | [
"MIT"
] | null | null | null | #pragma once
#include <hardware_interface/handle.hpp>
#include <hardware_interface/hardware_info.hpp>
#include <hardware_interface/system_interface.hpp>
#include <rclcpp_lifecycle/state.hpp>
#include "dynamixel_ros2_control/visiblity_control.h"
#include "dynamixel_ros2_control/dynamixel_driver.hpp"
#include "rclcpp/macros.hpp"
#include <map>
#include <memory>
namespace dynamixel_ros2_control
{
class DynamixelHardwareInterface : public hardware_interface::SystemInterface {
public:
RCLCPP_SHARED_PTR_DEFINITIONS(DynamixelHardwareInterface)
DYNAMIXEL_HARDWARE_INTERFACE_PUBLIC
CallbackReturn on_init(const hardware_interface::HardwareInfo &info) override;
DYNAMIXEL_HARDWARE_INTERFACE_PUBLIC
std::vector<hardware_interface::StateInterface>
export_state_interfaces() override;
DYNAMIXEL_HARDWARE_INTERFACE_PUBLIC
std::vector<hardware_interface::CommandInterface>
export_command_interfaces() override;
DYNAMIXEL_HARDWARE_INTERFACE_PUBLIC
CallbackReturn
on_activate(const rclcpp_lifecycle::State &previous_state) override;
DYNAMIXEL_HARDWARE_INTERFACE_PUBLIC
CallbackReturn
on_deactivate(const rclcpp_lifecycle::State &previous_state) override;
DYNAMIXEL_HARDWARE_INTERFACE_PUBLIC
hardware_interface::return_type read() override;
DYNAMIXEL_HARDWARE_INTERFACE_PUBLIC
hardware_interface::return_type write() override;
private:
hardware_interface::return_type enable_torque(const bool enabled);
void input_voltage_error_callback(const std::string &joint_name) const;
void overheating_error_callback(const std::string &joint_name) const;
void motor_encoder_error_callback(const std::string &joint_name) const;
void electrical_shock_error_callback(const std::string &joint_name) const;
void overload_error_callback(const std::string &joint_name) const;
std::unique_ptr<dynamixel::Driver> dynamixel_driver_;
std::map<std::string, bool> auto_clear_overload_error;
};
} // namespace dynamixel_ros2_control
| 32.783333 | 80 | 0.833757 |
744fbba26a999cc5a6d1040cdd2ca9127f54d104 | 688 | css | CSS | client/src/app/resources/resources.component.css | corde171/iteration-4-secure-super-group | e65f15898b8e5b9dabc2fac0275cf3af37c5c28e | [
"MIT"
] | null | null | null | client/src/app/resources/resources.component.css | corde171/iteration-4-secure-super-group | e65f15898b8e5b9dabc2fac0275cf3af37c5c28e | [
"MIT"
] | 29 | 2018-04-20T17:25:39.000Z | 2018-05-04T01:35:45.000Z | client/src/app/resources/resources.component.css | corde171/iteration-4-secure-super-group | e65f15898b8e5b9dabc2fac0275cf3af37c5c28e | [
"MIT"
] | 2 | 2019-01-03T02:53:33.000Z | 2020-03-30T16:05:41.000Z | #addNewResources{
background-color: lightgreen;
}
.addNewResourcesDiv{
padding: 10px;
}
.loginEmoji{
display: flex;
margin-left: auto;
margin-right: auto;
width: 256px;
height: 256px;
padding: 32px;
}
.loginEmojiText {
text-align: center;
font-size: 24px;
}
a:link {
color: green;
background-color: transparent;
text-decoration: none;
}
a:visited {
color: pink;
background-color: transparent;
text-decoration: none;
}
a:hover {
color: red;
background-color: transparent;
text-decoration: underline;
}
a:active {
color: yellow;
background-color: transparent;
text-decoration: underline;
}
| 14.638298 | 35 | 0.642442 |
e282c3fe190038865d60046189752153466d4417 | 1,745 | py | Python | routing_changes/taps/details_cluster_sizes.py | RWails/tempest | 4e13d9e56f24e46db8f3e172d0be52f335a5e906 | [
"CC0-1.0"
] | 2 | 2018-07-29T04:10:01.000Z | 2019-08-15T11:09:29.000Z | routing_changes/taps/details_cluster_sizes.py | RWails/tempest | 4e13d9e56f24e46db8f3e172d0be52f335a5e906 | [
"CC0-1.0"
] | null | null | null | routing_changes/taps/details_cluster_sizes.py | RWails/tempest | 4e13d9e56f24e46db8f3e172d0be52f335a5e906 | [
"CC0-1.0"
] | 1 | 2018-10-19T00:09:59.000Z | 2018-10-19T00:09:59.000Z | #!/usr/bin/env python3
import argparse
import datetime
import functools
import os
import ujson as json
def get_as_to_cluster(cluster_filename):
as_to_cluster = dict()
with open(cluster_filename, 'r') as f:
clusters = json.loads(f.read())
for cluster in clusters.values():
cluster_set = set(cluster)
for asn in cluster:
as_to_cluster[asn] = cluster_set
return as_to_cluster
def main(args):
as_to_clusters = list(map(get_as_to_cluster, args.cluster_filenames))
core_ases = [x for x in as_to_clusters[0].keys()
if all(map(lambda y: x in y.keys(), as_to_clusters))]
def cluster_filename_to_datetime_str(x):
return os.path.split(x)[1].split(".")[1]
cluster_dates = list(map(cluster_filename_to_datetime_str,
args.cluster_filenames))
output_lines = []
eventual_clusters = dict()
for idx in range(0, len(as_to_clusters)):
as_to_cluster = as_to_clusters[idx]
for core_as in core_ases:
if core_as not in eventual_clusters:
eventual_clusters[core_as] = as_to_cluster[core_as]
else:
eventual_clusters[core_as] =\
eventual_clusters[core_as].intersection(as_to_cluster[core_as])
for asn in core_ases:
abs_len = len(eventual_clusters[asn])
output_lines.append((cluster_dates[idx], asn, abs_len))
for output_line in output_lines:
print(",".join(map(lambda x: str(x), output_line)))
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("cluster_filenames", nargs="+")
return parser.parse_args()
if __name__ == "__main__":
main(parse_args())
| 27.698413 | 83 | 0.649284 |
cdb42577c4c0a888da52f30af3d4274eb10c357b | 266 | cs | C# | detect-nuget-inspector/detect-nuget-inspector/DependencyResolution/DependencyResolver.cs | blackducksoftware/detect-nuget-inspector | b068efcf10f11f03200ac78b3084cf03b43a0ceb | [
"Apache-2.0"
] | null | null | null | detect-nuget-inspector/detect-nuget-inspector/DependencyResolution/DependencyResolver.cs | blackducksoftware/detect-nuget-inspector | b068efcf10f11f03200ac78b3084cf03b43a0ceb | [
"Apache-2.0"
] | null | null | null | detect-nuget-inspector/detect-nuget-inspector/DependencyResolution/DependencyResolver.cs | blackducksoftware/detect-nuget-inspector | b068efcf10f11f03200ac78b3084cf03b43a0ceb | [
"Apache-2.0"
] | null | null | null | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Synopsys.Detect.Nuget.Inspector.DependencyResolution
{
interface DependencyResolver
{
DependencyResult Process();
}
}
| 19 | 62 | 0.759398 |
54faab8ae283868e2fe9bd68331d170814929a8d | 25,850 | h | C | gta5_fsr/wrappers/d3d11/ID3D11DeviceContextWrapper.h | mrnwf/gta5_fsr | 99802d5a74751c1b8ff49a9094aba5d318b58a43 | [
"MIT"
] | 366 | 2021-07-01T07:51:02.000Z | 2022-03-26T20:19:31.000Z | gta5_fsr/wrappers/d3d11/ID3D11DeviceContextWrapper.h | Rick-laboratory/gta5_fsr | 554175dda82a3c5cae3d94da0cee5a294813af7a | [
"MIT"
] | 48 | 2021-07-01T13:41:53.000Z | 2022-03-26T22:52:25.000Z | gta5_fsr/wrappers/d3d11/ID3D11DeviceContextWrapper.h | Rick-laboratory/gta5_fsr | 554175dda82a3c5cae3d94da0cee5a294813af7a | [
"MIT"
] | 31 | 2021-07-01T08:12:20.000Z | 2022-03-17T22:22:12.000Z | #pragma once
class ID3D11DeviceContextWrapper : public ID3D11DeviceContext
{
public:
ID3D11DeviceContextWrapper(ID3D11DeviceContext* pOrigDeviceContext);
ID3D11DeviceContextWrapper(const ID3D11DeviceContextWrapper&) = delete;
ID3D11DeviceContextWrapper& operator=(const ID3D11DeviceContextWrapper) = delete;
virtual HRESULT __stdcall QueryInterface(REFIID riid, void** ppvObject) override
{
return m_pOrigDeviceContext->QueryInterface(riid, ppvObject);
}
virtual ULONG __stdcall AddRef(void) override
{
return m_pOrigDeviceContext->AddRef();
}
virtual ULONG __stdcall Release(void) override;
virtual void __stdcall GetDevice(ID3D11Device** ppDevice) override
{
m_pOrigDeviceContext->GetDevice(ppDevice);
}
virtual HRESULT __stdcall GetPrivateData(REFGUID guid, UINT* pDataSize, void* pData) override
{
return m_pOrigDeviceContext->GetPrivateData(guid, pDataSize, pData);
}
virtual HRESULT __stdcall SetPrivateData(REFGUID guid, UINT DataSize, const void* pData) override
{
return m_pOrigDeviceContext->SetPrivateData(guid, DataSize, pData);
}
virtual HRESULT __stdcall SetPrivateDataInterface(REFGUID guid, const IUnknown* pData) override
{
return m_pOrigDeviceContext->SetPrivateDataInterface(guid, pData);
}
virtual void __stdcall VSSetConstantBuffers(UINT StartSlot, UINT NumBuffers, ID3D11Buffer* const* ppConstantBuffers) override
{
m_pOrigDeviceContext->VSSetConstantBuffers(StartSlot, NumBuffers, ppConstantBuffers);
}
virtual void __stdcall PSSetShaderResources(UINT StartSlot, UINT NumViews, ID3D11ShaderResourceView* const* ppShaderResourceViews) override;
virtual void __stdcall PSSetShader(ID3D11PixelShader* pPixelShader, ID3D11ClassInstance* const* ppClassInstances, UINT NumClassInstances) override
{
m_pOrigDeviceContext->PSSetShader(pPixelShader, ppClassInstances, NumClassInstances);
}
virtual void __stdcall PSSetSamplers(UINT StartSlot, UINT NumSamplers, ID3D11SamplerState* const* ppSamplers) override
{
m_pOrigDeviceContext->PSSetSamplers(StartSlot, NumSamplers, ppSamplers);
}
virtual void __stdcall VSSetShader(ID3D11VertexShader* pVertexShader, ID3D11ClassInstance* const* ppClassInstances, UINT NumClassInstances) override
{
m_pOrigDeviceContext->VSSetShader(pVertexShader, ppClassInstances, NumClassInstances);
}
virtual void __stdcall DrawIndexed(UINT IndexCount, UINT StartIndexLocation, INT BaseVertexLocation) override
{
m_pOrigDeviceContext->DrawIndexed(IndexCount, StartIndexLocation, BaseVertexLocation);
}
virtual void __stdcall Draw(UINT VertexCount, UINT StartVertexLocation) override;
virtual HRESULT __stdcall Map(ID3D11Resource* pResource, UINT Subresource, D3D11_MAP MapType, UINT MapFlags, D3D11_MAPPED_SUBRESOURCE* pMappedResource) override
{
return m_pOrigDeviceContext->Map(pResource, Subresource, MapType, MapFlags, pMappedResource);
}
virtual void __stdcall Unmap(ID3D11Resource* pResource, UINT Subresource) override
{
m_pOrigDeviceContext->Unmap(pResource, Subresource);
}
virtual void __stdcall PSSetConstantBuffers(UINT StartSlot, UINT NumBuffers, ID3D11Buffer* const* ppConstantBuffers) override
{
m_pOrigDeviceContext->PSSetConstantBuffers(StartSlot, NumBuffers, ppConstantBuffers);
}
virtual void __stdcall IASetInputLayout(ID3D11InputLayout* pInputLayout) override
{
m_pOrigDeviceContext->IASetInputLayout(pInputLayout);
}
virtual void __stdcall IASetVertexBuffers(UINT StartSlot, UINT NumBuffers, ID3D11Buffer* const* ppVertexBuffers, const UINT* pStrides, const UINT* pOffsets) override
{
m_pOrigDeviceContext->IASetVertexBuffers(StartSlot, NumBuffers, ppVertexBuffers, pStrides, pOffsets);
}
virtual void __stdcall IASetIndexBuffer(ID3D11Buffer* pIndexBuffer, DXGI_FORMAT Format, UINT Offset) override
{
m_pOrigDeviceContext->IASetIndexBuffer(pIndexBuffer, Format, Offset);
}
virtual void __stdcall DrawIndexedInstanced(UINT IndexCountPerInstance, UINT InstanceCount, UINT StartIndexLocation, INT BaseVertexLocation, UINT StartInstanceLocation) override
{
m_pOrigDeviceContext->DrawIndexedInstanced(IndexCountPerInstance, InstanceCount, StartIndexLocation, BaseVertexLocation, StartInstanceLocation);
}
virtual void __stdcall DrawInstanced(UINT VertexCountPerInstance, UINT InstanceCount, UINT StartVertexLocation, UINT StartInstanceLocation) override
{
m_pOrigDeviceContext->DrawInstanced(VertexCountPerInstance, InstanceCount, StartVertexLocation, StartInstanceLocation);
}
virtual void __stdcall GSSetConstantBuffers(UINT StartSlot, UINT NumBuffers, ID3D11Buffer* const* ppConstantBuffers) override
{
m_pOrigDeviceContext->GSSetConstantBuffers(StartSlot, NumBuffers, ppConstantBuffers);
}
virtual void __stdcall GSSetShader(ID3D11GeometryShader* pShader, ID3D11ClassInstance* const* ppClassInstances, UINT NumClassInstances) override
{
m_pOrigDeviceContext->GSSetShader(pShader, ppClassInstances, NumClassInstances);
}
virtual void __stdcall IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY Topology) override
{
m_pOrigDeviceContext->IASetPrimitiveTopology(Topology);
}
virtual void __stdcall VSSetShaderResources(UINT StartSlot, UINT NumViews, ID3D11ShaderResourceView* const* ppShaderResourceViews) override
{
m_pOrigDeviceContext->VSSetShaderResources(StartSlot, NumViews, ppShaderResourceViews);
}
virtual void __stdcall VSSetSamplers(UINT StartSlot, UINT NumSamplers, ID3D11SamplerState* const* ppSamplers) override
{
m_pOrigDeviceContext->VSSetSamplers(StartSlot, NumSamplers, ppSamplers);
}
virtual void __stdcall Begin(ID3D11Asynchronous* pAsync) override
{
m_pOrigDeviceContext->Begin(pAsync);
}
virtual void __stdcall End(ID3D11Asynchronous* pAsync) override
{
m_pOrigDeviceContext->End(pAsync);
}
virtual HRESULT __stdcall GetData(ID3D11Asynchronous* pAsync, void* pData, UINT DataSize, UINT GetDataFlags) override
{
return m_pOrigDeviceContext->GetData(pAsync, pData, DataSize, GetDataFlags);
}
virtual void __stdcall SetPredication(ID3D11Predicate* pPredicate, BOOL PredicateValue) override
{
m_pOrigDeviceContext->SetPredication(pPredicate, PredicateValue);
}
virtual void __stdcall GSSetShaderResources(UINT StartSlot, UINT NumViews, ID3D11ShaderResourceView* const* ppShaderResourceViews) override
{
m_pOrigDeviceContext->GSSetShaderResources(StartSlot, NumViews, ppShaderResourceViews);
}
virtual void __stdcall GSSetSamplers(UINT StartSlot, UINT NumSamplers, ID3D11SamplerState* const* ppSamplers) override
{
m_pOrigDeviceContext->GSSetSamplers(StartSlot, NumSamplers, ppSamplers);
}
virtual void __stdcall OMSetRenderTargets(UINT NumViews, ID3D11RenderTargetView* const* ppRenderTargetViews, ID3D11DepthStencilView* pDepthStencilView) override;
virtual void __stdcall OMSetRenderTargetsAndUnorderedAccessViews(UINT NumRTVs, ID3D11RenderTargetView* const* ppRenderTargetViews, ID3D11DepthStencilView* pDepthStencilView, UINT UAVStartSlot, UINT NumUAVs, ID3D11UnorderedAccessView* const* ppUnorderedAccessViews, const UINT* pUAVInitialCounts) override
{
m_pOrigDeviceContext->OMSetRenderTargetsAndUnorderedAccessViews(NumRTVs, ppRenderTargetViews, pDepthStencilView, UAVStartSlot, NumUAVs, ppUnorderedAccessViews, pUAVInitialCounts);
}
virtual void __stdcall OMSetBlendState(ID3D11BlendState* pBlendState, const FLOAT BlendFactor[4], UINT SampleMask) override
{
m_pOrigDeviceContext->OMSetBlendState(pBlendState, BlendFactor, SampleMask);
}
virtual void __stdcall OMSetDepthStencilState(ID3D11DepthStencilState* pDepthStencilState, UINT StencilRef) override
{
m_pOrigDeviceContext->OMSetDepthStencilState(pDepthStencilState, StencilRef);
}
virtual void __stdcall SOSetTargets(UINT NumBuffers, ID3D11Buffer* const* ppSOTargets, const UINT* pOffsets) override
{
m_pOrigDeviceContext->SOSetTargets(NumBuffers, ppSOTargets, pOffsets);
}
virtual void __stdcall DrawAuto(void) override
{
m_pOrigDeviceContext->DrawAuto();
}
virtual void __stdcall DrawIndexedInstancedIndirect(ID3D11Buffer* pBufferForArgs, UINT AlignedByteOffsetForArgs) override
{
m_pOrigDeviceContext->DrawIndexedInstancedIndirect(pBufferForArgs, AlignedByteOffsetForArgs);
}
virtual void __stdcall DrawInstancedIndirect(ID3D11Buffer* pBufferForArgs, UINT AlignedByteOffsetForArgs) override
{
m_pOrigDeviceContext->DrawInstancedIndirect(pBufferForArgs, AlignedByteOffsetForArgs);
}
virtual void __stdcall Dispatch(UINT ThreadGroupCountX, UINT ThreadGroupCountY, UINT ThreadGroupCountZ) override
{
m_pOrigDeviceContext->Dispatch(ThreadGroupCountX, ThreadGroupCountY, ThreadGroupCountZ);
}
virtual void __stdcall DispatchIndirect(ID3D11Buffer* pBufferForArgs, UINT AlignedByteOffsetForArgs) override
{
m_pOrigDeviceContext->DispatchIndirect(pBufferForArgs, AlignedByteOffsetForArgs);
}
virtual void __stdcall RSSetState(ID3D11RasterizerState* pRasterizerState) override
{
m_pOrigDeviceContext->RSSetState(pRasterizerState);
}
virtual void __stdcall RSSetViewports(UINT NumViewports, const D3D11_VIEWPORT* pViewports) override
{
m_pOrigDeviceContext->RSSetViewports(NumViewports, pViewports);
}
virtual void __stdcall RSSetScissorRects(UINT NumRects, const D3D11_RECT* pRects) override
{
m_pOrigDeviceContext->RSSetScissorRects(NumRects, pRects);
}
virtual void __stdcall CopySubresourceRegion(ID3D11Resource* pDstResource, UINT DstSubresource, UINT DstX, UINT DstY, UINT DstZ, ID3D11Resource* pSrcResource, UINT SrcSubresource, const D3D11_BOX* pSrcBox) override
{
m_pOrigDeviceContext->CopySubresourceRegion(pDstResource, DstSubresource, DstX, DstY, DstZ, pSrcResource, SrcSubresource, pSrcBox);
}
virtual void __stdcall CopyResource(ID3D11Resource* pDstResource, ID3D11Resource* pSrcResource) override
{
m_pOrigDeviceContext->CopyResource(pDstResource, pSrcResource);
}
virtual void __stdcall UpdateSubresource(ID3D11Resource* pDstResource, UINT DstSubresource, const D3D11_BOX* pDstBox, const void* pSrcData, UINT SrcRowPitch, UINT SrcDepthPitch) override
{
m_pOrigDeviceContext->UpdateSubresource(pDstResource, DstSubresource, pDstBox, pSrcData, SrcRowPitch, SrcDepthPitch);
}
virtual void __stdcall CopyStructureCount(ID3D11Buffer* pDstBuffer, UINT DstAlignedByteOffset, ID3D11UnorderedAccessView* pSrcView) override
{
m_pOrigDeviceContext->CopyStructureCount(pDstBuffer, DstAlignedByteOffset, pSrcView);
}
virtual void __stdcall ClearRenderTargetView(ID3D11RenderTargetView* pRenderTargetView, const FLOAT ColorRGBA[4]) override
{
m_pOrigDeviceContext->ClearRenderTargetView(pRenderTargetView, ColorRGBA);
}
virtual void __stdcall ClearUnorderedAccessViewUint(ID3D11UnorderedAccessView* pUnorderedAccessView, const UINT Values[4]) override
{
m_pOrigDeviceContext->ClearUnorderedAccessViewUint(pUnorderedAccessView, Values);
}
virtual void __stdcall ClearUnorderedAccessViewFloat(ID3D11UnorderedAccessView* pUnorderedAccessView, const FLOAT Values[4]) override
{
m_pOrigDeviceContext->ClearUnorderedAccessViewFloat(pUnorderedAccessView, Values);
}
virtual void __stdcall ClearDepthStencilView(ID3D11DepthStencilView* pDepthStencilView, UINT ClearFlags, FLOAT Depth, UINT8 Stencil) override
{
m_pOrigDeviceContext->ClearDepthStencilView(pDepthStencilView, ClearFlags, Depth, Stencil);
}
virtual void __stdcall GenerateMips(ID3D11ShaderResourceView* pShaderResourceView) override
{
m_pOrigDeviceContext->GenerateMips(pShaderResourceView);
}
virtual void __stdcall SetResourceMinLOD(ID3D11Resource* pResource, FLOAT MinLOD) override
{
m_pOrigDeviceContext->SetResourceMinLOD(pResource, MinLOD);
}
virtual FLOAT __stdcall GetResourceMinLOD(ID3D11Resource* pResource) override
{
return m_pOrigDeviceContext->GetResourceMinLOD(pResource);
}
virtual void __stdcall ResolveSubresource(ID3D11Resource* pDstResource, UINT DstSubresource, ID3D11Resource* pSrcResource, UINT SrcSubresource, DXGI_FORMAT Format) override
{
m_pOrigDeviceContext->ResolveSubresource(pDstResource, DstSubresource, pSrcResource, SrcSubresource, Format);
}
virtual void __stdcall ExecuteCommandList(ID3D11CommandList* pCommandList, BOOL RestoreContextState) override
{
m_pOrigDeviceContext->ExecuteCommandList(pCommandList, RestoreContextState);
}
virtual void __stdcall HSSetShaderResources(UINT StartSlot, UINT NumViews, ID3D11ShaderResourceView* const* ppShaderResourceViews) override
{
m_pOrigDeviceContext->HSSetShaderResources(StartSlot, NumViews, ppShaderResourceViews);
}
virtual void __stdcall HSSetShader(ID3D11HullShader* pHullShader, ID3D11ClassInstance* const* ppClassInstances, UINT NumClassInstances) override
{
m_pOrigDeviceContext->HSSetShader(pHullShader, ppClassInstances, NumClassInstances);
}
virtual void __stdcall HSSetSamplers(UINT StartSlot, UINT NumSamplers, ID3D11SamplerState* const* ppSamplers) override
{
m_pOrigDeviceContext->HSSetSamplers(StartSlot, NumSamplers, ppSamplers);
}
virtual void __stdcall HSSetConstantBuffers(UINT StartSlot, UINT NumBuffers, ID3D11Buffer* const* ppConstantBuffers) override
{
m_pOrigDeviceContext->HSSetConstantBuffers(StartSlot, NumBuffers, ppConstantBuffers);
}
virtual void __stdcall DSSetShaderResources(UINT StartSlot, UINT NumViews, ID3D11ShaderResourceView* const* ppShaderResourceViews) override
{
m_pOrigDeviceContext->DSSetShaderResources(StartSlot, NumViews, ppShaderResourceViews);
}
virtual void __stdcall DSSetShader(ID3D11DomainShader* pDomainShader, ID3D11ClassInstance* const* ppClassInstances, UINT NumClassInstances) override
{
m_pOrigDeviceContext->DSSetShader(pDomainShader, ppClassInstances, NumClassInstances);
}
virtual void __stdcall DSSetSamplers(UINT StartSlot, UINT NumSamplers, ID3D11SamplerState* const* ppSamplers) override
{
m_pOrigDeviceContext->DSSetSamplers(StartSlot, NumSamplers, ppSamplers);
}
virtual void __stdcall DSSetConstantBuffers(UINT StartSlot, UINT NumBuffers, ID3D11Buffer* const* ppConstantBuffers) override
{
m_pOrigDeviceContext->DSSetConstantBuffers(StartSlot, NumBuffers, ppConstantBuffers);
}
virtual void __stdcall CSSetShaderResources(UINT StartSlot, UINT NumViews, ID3D11ShaderResourceView* const* ppShaderResourceViews) override
{
m_pOrigDeviceContext->CSSetShaderResources(StartSlot, NumViews, ppShaderResourceViews);
}
virtual void __stdcall CSSetUnorderedAccessViews(UINT StartSlot, UINT NumUAVs, ID3D11UnorderedAccessView* const* ppUnorderedAccessViews, const UINT* pUAVInitialCounts) override
{
m_pOrigDeviceContext->CSSetUnorderedAccessViews(StartSlot, NumUAVs, ppUnorderedAccessViews, pUAVInitialCounts);
}
virtual void __stdcall CSSetShader(ID3D11ComputeShader* pComputeShader, ID3D11ClassInstance* const* ppClassInstances, UINT NumClassInstances) override
{
m_pOrigDeviceContext->CSSetShader(pComputeShader, ppClassInstances, NumClassInstances);
}
virtual void __stdcall CSSetSamplers(UINT StartSlot, UINT NumSamplers, ID3D11SamplerState* const* ppSamplers) override
{
m_pOrigDeviceContext->CSSetSamplers(StartSlot, NumSamplers, ppSamplers);
}
virtual void __stdcall CSSetConstantBuffers(UINT StartSlot, UINT NumBuffers, ID3D11Buffer* const* ppConstantBuffers) override
{
m_pOrigDeviceContext->CSSetConstantBuffers(StartSlot, NumBuffers, ppConstantBuffers);
}
virtual void __stdcall VSGetConstantBuffers(UINT StartSlot, UINT NumBuffers, ID3D11Buffer** ppConstantBuffers) override
{
m_pOrigDeviceContext->VSGetConstantBuffers(StartSlot, NumBuffers, ppConstantBuffers);
}
virtual void __stdcall PSGetShaderResources(UINT StartSlot, UINT NumViews, ID3D11ShaderResourceView** ppShaderResourceViews) override
{
m_pOrigDeviceContext->PSGetShaderResources(StartSlot, NumViews, ppShaderResourceViews);
}
virtual void __stdcall PSGetShader(ID3D11PixelShader** ppPixelShader, ID3D11ClassInstance** ppClassInstances, UINT* pNumClassInstances) override
{
m_pOrigDeviceContext->PSGetShader(ppPixelShader, ppClassInstances, pNumClassInstances);
}
virtual void __stdcall PSGetSamplers(UINT StartSlot, UINT NumSamplers, ID3D11SamplerState** ppSamplers) override
{
m_pOrigDeviceContext->PSGetSamplers(StartSlot, NumSamplers, ppSamplers);
}
virtual void __stdcall VSGetShader(ID3D11VertexShader** ppVertexShader, ID3D11ClassInstance** ppClassInstances, UINT* pNumClassInstances) override
{
m_pOrigDeviceContext->VSGetShader(ppVertexShader, ppClassInstances, pNumClassInstances);
}
virtual void __stdcall PSGetConstantBuffers(UINT StartSlot, UINT NumBuffers, ID3D11Buffer** ppConstantBuffers) override
{
m_pOrigDeviceContext->PSGetConstantBuffers(StartSlot, NumBuffers, ppConstantBuffers);
}
virtual void __stdcall IAGetInputLayout(ID3D11InputLayout** ppInputLayout) override
{
m_pOrigDeviceContext->IAGetInputLayout(ppInputLayout);
}
virtual void __stdcall IAGetVertexBuffers(UINT StartSlot, UINT NumBuffers, ID3D11Buffer** ppVertexBuffers, UINT* pStrides, UINT* pOffsets) override
{
m_pOrigDeviceContext->IAGetVertexBuffers(StartSlot, NumBuffers, ppVertexBuffers, pStrides, pOffsets);
}
virtual void __stdcall IAGetIndexBuffer(ID3D11Buffer** pIndexBuffer, DXGI_FORMAT* Format, UINT* Offset) override
{
m_pOrigDeviceContext->IAGetIndexBuffer(pIndexBuffer, Format, Offset);
}
virtual void __stdcall GSGetConstantBuffers(UINT StartSlot, UINT NumBuffers, ID3D11Buffer** ppConstantBuffers) override
{
m_pOrigDeviceContext->GSGetConstantBuffers(StartSlot, NumBuffers, ppConstantBuffers);
}
virtual void __stdcall GSGetShader(ID3D11GeometryShader** ppGeometryShader, ID3D11ClassInstance** ppClassInstances, UINT* pNumClassInstances) override
{
m_pOrigDeviceContext->GSGetShader(ppGeometryShader, ppClassInstances, pNumClassInstances);
}
virtual void __stdcall IAGetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY* pTopology) override
{
m_pOrigDeviceContext->IAGetPrimitiveTopology(pTopology);
}
virtual void __stdcall VSGetShaderResources(UINT StartSlot, UINT NumViews, ID3D11ShaderResourceView** ppShaderResourceViews) override
{
m_pOrigDeviceContext->VSGetShaderResources(StartSlot, NumViews, ppShaderResourceViews);
}
virtual void __stdcall VSGetSamplers(UINT StartSlot, UINT NumSamplers, ID3D11SamplerState** ppSamplers) override
{
m_pOrigDeviceContext->VSGetSamplers(StartSlot, NumSamplers, ppSamplers);
}
virtual void __stdcall GetPredication(ID3D11Predicate** ppPredicate, BOOL* pPredicateValue) override
{
m_pOrigDeviceContext->GetPredication(ppPredicate, pPredicateValue);
}
virtual void __stdcall GSGetShaderResources(UINT StartSlot, UINT NumViews, ID3D11ShaderResourceView** ppShaderResourceViews) override
{
m_pOrigDeviceContext->GSGetShaderResources(StartSlot, NumViews, ppShaderResourceViews);
}
virtual void __stdcall GSGetSamplers(UINT StartSlot, UINT NumSamplers, ID3D11SamplerState** ppSamplers) override
{
m_pOrigDeviceContext->GSGetSamplers(StartSlot, NumSamplers, ppSamplers);
}
virtual void __stdcall OMGetRenderTargets(UINT NumViews, ID3D11RenderTargetView** ppRenderTargetViews, ID3D11DepthStencilView** ppDepthStencilView) override
{
m_pOrigDeviceContext->OMGetRenderTargets(NumViews, ppRenderTargetViews, ppDepthStencilView);
}
virtual void __stdcall OMGetRenderTargetsAndUnorderedAccessViews(UINT NumRTVs, ID3D11RenderTargetView** ppRenderTargetViews, ID3D11DepthStencilView** ppDepthStencilView, UINT UAVStartSlot, UINT NumUAVs, ID3D11UnorderedAccessView** ppUnorderedAccessViews) override
{
m_pOrigDeviceContext->OMGetRenderTargetsAndUnorderedAccessViews(NumRTVs, ppRenderTargetViews, ppDepthStencilView, UAVStartSlot, NumUAVs, ppUnorderedAccessViews);
}
virtual void __stdcall OMGetBlendState(ID3D11BlendState** ppBlendState, FLOAT BlendFactor[4], UINT* pSampleMask) override
{
m_pOrigDeviceContext->OMGetBlendState(ppBlendState, BlendFactor, pSampleMask);
}
virtual void __stdcall OMGetDepthStencilState(ID3D11DepthStencilState** ppDepthStencilState, UINT* pStencilRef) override
{
m_pOrigDeviceContext->OMGetDepthStencilState(ppDepthStencilState, pStencilRef);
}
virtual void __stdcall SOGetTargets(UINT NumBuffers, ID3D11Buffer** ppSOTargets) override
{
m_pOrigDeviceContext->SOGetTargets(NumBuffers, ppSOTargets);
}
virtual void __stdcall RSGetState(ID3D11RasterizerState** ppRasterizerState) override
{
m_pOrigDeviceContext->RSGetState(ppRasterizerState);
}
virtual void __stdcall RSGetViewports(UINT* pNumViewports, D3D11_VIEWPORT* pViewports) override
{
m_pOrigDeviceContext->RSGetViewports(pNumViewports, pViewports);
}
virtual void __stdcall RSGetScissorRects(UINT* pNumRects, D3D11_RECT* pRects) override
{
m_pOrigDeviceContext->RSGetScissorRects(pNumRects, pRects);
}
virtual void __stdcall HSGetShaderResources(UINT StartSlot, UINT NumViews, ID3D11ShaderResourceView** ppShaderResourceViews) override
{
m_pOrigDeviceContext->HSGetShaderResources(StartSlot, NumViews, ppShaderResourceViews);
}
virtual void __stdcall HSGetShader(ID3D11HullShader** ppHullShader, ID3D11ClassInstance** ppClassInstances, UINT* pNumClassInstances) override
{
m_pOrigDeviceContext->HSGetShader(ppHullShader, ppClassInstances, pNumClassInstances);
}
virtual void __stdcall HSGetSamplers(UINT StartSlot, UINT NumSamplers, ID3D11SamplerState** ppSamplers) override
{
m_pOrigDeviceContext->HSGetSamplers(StartSlot, NumSamplers, ppSamplers);
}
virtual void __stdcall HSGetConstantBuffers(UINT StartSlot, UINT NumBuffers, ID3D11Buffer** ppConstantBuffers) override
{
m_pOrigDeviceContext->HSGetConstantBuffers(StartSlot, NumBuffers, ppConstantBuffers);
}
virtual void __stdcall DSGetShaderResources(UINT StartSlot, UINT NumViews, ID3D11ShaderResourceView** ppShaderResourceViews) override
{
m_pOrigDeviceContext->DSGetShaderResources(StartSlot, NumViews, ppShaderResourceViews);
}
virtual void __stdcall DSGetShader(ID3D11DomainShader** ppDomainShader, ID3D11ClassInstance** ppClassInstances, UINT* pNumClassInstances) override
{
m_pOrigDeviceContext->DSGetShader(ppDomainShader, ppClassInstances, pNumClassInstances);
}
virtual void __stdcall DSGetSamplers(UINT StartSlot, UINT NumSamplers, ID3D11SamplerState** ppSamplers) override
{
m_pOrigDeviceContext->DSGetSamplers(StartSlot, NumSamplers, ppSamplers);
}
virtual void __stdcall DSGetConstantBuffers(UINT StartSlot, UINT NumBuffers, ID3D11Buffer** ppConstantBuffers) override
{
m_pOrigDeviceContext->DSGetConstantBuffers(StartSlot, NumBuffers, ppConstantBuffers);
}
virtual void __stdcall CSGetShaderResources(UINT StartSlot, UINT NumViews, ID3D11ShaderResourceView** ppShaderResourceViews) override
{
m_pOrigDeviceContext->CSGetShaderResources(StartSlot, NumViews, ppShaderResourceViews);
}
virtual void __stdcall CSGetUnorderedAccessViews(UINT StartSlot, UINT NumUAVs, ID3D11UnorderedAccessView** ppUnorderedAccessViews) override
{
m_pOrigDeviceContext->CSGetUnorderedAccessViews(StartSlot, NumUAVs, ppUnorderedAccessViews);
}
virtual void __stdcall CSGetShader(ID3D11ComputeShader** ppComputeShader, ID3D11ClassInstance** ppClassInstances, UINT* pNumClassInstances) override
{
m_pOrigDeviceContext->CSGetShader(ppComputeShader, ppClassInstances, pNumClassInstances);
}
virtual void __stdcall CSGetSamplers(UINT StartSlot, UINT NumSamplers, ID3D11SamplerState** ppSamplers) override
{
m_pOrigDeviceContext->CSGetSamplers(StartSlot, NumSamplers, ppSamplers);
}
virtual void __stdcall CSGetConstantBuffers(UINT StartSlot, UINT NumBuffers, ID3D11Buffer** ppConstantBuffers) override
{
m_pOrigDeviceContext->CSGetConstantBuffers(StartSlot, NumBuffers, ppConstantBuffers);
}
virtual void __stdcall ClearState(void) override
{
m_pOrigDeviceContext->ClearState();
}
virtual void __stdcall Flush(void) override
{
m_pOrigDeviceContext->Flush();
}
virtual D3D11_DEVICE_CONTEXT_TYPE __stdcall GetType(void) override
{
return m_pOrigDeviceContext->GetType();
}
virtual UINT __stdcall GetContextFlags(void) override
{
return m_pOrigDeviceContext->GetContextFlags();
}
virtual HRESULT __stdcall FinishCommandList(BOOL RestoreDeferredContextState, ID3D11CommandList** ppCommandList) override
{
return m_pOrigDeviceContext->FinishCommandList(RestoreDeferredContextState, ppCommandList);
}
private:
ID3D11DeviceContext* m_pOrigDeviceContext;
UINT m_PSShaderResource_LastStartSlot;
BOOL m_OMRenderTargets_Valid;
};
| 54.306723 | 308 | 0.781818 |
2dec8914325739d0aff3a89bcece7fc8b0debc65 | 1,996 | cpp | C++ | lib/sensors/src/sensors.cpp | mauriciobarroso/dit-es | e4b6b63eaae34d0f141554ce0f727ed86b919279 | [
"MIT"
] | null | null | null | lib/sensors/src/sensors.cpp | mauriciobarroso/dit-es | e4b6b63eaae34d0f141554ce0f727ed86b919279 | [
"MIT"
] | null | null | null | lib/sensors/src/sensors.cpp | mauriciobarroso/dit-es | e4b6b63eaae34d0f141554ce0f727ed86b919279 | [
"MIT"
] | null | null | null | /*
* mcp401x.c
*
* Created on: Jan 13, 2021
* Author: Mauricio Barroso Benavides
*/
/* inclusions ----------------------------------------------------------------*/
#include "sensors.h"
/* macros --------------------------------------------------------------------*/
/* typedef -------------------------------------------------------------------*/
/* internal data declaration -------------------------------------------------*/
/* external data declaration -------------------------------------------------*/
/* internal functions declaration --------------------------------------------*/
/* external functions definition ---------------------------------------------*/
bool sensors_init(sensors_t * const me, sensors_pins_t * pins)
{
/* Initialise DHT */
me->pins.dht = pins->dht;
me->values.hum = 0;
me->values.temp = 0;
/* Initialise LDR */
me->pins.ldr = pins->ldr;
me->values.light = 0;
/* Initialise PIR */
me->pins.pir = pins->pir;
pinMode(me->pins.pir, INPUT);
me->values.presence = 0;
return true;
}
void sensors_get_values(sensors_t * const me, sensors_values_t * values)
{
/* Get DHT values */
DHT dht(me->pins.dht);
float temp_tmp = dht.getTempCelcius();
if(!isnan(temp_tmp) && temp_tmp >= 0 && temp_tmp <= 80)
{
me->values.temp = temp_tmp;
values->temp = me->values.temp;
}
float hum_tmp = dht.getHumidity();
if(!isnan(hum_tmp) && hum_tmp >= 0 && hum_tmp <= 95)
{
me->values.hum = hum_tmp;
values->hum = me->values.hum;
}
/* Get LDR values */
uint16_t light_tmp = analogRead(me->pins.ldr);
if(light_tmp >= 0 && light_tmp <= 4095)
{
me->values.light = light_tmp;
values->light = me->values.light;
}
/* Get PIR values */
me->values.presence = digitalRead(me->pins.pir);
values->presence = me->values.presence;
}
/* internal functions definition ---------------------------------------------*/
/* end of file ---------------------------------------------------------------*/
| 25.265823 | 80 | 0.470942 |
b2ea30b4736ef5e61b9706179459b1042c35209a | 190 | rb | Ruby | spec/models/site.rb | jagdeepsingh/mongoid-rspec | 19f0ef33724687e59a1091a8e41826fe5048cb15 | [
"MIT"
] | 1 | 2015-07-08T10:27:33.000Z | 2015-07-08T10:27:33.000Z | spec/models/site.rb | jagdeepsingh/mongoid-rspec | 19f0ef33724687e59a1091a8e41826fe5048cb15 | [
"MIT"
] | null | null | null | spec/models/site.rb | jagdeepsingh/mongoid-rspec | 19f0ef33724687e59a1091a8e41826fe5048cb15 | [
"MIT"
] | null | null | null | class Site
include Mongoid::Document
field :name
has_many :users, inverse_of: :site, order: :email.desc, counter_cache: true
validates :name, presence: true, uniqueness: true
end
| 19 | 77 | 0.736842 |
ea320c4704143a008424ee15039d1885fd315d27 | 990 | dart | Dart | lib/round_button.dart | janagrill/flutter-coding-challenge | cd6d481659c40c91ac37c6d7a0bcb541639a6487 | [
"Apache-2.0"
] | 1 | 2020-04-22T06:51:45.000Z | 2020-04-22T06:51:45.000Z | lib/round_button.dart | janagrill/flutter-coding-challenge | cd6d481659c40c91ac37c6d7a0bcb541639a6487 | [
"Apache-2.0"
] | 1 | 2020-02-02T18:40:37.000Z | 2020-02-02T18:40:37.000Z | lib/round_button.dart | janagrill/flutter-coding-challenge | cd6d481659c40c91ac37c6d7a0bcb541639a6487 | [
"Apache-2.0"
] | 3 | 2020-02-02T13:35:33.000Z | 2020-02-03T14:20:44.000Z | import 'package:flutter/material.dart';
class RoundButton extends StatelessWidget {
final String text;
final IconData icon;
final Color backgroundColor;
final VoidCallback onPressed;
RoundButton({
@required this.text,
@required this.icon,
@required this.backgroundColor,
@required this.onPressed,
});
Widget build(context) {
return Column(
children: <Widget>[
SizedBox.fromSize(
size: Size(100, 100),
child: ClipOval(
child: Material(
color: backgroundColor,
child: InkWell(
splashColor: Colors.white,
onTap: onPressed,
child: Icon(
icon,
size: 60,
),
),
),
),
),
Text(
text,
style: TextStyle(
fontSize: 30,
fontWeight: FontWeight.bold,
),
),
],
);
}
}
| 21.521739 | 43 | 0.491919 |
b09be00c55d29c332ecfd5a00d59405f33e6665f | 18,732 | py | Python | src/train/simulate.py | rmldj/random-graph-nn-paper | b04537f3312113b118878c37cb314a527c5b3a11 | [
"MIT"
] | 3 | 2020-03-23T14:00:35.000Z | 2020-09-24T13:56:18.000Z | src/train/simulate.py | rmldj/random-graph-nn-paper | b04537f3312113b118878c37cb314a527c5b3a11 | [
"MIT"
] | null | null | null | src/train/simulate.py | rmldj/random-graph-nn-paper | b04537f3312113b118878c37cb314a527c5b3a11 | [
"MIT"
] | null | null | null | import logging
import os
import pickle as pkl
import random
import sys
import time
import numpy as np
import torch
import torch.backends.cudnn as cudnn
import torch.nn as nn
import torch.nn.parallel
import torch.optim
import torch.utils.data
import torchvision.datasets as datasets
import torchvision.transforms as transforms
logging.basicConfig(filename='simulate.log', level=logging.INFO)
'''The file containing the main training logic.'''
# parameter numbers for the various sizes (resnet32, resnet56, resnet110)
params = {'S': 464154, 'M': 853018, 'L': 1727962}
def simulate(args):
"""
The function performing the simulation (training + saving)
:param args: the command line arguments. See src/main.py
"""
if args.restype == "None":
args.restype = None
cmdline = ' '.join(sys.argv)
if args.verbose:
print_command_line(args, cmdline)
# for approximate reproducibility
set_seed(args)
# handle output directories
make_dirs(args)
# CIFAR
if args.cifar == '10':
train_loader, val_loader = load_cifar10(args)
num_outputs = 10
elif args.cifar == '100':
train_loader, val_loader = load_cifar100(args)
num_outputs = 100
else:
raise ValueError("Unknown dataset {}".format(args.cifar))
# load appropriate model and set basename for results/trained model
C, basename, model, net, size = load_model(args, num_outputs)
num_parameters = get_num_parameters(model)
model.cuda()
maybe_deterministic(args)
# define loss function (criterion) and optimizer
criterion, optimizer = set_criterion(args, model)
# lr_scheduler
lr_scheduler = set_lr_scheduler(args, optimizer)
if args.verbose:
print()
print(args.arch, 'parameters:', num_parameters, 'size:', size, 'C:', C)
print()
num_epochs = args.epochs
lr_all = np.zeros(num_epochs)
total_time = 0
preds = None
for epoch in range(num_epochs):
# train for one epoch
epoch_time, train_losses, train_prec1 = train(train_loader, model, criterion, optimizer, epoch, args)
total_time += epoch_time
# update learning rate
check_learning_rate(epoch, lr_all, lr_scheduler, optimizer, verbose=args.verbose)
# evaluate on validation set
if args.save_preds and epoch == num_epochs - 1:
valid_losses, valid_prec1, preds = validate(val_loader, model, criterion, args, return_preds=True)
else:
valid_losses, valid_prec1 = validate(val_loader, model, criterion, args)
# save metrics
if epoch == 0:
arrays = create_metric_arrays(epoch, num_epochs, train_losses, train_prec1, valid_losses, valid_prec1)
else:
update_all_metrics(epoch, arrays, train_losses, train_prec1, valid_losses, valid_prec1)
if args.verbose:
print()
save_results(C, args, arrays, basename, cmdline, lr_all, model, net, num_epochs, num_parameters, preds, size,
total_time, train_losses, train_prec1, valid_losses, valid_prec1)
def get_num_parameters(model):
"""
:param model: the pytorch model.
:return: the total number of parameters.
"""
trainable = filter(lambda p: p.requires_grad, model.parameters())
return sum([np.prod(p.size()) for p in trainable])
def maybe_deterministic(args):
"""
sets (or not) the cudnn.deterministic and cudnn.benchmark flags, depending on the command line arguments.
:param args: command line arguments.
:return:
"""
if args.deterministic:
# NOT reproducible across different GPU's
cudnn.deterministic = True
cudnn.benchmark = False
else:
cudnn.benchmark = True
def print_command_line(args, cmdline):
"""
print commandline arguments
:param args: arguments parsed.
:param cmdline: the raw command line.
:return:
"""
print(cmdline)
print()
print(args)
print()
def save_results(C, args, arrays, basename, cmdline, lr_all, model, net, num_epochs, num_parameters, preds, size,
total_time, train_losses, train_prec1, valid_losses, valid_prec1):
"""
saves the results.
:param C: number of initial channels.
:param args: command line arguments.
:param arrays: training and validation arrays containing loss and accuracy for all epochs.
:param basename: the basename of the model (used to form the save filename).
:param cmdline: the commandline arguments (raw).
:param lr_all: the learning rates.
:param model: the pytorch model.
:param net: the underlying graph.
:param num_epochs: number of epochs.
:param num_parameters: number of parameters of the model.
:param preds: last predictions.
:param size: The size of the model (S,M,L).
:param total_time: the total average time used for training.
:param train_losses: the train loss for the last epoch
:param train_prec1: the precision for last epoch.
:param valid_losses: the validation loss for last epoch.
:param valid_prec1: the precision for last epoch.
:return:
"""
results = dict()
train_losses_all, train_prec1_all, valid_losses_all, valid_prec1_all = arrays
# parameters
results['cmdline'] = cmdline
results['args'] = args
# architecture data
results['arch'] = args.arch
results['num_parameters'] = num_parameters
results['size'] = size
results['C'] = C
for prop in ['meta', 'num_nodes', 'num_units', 'stages', 'edges', 'pos']:
results[prop] = getattr(net, prop, None)
if 'edge_weights' in dir(net):
results['edge_weights'] = net.edge_weights()
else:
results['edge_weights'] = None
# simulation results by epoch/batch
results['lr'] = lr_all
results['train_losses'] = train_losses_all
results['train_accs'] = train_prec1_all
results['test_losses'] = valid_losses_all
results['test_accs'] = valid_prec1_all
# final results
results['train_loss'] = train_losses.avg
results['train_acc'] = train_prec1.avg
results['test_loss'] = valid_losses.avg
results['test_acc'] = valid_prec1.avg
results['epoch_time'] = total_time / num_epochs
with open(os.path.join(args.results_dir, basename + '.pkl'), 'wb') as f:
pkl.dump(results, f)
logging.info('{} {:.3f}'.format(results['arch'], results['test_acc']))
if args.save_model:
torch.save({
'state_dict': model.state_dict(),
}, os.path.join(args.models_dir, basename + '.th'))
if args.save_preds:
np.save(os.path.join(args.preds_dir, basename + '.npy'), preds)
def update_all_metrics(epoch, arrays, train_losses, train_prec1, valid_losses, valid_prec1):
'''
updates all arrays of metrics with values computed for the current epoch.
:param epoch: the current epoch
:param arrays: the arrays with all training and test accuracies for all epochs.
:param train_losses:
:param train_prec1:
:param valid_losses:
:param valid_prec1:
:return:
'''
train_losses_all, train_prec1_all, valid_losses_all, valid_prec1_all = arrays
train_losses_all[epoch] = train_losses.get_array()
train_prec1_all[epoch] = train_prec1.get_array()
valid_losses_all[epoch] = valid_losses.get_array()
valid_prec1_all[epoch] = valid_prec1.get_array()
def create_metric_arrays(epoch, num_epochs, train_losses, train_prec1, valid_losses, valid_prec1):
"""
creates the arrays storing loss and accuracy for all epochs.
:param epoch: current epoch number upon creation of the arrays (usually 0).
:param num_epochs: total number of epochs.
:param train_losses:
:param train_prec1:
:param valid_losses:
:param valid_prec1:
:return:
"""
# Determine the number of batches in the training set
arr = train_losses.get_array()
train_nbatches = len(arr)
train_losses_all = np.zeros((num_epochs, train_nbatches))
train_losses_all[epoch] = arr
train_prec1_all = np.zeros((num_epochs, train_nbatches))
train_prec1_all[epoch] = train_prec1.get_array()
# Determine the number of batches in the test set
arr = valid_losses.get_array()
valid_nbatches = len(arr)
valid_losses_all = np.zeros((num_epochs, valid_nbatches))
valid_losses_all[epoch] = arr
valid_prec1_all = np.zeros((num_epochs, valid_nbatches))
valid_prec1_all[epoch] = valid_prec1.get_array()
return train_losses_all, train_prec1_all, valid_losses_all, valid_prec1_all
def set_lr_scheduler(args, optimizer):
"""
Sets a learning rate scheduler.
:param args: command line arguments.
:param optimizer: the optimizer class, to which the scheduler is assigned.
:return: the learning rate scheduler (torch.optim.lr_scheduler class)
"""
lr_scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer,
milestones=[args.milestone1 - 1, args.milestone2 - 1])
if args.arch in ['resnet1202', 'resnet110'] or args.size == 'L':
# for resnet1202 original paper uses lr=0.01 for first 400 minibatches for warm-up
# then switch back. In this implementation it will correspond for first epoch.
for param_group in optimizer.param_groups:
param_group['lr'] = args.lr * 0.1
return lr_scheduler
def set_criterion(args, model):
"""
Sets the loss criterion.
:param args: command line arguments.
:param model: pytorch model.
:return: the criterion and teh criterions optimizer.
"""
criterion = nn.CrossEntropyLoss().cuda()
if args.half:
model.half()
criterion.half()
optimizer = torch.optim.SGD(model.parameters(), args.lr,
momentum=args.momentum,
weight_decay=args.weight_decay)
return criterion, optimizer
def load_model(args, num_outputs):
"""
Load the model corresponding to the given net architecture.
:param args: command line argument
:param num_outputs: output dimension (e.g. 10 for CIFAR10, 100 for CIFAR100)
:return: initial number of channels, the net name, the pytorch model, the net architecture, the net size.
"""
C = None
if args.arch.startswith('resnet'):
from src.models import resnet
num_classes = 10
if args.cifar == "100":
num_classes = 100
net = resnet.__dict__[args.arch](num_classes=num_classes)
model = torch.nn.DataParallel(net)
basename = args.arch
size = 'C'
if args.arch == 'resnet32':
size = 'S'
if args.arch == 'resnet56':
size = 'M'
if args.arch == 'resnet110':
size = 'L'
else:
import importlib
sys.path.append(args.net_dir)
Net = getattr(importlib.import_module(args.arch), 'Net')
if args.C is None:
# fix C from the total number of parameters
C = Net.get_C(params[args.size], restype=args.restype, blocktype=args.blocktype)
net = Net(C, restype=args.restype, blocktype=args.blocktype, num_outputs=num_outputs)
model = torch.nn.DataParallel(net)
basename = '{}_{}'.format(args.arch, args.size)
size = args.size
else:
# C set manually
C = args.C
net = Net(C, restype=args.restype, blocktype=args.blocktype, num_outputs=num_outputs)
model = torch.nn.DataParallel(net)
basename = '{}_C{}'.format(args.arch, C)
size = 'C'
return C, basename, model, net, size
def make_dirs(args):
"""
Creates the directories for saving, if they do not exists already.
"""
os.makedirs(args.results_dir, exist_ok=True)
if args.save_model:
os.makedirs(args.models_dir, exist_ok=True)
if args.save_preds:
os.makedirs(args.preds_dir, exist_ok=True)
def set_seed(args):
"""
Sets the seed to args.seed.
:param args: command line arguments.
:return:
"""
random.seed(args.seed)
np.random.seed(args.seed)
torch.manual_seed(args.seed)
torch.cuda.manual_seed(args.seed)
def check_learning_rate(epoch, lr_all, lr_scheduler, optimizer, verbose):
"""
sets and prints the current learning rate.
:param epoch: current epoch.
:param lr_all: arrays containing lr_rates for all epochs.
:param lr_scheduler: the learning rate scheduler.
:param optimizer: the optimizer.
:param verbose: whether to be verbose.
:return:
"""
lr = optimizer.param_groups[0]['lr']
if verbose:
print('lr {:.5f}'.format(lr), end=' | test: ')
lr_all[epoch] = lr
lr_scheduler.step()
def load_cifar10(args):
"""
loads the cifar10 dataset.
:param args: command line arguments.
:return:
"""
CIFAR10_PATH = args.datapath
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
train_loader = torch.utils.data.DataLoader(
datasets.CIFAR10(root=CIFAR10_PATH, train=True, transform=transforms.Compose([
transforms.RandomHorizontalFlip(),
transforms.RandomCrop(32, 4),
transforms.ToTensor(),
normalize,
]), download=True),
batch_size=args.batch_size, shuffle=True,
num_workers=args.workers, pin_memory=True)
val_loader = torch.utils.data.DataLoader(
datasets.CIFAR10(root=CIFAR10_PATH, train=False, transform=transforms.Compose([
transforms.ToTensor(),
normalize,
])),
batch_size=args.batch_size, shuffle=False,
num_workers=args.workers, pin_memory=True)
return train_loader, val_loader
def load_cifar100(args):
"""
loads the cifar100 datasets
:param args: command line arguments.
:return:
"""
CIFAR100_PATH = args.datapath
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
train_loader = torch.utils.data.DataLoader(
datasets.CIFAR100(root=CIFAR100_PATH, train=True, transform=transforms.Compose([
transforms.RandomHorizontalFlip(),
transforms.RandomCrop(32, 4),
transforms.ToTensor(),
normalize,
]), download=True),
batch_size=args.batch_size, shuffle=True,
num_workers=args.workers, pin_memory=True)
val_loader = torch.utils.data.DataLoader(
datasets.CIFAR100(root=CIFAR100_PATH, train=False, transform=transforms.Compose([
transforms.ToTensor(),
normalize,
])),
batch_size=args.batch_size, shuffle=False,
num_workers=args.workers, pin_memory=True)
return train_loader, val_loader
def train(train_loader, model, criterion, optimizer, epoch, args):
"""
Run one train epoch
"""
losses = AverageMeter()
top1 = AverageMeter()
# switch to train mode
model.train()
start = time.time()
for i, (input, target) in enumerate(train_loader):
target = target.cuda(non_blocking=True)
input_var = torch.autograd.Variable(input).cuda()
target_var = torch.autograd.Variable(target)
if args.half:
input_var = input_var.half()
# compute output
output = model(input_var)
loss = criterion(output, target_var)
# compute gradient and do SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
output = output.float()
loss = loss.float()
# measure accuracy and record loss
prec1 = accuracy(output.data, target)[0]
losses.update(loss.item(), input.size(0))
top1.update(prec1.item(), input.size(0))
epoch_time = time.time() - start
if args.verbose:
print('Epoch: {epoch:3d} '
'Loss {loss.avg:.4f} '
'Prec@1 {top1.avg:.3f} time {epoch_time:.2f}s'.format(
epoch=epoch + 1, epoch_time=epoch_time, loss=losses, top1=top1), end=' ')
return epoch_time, losses, top1
def validate(val_loader, model, criterion, args, return_preds=False):
"""
Run evaluation
"""
losses = AverageMeter()
top1 = AverageMeter()
if return_preds:
num_val = len(val_loader.dataset) # number of images in test set
preds = np.zeros((num_val, 10), dtype=np.float32)
j = 0
# switch to evaluate mode
model.eval()
with torch.no_grad():
for i, (input, target) in enumerate(val_loader):
target = target.cuda(non_blocking=True)
with torch.no_grad():
input_var = torch.autograd.Variable(input).cuda()
target_var = torch.autograd.Variable(target)
if args.half:
input_var = input_var.half()
# compute output
output = model(input_var)
loss = criterion(output, target_var)
output = output.float()
loss = loss.float()
# measure accuracy and record loss
batch_size = input.size(0)
prec1 = accuracy(output.data, target)[0]
losses.update(loss.item(), batch_size)
top1.update(prec1.item(), batch_size)
if return_preds:
preds[j:j + batch_size] = output.data.cpu().numpy()
j += batch_size
if args.verbose:
print('Loss {losses.avg:.4f} Prec@1 {top1.avg:.3f}'.format(losses=losses, top1=top1), end=' ')
if return_preds:
return losses, top1, preds
else:
return losses, top1
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
self.vals = []
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
self.vals.append(self.val)
def save(self, filename):
np.save(filename, np.array(self.vals))
def get_array(self):
return np.array(self.vals)
def accuracy(output, target, topk=(1,)):
"""Computes the precision@k for the specified values of k"""
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0)
res.append(correct_k.mul_(100.0 / batch_size))
return res
| 33.45 | 114 | 0.642537 |
da88f6bb3a081d65861923373d8e7b07388ced85 | 279 | php | PHP | test/ConfigClasses/Valid/SimpleConfig.php | kostislav/php-class-symfony-service-config | 1503199d8874579f8b24cb3deaa619259bf38589 | [
"MIT"
] | 1 | 2021-09-16T07:24:49.000Z | 2021-09-16T07:24:49.000Z | test/ConfigClasses/Valid/SimpleConfig.php | kostislav/php-class-symfony-service-config | 1503199d8874579f8b24cb3deaa619259bf38589 | [
"MIT"
] | null | null | null | test/ConfigClasses/Valid/SimpleConfig.php | kostislav/php-class-symfony-service-config | 1503199d8874579f8b24cb3deaa619259bf38589 | [
"MIT"
] | null | null | null | <?php
namespace ConfigClasses\Valid;
use FakeServices\SimpleService;
use Kostislav\ClassConfig\ServiceDefinition;
class SimpleConfig {
#[ServiceDefinition(isPublic: true)]
public function someService(): SimpleService {
return new SimpleService('sesd');
}
} | 21.461538 | 50 | 0.74552 |
9fed6f7f1c32ccfb52371c8ee1658a782098afe5 | 1,539 | py | Python | python3/hackerrank_leetcode/coin_change/main.py | seLain/codesnippets | ae9a1fa05b67f4b3ac1703cc962fcf5f6de1e289 | [
"MIT"
] | null | null | null | python3/hackerrank_leetcode/coin_change/main.py | seLain/codesnippets | ae9a1fa05b67f4b3ac1703cc962fcf5f6de1e289 | [
"MIT"
] | null | null | null | python3/hackerrank_leetcode/coin_change/main.py | seLain/codesnippets | ae9a1fa05b67f4b3ac1703cc962fcf5f6de1e289 | [
"MIT"
] | null | null | null | '''
Hackerrank: https://www.hackerrank.com/challenges/coin-change/problem
this solution will get timeout in many test cases
however, this solution can get all the combination during computation
'''
import sys
from collections import Counter, defaultdict
sys.setrecursionlimit(10000)
memory = defaultdict(list)
def recur_getWays(n, c):
#print('enter: n='+str(n))
if n < 0:
return False, []
elif n == 0:
return True, [Counter()]
if n in memory.keys():
return True, memory[n]
all_combs_count = []
for coin in c:
#print('choose coin:'+str(coin))
valid, combs = recur_getWays(n-coin, c)
if valid:
for comb in combs:
comb = comb + Counter({coin:1})
existed = False
for existed_comb in all_combs_count:
if existed_comb == comb:
existed = True
break
if not existed:
all_combs_count.append(comb)
#print('memorize: n='+str(n)+' count:'+str(all_combs_count))
memory[n] = all_combs_count
return True, all_combs_count
def getWays(n, c):
# Complete this function
valid, combs = recur_getWays(n, c)
#print(memory)
return len(combs)
n, m = input().strip().split(' ')
n, m = [int(n), int(m)]
c = list(map(int, input().strip().split(' ')))
# Print the number of ways of making change for 'n' units using coins having the values given by 'c'
ways = getWays(n, c)
print(ways) | 29.037736 | 100 | 0.589994 |
da8329dfa63a5a263c3abd3b8b6095598ae89c5c | 616 | php | PHP | database/seeds/CategorySeeder.php | alihassany4ktech/mploya | 5da864ce3db7e9dfeac8199f0eaee393a039fb6d | [
"MIT"
] | null | null | null | database/seeds/CategorySeeder.php | alihassany4ktech/mploya | 5da864ce3db7e9dfeac8199f0eaee393a039fb6d | [
"MIT"
] | null | null | null | database/seeds/CategorySeeder.php | alihassany4ktech/mploya | 5da864ce3db7e9dfeac8199f0eaee393a039fb6d | [
"MIT"
] | null | null | null | <?php
use Illuminate\Database\Seeder;
use Illuminate\Support\Facades\DB;
class CategorySeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
DB::table('categories')->insert([
[
'title' => 'title 1',
],
[
'title' => 'title 2',
],
[
'title' => 'title 3',
],
[
'title' => 'title 4',
],
[
'title' => 'title 5',
],
]);
}
}
| 18.117647 | 41 | 0.349026 |
c9ea194d809586c898f53490fb5af985d487e697 | 891 | tsx | TypeScript | typescript_solutions/09_redux_fc/01_hotel_app/src/components/Navbar.tsx | Wantonius/opiframe-react-exercisebook | 719a71bddedcd2c17227900c801e135f2787ff63 | [
"MIT"
] | null | null | null | typescript_solutions/09_redux_fc/01_hotel_app/src/components/Navbar.tsx | Wantonius/opiframe-react-exercisebook | 719a71bddedcd2c17227900c801e135f2787ff63 | [
"MIT"
] | null | null | null | typescript_solutions/09_redux_fc/01_hotel_app/src/components/Navbar.tsx | Wantonius/opiframe-react-exercisebook | 719a71bddedcd2c17227900c801e135f2787ff63 | [
"MIT"
] | null | null | null | import React from 'react';
import {Link} from 'react-router-dom';
import {useSelector} from 'react-redux';
import HotelInfo from '../models/HotelInfo';
interface LoadingState {
loading:boolean;
}
interface ErrorState {
error:string
}
const Navbar:React.FC<{}> = () => {
const loadingState = (state:LoadingState) => state.loading
const errorState = (state:ErrorState) => state.error
const loading = useSelector(loadingState);
const error = useSelector(errorState);
let header = <h4>Hotel app</h4>
if(loading) {
header = <h4>Loading ...</h4>
}
if(error) {
header = <h4>{error}</h4>
}
let navStyle={backgroundColor:"lightblue",height:120}
return(
<div style={navStyle}>
{header}
<ul style={{listStyleType:"none"}}>
<li><Link to="/">Hotel List</Link></li>
<li><Link to="/form">Add to list</Link></li>
</ul>
</div>
)
}
export default Navbar; | 21.214286 | 59 | 0.65881 |
4bd392558f3b8b06d432258ddd74f1827b7b5f2a | 2,821 | go | Go | auth/auth_service_test.go | racker/rmii-telemetry-envoy | 11c50792585e134c41c4d574ff7b64a2a35d01fe | [
"Apache-2.0"
] | 4 | 2019-06-24T14:49:37.000Z | 2021-12-07T22:29:34.000Z | auth/auth_service_test.go | racker/rmii-telemetry-envoy | 11c50792585e134c41c4d574ff7b64a2a35d01fe | [
"Apache-2.0"
] | 24 | 2018-11-16T03:50:49.000Z | 2020-07-31T14:10:33.000Z | auth/auth_service_test.go | racker/rmii-telemetry-envoy | 11c50792585e134c41c4d574ff7b64a2a35d01fe | [
"Apache-2.0"
] | 1 | 2019-06-24T14:50:18.000Z | 2019-06-24T14:50:18.000Z | /*
* Copyright 2020 Rackspace US, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package auth_test
import (
"fmt"
"github.com/racker/salus-telemetry-envoy/auth"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"io"
"net/http"
"net/http/httptest"
"os"
"strings"
"testing"
)
func TestAuthServiceCertProvider_ProvideCertificates_Success(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
assert.Equal(t, "/v1.0/cert", req.URL.Path)
assert.Equal(t, "Bearer token1", req.Header.Get("Authorization"))
resp.Header().Set("Content-Type", "application/json")
respFile, err := os.Open("testdata/auth_service_resp.json")
require.NoError(t, err)
defer respFile.Close()
io.Copy(resp, respFile)
}))
defer ts.Close()
viper.SetConfigType("yaml")
err := viper.ReadConfig(strings.NewReader(fmt.Sprintf(`
auth_token: token1
tls:
auth_service:
url: %s
`, ts.URL)))
require.NoError(t, err)
certificate, certPool, err := auth.LoadCertificates()
require.NoError(t, err)
verifyCertSubject(t, "dev-ambassador", certificate)
verifyCertPoolSubject(t, "dev-rmii-ambassador-ca", certPool)
}
func TestAuthServiceCertProvider_ProvideCertificates_BadStatus(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
resp.WriteHeader(500)
}))
defer ts.Close()
viper.SetConfigType("yaml")
err := viper.ReadConfig(strings.NewReader(fmt.Sprintf(`
tls:
auth_service:
url: %s
`, ts.URL)))
require.NoError(t, err)
certificate, certPool, err := auth.LoadCertificates()
require.Error(t, err)
assert.Nil(t, certificate)
assert.Nil(t, certPool)
}
func TestAuthServiceCertProvider_ProvideCertificates_MissingRespFields(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
resp.WriteHeader(200)
resp.Write([]byte(`{"certificate":""}`))
}))
defer ts.Close()
viper.SetConfigType("yaml")
err := viper.ReadConfig(strings.NewReader(fmt.Sprintf(`
tls:
auth_service:
url: %s
`, ts.URL)))
require.NoError(t, err)
certificate, certPool, err := auth.LoadCertificates()
require.Error(t, err)
assert.Nil(t, certificate)
assert.Nil(t, certPool)
}
| 27.125 | 94 | 0.730946 |
28b6d0ad5c55b4478a83fe93ed1ab6d709811689 | 174 | sql | SQL | src/dialogflow-java-client-master/samples/clients/VirtualTradingAssistant/src/main/java/ai/examples/schema.sql | 16kozlowskim/Group-20-SE | ceb8c319643964a3f478772d8f10090962df567c | [
"MIT"
] | null | null | null | src/dialogflow-java-client-master/samples/clients/VirtualTradingAssistant/src/main/java/ai/examples/schema.sql | 16kozlowskim/Group-20-SE | ceb8c319643964a3f478772d8f10090962df567c | [
"MIT"
] | null | null | null | src/dialogflow-java-client-master/samples/clients/VirtualTradingAssistant/src/main/java/ai/examples/schema.sql | 16kozlowskim/Group-20-SE | ceb8c319643964a3f478772d8f10090962df567c | [
"MIT"
] | null | null | null | create table company (
name string,
count integer
);
create table attribute (
name string,
count integer
);
create table sector (
name string,
count integer
);
| 11.6 | 24 | 0.695402 |
07f3c2c1575246c69c0afa12b1528d65da54f519 | 1,868 | css | CSS | style.css | MosesGakuhi1857/Akan-name | 50707c31908809bac6ebfde13fa4265e1ecf92e8 | [
"MIT"
] | null | null | null | style.css | MosesGakuhi1857/Akan-name | 50707c31908809bac6ebfde13fa4265e1ecf92e8 | [
"MIT"
] | null | null | null | style.css | MosesGakuhi1857/Akan-name | 50707c31908809bac6ebfde13fa4265e1ecf92e8 | [
"MIT"
] | null | null | null | @import url('https://fonts.googleapis.com/css2?family=Oswald&display=swap');
body {
font-family: 'Oswald', sans-serif;
}
nav {
background-image: url(./assets/footer.ghana.jpg);
height: 40px;
width: 100%;
}
.container {
text-align: center;
}
img {
position: relative;
top: 30px;
background-color: #333;
border-radius: 10%;
opacity:0.6;
}
section {
background-image: url(./assets/ghana.sunset.jpg);
background-size: cover;
background-repeat: no-repeat;
height: 135vh;
;
}
h3 {
text-decoration: underline;
}
.desc {
background-color:#333;
opacity: 0.7;
color: #fff;
font-size: 1.5rem;
width: 35%;
border-radius: 10%;
line-height: 2rem;
padding: 30px;
line-height: 2rem;
}
.akan-names{
display: flex;
}
.akan-names div{
padding: 15px;
}
.form {
background-color: #333;
opacity: 0.7;
color: #fff;
width: 30%;
text-align: center;
border-radius: 10%;
height: auto;
line-height: 2rem;
font-size: 1.5rem;
padding: 30px;
}
form p {
text-decoration: underline;
font-size: 1.6rem;
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
}
.desc-form {
display: flex;
gap: 50px;
justify-content: center;
position: relative;
top: 100px;
}
input[type=text] {
width: 70%;
border: none;
text-align: center;
border-radius: 10px;
}
footer {
background-image: url(./assets/footer.ghana.jpg);
width: 100%;
height: 50px;
text-align: center;
font-size: 1.4rem;
display: flex;
justify-content: center;
}
button {
text-align: center;
padding: 5px;
border: none;
background-color: #fff;
color: #E79925;
border-radius: 10px;
width: 70px;
}
button:hover {
transform: scale(0.90);
}
.radio {
padding: 10px;
} | 15.566667 | 76 | 0.597966 |
68985937694a6c2a1b9410eea9602221105eeee4 | 7,824 | php | PHP | resources/views/admin/tenants/show.blade.php | muhammad-sayg/juffair_gables_project | 0db75110ac74ef6a67a7fc5b3d3675442f4ba05f | [
"MIT"
] | null | null | null | resources/views/admin/tenants/show.blade.php | muhammad-sayg/juffair_gables_project | 0db75110ac74ef6a67a7fc5b3d3675442f4ba05f | [
"MIT"
] | null | null | null | resources/views/admin/tenants/show.blade.php | muhammad-sayg/juffair_gables_project | 0db75110ac74ef6a67a7fc5b3d3675442f4ba05f | [
"MIT"
] | null | null | null | @extends('layouts.admin.app')
{{-- Page title --}}
{{-- page level styles --}}
@section('header_styles')
<!-- Template CSS -->
<link rel="stylesheet" href="{{ asset('public/admin/assets') }}/css/components.css">
<style>
</style>
@stop
@section('content')
<section class="section">
{{-- <ul class="breadcrumb breadcrumb-style ">
<li class="breadcrumb-item">
<h4 class="page-title m-b-0">Profile</h4>
</li>
<li class="breadcrumb-item">
<a href="index.html">
<i class="fas fa-home"></i></a>
</li>
<li class="breadcrumb-item">View</li>
<li class="breadcrumb-item">Tenant</li>
</ul> --}}
<div class="section-body">
<div class="row mt-sm-5">
<div class="col-12 col-md-12 col-lg-4">
<div class="card author-box">
<div class="card-body">
<div class="author-box-center">
<img alt="image" src="{{ asset('public/admin/assets/img/staff')}}/{{ isset($tenant->tenant_image) ? $tenant->tenant_image:'' }}" class="rounded-circle author-box-picture">
<div class="clearfix"></div>
<div class="author-box-name">
<a href="#">{{isset($tenant) ? $tenant->tenant_first_name. ' '.$tenant->tenant_last_name : ''}}</a>
</div>
<div class="author-box-job">Apartment:{{isset($tenant->unit) ? $tenant->unit->unit_number : '' }}(floor {{isset($tenant->unit->floor)? $tenant->unit->floor->number : ''}})</div>
</div>
</div>
</div>
<div class="card">
<div class="card-header">
<h4>Personal Details</h4>
</div>
<div class="card-body">
<div class="py-1">
<p class="clearfix">
<span class="float-left">
Date of Birth
</span>
<span class="float-right text-muted">
{{ isset($tenant->tenant_date_of_birth) ? \Carbon\Carbon::parse($tenant->tenant_date_of_birth)->format('d-m-Y') : '' }}
</span>
</p>
<p class="clearfix">
<span class="float-left">
Phone
</span>
<span class="float-right text-muted">
{{ isset($tenant->tenant_mobile_phone)? $tenant->tenant_mobile_phone : '' }}
</span>
</p>
<p class="clearfix">
<span class="float-left">
Email
</span>
<span class="float-right text-muted">
{{ isset($tenant->tenant_email_address )? $tenant->tenant_email_address : '' }}
</span>
</p>
</div>
</div>
</div>
<div class="text-center">
<a href="{{ url()->previous() }}" class="btn btn-primary">Back</a>
</div>
</div>
<div class="col-12 col-md-12 col-lg-8">
<div class="card">
<div class="padding-20">
<ul class="nav nav-tabs" id="myTab2" role="tablist">
<li class="nav-item">
<a class="nav-link active" id="home-tab2" data-toggle="tab" href="#about" role="tab"
aria-selected="true">About</a>
</li>
</ul>
<div class="tab-content tab-bordered" id="myTab3Content">
<div class="tab-pane fade show active" id="about" role="tabpanel" aria-labelledby="home-tab2">
<div class="row">
<div class="col-md-3 col-6 b-r">
<strong>Full Name</strong>
<br>
<p class="text-muted">{{isset($tenant) ? $tenant->tenant_first_name. ' '.$tenant->tenant_last_name : ''}}</p>
</div>
<div class="col-md-3 col-6 b-r">
<strong>Mobile</strong>
<br>
<p class="text-muted">{{ isset($tenant->tenant_mobile_phone)? $tenant->tenant_mobile_phone : '' }}</p>
</div>
<div class="col-md-3 col-6 b-r">
<strong>Emergency Email</strong>
<br>
<p class="text-muted">{{ isset($tenant->emergancy_email )? $tenant->emergancy_email : '' }}</p>
</div>
<div class="col-md-3 col-6">
<strong>Apartment</strong>
<br>
<p class="text-muted">{{isset($tenant->unit) ? $tenant->unit->unit_number : '' }}</p>
</div>
@if($tenant->tenant_cpr_no)
<div class="col-md-3 col-6">
<strong>CPR Number</strong>
<br>
<p class="text-muted">{{isset($tenant->tenant_cpr_no) ? $tenant->tenant_cpr_no : '' }}</p>
</div>
@endif
<div class="col-md-3 col-6">
<strong>Passport Number</strong>
<br>
<p class="text-muted">{{isset($tenant->tenant_passport_no) ? $tenant->tenant_passport_no : '' }}</p>
</div>
<div class="col-md-3 col-6">
<strong>Total Rent</strong>
<br>
<p class="text-muted">{{isset($tenant->tenant_rent) ? (int)$tenant->tenant_rent.' BD' : '' }}</p>
</div>
</div>
<div class="section-title">Facilities</div>
<ul>
@foreach($tenant->tenant_facilities_list as $facilities)
<li>{{ $facilities }}</li>
</li>
@endforeach
</ul>
<div class="section-title">Contract Details</div>
<ul>
<li>Lease Period Start Date: from {{ isset($tenant->lease_period_start_datetime) ? \Carbon\Carbon::parse($tenant->lease_period_start_datetime)->toFormattedDateString() : '' }} to {{ isset($tenant->lease_period_end_datetime) ? \Carbon\Carbon::parse($tenant->lease_period_end_datetime)->toFormattedDateString() : '' }}</li>
</li>
</ul>
<div class="section-title">Address</div>
<ul>
<li>Present Address:{{ isset($tenant->tenant_present_address )? $tenant->tenant_present_address : '' }}</li>
<li>Permenant Address:{{ isset($tenant->tenant_permanent_address )? $tenant->tenant_permanent_address : '' }}
</li></ul>
</ul>
<div class="section-title">Documents</div>
<ul>
<li><a href="{{ url('public/admin/assets/img/documents').'/'. $tenant->tenant_passport_copy }}" target="blank">Passport Copy</a></li>
@if($tenant->tenant_cpr_copy)
<li><a href="{{ url('public/admin/assets/img/documents').'/'. $tenant->tenant_cpr_copy }}" target="blank">CPR Copy</a></li>
@endif
<li><a href="{{ url('public/admin/assets/img/documents').'/'. $tenant->tenant_contract_copy }}" target="blank">Contract Copy</a></li>
</ul>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</section>
@stop
@section('footer_scripts')
<script>
</script>
@stop | 44.708571 | 341 | 0.451559 |
a9ed1d6ebc46f1572cecdec17334da997c81f840 | 1,118 | php | PHP | resources/views/disposisi/create.blade.php | 161710091/Surat | 31cd6dedf352c48592373176bea1b99e9b174671 | [
"MIT"
] | null | null | null | resources/views/disposisi/create.blade.php | 161710091/Surat | 31cd6dedf352c48592373176bea1b99e9b174671 | [
"MIT"
] | null | null | null | resources/views/disposisi/create.blade.php | 161710091/Surat | 31cd6dedf352c48592373176bea1b99e9b174671 | [
"MIT"
] | null | null | null | @extends('layouts.admin')
@section('content')
<div class="row">
<div class="container-fluid">
<div class="col-md-12">
<div class="card card-info">
<div class="card-header">Add Disposisi
<div class="panel-title pull-right">
<a href="{{route('disposisi.index')}}" class="fa fa-arrow-left">Add</a>
</div>
</div>
<div class="card-body">
<form action="{{ route('disposisi.store') }}" method="post">
{{ csrf_field() }}
<div class="form-group {{ $errors->has('disposisi') ? 'has-error' : '' }}">
<label class="control-label">Disposisi</label>
<input type="text" name="disposisi" class="form-control" required>
@if ($errors->has('disposisi'))
<span class="help-block">
<strong>{{ $errors->first('disposisi') }}</strong>
</span>
@endif
</div>
<div>
<button type="submit" class="btn btn-outline-primary">OK</button>
<button type="reset" class="btn btn-outline-danger">Reset</button>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
@endsection | 31.055556 | 82 | 0.554562 |
8e7a8911e64f31c483bd66741cb0779e59263b01 | 7,227 | js | JavaScript | src/components/WineList.js | mmtakeuchi/wine-client | beda19b70adc84679b7ff4025220558259b70520 | [
"MIT"
] | 1 | 2021-04-26T02:45:27.000Z | 2021-04-26T02:45:27.000Z | src/components/WineList.js | mmtakeuchi/wine-client | beda19b70adc84679b7ff4025220558259b70520 | [
"MIT"
] | null | null | null | src/components/WineList.js | mmtakeuchi/wine-client | beda19b70adc84679b7ff4025220558259b70520 | [
"MIT"
] | null | null | null | import React, { useEffect } from "react";
import { Link } from "react-router-dom";
import { connect, useDispatch } from "react-redux";
import { getVarietals } from "../actions/varietalActions";
import { getOrigins } from "../actions/originActions";
import { makeStyles } from "@material-ui/core/styles";
import Container from "@material-ui/core/Container";
import Table from "@material-ui/core/Table";
import TableBody from "@material-ui/core/TableBody";
import TableCell from "@material-ui/core/TableCell";
import TableContainer from "@material-ui/core/TableContainer";
import TableHead from "@material-ui/core/TableHead";
import TableRow from "@material-ui/core/TableRow";
import Paper from "@material-ui/core/Paper";
import Button from "@material-ui/core/Button";
import Typography from "@material-ui/core/Typography";
const useStyles = makeStyles({
table: {
minWidth: 650,
},
title: {
fontSize: "1.5em",
fontWeight: "bold",
},
category: {
fontSize: "1.4em",
},
link: {
textDecoration: "none",
color: "blue",
fontSize: "1.5em",
},
});
const WineList = (props) => {
const dispatch = useDispatch();
const classes = useStyles();
useEffect(() => {
dispatch(getOrigins());
}, [dispatch]);
useEffect(() => {
dispatch(getVarietals());
}, [dispatch]);
const varietalName = (wine) => {
if (props.varietals.length) {
return (
<Typography className={classes.category}>
{
props.varietals.find(
(varietal) => varietal.id === parseInt(wine.varietal_id)
).name
}
</Typography>
);
}
};
const originRegion = (wine) => {
if (props.origins.length) {
return (
<Typography className={classes.category}>
{
props.origins.find(
(origin) => origin.id === parseInt(wine.origin_id)
).region
}
</Typography>
);
}
};
const filterWines = () => {
if (props.wines && props.wines.length) {
if (props.varietalId && props.originId) {
return (
<React.Fragment>
{props.wines
.filter((wine) => wine.user_id === props.user.id)
.filter(
(wine) =>
wine.varietal_id === parseInt(props.varietalId) &&
wine.origin_id === parseInt(props.originId)
)
.sort((a, b) => (a.brand < b.brand ? -1 : 1))
.map((wine) => (
<TableRow key={wine.id}>
<TableCell component="th" scope="row">
<Link to={`/wines/${wine.id}`} className={classes.link}>
{wine.brand}
</Link>
<TableCell align="right">
<Typography className={classes.category}>
{varietalName(wine)}
</Typography>
</TableCell>
<TableCell align="right" className={classes.category}>
{originRegion(wine)}
</TableCell>
</TableCell>
</TableRow>
))}
</React.Fragment>
);
} else if (props.varietalId && props.originId === "") {
return (
<React.Fragment>
{props.wines.length &&
props.wines
.filter(
(wine) => wine.varietal_id === parseInt(props.varietalId)
)
.sort((a, b) => (a.brand < b.brand ? -1 : 1))
.map((wine) => (
<TableRow key={wine.id}>
<TableCell component="th" scope="row">
<Link to={`/wines/${wine.id}`} className={classes.link}>
{wine.brand}
</Link>
</TableCell>
<TableCell align="right">{varietalName(wine)}</TableCell>
<TableCell align="right">{originRegion(wine)}</TableCell>
</TableRow>
))}
</React.Fragment>
);
} else if (props.originId && props.varietalId === "") {
return (
<React.Fragment>
{props.wines.length &&
props.wines
.filter((wine) => wine.user_id === props.user.id)
.filter((wine) => wine.origin_id === parseInt(props.originId))
.sort((a, b) => (a.brand < b.brand ? -1 : 1))
.map((wine) => (
<TableRow key={wine.id}>
<TableCell component="th" scope="row">
<Link to={`/wines/${wine.id}`} className={classes.link}>
{wine.brand}
</Link>
</TableCell>
<TableCell align="right">{varietalName(wine)}</TableCell>
<TableCell align="right">{originRegion(wine)}</TableCell>
</TableRow>
))}
</React.Fragment>
);
} else {
return (
<React.Fragment>
{props.wines.length &&
props.wines
.filter((wine) => wine.user_id === props.user.id)
.sort((a, b) => (a.brand < b.brand ? -1 : 1))
.map((wine) => (
<TableRow key={wine.id}>
<TableCell component="th" scope="row">
<Link to={`/wines/${wine.id}`} className={classes.link}>
{wine.brand}
</Link>
</TableCell>
<TableCell align="right">{varietalName(wine)}</TableCell>
<TableCell align="right">{originRegion(wine)}</TableCell>
</TableRow>
))}
</React.Fragment>
);
}
}
};
if (props.wines) {
return (
<div>
<Container maxWidth="md">
<Button variant="outlined" size="small">
<Link to="/wines/new" className={classes.link}>
Add Wine
</Link>
</Button>
<br />
<TableContainer component={Paper}>
<Table className={classes.table} aria-label="simple table">
<TableHead>
<TableRow>
<TableCell className={classes.title}>Wine Brand</TableCell>
<TableCell align="right" className={classes.title}>
Varietal
</TableCell>
<TableCell align="right" className={classes.title}>
Country
</TableCell>
</TableRow>
</TableHead>
<TableBody>{filterWines()}</TableBody>
</Table>
</TableContainer>
</Container>
</div>
);
}
return <h1>Relead Page</h1>;
};
const mapStateToProps = (state) => ({
origins: state.origins,
varietals: state.varietals,
});
const mapDispatchToProps = (dispatch) => ({
getOrigins: () => dispatch(getOrigins()),
getVarietals: () => dispatch(getVarietals()),
});
export default connect(mapStateToProps, mapDispatchToProps)(WineList);
| 32.408072 | 78 | 0.484157 |
8a385a8665e02e819b852b29940e904b02ce514b | 2,613 | swift | Swift | BankWallet/BankWallet/Modules/RestoreEos/RestoreEosPresenter.swift | isanth/unstoppable-wallet-ios | d21f9cd2446272754ce3943a248cd6262e9f0d48 | [
"MIT"
] | 3 | 2020-05-15T01:52:45.000Z | 2021-02-06T03:09:07.000Z | BankWallet/BankWallet/Modules/RestoreEos/RestoreEosPresenter.swift | WaltonChain/waltonchain-wallet-ios | 1f65f52377767a1ee64b4101dd3aa2cad84e846b | [
"MIT"
] | null | null | null | BankWallet/BankWallet/Modules/RestoreEos/RestoreEosPresenter.swift | WaltonChain/waltonchain-wallet-ios | 1f65f52377767a1ee64b4101dd3aa2cad84e846b | [
"MIT"
] | 1 | 2020-05-31T13:22:52.000Z | 2020-05-31T13:22:52.000Z | class RestoreEosPresenter {
weak var view: IRestoreEosView?
private let mode: RestoreRouter.PresentationMode
private let interactor: IRestoreEosInteractor
private let router: IRestoreEosRouter
private var state: RestoreEosPresenterState
init(mode: RestoreRouter.PresentationMode, interactor: IRestoreEosInteractor, router: IRestoreEosRouter, state: RestoreEosPresenterState) {
self.mode = mode
self.interactor = interactor
self.router = router
self.state = state
}
private func onEnter(account: String?) {
state.account = account
view?.set(account: account)
}
private func onEnter(key: String?) {
state.privateKey = key
view?.set(key: key)
}
private func omitReturns(string: String) -> String {
return string.replacingOccurrences(of: "\n", with: " ")
}
}
extension RestoreEosPresenter: IRestoreEosViewDelegate {
func viewDidLoad() {
if mode == .presented {
view?.showCancelButton()
}
let (account, activePrivateKey) = interactor.defaultCredentials
onEnter(account: account)
onEnter(key: activePrivateKey)
}
func onPasteAccountClicked() {
if let account = interactor.valueFromPasteboard {
onEnter(account: omitReturns(string: account))
}
}
func onChange(account: String?) {
onEnter(account: account)
}
func onDeleteAccount() {
onEnter(account: nil)
}
func onPasteKeyClicked() {
if let key = interactor.valueFromPasteboard {
onEnter(key: omitReturns(string: key))
}
}
func onScan(key: String) {
onEnter(key: key)
}
func onDeleteKey() {
onEnter(key: nil)
}
func didTapCancel() {
router.dismiss()
}
func didTapDone() {
let account = (state.account ?? "").trimmingCharacters(in: .whitespaces).lowercased()
let privateKey = (state.privateKey ?? "").trimmingCharacters(in: .whitespaces)
do {
try interactor.validate(account: account)
try interactor.validate(privateKey: privateKey)
let accountType: AccountType = .eos(account: account, activePrivateKey: privateKey)
switch mode {
case .pushed: router.notifyRestored(accountType: accountType)
case .presented: router.dismissAndNotify(accountType: accountType)
}
} catch {
view?.show(error: error)
}
}
}
extension RestoreEosPresenter: IRestoreEosInteractorDelegate {
}
| 26.13 | 143 | 0.629927 |
5aefee38e656f8395f6bb8debc008750e9112602 | 5,358 | cs | C# | src/Dfe.Spi.UkrlpAdapter.Infrastructure.UkrlpSoapApi.UnitTests/WhenBuildingMessageToGetSpecificUkprns.cs | DFE-Digital/spi-ukrlp-adapter | ec1f905d403c18df733ae2881ccfc4dd3c338868 | [
"MIT"
] | null | null | null | src/Dfe.Spi.UkrlpAdapter.Infrastructure.UkrlpSoapApi.UnitTests/WhenBuildingMessageToGetSpecificUkprns.cs | DFE-Digital/spi-ukrlp-adapter | ec1f905d403c18df733ae2881ccfc4dd3c338868 | [
"MIT"
] | 1 | 2022-01-13T10:11:05.000Z | 2022-01-13T10:11:05.000Z | src/Dfe.Spi.UkrlpAdapter.Infrastructure.UkrlpSoapApi.UnitTests/WhenBuildingMessageToGetSpecificUkprns.cs | DFE-Digital/spi-ukrlp-adapter | ec1f905d403c18df733ae2881ccfc4dd3c338868 | [
"MIT"
] | 1 | 2021-04-10T21:51:23.000Z | 2021-04-10T21:51:23.000Z | using System.Linq;
using System.Xml.Linq;
using AutoFixture;
using NUnit.Framework;
namespace Dfe.Spi.UkrlpAdapter.Infrastructure.UkrlpSoapApi.UnitTests
{
public class WhenBuildingMessageToGetSpecificUkprns
{
private static readonly XNamespace soapNs = "http://schemas.xmlsoap.org/soap/envelope/";
private static readonly XNamespace ukrlpNs = "http://ukrlp.co.uk.server.ws.v3";
private Fixture _fixture;
private string _stakeholderId;
private long _ukprn;
private UkrlpSoapMessageBuilder _builder;
[SetUp]
public void Arrange()
{
_fixture = new Fixture();
_stakeholderId = _fixture.Create<string>();
_ukprn = _fixture.Create<long>();
_builder = new UkrlpSoapMessageBuilder(_stakeholderId);
}
[Test]
public void ThenItShouldReturnSoapMesage()
{
var actual = _builder.BuildMessageToGetSpecificUkprns(new[]{_ukprn});
var envelope = XElement.Parse(actual);
Assert.AreEqual("Envelope", envelope.Name.LocalName);
Assert.AreEqual(soapNs.NamespaceName, envelope.Name.NamespaceName);
Assert.IsNotNull(envelope.Elements().SingleOrDefault(e =>
e.Name.LocalName == "Header" && e.Name.NamespaceName == soapNs.NamespaceName));
Assert.IsNotNull(envelope.Elements().SingleOrDefault(e =>
e.Name.LocalName == "Body" && e.Name.NamespaceName == soapNs.NamespaceName));
}
[Test]
public void ThenItShouldHaveAProviderQueryRequestInTheSoapBody()
{
var actual = _builder.BuildMessageToGetSpecificUkprns(new[]{_ukprn});
var body = XElement.Parse(actual).GetElementByLocalName("Body");
Assert.IsNotNull(body.Elements().SingleOrDefault(e =>
e.Name.LocalName == "ProviderQueryRequest" &&
e.Name.NamespaceName == ukrlpNs.NamespaceName));
}
[Test]
public void ThenItShouldHaveAQueryIdInRequest()
{
var actual = _builder.BuildMessageToGetSpecificUkprns(new[]{_ukprn});
var request = XElement.Parse(actual).GetElementByLocalName("Body").GetElementByLocalName("ProviderQueryRequest");
var queryId = request.GetElementByLocalName("QueryId");
Assert.IsNotNull(queryId);
}
[Test]
public void ThenItShouldHaveAStakeholderIdInSelectionCriteria()
{
var actual = _builder.BuildMessageToGetSpecificUkprns(new[]{_ukprn});
var selectionCriteria = XElement.Parse(actual)
.GetElementByLocalName("Body")
.GetElementByLocalName("ProviderQueryRequest")
.GetElementByLocalName("SelectionCriteria");
var stakeholderId = selectionCriteria.GetElementByLocalName("StakeholderId");
Assert.IsNotNull(stakeholderId);
Assert.AreEqual(_stakeholderId, stakeholderId.Value);
}
[Test]
public void ThenItShouldHaveASelectionCriteriaForUkprns()
{
var ukprn1 = _fixture.Create<long>();
var ukprn2 = _fixture.Create<long>();
var actual = _builder.BuildMessageToGetSpecificUkprns(new[]{ukprn1, ukprn2});
var selectionCriteria = XElement.Parse(actual)
.GetElementByLocalName("Body")
.GetElementByLocalName("ProviderQueryRequest")
.GetElementByLocalName("SelectionCriteria");
var ukprnList = selectionCriteria.GetElementByLocalName("UnitedKingdomProviderReferenceNumberList");
Assert.IsNotNull(ukprnList);
var ukprns = ukprnList.GetElementsByLocalName("UnitedKingdomProviderReferenceNumber");
Assert.IsNotNull(ukprns);
Assert.AreEqual(2, ukprns.Length);
Assert.AreEqual(ukprn1.ToString(), ukprns[0].Value);
Assert.AreEqual(ukprn2.ToString(), ukprns[1].Value);
}
[Test]
public void ThenItShouldHaveASelectionCriteriaForDefaultStatus()
{
var actual = _builder.BuildMessageToGetSpecificUkprns(new[]{_ukprn});
var selectionCriteria = XElement.Parse(actual)
.GetElementByLocalName("Body")
.GetElementByLocalName("ProviderQueryRequest")
.GetElementByLocalName("SelectionCriteria");
var status = selectionCriteria.GetElementByLocalName("ProviderStatus");
Assert.IsNotNull(status);
Assert.AreEqual("A", status.Value);
}
[TestCase("A")]
[TestCase("V")]
[TestCase("PD1")]
[TestCase("PD2")]
public void ThenItShouldHaveASelectionCriteriaForSpecifiedStatus(string providerStatus)
{
var actual = _builder.BuildMessageToGetSpecificUkprns(new[]{_ukprn}, providerStatus);
var selectionCriteria = XElement.Parse(actual)
.GetElementByLocalName("Body")
.GetElementByLocalName("ProviderQueryRequest")
.GetElementByLocalName("SelectionCriteria");
var status = selectionCriteria.GetElementByLocalName("ProviderStatus");
Assert.IsNotNull(status);
Assert.AreEqual(providerStatus, status.Value);
}
}
} | 39.397059 | 125 | 0.637738 |
b9446f5f861237abcfe60e183b377b424d13dc48 | 2,182 | swift | Swift | Fakestagram-Xcode10/Fakestagram-Xcode10/ViewControllers/CameraViewController.swift | dolmayan21/fakestagram-ios | fd728fd0224d7296224ae5f7c9f9393965010fbc | [
"MIT"
] | null | null | null | Fakestagram-Xcode10/Fakestagram-Xcode10/ViewControllers/CameraViewController.swift | dolmayan21/fakestagram-ios | fd728fd0224d7296224ae5f7c9f9393965010fbc | [
"MIT"
] | null | null | null | Fakestagram-Xcode10/Fakestagram-Xcode10/ViewControllers/CameraViewController.swift | dolmayan21/fakestagram-ios | fd728fd0224d7296224ae5f7c9f9393965010fbc | [
"MIT"
] | null | null | null | //
// CameraViewController.swift
// Fakestagram-Xcode10
//
// Created by Ruben Alejandro Leon Del Villar on 19/10/19.
// Copyright © 2019 unam. All rights reserved.
//
import UIKit
import CoreLocation
class CameraViewController: UIViewController, CLLocationManagerDelegate {
let locationManager = CLLocationManager()
let service = CreatePostService()
override func viewDidLoad() {
super.viewDidLoad()
enableBasicLocationServices()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
locationManager.startUpdatingLocation()
}
override func viewWillDisappear(_ animated: Bool) {
locationManager.stopUpdatingLocation()
super.viewWillDisappear(animated)
}
@IBAction func onTapCreate(_ sender: Any) {
let img = UIImage(named: "spacecat")!
service.call(
image: img,
title: "Spacecat🙀 - \(UUID().uuidString)"
) { postId in
print("Successful!")
print(postId ?? -1)
}
}
/*
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
// Get the new view controller using segue.destination.
// Pass the selected object to the new view controller.
}
*/
func enableBasicLocationServices() {
locationManager.delegate = self
switch CLLocationManager.authorizationStatus() {
case .notDetermined:
locationManager.requestWhenInUseAuthorization()
case .restricted, .denied:
print("Disable location features")
case .authorizedWhenInUse, .authorizedAlways:
print("Enable location features")
@unknown default:
fatalError()
}
}
func locationManager(_ manager: CLLocationManager, didUpdateLocations locations: [CLLocation]) {
guard let location = locations.last else { return }
print(location)
service.update(coordinate: location.coordinate)
}
}
| 30.305556 | 107 | 0.642988 |
14be68b43eb7c98cae87b1f532b2b25de1bbab50 | 628 | ts | TypeScript | Sources/Common/DataModel/Line/index.d.ts | HovaLabs/vtk-js | cbc26d5feefaea1765b374f08155e35165834811 | [
"BSD-3-Clause"
] | null | null | null | Sources/Common/DataModel/Line/index.d.ts | HovaLabs/vtk-js | cbc26d5feefaea1765b374f08155e35165834811 | [
"BSD-3-Clause"
] | null | null | null | Sources/Common/DataModel/Line/index.d.ts | HovaLabs/vtk-js | cbc26d5feefaea1765b374f08155e35165834811 | [
"BSD-3-Clause"
] | null | null | null | export interface T100 {
t: number;
distance: number;
}
declare function distanceToLine(x: any, p1: any, p2: any, closestPoint?: any): T100;
declare function intersection(a1: any, a2: any, b1: any, b2: any, u: any, v: any): any;
export interface T101 {
distanceToLine: typeof distanceToLine;
intersection: typeof intersection;
}
export const STATIC: T101;
export interface T102 {
[key: string]: any;
}
declare function extend_1(publicAPI: any, model: any, initialValues?: T102): void;
export const extend: typeof extend_1;
export const newInstance: any;
declare const T103: any;
export default T103;
| 31.4 | 88 | 0.718153 |
4ceb8d9d0e97ed079332d10b37a95f8d236b4784 | 4,894 | py | Python | real-world-examples/linkedin_profile_views.py | AlienCoders/learning-python | 255dc32400b79db83382e707c96df029cfe30b24 | [
"MIT"
] | 19 | 2019-08-30T06:51:52.000Z | 2022-03-11T18:44:29.000Z | real-world-examples/linkedin_profile_views.py | AlienCoders/learning-python | 255dc32400b79db83382e707c96df029cfe30b24 | [
"MIT"
] | 9 | 2020-02-14T09:21:20.000Z | 2022-03-08T09:38:09.000Z | real-world-examples/linkedin_profile_views.py | AlienCoders/learning-python | 255dc32400b79db83382e707c96df029cfe30b24 | [
"MIT"
] | 12 | 2020-07-20T18:49:45.000Z | 2021-12-18T11:20:03.000Z | import requests # pip install requests
from bs4 import BeautifulSoup # pip install beautifulsoup4
import re # module for regular expression activities
import getpass # invisible password in terminal
import sys
# This script is to list the linkedin viewers for your profile
# Thanks to Prudhvi Pentakota for sharing this idea
# Thanks to Sujay Gankidi for validating the script and output.
# It's a web scraping, it might not work, if linkedin changes the code or
# Linkedin may block your IP too. So, don't run this script more often
# You can better use Linkedin developer API, if that allows you to view the list without being a paid member.
# AUTHOR: Sanjeev Jaiswal
# VERSION: 1.2
# This script will ask creds if you pass online in argument,
# If you have opted for offline in argument then
# It will ask for file in txt format containing page-source of view-source:https://www.linkedin.com/me/profile-views/urn:li:wvmp:summary/
# (login first and open this url and save the contents in txt format)
def get_info_online():
# Ask user to type username and password
# Your login credentials
email = input("username: ") #your email id
password = getpass.getpass(prompt='Password: ', stream=None) # Your password
# Leverage session of requests module
client = requests.Session()
HOMEPAGE_URL = 'https://www.linkedin.com'
LOGIN_URL = 'https://www.linkedin.com/uas/login-submit'
PROFILE_VIEWS_URL = 'https://www.linkedin.com/me/profile-views/urn:li:wvmp:summary/'
# It is to get loginCsrfParam value which is needed while logging in
html = client.get(HOMEPAGE_URL).content
soup = BeautifulSoup(html, "html.parser")
csrf = soup.find('input', {'name': 'loginCsrfParam'}).get('value')
# Building the payload to login
login_information = {
'session_key': email,
'session_password': password,
'loginCsrfParam': csrf,
'trk': 'guest_homepage-basic_sign-in-submit'
}
# Login with login_information data
client.post(LOGIN_URL, data=login_information)
# Get the content from profile views summary page
profile_views_raw = client.get(PROFILE_VIEWS_URL).content
process_and_print(profile_views_raw)
return None
def get_info_offline(filename):
try:
profile_views_raw = open(filename).read()
except Exception as e:
exit("Either file doesn't exit or you don't have permission. Please verify\n", e)
process_and_print(profile_views_raw)
return None
def process_and_print(raw_data):
# Get raw content in html format
profile_views_html = BeautifulSoup(raw_data, "html.parser")
# look for <code id="bpr-guid-" and get the value
code = profile_views_html.findAll('code', {'id': re.compile('^bpr-guid-')})
if not code:
exit("Possible wrong creds or data. Please check creds or contents and retry\n")
final_viewer_list = []
for line in code:
string_line = str(line)
# Below line was working earlier but now it's not working due to some changes at front-end
# profile_search = re.findall('firstName":"(\w+\s?\w+?)","lastName":"([a-zA-z-,\.\s]+)"', string_line)
# Below 3 lines are working code because you need to get firstname and lastname separately now and then zip it
firstname = re.findall('firstName":"(\w+\s?\w+?)"', string_line)
lastname = re.findall('lastName":"(\w+\s?\w+?)"', string_line)
profile_search = zip(firstname,lastname)
if profile_search:
final_viewer_list.extend(profile_search)
# using join() + map(), joining tuple elements
viewer_name_list = list(map(" ".join, final_viewer_list))
# unique values in list
viewer_name_list = list(set(viewer_name_list))
# sort the name list, case insensitive sorting
viewer_name_list.sort(key=lambda v: v.upper())
print("{} viewers have visited your LinkedIn profile today".format(len(viewer_name_list)))
count = 0
# printing nicely the viewer lists
for fullname in viewer_name_list:
count += 1
print(str(count) + ". " + fullname)
return None
# We will use getopt or argparse or click to replace current sys.argv and if else block.
if len(sys.argv) >=2:
if re.match(r'^online$', sys.argv[1], re.IGNORECASE):
get_info_online()
elif re.match(r'^offline$', sys.argv[1], re.IGNORECASE):
if len(sys.argv) == 3:
get_info_offline(sys.argv[2])
else:
exit("It requires filename when you choose offline option\npython script-name online or python script-name offline page-source.txt\n")
else:
exit("It should be either online or offline as an argument\npython script-name online or python script-name offline page-source.txt\n")
else:
exit("It needs at least an argument\npython script-name online or python script-name offline page-source.txt\n")
| 38.84127 | 146 | 0.699224 |
fa3601c6cb60c9c0200636a17693bc5d3438e55b | 471 | cpp | C++ | BASIC c++/pointer-and-referencers/pointer1.cpp | jattramesh/Learning_git | 5191ecc6c0c11b69b9786f2a8bdd3db7228987d6 | [
"MIT"
] | null | null | null | BASIC c++/pointer-and-referencers/pointer1.cpp | jattramesh/Learning_git | 5191ecc6c0c11b69b9786f2a8bdd3db7228987d6 | [
"MIT"
] | null | null | null | BASIC c++/pointer-and-referencers/pointer1.cpp | jattramesh/Learning_git | 5191ecc6c0c11b69b9786f2a8bdd3db7228987d6 | [
"MIT"
] | null | null | null | //
// Created by Rahul on 6/5/2019.
//
#include <iostream>
#include<math.h>
using namespace std;
int var,*ptr;
int main()
{
var=100;
ptr=&var;
cout<<"value of var "<<var<<"address of ptr"<<&var<<endl;
cout<<"value pf ptr "<<ptr<<"address od ptr "<<&ptr<<endl;
long a=10,b;
long *ptr;
ptr=&a;
b=*ptr;
cout<<ptr<<endl<<b<<endl;
double x,y,*px;
px=&x;
*px=12;
*px+=4.5;
cout<<sin(*px)<<endl<<x<<endl;
return 0;
}
| 17.444444 | 62 | 0.532909 |
584dfe9a5c1025fb2a5795e34ed23c78cd944ed6 | 12,768 | rb | Ruby | spec/models/ci/runner_spec.rb | dzaporozhets/gitlabhq | a550942de1085eae4c60c498097ab191f8acfc0c | [
"MIT"
] | 1 | 2019-03-04T15:05:10.000Z | 2019-03-04T15:05:10.000Z | spec/models/ci/runner_spec.rb | dzaporozhets/gitlabhq | a550942de1085eae4c60c498097ab191f8acfc0c | [
"MIT"
] | 11 | 2020-04-30T14:31:37.000Z | 2022-03-02T07:17:53.000Z | spec/models/ci/runner_spec.rb | dzaporozhets/gitlabhq | a550942de1085eae4c60c498097ab191f8acfc0c | [
"MIT"
] | 2 | 2020-11-04T05:29:36.000Z | 2021-04-22T08:34:08.000Z | require 'spec_helper'
describe Ci::Runner do
describe 'validation' do
it { is_expected.to validate_presence_of(:access_level) }
context 'when runner is not allowed to pick untagged jobs' do
context 'when runner does not have tags' do
it 'is not valid' do
runner = build(:ci_runner, tag_list: [], run_untagged: false)
expect(runner).to be_invalid
end
end
context 'when runner has tags' do
it 'is valid' do
runner = build(:ci_runner, tag_list: ['tag'], run_untagged: false)
expect(runner).to be_valid
end
end
end
end
describe '#access_level' do
context 'when creating new runner and access_level is nil' do
let(:runner) do
build(:ci_runner, access_level: nil)
end
it "object is invalid" do
expect(runner).not_to be_valid
end
end
context 'when creating new runner and access_level is defined in enum' do
let(:runner) do
build(:ci_runner, access_level: :not_protected)
end
it "object is valid" do
expect(runner).to be_valid
end
end
context 'when creating new runner and access_level is not defined in enum' do
it "raises an error" do
expect { build(:ci_runner, access_level: :this_is_not_defined) }.to raise_error(ArgumentError)
end
end
end
describe '#display_name' do
it 'returns the description if it has a value' do
runner = FactoryGirl.build(:ci_runner, description: 'Linux/Ruby-1.9.3-p448')
expect(runner.display_name).to eq 'Linux/Ruby-1.9.3-p448'
end
it 'returns the token if it does not have a description' do
runner = FactoryGirl.create(:ci_runner)
expect(runner.display_name).to eq runner.description
end
it 'returns the token if the description is an empty string' do
runner = FactoryGirl.build(:ci_runner, description: '', token: 'token')
expect(runner.display_name).to eq runner.token
end
end
describe '#assign_to' do
let!(:project) { FactoryGirl.create :project }
let!(:shared_runner) { FactoryGirl.create(:ci_runner, :shared) }
before do
shared_runner.assign_to(project)
end
it { expect(shared_runner).to be_specific }
it { expect(shared_runner.projects).to eq([project]) }
it { expect(shared_runner.only_for?(project)).to be_truthy }
end
describe '.online' do
subject { described_class.online }
before do
@runner1 = FactoryGirl.create(:ci_runner, :shared, contacted_at: 1.year.ago)
@runner2 = FactoryGirl.create(:ci_runner, :shared, contacted_at: 1.second.ago)
end
it { is_expected.to eq([@runner2])}
end
describe '#online?' do
let(:runner) { FactoryGirl.create(:ci_runner, :shared) }
subject { runner.online? }
context 'never contacted' do
before do
runner.contacted_at = nil
end
it { is_expected.to be_falsey }
end
context 'contacted long time ago time' do
before do
runner.contacted_at = 1.year.ago
end
it { is_expected.to be_falsey }
end
context 'contacted 1s ago' do
before do
runner.contacted_at = 1.second.ago
end
it { is_expected.to be_truthy }
end
end
describe '#can_pick?' do
let(:pipeline) { create(:ci_pipeline) }
let(:build) { create(:ci_build, pipeline: pipeline) }
let(:runner) { create(:ci_runner) }
subject { runner.can_pick?(build) }
before do
build.project.runners << runner
end
context 'when runner does not have tags' do
it 'can handle builds without tags' do
expect(runner.can_pick?(build)).to be_truthy
end
it 'cannot handle build with tags' do
build.tag_list = ['aa']
expect(runner.can_pick?(build)).to be_falsey
end
end
context 'when runner has tags' do
before do
runner.tag_list = %w(bb cc)
end
shared_examples 'tagged build picker' do
it 'can handle build with matching tags' do
build.tag_list = ['bb']
expect(runner.can_pick?(build)).to be_truthy
end
it 'cannot handle build without matching tags' do
build.tag_list = ['aa']
expect(runner.can_pick?(build)).to be_falsey
end
end
context 'when runner can pick untagged jobs' do
it 'can handle builds without tags' do
expect(runner.can_pick?(build)).to be_truthy
end
it_behaves_like 'tagged build picker'
end
context 'when runner cannot pick untagged jobs' do
before do
runner.run_untagged = false
end
it 'cannot handle builds without tags' do
expect(runner.can_pick?(build)).to be_falsey
end
it_behaves_like 'tagged build picker'
end
end
context 'when runner is shared' do
before do
runner.is_shared = true
build.project.runners = []
end
it 'can handle builds' do
expect(runner.can_pick?(build)).to be_truthy
end
context 'when runner is locked' do
before do
runner.locked = true
end
it 'can handle builds' do
expect(runner.can_pick?(build)).to be_truthy
end
end
end
context 'when runner is not shared' do
context 'when runner is assigned to a project' do
it 'can handle builds' do
expect(runner.can_pick?(build)).to be_truthy
end
end
context 'when runner is not assigned to a project' do
before do
build.project.runners = []
end
it 'cannot handle builds' do
expect(runner.can_pick?(build)).to be_falsey
end
end
end
context 'when access_level of runner is not_protected' do
before do
runner.not_protected!
end
context 'when build is protected' do
before do
build.protected = true
end
it { is_expected.to be_truthy }
end
context 'when build is unprotected' do
before do
build.protected = false
end
it { is_expected.to be_truthy }
end
end
context 'when access_level of runner is ref_protected' do
before do
runner.ref_protected!
end
context 'when build is protected' do
before do
build.protected = true
end
it { is_expected.to be_truthy }
end
context 'when build is unprotected' do
before do
build.protected = false
end
it { is_expected.to be_falsey }
end
end
end
describe '#status' do
let(:runner) { FactoryGirl.create(:ci_runner, :shared, contacted_at: 1.second.ago) }
subject { runner.status }
context 'never connected' do
before do
runner.contacted_at = nil
end
it { is_expected.to eq(:not_connected) }
end
context 'contacted 1s ago' do
before do
runner.contacted_at = 1.second.ago
end
it { is_expected.to eq(:online) }
end
context 'contacted long time ago' do
before do
runner.contacted_at = 1.year.ago
end
it { is_expected.to eq(:offline) }
end
context 'inactive' do
before do
runner.active = false
end
it { is_expected.to eq(:paused) }
end
end
describe '#tick_runner_queue' do
let(:runner) { create(:ci_runner) }
it 'returns a new last_update value' do
expect(runner.tick_runner_queue).not_to be_empty
end
end
describe '#ensure_runner_queue_value' do
let(:runner) { create(:ci_runner) }
it 'sets a new last_update value when it is called the first time' do
last_update = runner.ensure_runner_queue_value
expect_value_in_queues.to eq(last_update)
end
it 'does not change if it is not expired and called again' do
last_update = runner.ensure_runner_queue_value
expect(runner.ensure_runner_queue_value).to eq(last_update)
expect_value_in_queues.to eq(last_update)
end
context 'updates runner queue after changing editable value' do
let!(:last_update) { runner.ensure_runner_queue_value }
before do
Ci::UpdateRunnerService.new(runner).update(description: 'new runner')
end
it 'sets a new last_update value' do
expect_value_in_queues.not_to eq(last_update)
end
end
context 'does not update runner value after save' do
let!(:last_update) { runner.ensure_runner_queue_value }
before do
runner.touch
end
it 'has an old last_update value' do
expect_value_in_queues.to eq(last_update)
end
end
def expect_value_in_queues
Gitlab::Redis::Queues.with do |redis|
runner_queue_key = runner.send(:runner_queue_key)
expect(redis.get(runner_queue_key))
end
end
end
describe '#destroy' do
let(:runner) { create(:ci_runner) }
context 'when there is a tick in the queue' do
let!(:queue_key) { runner.send(:runner_queue_key) }
before do
runner.tick_runner_queue
runner.destroy
end
it 'cleans up the queue' do
Gitlab::Redis::Queues.with do |redis|
expect(redis.get(queue_key)).to be_nil
end
end
end
end
describe '.assignable_for' do
let(:runner) { create(:ci_runner) }
let(:project) { create(:project) }
let(:another_project) { create(:project) }
before do
project.runners << runner
end
context 'with shared runners' do
before do
runner.update(is_shared: true)
end
context 'does not give owned runner' do
subject { described_class.assignable_for(project) }
it { is_expected.to be_empty }
end
context 'does not give shared runner' do
subject { described_class.assignable_for(another_project) }
it { is_expected.to be_empty }
end
end
context 'with unlocked runner' do
context 'does not give owned runner' do
subject { described_class.assignable_for(project) }
it { is_expected.to be_empty }
end
context 'does give a specific runner' do
subject { described_class.assignable_for(another_project) }
it { is_expected.to contain_exactly(runner) }
end
end
context 'with locked runner' do
before do
runner.update(locked: true)
end
context 'does not give owned runner' do
subject { described_class.assignable_for(project) }
it { is_expected.to be_empty }
end
context 'does not give a locked runner' do
subject { described_class.assignable_for(another_project) }
it { is_expected.to be_empty }
end
end
end
describe "belongs_to_one_project?" do
it "returns false if there are two projects runner assigned to" do
runner = FactoryGirl.create(:ci_runner)
project = FactoryGirl.create(:project)
project1 = FactoryGirl.create(:project)
project.runners << runner
project1.runners << runner
expect(runner.belongs_to_one_project?).to be_falsey
end
it "returns true" do
runner = FactoryGirl.create(:ci_runner)
project = FactoryGirl.create(:project)
project.runners << runner
expect(runner.belongs_to_one_project?).to be_truthy
end
end
describe '#has_tags?' do
context 'when runner has tags' do
subject { create(:ci_runner, tag_list: ['tag']) }
it { is_expected.to have_tags }
end
context 'when runner does not have tags' do
subject { create(:ci_runner, tag_list: []) }
it { is_expected.not_to have_tags }
end
end
describe '.search' do
let(:runner) { create(:ci_runner, token: '123abc') }
it 'returns runners with a matching token' do
expect(described_class.search(runner.token)).to eq([runner])
end
it 'returns runners with a partially matching token' do
expect(described_class.search(runner.token[0..2])).to eq([runner])
end
it 'returns runners with a matching token regardless of the casing' do
expect(described_class.search(runner.token.upcase)).to eq([runner])
end
it 'returns runners with a matching description' do
expect(described_class.search(runner.description)).to eq([runner])
end
it 'returns runners with a partially matching description' do
expect(described_class.search(runner.description[0..2])).to eq([runner])
end
it 'returns runners with a matching description regardless of the casing' do
expect(described_class.search(runner.description.upcase)).to eq([runner])
end
end
end
| 25.383698 | 102 | 0.636513 |
1f777b7fe69102201de68b0fd19b2d70f3d33a3a | 3,490 | cs | C# | Assets/Live2D/Cubism/Framework/Expression/CubismPlayingExpression.cs | MurlocTW/Live2DLighting | 7f31d06915d6395f77212eef240430105d09c2b7 | [
"MIT"
] | 114 | 2019-05-17T02:17:14.000Z | 2022-02-20T04:41:59.000Z | Assets/Live2D/Cubism/Framework/Expression/CubismPlayingExpression.cs | MurlocTW/Live2DLighting | 7f31d06915d6395f77212eef240430105d09c2b7 | [
"MIT"
] | 2 | 2019-05-15T18:58:03.000Z | 2022-03-29T02:22:11.000Z | Assets/Live2D/Cubism/Framework/Expression/CubismPlayingExpression.cs | MurlocTW/Live2DLighting | 7f31d06915d6395f77212eef240430105d09c2b7 | [
"MIT"
] | 16 | 2019-05-25T02:51:40.000Z | 2022-02-11T03:40:34.000Z | /*
* Copyright(c) Live2D Inc. All rights reserved.
*
* Use of this source code is governed by the Live2D Open Software license
* that can be found at http://live2d.com/eula/live2d-open-software-license-agreement_en.html.
*/
using Live2D.Cubism.Core;
using UnityEngine;
namespace Live2D.Cubism.Framework.Expression
{
/// <summary>
/// The cubism expression data.
/// </summary>
[System.Serializable]
public class CubismPlayingExpression
{
#region variable
/// <summary>
/// Expression type.
/// </summary>
[SerializeField]
public string Type;
/// <summary>
/// Expression fade in time.
/// </summary>
[SerializeField]
public float FadeInTime;
/// <summary>
/// Expression fade out time.
/// </summary>
[SerializeField]
public float FadeOutTime;
/// <summary>
/// Expression Weight.
/// </summary>
[SerializeField, Range(0.0f, 1.0f)]
public float Weight;
/// <summary>
/// Expression user time.
/// </summary>
[SerializeField]
public float ExpressionUserTime;
/// <summary>
/// Expression end time.
/// </summary>
[SerializeField]
public float ExpressionEndTime;
/// <summary>
/// Expression parameters cache.
/// </summary>
[SerializeField]
public CubismParameter[] Destinations;
/// <summary>
/// Expression parameter value.
/// </summary>
[SerializeField]
public float[] Value;
/// <summary>
/// Expression parameter blend mode.
/// </summary>
[SerializeField]
public CubismParameterBlendMode[] Blend;
#endregion
/// <summary>
/// Initialize expression data from <see cref="CubismExpressionData"/>.
/// </summary>
/// <param name="model">model.</param>
/// <param name="expressionData">Source.</param>
public static CubismPlayingExpression Create(CubismModel model, CubismExpressionData expressionData)
{
// Fail silently...
if(model == null || expressionData == null)
{
return null;
}
var ret = new CubismPlayingExpression();
ret.Type = expressionData.Type;
ret.FadeInTime = (expressionData.FadeInTime <= 0.0f)
? 1.0f
: expressionData.FadeInTime;
ret.FadeOutTime = (expressionData.FadeOutTime <= 0.0f)
? 1.0f
: expressionData.FadeOutTime;
ret.Weight = 0.0f;
ret.ExpressionUserTime = 0.0f;
ret.ExpressionEndTime = 0.0f;
var parameterCount = expressionData.Parameters.Length;
ret.Destinations = new CubismParameter[parameterCount];
ret.Value = new float[parameterCount];
ret.Blend = new CubismParameterBlendMode[parameterCount];
for(var i = 0; i < parameterCount; ++i)
{
ret.Destinations[i] = model.Parameters.FindById(expressionData.Parameters[i].Id);
ret.Value[i] = expressionData.Parameters[i].Value;
ret.Blend[i] = expressionData.Parameters[i].Blend;
}
return ret;
}
}
} | 28.606557 | 108 | 0.542407 |
6fa3e0280721d1343962ce253d07fff4f98cab83 | 2,013 | dart | Dart | lib/pages/views/archive_view.dart | muqaddir/realiq_fluffy | c18dad164c26baaedbaef076e397821b14e1118e | [
"MIT"
] | null | null | null | lib/pages/views/archive_view.dart | muqaddir/realiq_fluffy | c18dad164c26baaedbaef076e397821b14e1118e | [
"MIT"
] | null | null | null | lib/pages/views/archive_view.dart | muqaddir/realiq_fluffy | c18dad164c26baaedbaef076e397821b14e1118e | [
"MIT"
] | null | null | null | import 'package:famedlysdk/famedlysdk.dart';
import 'package:realiq_fluffy/pages/archive.dart';
import 'package:realiq_fluffy/widgets/list_items/chat_list_item.dart';
import 'package:flutter/material.dart';
import 'package:flutter_gen/gen_l10n/l10n.dart';
class ArchiveView extends StatelessWidget {
final ArchiveController controller;
const ArchiveView(this.controller, {Key key}) : super(key: key);
@override
Widget build(BuildContext context) {
return FutureBuilder<List<Room>>(
future: controller.getArchive(context),
builder: (BuildContext context, snapshot) => Scaffold(
appBar: AppBar(
leading: BackButton(),
title: Text(L10n.of(context).archive),
actions: [
if (snapshot.hasData &&
controller.archive != null &&
controller.archive.isNotEmpty)
TextButton(
onPressed: controller.forgetAllAction,
child: Text(L10n.of(context).clearArchive),
)
],
),
body: Builder(
builder: (BuildContext context) {
if (snapshot.hasError) {
return Center(
child: Text(
L10n.of(context).oopsSomethingWentWrong,
textAlign: TextAlign.center,
));
}
if (!snapshot.hasData) {
return Center(child: CircularProgressIndicator());
} else {
controller.archive = snapshot.data;
if (controller.archive.isEmpty) {
return Center(child: Icon(Icons.archive_outlined, size: 80));
}
return ListView.builder(
itemCount: controller.archive.length,
itemBuilder: (BuildContext context, int i) => ChatListItem(
controller.archive[i],
onForget: controller.forgetAction,
),
);
}
},
),
),
);
}
}
| 33.55 | 77 | 0.558867 |
c288696525c689ea66c916d06642de3cebd5cf12 | 841 | swift | Swift | src/nodekit/NKScripting/util/NKDisposable.swift | nodekit-io/nodekit-darwin-lite | af09ab257c9ff375359f3003e91df6e6c3e73fe6 | [
"Apache-2.0"
] | 8 | 2016-11-20T12:04:40.000Z | 2021-06-21T13:25:16.000Z | src/nodekit/NKScripting/util/NKDisposable.swift | nodekit-io/nodekit-darwin-lite | af09ab257c9ff375359f3003e91df6e6c3e73fe6 | [
"Apache-2.0"
] | 5 | 2017-08-22T21:13:00.000Z | 2020-07-22T21:37:37.000Z | src/nodekit/NKScripting/util/NKDisposable.swift | nodekit-io/nodekit-darwin-lite | af09ab257c9ff375359f3003e91df6e6c3e73fe6 | [
"Apache-2.0"
] | 4 | 2016-09-12T13:57:28.000Z | 2020-11-16T06:55:47.000Z | /*
* nodekit.io
*
* Copyright (c) 2016 OffGrid Networks. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
Should be implemented by any NKNativePlugin object that
needs to perform cleanup on engine tear-down, such as
timers, filehandles, etc.
*/
protocol NKDisposable {
func dispose()
}
| 30.035714 | 75 | 0.728894 |
64c385d211c174ccc540bb8dc5da1f0b7f8465b1 | 531 | sql | SQL | src/test/resources/test-reorg-table.sql | pdjohe/liquibase-db2-enhanced | 6025e917b51980bf78f50d0700c9598787278ca2 | [
"Apache-2.0"
] | null | null | null | src/test/resources/test-reorg-table.sql | pdjohe/liquibase-db2-enhanced | 6025e917b51980bf78f50d0700c9598787278ca2 | [
"Apache-2.0"
] | 2 | 2021-12-07T17:54:53.000Z | 2022-03-18T13:44:59.000Z | src/test/resources/test-reorg-table.sql | pdjohe/liquibase-db2-enhanced | 6025e917b51980bf78f50d0700c9598787278ca2 | [
"Apache-2.0"
] | null | null | null | ---------------------------------------------------------------------------------
-- Test reorg table
---------------------------------------------------------------------------------
SET CURRENT SCHEMA = 'DB2TEST';
SET CURRENT PATH = 'DB2TEST';
CREATE TABLE "TEST_REORG" (
"OBJ_ID" BIGINT NOT NULL,
"TEXT" VARCHAR(30),
"IS_TEXT_NULL" INTEGER NOT NULL DEFAULT 1);
COMMENT ON TABLE "TEST_REORG" IS 'Test table';
ALTER TABLE TEST_REORG ALTER COLUMN TEXT SET DATA TYPE VARCHAR(50);
REORG TABLE TEST_REORG;
| 31.235294 | 81 | 0.480226 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.