hexsha
stringlengths 40
40
| size
int64 5
1.05M
| ext
stringclasses 98
values | lang
stringclasses 21
values | max_stars_repo_path
stringlengths 3
945
| max_stars_repo_name
stringlengths 4
118
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
368k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
945
| max_issues_repo_name
stringlengths 4
118
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
134k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
945
| max_forks_repo_name
stringlengths 4
135
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
1.05M
| avg_line_length
float64 1
1.03M
| max_line_length
int64 2
1.03M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
05aa648973edc1dadc87a9a8de7c8c2e6067a5a0 | 9,112 | py | Python | train.py | gakkispy/DD-Net-Pytorch | 45032ce373dfa0fffec8e99671555071548445bc | [
"MIT"
] | 31 | 2020-08-25T05:28:33.000Z | 2022-03-25T01:54:57.000Z | train.py | gakkispy/DD-Net-Pytorch | 45032ce373dfa0fffec8e99671555071548445bc | [
"MIT"
] | 1 | 2021-12-25T04:13:22.000Z | 2021-12-25T04:13:22.000Z | train.py | gakkispy/DD-Net-Pytorch | 45032ce373dfa0fffec8e99671555071548445bc | [
"MIT"
] | 7 | 2020-11-08T02:49:46.000Z | 2022-03-08T09:14:37.000Z | #! /usr/bin/env python
#! coding:utf-8
from pathlib import Path
import matplotlib.pyplot as plt
from torch import log
from tqdm import tqdm
import torch
import torch.nn as nn
import argparse
import torch.optim as optim
from torch.optim.lr_scheduler import ReduceLROnPlateau
from sklearn.metrics import confusion_matrix
from dataloader.jhmdb_loader import load_jhmdb_data, Jdata_generator, JConfig
from dataloader.shrec_loader import load_shrec_data, Sdata_generator, SConfig
from models.DDNet_Original import DDNet_Original as DDNet
from utils import makedir
import sys
import time
import numpy as np
import logging
sys.path.insert(0, './pytorch-summary/torchsummary/')
from torchsummary import summary # noqa
savedir = Path('experiments') / Path(str(int(time.time())))
makedir(savedir)
logging.basicConfig(filename=savedir/'train.log', level=logging.INFO)
history = {
"train_loss": [],
"test_loss": [],
"test_acc": []
}
def train(args, model, device, train_loader, optimizer, epoch, criterion):
model.train()
train_loss = 0
for batch_idx, (data1, data2, target) in enumerate(tqdm(train_loader)):
M, P, target = data1.to(device), data2.to(device), target.to(device)
optimizer.zero_grad()
output = model(M, P)
loss = criterion(output, target)
train_loss += loss.detach().item()
loss.backward()
optimizer.step()
if batch_idx % args.log_interval == 0:
msg = ('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch, batch_idx * len(data1), len(train_loader.dataset),
100. * batch_idx / len(train_loader), loss.item()))
print(msg)
logging.info(msg)
if args.dry_run:
break
history['train_loss'].append(train_loss)
return train_loss
def test(model, device, test_loader):
model.eval()
test_loss = 0
correct = 0
criterion = nn.CrossEntropyLoss(reduction='sum')
with torch.no_grad():
for _, (data1, data2, target) in enumerate(tqdm(test_loader)):
M, P, target = data1.to(device), data2.to(device), target.to(device)
output = model(M, P)
# sum up batch loss
test_loss += criterion(output, target).item()
# get the index of the max log-probability
pred = output.argmax(dim=1, keepdim=True)
# output shape (B,Class)
# target_shape (B)
# pred shape (B,1)
correct += pred.eq(target.view_as(pred)).sum().item()
test_loss /= len(test_loader.dataset)
history['test_loss'].append(test_loss)
history['test_acc'].append(correct / len(test_loader.dataset))
msg = ('Test set: Average loss: {:.4f}, Accuracy: {}/{} ({:.2f}%)\n'.format(
test_loss, correct, len(test_loader.dataset),
100. * correct / len(test_loader.dataset)))
print(msg)
logging.info(msg)
def main():
# Training settings
parser = argparse.ArgumentParser()
parser.add_argument('--batch-size', type=int, default=64, metavar='N',
help='input batch size for training (default: 64)')
parser.add_argument('--test-batch-size', type=int, default=1000, metavar='N',
help='input batch size for testing (default: 1000)')
parser.add_argument('--epochs', type=int, default=199, metavar='N',
help='number of epochs to train (default: 199)')
parser.add_argument('--lr', type=float, default=0.01, metavar='LR',
help='learning rate (default: 0.01)')
parser.add_argument('--gamma', type=float, default=0.5, metavar='M',
help='Learning rate step gamma (default: 0.5)')
parser.add_argument('--no-cuda', action='store_true', default=False,
help='disables CUDA training')
parser.add_argument('--dry-run', action='store_true', default=False,
help='quickly check a single pass')
parser.add_argument('--log-interval', type=int, default=2, metavar='N',
help='how many batches to wait before logging training status')
parser.add_argument('--save-model', action='store_true', default=False,
help='For Saving the current Model')
parser.add_argument('--dataset', type=int, required=True, metavar='N',
help='0 for JHMDB, 1 for SHREC coarse, 2 for SHREC fine, others is undefined')
parser.add_argument('--model', action='store_true', default=False,
help='For Saving the current Model')
parser.add_argument('--calc_time', action='store_true', default=False,
help='calc calc time per sample')
args = parser.parse_args()
logging.info(args)
use_cuda = not args.no_cuda and torch.cuda.is_available()
device = torch.device("cuda" if use_cuda else "cpu")
kwargs = {'batch_size': args.batch_size}
if use_cuda:
kwargs.update({'num_workers': 1,
'pin_memory': True,
'shuffle': True},)
# alias
Config = None
data_generator = None
load_data = None
clc_num = 0
if args.dataset == 0:
Config = JConfig()
data_generator = Jdata_generator
load_data = load_jhmdb_data
clc_num = Config.clc_num
elif args.dataset == 1:
Config = SConfig()
load_data = load_shrec_data
clc_num = Config.class_coarse_num
data_generator = Sdata_generator('coarse_label')
elif args.dataset == 2:
Config = SConfig()
clc_num = Config.class_fine_num
load_data = load_shrec_data
data_generator = Sdata_generator('fine_label')
else:
print("Unsupported dataset!")
sys.exit(1)
C = Config
Train, Test, le = load_data()
X_0, X_1, Y = data_generator(Train, C, le)
X_0 = torch.from_numpy(X_0).type('torch.FloatTensor')
X_1 = torch.from_numpy(X_1).type('torch.FloatTensor')
Y = torch.from_numpy(Y).type('torch.LongTensor')
X_0_t, X_1_t, Y_t = data_generator(Test, C, le)
X_0_t = torch.from_numpy(X_0_t).type('torch.FloatTensor')
X_1_t = torch.from_numpy(X_1_t).type('torch.FloatTensor')
Y_t = torch.from_numpy(Y_t).type('torch.LongTensor')
trainset = torch.utils.data.TensorDataset(X_0, X_1, Y)
train_loader = torch.utils.data.DataLoader(trainset, **kwargs)
testset = torch.utils.data.TensorDataset(X_0_t, X_1_t, Y_t)
test_loader = torch.utils.data.DataLoader(
testset, batch_size=args.test_batch_size)
Net = DDNet(C.frame_l, C.joint_n, C.joint_d,
C.feat_d, C.filters, clc_num)
model = Net.to(device)
summary(model, [(C.frame_l, C.feat_d), (C.frame_l, C.joint_n, C.joint_d)])
optimizer = optim.Adam(model.parameters(), lr=args.lr, betas=(0.9, 0.999))
criterion = nn.CrossEntropyLoss()
scheduler = ReduceLROnPlateau(
optimizer, factor=args.gamma, patience=5, cooldown=0.5, min_lr=5e-6, verbose=True)
for epoch in range(1, args.epochs + 1):
train_loss = train(args, model, device, train_loader,
optimizer, epoch, criterion)
test(model, device, test_loader)
scheduler.step(train_loss)
fig, (ax1, ax2, ax3) = plt.subplots(nrows=3, ncols=1)
ax1.plot(history['train_loss'])
ax1.plot(history['test_loss'])
ax1.legend(['Train', 'Test'], loc='upper left')
ax1.set_xlabel('Epoch')
ax1.set_title('Loss')
ax2.set_title('Model accuracy')
ax2.set_ylabel('Accuracy')
ax2.set_xlabel('Epoch')
ax2.plot(history['test_acc'])
xmax = np.argmax(history['test_acc'])
ymax = np.max(history['test_acc'])
text = "x={}, y={:.3f}".format(xmax, ymax)
ax2.annotate(text, xy=(xmax, ymax))
ax3.set_title('Confusion matrix')
model.eval()
with torch.no_grad():
Y_pred = model(X_0_t.to(device), X_1_t.to(
device)).cpu().numpy()
Y_test = Y_t.numpy()
cnf_matrix = confusion_matrix(
Y_test, np.argmax(Y_pred, axis=1))
ax3.imshow(cnf_matrix)
fig.tight_layout()
fig.savefig(str(savedir / "perf.png"))
if args.save_model:
torch.save(model.state_dict(), str(savedir/"model.pt"))
if args.calc_time:
device = ['cpu', 'cuda']
# calc time
for d in device:
tmp_X_0_t = X_0_t.to(d)
tmp_X_1_t = X_1_t.to(d)
model = model.to(d)
# warm up
_ = model(tmp_X_0_t, tmp_X_1_t)
tmp_X_0_t = tmp_X_0_t.unsqueeze(1)
tmp_X_1_t = tmp_X_1_t.unsqueeze(1)
start = time.perf_counter_ns()
for i in range(tmp_X_0_t.shape[0]):
_ = model(tmp_X_0_t[i, :, :, :], tmp_X_1_t[i, :, :, :])
end = time.perf_counter_ns()
msg = ("total {}ns, {:.2f}ns per one on {}".format((end - start),
((end - start) / (X_0_t.shape[0])), d))
print(msg)
logging.info(msg)
if __name__ == '__main__':
main()
| 38.447257 | 102 | 0.613586 |
d07d396d290e097aa74751b2f8b96cac5c013cae | 538 | asm | Assembly | oeis/070/A070781.asm | neoneye/loda-programs | 84790877f8e6c2e821b183d2e334d612045d29c0 | [
"Apache-2.0"
] | 11 | 2021-08-22T19:44:55.000Z | 2022-03-20T16:47:57.000Z | oeis/070/A070781.asm | neoneye/loda-programs | 84790877f8e6c2e821b183d2e334d612045d29c0 | [
"Apache-2.0"
] | 9 | 2021-08-29T13:15:54.000Z | 2022-03-09T19:52:31.000Z | oeis/070/A070781.asm | neoneye/loda-programs | 84790877f8e6c2e821b183d2e334d612045d29c0 | [
"Apache-2.0"
] | 3 | 2021-08-22T20:56:47.000Z | 2021-09-29T06:26:12.000Z | ; A070781: a(n) = binomial((n+1)^2, n^2).
; 1,4,126,11440,2042975,600805296,262596783764,159518999862720,128447994798305325,132341572939212267400,169758547725351091518726,265242021590899282966358400,495874093230232452749553398586,1092844289151775603486607296657120,2803736645550638260366779399137545800,8283675595268374292919471912522442632960,27922522076335249153759039335657631882742553,106506788903198534447058733223660406842425971192,456408290522469516408592494850398369993718507508650
mov $1,$0
add $0,1
pow $0,2
pow $1,2
bin $0,$1
| 59.777778 | 447 | 0.881041 |
3a292e76b9eb02de473ad471f8e8f85c964d0fa7 | 10,666 | lua | Lua | fixes/TriggerFixCppLogicExtension.lua | mcb5637/s5CommunityLib | 20fb1521fe65105c128ac45540470000c61ddf3a | [
"MIT"
] | 1 | 2022-02-23T18:56:34.000Z | 2022-02-23T18:56:34.000Z | fixes/TriggerFixCppLogicExtension.lua | mcb5637/s5CommunityLib | 20fb1521fe65105c128ac45540470000c61ddf3a | [
"MIT"
] | null | null | null | fixes/TriggerFixCppLogicExtension.lua | mcb5637/s5CommunityLib | 20fb1521fe65105c128ac45540470000c61ddf3a | [
"MIT"
] | null | null | null | if mcbPacker then --mcbPacker.ignore
mcbPacker.require("s5CommunityLib/fixes/TriggerFix")
mcbPacker.require("s5CommunityLib/comfort/other/FrameworkWrapperLight")
mcbPacker.require("s5CommunityLib/tables/ArmorClasses")
mcbPacker.require("s5CommunityLib/comfort/entity/EntityIdChangedHelper")
end --mcbPacker.ignore
if not CppLogic then
assert(false, "CppLogic is required!")
-- TODO exit map
return
end
--- author:mcb current maintainer:mcb v1.0
-- trigger support für CppLogic.
-- - definiert Events.SCRIPT_EVENT_ON_CONVERT_ENTITY, aufgerufen wenn helias einen leader bekehrt.
-- - verbessert Events.SCRIPT_EVENT_ON_ENTITY_KILLS_ENTITY, soldier ids stimmen nun in jedem fall.
--
-- setze TriggerFixCppLogicExtension_UseRecommendedFixes = true um einige von mir empfohlene fixes zu verwenden.
--
-- enthaltene comforts:
-- - TriggerFixCppLogicExtension.RemoveArchiveOnLeave bool, wenn true werden alle s5x archive in der loadorder entfernt wenn die map verlassen wird.
-- - TriggerFixCppLogicExtension.AddMapArchiveToLoadOrder(path) fügt path zur loadorder hinzu, wenn noch nicht vorhanden. nur s5x archive. wenn path nil ist, die aktuelle map.
-- - TriggerFixCppLogicExtension.SetGUIStateSelectEntity(onconfirm, mouse, checkentity, oncancel)
-- gui state um ein entity zu selektieren.
-- - TriggerFixCppLogicExtension.SetGUIStateSelectPos(onconfirm, mouse, checkpos, oncancel)
-- gui state um eine position zu selektieren.
-- - TriggerFixCppLogicExtension.SetGUIStateSelectPosInSector(onconfirm, mouse, sector, checkpos, oncancel)
-- gui state um eine position in einem sector zu selektieren.
--
-- Benötigt:
-- - CppLogic
-- - TriggerFix
-- - FrameworkWrapper
-- - ArmorClasses
TriggerFixCppLogicExtension = {Backup = {}, GUIStateCustomMouse=10, RemoveArchiveOnLeave=false}
TriggerFixCppLogicExtension.Backup.TaskListToFix = {
{TaskLists.TL_BATTLE_RIFLE, 10},
{TaskLists.TL_BATTLE_BOW, 10},
{TaskLists.TL_BATTLE_CROSSBOW, 10},
{TaskLists.TL_BATTLE_HEROBOW, 9},
{TaskLists.TL_BATTLE_SKIRMISHER, 10},
{TaskLists.TL_BATTLE_VEHICLE, 17},
}
TriggerFix.AddScriptTrigger("SCRIPT_EVENT_ON_CONVERT_ENTITY")
function TriggerFixCppLogicExtension.Init()
CppLogic.Entity.Settler.EnableConversionHook(TriggerFixCppLogicExtension.Hook)
if TriggerFixCppLogicExtension_UseRecommendedFixes then
CppLogic.Combat.EnableAoEProjectileFix() -- aoe projektile beachten damage/armorclass und schadensboni durch techs/helden
CppLogic.Combat.EnableCamoFix() -- camo wird nicht beendet, wenn projektile treffen
--CppLogic.Logic.EnableAllHurtEntityTrigger() -- hurtentity trigger auch ausführen, wenn der angreifer tot ist
TriggerFixCppLogicExtension.InitKillCb()
CppLogic.Logic.EnableBuildOnMovementFix(true) -- auf siedlern bauen bricht bewegung nicht mehr ab
if not CEntity then
CppLogic.Logic.SetLeadersRegenerateTroopHealth(true) -- truppen hp regenerieren
CppLogic.Entity.Settler.EnableRangedEffectSoldierHeal(true) -- truppen hp von salim geheilt
CppLogic.Logic.FixSnipeDamage(nil)
CppLogic.Logic.TaskListSetChangeTaskListCheckUncancelable(true)
TriggerFix.CreateEventHurtIn = TriggerFix.CreateEventHurtInCppLogic
TriggerFix.CreateEventHurtOut = TriggerFix.CreateEventHurtOutCppLogic
end
-- kanonen damageclasses fixen
TriggerFixCppLogicExtension.Backup.Cannons = {}
local function docannon(ty, dc)
local d, c = CppLogic.EntityType.GetAutoAttackDamage(ty)
TriggerFixCppLogicExtension.Backup.Cannons[ty] = c
CppLogic.EntityType.SetAutoAttackDamage(ty, d, dc)
end
docannon(Entities.PV_Cannon2, DamageClasses.DC_Siege)
docannon(Entities.PV_Cannon3, DamageClasses.DC_Chaos)
-- damageclasses faktor gegen fur fixen
TriggerFixCppLogicExtension.Backup.FurAC = {}
for _,dc in pairs(DamageClasses) do
TriggerFixCppLogicExtension.Backup.FurAC[dc] = CppLogic.Logic.GetDamageFactor(dc, ArmorClasses.AC_Fur)
end
CppLogic.Logic.SetDamageFactor(DamageClasses.DC_Strike, ArmorClasses.AC_Fur, 0.9)
CppLogic.Logic.SetDamageFactor(DamageClasses.DC_Pierce, ArmorClasses.AC_Fur, 0.9)
CppLogic.Logic.SetDamageFactor(DamageClasses.DC_Chaos, ArmorClasses.AC_Fur, 0.7)
CppLogic.Logic.SetDamageFactor(DamageClasses.DC_Siege, ArmorClasses.AC_Fur, 0.2)
CppLogic.Logic.SetDamageFactor(DamageClasses.DC_Hero, ArmorClasses.AC_Fur, 0.8)
CppLogic.Logic.SetDamageFactor(DamageClasses.DC_Evil, ArmorClasses.AC_Fur, 1)
CppLogic.Logic.SetDamageFactor(DamageClasses.DC_Bullet, ArmorClasses.AC_Fur, 1.5)
if not CEntity then
for _,tl in ipairs(TriggerFixCppLogicExtension.Backup.TaskListToFix) do -- make battle task lists wait for anim uncancleable after firing projectile
CppLogic.Logic.TaskListMakeWaitForAnimsUnCancelable(tl[1], tl[2])
end
end
end
end
function TriggerFixCppLogicExtension.OnLeaveMap()
CppLogic.OnLeaveMap()
-- CppLogic automatically deactivates all mods, just have to reset data
if TriggerFixCppLogicExtension_UseRecommendedFixes then
for ty, dc in pairs(TriggerFixCppLogicExtension.Backup.Cannons) do
CppLogic.EntityType.SetAutoAttackDamage(ty, CppLogic.EntityType.GetAutoAttackDamage(ty), dc)
end
for dc, f in pairs(TriggerFixCppLogicExtension.Backup.FurAC) do
CppLogic.Logic.SetDamageFactor(dc, ArmorClasses.AC_Fur, f)
end
if not CEntity then
for _,tl in ipairs(TriggerFixCppLogicExtension.Backup.TaskListToFix) do
CppLogic.Logic.TaskListMakeWaitForAnimsCancelable(tl[1], tl[2])
end
end
end
if TriggerFixCppLogicExtension.RemoveArchiveOnLeave then
while string.find(CppLogic.Logic.GetLoadOrder()[1], ".s5x") do
CppLogic.Logic.RemoveTopArchive()
end
end
end
function TriggerFixCppLogicExtension.AddLeaveTrigger()
Trigger.RequestTrigger(Events.SCRIPT_EVENT_ON_LEAVE_MAP, nil, "TriggerFixCppLogicExtension.OnLeaveMap", 1)
Trigger.RequestTrigger(Events.SCRIPT_EVENT_ON_ENTITY_ID_CHANGED, nil, "TriggerFixCppLogicExtension.OnIdChanged", 1)
TriggerFixCppLogicExtension.GameCallback_GUI_StateChanged = GameCallback_GUI_StateChanged
function GameCallback_GUI_StateChanged(stateid, armed)
TriggerFixCppLogicExtension.GameCallback_GUI_StateChanged(stateid, armed)
if stateid == 27 then
Mouse.CursorSet(TriggerFixCppLogicExtension.GUIStateCustomMouse)
end
end
return true
end
function TriggerFixCppLogicExtension.OnIdChanged()
CppLogic.Entity.CloneOverrideData(Event.GetEntityID1(), Event.GetEntityID2())
end
function TriggerFixCppLogicExtension.Hook(targetId, player, newid, converterId)
local ev = TriggerFix.CreateEmptyEvent()
ev.GetEntityID1 = targetId
ev.GetEntityID2 = newid
ev.GetEntityID = converterId
ev.GetPlayerID = player
TriggerFix_action(Events.SCRIPT_EVENT_ON_CONVERT_ENTITY, ev)
end
function TriggerFixCppLogicExtension.InitKillCb()
for i = table.getn(TriggerFix.afterTriggerCB), 1, -1 do
if TriggerFix.afterTriggerCB[i]==TriggerFix.KillTrigger.AfterTriggerCB then
table.remove(TriggerFix.afterTriggerCB, i)
end
end
CppLogic.Logic.EnableAllHurtEntityTrigger(true, function(att, tar, pl, sourc)
local ev = TriggerFix.CreateEmptyEvent()
ev.GetEntityID1 = att
ev.GetEntityID2 = tar
ev.GetPlayerID = pl
ev.AttackSource = sourc
TriggerFix_action(Events.SCRIPT_EVENT_ON_ENTITY_KILLS_ENTITY, ev)
end)
end
function TriggerFixCppLogicExtension.SetGUIStateSelectEntity(onconfirm, mouse, checkentity, oncancel)
TriggerFixCppLogicExtension.GUIStateCustomMouse = mouse
CppLogic.UI.SetGUIStateLuaSelection(function(x, y)
local id = GUI.GetEntityAtPosition(x, y)
if IsDestroyed(id) then
return false
end
if checkentity and not checkentity(id) then
return false
end
onconfirm(id)
return true
end, oncancel)
end
function TriggerFixCppLogicExtension.SetGUIStateSelectPos(onconfirm, mouse, checkpos, oncancel)
TriggerFixCppLogicExtension.GUIStateCustomMouse = mouse
CppLogic.UI.SetGUIStateLuaSelection(function(x, y)
local p = CppLogic.UI.GetLandscapePosAtScreenPos(x, y)
if not IsValidPosition(p) then
return false
end
if checkpos and not checkpos(p) then
return false
end
onconfirm(p)
return true
end, oncancel)
end
function TriggerFixCppLogicExtension.SetGUIStateSelectPosInSector(onconfirm, mouse, sector, checkpos, oncancel)
TriggerFixCppLogicExtension.GUIStateCustomMouse = mouse
CppLogic.UI.SetGUIStateLuaSelection(function(x, y)
local p = CppLogic.UI.GetLandscapePosAtScreenPos(x, y)
if not IsValidPosition(p) then
return false
end
if CppLogic.Logic.LandscapeGetSector(p) ~= sector then
p = CppLogic.Logic.LandscapeGetNearestUnblockedPosInSector(p, sector, 2000)
end
if not IsValidPosition(p) or CppLogic.Logic.LandscapeGetSector(p) ~= sector then
return false
end
if checkpos and not checkpos(p) then
return false
end
onconfirm(p)
return true
end, oncancel)
end
function TriggerFixCppLogicExtension.AddMapArchiveToLoadOrder(path)
TriggerFixCppLogicExtension.RemoveArchiveOnLeave = true
if not path then
path = CppLogic.API.MapGetDataPath(Framework.GetCurrentMapName(), Framework.GetCurrentMapTypeAndCampaignName())
end
assert(string.find(path, ".s5x"))
local lo = CppLogic.Logic.GetLoadOrder()
for _,p in ipairs(lo) do
if p==path then
return
end
end
CppLogic.Logic.AddArchive(path)
end
AddMapStartAndSaveLoadedCallback("TriggerFixCppLogicExtension.Init")
AddMapStartCallback("TriggerFixCppLogicExtension.AddLeaveTrigger")
AdvancedDealDamageSource = {
Unknown = 0,
Melee = 1,
Arrow = 2,
Cannonball = 3,
AbilitySnipe = 10,
AbilityCircularAttack = 11,
AbilityBomb = 12,
AbilitySabotageSingleTarget = 13,
AbilitySabotageBlast = 14,
AbilityShuriken = 15,
Script = 25,
};
| 44.441667 | 187 | 0.723701 |
bb3648e855870f1a6fcab377be3eb8892eb5982c | 7,385 | cs | C# | Bash.Common/Models/BashData.cs | b3nk4n/ibash0r-app | 4d5e4ebeae3d920e6699c4efd4db06779b1c33bb | [
"MIT"
] | null | null | null | Bash.Common/Models/BashData.cs | b3nk4n/ibash0r-app | 4d5e4ebeae3d920e6699c4efd4db06779b1c33bb | [
"MIT"
] | null | null | null | Bash.Common/Models/BashData.cs | b3nk4n/ibash0r-app | 4d5e4ebeae3d920e6699c4efd4db06779b1c33bb | [
"MIT"
] | null | null | null | using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using System.Text;
using PhoneKit.Framework.Core.MVVM;
namespace Bash.Common.Models
{
[DataContract]
public class BashData : ViewModelBase
{
private const string NEWLINE = "[newline]";
private List<BashQuoteItem> _cachedQuoteItems;
private const double LINE_LENGTH = 52.0;
public BashData()
{
}
[DataMember(Name = "ident")]
public int Id { get; set; }
[DataMember(Name = "ts")]
public string Timestamp { get; set; }
public string ShortTimestamp
{
get
{
return Timestamp.Split(' ')[0];
}
}
[DataMember(Name = "content")]
public string Content { get; set; }
private int _rating;
[DataMember(Name = "rating")]
public int Rating
{
get
{
return _rating;
}
set
{
if (_rating != value)
{
_rating = value;
NotifyPropertyChanged("Rating");
}
}
}
public List<BashQuoteItem> QuoteItems
{
get
{
if (_cachedQuoteItems != null)
return _cachedQuoteItems;
var result = new List<BashQuoteItem>();
var persons = new Dictionary<string, int>();
string[] splittedConversation = Content.Split(new string[]{ NEWLINE }, StringSplitOptions.RemoveEmptyEntries);
foreach (var conversationPart in splittedConversation)
{
string nick;
int personIndex;
string text;
int nameOpen = conversationPart.IndexOf('<');
int nameClose = conversationPart.IndexOf('>');
int heightScore;
if (nameOpen != -1 && nameClose != -1)
{
nick = conversationPart.Substring(nameOpen + 1, nameClose - nameOpen - 1);
text = conversationPart.Substring(nameClose + 1, conversationPart.Length - nameClose - 1).Trim();
if (persons.ContainsKey(nick))
{
personIndex = persons[nick];
}
else
{
personIndex = persons.Count;
persons.Add(nick, personIndex);
}
heightScore = 1 + (int)Math.Ceiling(text.Length / LINE_LENGTH);
}
else if (IsServerText(conversationPart))
{
nick = "server";
personIndex = -1;
text = TrimServerText(conversationPart);
heightScore = 2;
}
else // belongs to the quote before
{
if (result.Count > 0)
{
var itemBefore = result[result.Count - 1];
itemBefore.Text += '\n' + conversationPart;
itemBefore.HeightScore += (int)Math.Ceiling(conversationPart.Length / LINE_LENGTH);
}
continue;
}
result.Add(new BashQuoteItem
{
Nick = nick,
PersonIndex = personIndex,
Text = text,
IndexPosition = result.Count,
HeightScore = heightScore
});
}
_cachedQuoteItems = result;
return result;
}
}
public int GuessHeightScore()
{
int heightScore = 0;
foreach (var item in QuoteItems)
{
heightScore += item.HeightScore;
}
return heightScore;
}
private string TrimServerText(string text)
{
if (text.StartsWith("*** ") || text.StartsWith("<-- ") || text.StartsWith("--> "))
{
return text.Substring(4, text.Length - 4);
}
else if (text.StartsWith("* "))
{
return text.Substring(2, text.Length - 2);
}
if (text.StartsWith("*"))
{
return text.Substring(1, text.Length - 1);
}
return text;
}
private bool IsServerText(string text)
{
return ((text.StartsWith("*") || text.StartsWith("<--") || text.StartsWith("-->") || text.StartsWith("-!-")) &&
(text.Contains(" was banned from the server") ||
text.Contains(" is back from") ||
text.Contains(" was kicked by") ||
text.Contains("Quits: ") ||
text.Contains("Joins: ") ||
text.Contains(" has joined") ||
text.Contains(" has quit (") ||
text.Contains(" changed nick to") ||
text.Contains(" wirft seine Tastatur ausm Fenster") ||
text.Contains(" is now known as") ||
text.Contains("Parts: ") ||
text.Contains(" sets mode: ") ||
text.Contains(" left channel (") ||
text.Contains(" has left ") ||
text.Contains(" is away -")) ||
text.Contains(" has quit IRC") ||
text.Equals("---- 1 Stunde später ----") ||
text.Equals("[2 Tage später]") ||
text.Equals("- etwa einen Tag später -"));
}
public override bool Equals(object obj)
{
if (this == obj)
return true;
if (obj == null)
return false;
if (this.GetType() != obj.GetType())
return false;
var data = obj as BashData;
return data.Id == this.Id;
}
public override int GetHashCode()
{
return this.Id.GetHashCode();
}
public string QuoteString {
get
{
var sb = new StringBuilder();
bool isNotFirst = false;
foreach(var quote in QuoteItems)
{
if (isNotFirst)
sb.Append('\n');
else
isNotFirst = true;
if (quote.PersonIndex == -1)
sb.Append(string.Format("*** {0}", quote.Text));
else
sb.Append(string.Format("<{0}> {1}", quote.Nick, quote.Text));
}
return sb.ToString();
}
}
public Uri Uri
{
get
{
return new Uri(string.Format(@"http://www.ibash.de/zitat_{0}.html", Id), UriKind.Absolute);
}
}
}
}
| 31.969697 | 126 | 0.419499 |
bdca4b8304cf57c7ad11f544dac8fd076abf2a18 | 19,743 | sql | SQL | 25072021pj2.sql | hiep820/PJ-2 | 685fc90fb56623606137a3b74d5a94711c2a7a87 | [
"MIT"
] | null | null | null | 25072021pj2.sql | hiep820/PJ-2 | 685fc90fb56623606137a3b74d5a94711c2a7a87 | [
"MIT"
] | null | null | null | 25072021pj2.sql | hiep820/PJ-2 | 685fc90fb56623606137a3b74d5a94711c2a7a87 | [
"MIT"
] | null | null | null | -- phpMyAdmin SQL Dump
-- version 5.1.0
-- https://www.phpmyadmin.net/
--
-- Máy chủ: 127.0.0.1
-- Thời gian đã tạo: Th7 26, 2021 lúc 11:17 AM
-- Phiên bản máy phục vụ: 10.4.18-MariaDB
-- Phiên bản PHP: 7.4.16
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Cơ sở dữ liệu: `25072021pj2`
--
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `book`
--
CREATE TABLE `book` (
`id_book` int(10) UNSIGNED NOT NULL,
`title_book` varchar(50) COLLATE utf8mb4_unicode_ci NOT NULL,
`quantity` int(11) NOT NULL,
`id_subjects` int(10) UNSIGNED NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `book`
--
INSERT INTO `book` (`id_book`, `title_book`, `quantity`, `id_subjects`) VALUES
(4, 'Sách A', 3, 4),
(5, 'Sách Z', 10, 3);
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `course`
--
CREATE TABLE `course` (
`id_course` int(10) UNSIGNED NOT NULL,
`name_course` varchar(50) COLLATE utf8mb4_unicode_ci NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `course`
--
INSERT INTO `course` (`id_course`, `name_course`) VALUES
(4, 'Khóa Học A');
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `failed_jobs`
--
CREATE TABLE `failed_jobs` (
`id` bigint(20) UNSIGNED NOT NULL,
`uuid` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`connection` text COLLATE utf8mb4_unicode_ci NOT NULL,
`queue` text COLLATE utf8mb4_unicode_ci NOT NULL,
`payload` longtext COLLATE utf8mb4_unicode_ci NOT NULL,
`exception` longtext COLLATE utf8mb4_unicode_ci NOT NULL,
`failed_at` timestamp NOT NULL DEFAULT current_timestamp()
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `grade`
--
CREATE TABLE `grade` (
`id_grade` int(10) UNSIGNED NOT NULL,
`name_grade` varchar(50) COLLATE utf8mb4_unicode_ci NOT NULL,
`id_course` int(10) UNSIGNED NOT NULL,
`status` tinyint(1) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `grade`
--
INSERT INTO `grade` (`id_grade`, `name_grade`, `id_course`, `status`) VALUES
(3, 'Lớp B', 4, 0);
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `invoice`
--
CREATE TABLE `invoice` (
`id_invoice` int(10) UNSIGNED NOT NULL,
`exportDate` date NOT NULL,
`id_student` int(10) UNSIGNED NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `invoice_detail`
--
CREATE TABLE `invoice_detail` (
`id_invoice` int(10) UNSIGNED NOT NULL,
`id_book` int(10) UNSIGNED NOT NULL,
`isReceived` tinyint(1) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `migrations`
--
CREATE TABLE `migrations` (
`id` int(10) UNSIGNED NOT NULL,
`migration` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`batch` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `migrations`
--
INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES
(1, '2021_06_15_014315_course', 1),
(2, '2021_06_15_025607_grade', 1),
(3, '2021_06_15_030124_subjects', 1),
(4, '2021_06_15_030153_book', 1),
(5, '2021_06_15_030154_student', 1),
(6, '2021_06_15_031014_invoice', 1),
(7, '2021_06_15_031454_ministry', 1),
(8, '2021_06_15_043307_invoice_detail', 1),
(9, '2014_10_12_000000_create_users_table', 2),
(10, '2014_10_12_100000_create_password_resets_table', 2),
(11, '2019_08_19_000000_create_failed_jobs_table', 2),
(12, '2021_07_25_094809_create_permission_tables', 3);
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `ministry`
--
CREATE TABLE `ministry` (
`id_ministry` int(10) UNSIGNED NOT NULL,
`name_ministry` varchar(50) COLLATE utf8mb4_unicode_ci NOT NULL,
`username` varchar(30) COLLATE utf8mb4_unicode_ci NOT NULL,
`password` varchar(30) COLLATE utf8mb4_unicode_ci NOT NULL,
`gender` tinyint(1) NOT NULL,
`phone` char(10) COLLATE utf8mb4_unicode_ci NOT NULL,
`role` tinyint(1) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `ministry`
--
INSERT INTO `ministry` (`id_ministry`, `name_ministry`, `username`, `password`, `gender`, `phone`, `role`) VALUES
(4, 'Nguyễn Thu Thảo', '[email protected]', '123456', 1, '0986182756', 0);
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `model_has_permissions`
--
CREATE TABLE `model_has_permissions` (
`permission_id` bigint(20) UNSIGNED NOT NULL,
`model_type` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`model_id` bigint(20) UNSIGNED NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `model_has_roles`
--
CREATE TABLE `model_has_roles` (
`role_id` bigint(20) UNSIGNED NOT NULL,
`model_type` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`model_id` bigint(20) UNSIGNED NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `model_has_roles`
--
INSERT INTO `model_has_roles` (`role_id`, `model_type`, `model_id`) VALUES
(1, 'App\\Models\\User', 1),
(1, 'App\\Models\\User', 3),
(1, 'App\\Models\\User', 4),
(4, 'App\\Models\\User', 2);
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `password_resets`
--
CREATE TABLE `password_resets` (
`email` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`token` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`created_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `permissions`
--
CREATE TABLE `permissions` (
`id` bigint(20) UNSIGNED NOT NULL,
`name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`guard_name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `permissions`
--
INSERT INTO `permissions` (`id`, `name`, `guard_name`, `created_at`, `updated_at`) VALUES
(1, 'role-list', 'web', '2021-07-25 02:58:19', '2021-07-25 02:58:19'),
(2, 'role-create', 'web', '2021-07-25 02:58:19', '2021-07-25 02:58:19'),
(3, 'role-edit', 'web', '2021-07-25 02:58:19', '2021-07-25 02:58:19'),
(4, 'role-delete', 'web', '2021-07-25 02:58:19', '2021-07-25 02:58:19'),
(5, 'ds-taikhoan', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(6, 'tao-taikhoan', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(7, 'capnhat-taikhoan', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(8, 'xoa-taikhoan', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(9, 'ds-khoahoc', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(10, 'tao-khoahoc', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(11, 'capnhat-khoahoc', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(12, 'xoa-khoahoc', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(13, 'ds-lophoc', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(14, 'tao-lophoc', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(15, 'capnhat-lophoc', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(16, 'xoa-lophoc', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(17, 'ds-sinhvien', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(18, 'tao-sinhvien', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(19, 'capnhat-sinhvien', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(20, 'xoa-sinhvien', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(21, 'ds-monhoc', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(22, 'tao-monhoc', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(23, 'capnhat-monhoc', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(24, 'xoa-monhoc', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(25, 'ds-sach', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(26, 'tao-sach', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(27, 'capnhat-sach', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49'),
(28, 'xoa-sach', 'web', '2021-07-25 06:11:49', '2021-07-25 06:11:49');
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `roles`
--
CREATE TABLE `roles` (
`id` bigint(20) UNSIGNED NOT NULL,
`name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`guard_name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `roles`
--
INSERT INTO `roles` (`id`, `name`, `guard_name`, `created_at`, `updated_at`) VALUES
(1, 'Admin', 'web', '2021-07-25 02:59:04', '2021-07-25 02:59:04'),
(4, 'Quản lý Sinh viên', 'web', '2021-07-25 06:27:48', '2021-07-25 06:27:48');
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `role_has_permissions`
--
CREATE TABLE `role_has_permissions` (
`permission_id` bigint(20) UNSIGNED NOT NULL,
`role_id` bigint(20) UNSIGNED NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `role_has_permissions`
--
INSERT INTO `role_has_permissions` (`permission_id`, `role_id`) VALUES
(1, 1),
(2, 1),
(3, 1),
(4, 1),
(5, 1),
(6, 1),
(7, 1),
(8, 1),
(9, 1),
(10, 1),
(11, 1),
(12, 1),
(13, 1),
(14, 1),
(15, 1),
(16, 1),
(17, 1),
(17, 4),
(18, 1),
(18, 4),
(19, 1),
(19, 4),
(20, 1),
(20, 4),
(21, 1),
(22, 1),
(23, 1),
(24, 1),
(25, 1),
(26, 1),
(27, 1),
(28, 1);
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `student`
--
CREATE TABLE `student` (
`id_student` int(10) UNSIGNED NOT NULL,
`name_student` varchar(50) COLLATE utf8mb4_unicode_ci NOT NULL,
`birthday` date NOT NULL,
`gender` tinyint(1) NOT NULL,
`status` tinyint(1) NOT NULL,
`id_grade` int(10) UNSIGNED NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `student`
--
INSERT INTO `student` (`id_student`, `name_student`, `birthday`, `gender`, `status`, `id_grade`) VALUES
(6, 'Sinh viên B', '2021-07-25', 0, 0, 3);
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `subjects`
--
CREATE TABLE `subjects` (
`id_subjects` int(10) UNSIGNED NOT NULL,
`name_subjects` varchar(50) COLLATE utf8mb4_unicode_ci NOT NULL,
`id_grade` int(10) UNSIGNED NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `subjects`
--
INSERT INTO `subjects` (`id_subjects`, `name_subjects`, `id_grade`) VALUES
(3, 'Môn C', 3),
(4, 'Môn B', 3);
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `users`
--
CREATE TABLE `users` (
`id` bigint(20) UNSIGNED NOT NULL,
`name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`email` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`email_verified_at` timestamp NULL DEFAULT NULL,
`password` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`remember_token` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`status` int(11) DEFAULT 0
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `users`
--
INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`, `status`) VALUES
(1, 'Hardik Savani', '[email protected]', NULL, '$2y$10$90mJLEDxYqG8M95AdM3T/eEhUD4JlsNLba/y5BZKTOYZ2RY8DJpd.', NULL, '2021-07-25 02:59:04', '2021-07-25 02:59:04', 0),
(2, 'qlsv', '[email protected]', NULL, '$2y$10$BUCpLekqXlbXMH6AeZHIkuXDI/ivWGe3HgCAly77ch5NvYOiS0MQ.', NULL, '2021-07-25 06:28:16', '2021-07-25 06:58:59', 1),
(3, 'qlsach', '[email protected]', NULL, '$2y$10$UCpo7l3VDvxttLqzluetbuMBTUGxd4NUelPFu93qLG/qOw7Dp6d0u', NULL, '2021-07-25 06:59:29', '2021-07-25 06:59:29', 0),
(4, 'qlmonhoc', '[email protected]', NULL, '$2y$10$bXb/bKSnYoeOP3HcZkayw.sx4C2ioIbHjbMzXRMKz9evlOS1NN14W', NULL, '2021-07-25 07:01:54', '2021-07-25 07:07:34', 1);
--
-- Chỉ mục cho các bảng đã đổ
--
--
-- Chỉ mục cho bảng `book`
--
ALTER TABLE `book`
ADD PRIMARY KEY (`id_book`),
ADD KEY `book_id_subjects_foreign` (`id_subjects`);
--
-- Chỉ mục cho bảng `course`
--
ALTER TABLE `course`
ADD PRIMARY KEY (`id_course`);
--
-- Chỉ mục cho bảng `failed_jobs`
--
ALTER TABLE `failed_jobs`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `failed_jobs_uuid_unique` (`uuid`);
--
-- Chỉ mục cho bảng `grade`
--
ALTER TABLE `grade`
ADD PRIMARY KEY (`id_grade`),
ADD KEY `grade_id_course_foreign` (`id_course`);
--
-- Chỉ mục cho bảng `invoice`
--
ALTER TABLE `invoice`
ADD PRIMARY KEY (`id_invoice`),
ADD KEY `invoice_id_student_foreign` (`id_student`);
--
-- Chỉ mục cho bảng `invoice_detail`
--
ALTER TABLE `invoice_detail`
ADD PRIMARY KEY (`id_invoice`),
ADD KEY `invoice_detail_id_book_foreign` (`id_book`);
--
-- Chỉ mục cho bảng `migrations`
--
ALTER TABLE `migrations`
ADD PRIMARY KEY (`id`);
--
-- Chỉ mục cho bảng `ministry`
--
ALTER TABLE `ministry`
ADD PRIMARY KEY (`id_ministry`);
--
-- Chỉ mục cho bảng `model_has_permissions`
--
ALTER TABLE `model_has_permissions`
ADD PRIMARY KEY (`permission_id`,`model_id`,`model_type`),
ADD KEY `model_has_permissions_model_id_model_type_index` (`model_id`,`model_type`);
--
-- Chỉ mục cho bảng `model_has_roles`
--
ALTER TABLE `model_has_roles`
ADD PRIMARY KEY (`role_id`,`model_id`,`model_type`),
ADD KEY `model_has_roles_model_id_model_type_index` (`model_id`,`model_type`);
--
-- Chỉ mục cho bảng `password_resets`
--
ALTER TABLE `password_resets`
ADD KEY `password_resets_email_index` (`email`);
--
-- Chỉ mục cho bảng `permissions`
--
ALTER TABLE `permissions`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `permissions_name_guard_name_unique` (`name`,`guard_name`);
--
-- Chỉ mục cho bảng `roles`
--
ALTER TABLE `roles`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `roles_name_guard_name_unique` (`name`,`guard_name`);
--
-- Chỉ mục cho bảng `role_has_permissions`
--
ALTER TABLE `role_has_permissions`
ADD PRIMARY KEY (`permission_id`,`role_id`),
ADD KEY `role_has_permissions_role_id_foreign` (`role_id`);
--
-- Chỉ mục cho bảng `student`
--
ALTER TABLE `student`
ADD PRIMARY KEY (`id_student`),
ADD KEY `student_id_grade_foreign` (`id_grade`);
--
-- Chỉ mục cho bảng `subjects`
--
ALTER TABLE `subjects`
ADD PRIMARY KEY (`id_subjects`),
ADD KEY `subjects_id_grade_foreign` (`id_grade`);
--
-- Chỉ mục cho bảng `users`
--
ALTER TABLE `users`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `users_email_unique` (`email`);
--
-- AUTO_INCREMENT cho các bảng đã đổ
--
--
-- AUTO_INCREMENT cho bảng `book`
--
ALTER TABLE `book`
MODIFY `id_book` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=6;
--
-- AUTO_INCREMENT cho bảng `course`
--
ALTER TABLE `course`
MODIFY `id_course` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5;
--
-- AUTO_INCREMENT cho bảng `failed_jobs`
--
ALTER TABLE `failed_jobs`
MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT cho bảng `grade`
--
ALTER TABLE `grade`
MODIFY `id_grade` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=4;
--
-- AUTO_INCREMENT cho bảng `invoice`
--
ALTER TABLE `invoice`
MODIFY `id_invoice` int(10) UNSIGNED NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT cho bảng `invoice_detail`
--
ALTER TABLE `invoice_detail`
MODIFY `id_invoice` int(10) UNSIGNED NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT cho bảng `migrations`
--
ALTER TABLE `migrations`
MODIFY `id` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=13;
--
-- AUTO_INCREMENT cho bảng `ministry`
--
ALTER TABLE `ministry`
MODIFY `id_ministry` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5;
--
-- AUTO_INCREMENT cho bảng `permissions`
--
ALTER TABLE `permissions`
MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=29;
--
-- AUTO_INCREMENT cho bảng `roles`
--
ALTER TABLE `roles`
MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5;
--
-- AUTO_INCREMENT cho bảng `student`
--
ALTER TABLE `student`
MODIFY `id_student` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=7;
--
-- AUTO_INCREMENT cho bảng `subjects`
--
ALTER TABLE `subjects`
MODIFY `id_subjects` int(10) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5;
--
-- AUTO_INCREMENT cho bảng `users`
--
ALTER TABLE `users`
MODIFY `id` bigint(20) UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=8;
--
-- Các ràng buộc cho các bảng đã đổ
--
--
-- Các ràng buộc cho bảng `book`
--
ALTER TABLE `book`
ADD CONSTRAINT `book_id_subjects_foreign` FOREIGN KEY (`id_subjects`) REFERENCES `subjects` (`id_subjects`);
--
-- Các ràng buộc cho bảng `grade`
--
ALTER TABLE `grade`
ADD CONSTRAINT `grade_id_course_foreign` FOREIGN KEY (`id_course`) REFERENCES `course` (`id_course`);
--
-- Các ràng buộc cho bảng `invoice`
--
ALTER TABLE `invoice`
ADD CONSTRAINT `invoice_id_student_foreign` FOREIGN KEY (`id_student`) REFERENCES `student` (`id_student`);
--
-- Các ràng buộc cho bảng `invoice_detail`
--
ALTER TABLE `invoice_detail`
ADD CONSTRAINT `invoice_detail_id_book_foreign` FOREIGN KEY (`id_book`) REFERENCES `book` (`id_book`),
ADD CONSTRAINT `invoice_detail_id_invoice_foreign` FOREIGN KEY (`id_invoice`) REFERENCES `invoice` (`id_invoice`);
--
-- Các ràng buộc cho bảng `model_has_permissions`
--
ALTER TABLE `model_has_permissions`
ADD CONSTRAINT `model_has_permissions_permission_id_foreign` FOREIGN KEY (`permission_id`) REFERENCES `permissions` (`id`) ON DELETE CASCADE;
--
-- Các ràng buộc cho bảng `model_has_roles`
--
ALTER TABLE `model_has_roles`
ADD CONSTRAINT `model_has_roles_role_id_foreign` FOREIGN KEY (`role_id`) REFERENCES `roles` (`id`) ON DELETE CASCADE;
--
-- Các ràng buộc cho bảng `role_has_permissions`
--
ALTER TABLE `role_has_permissions`
ADD CONSTRAINT `role_has_permissions_permission_id_foreign` FOREIGN KEY (`permission_id`) REFERENCES `permissions` (`id`) ON DELETE CASCADE,
ADD CONSTRAINT `role_has_permissions_role_id_foreign` FOREIGN KEY (`role_id`) REFERENCES `roles` (`id`) ON DELETE CASCADE;
--
-- Các ràng buộc cho bảng `student`
--
ALTER TABLE `student`
ADD CONSTRAINT `student_id_grade_foreign` FOREIGN KEY (`id_grade`) REFERENCES `grade` (`id_grade`);
--
-- Các ràng buộc cho bảng `subjects`
--
ALTER TABLE `subjects`
ADD CONSTRAINT `subjects_id_grade_foreign` FOREIGN KEY (`id_grade`) REFERENCES `grade` (`id_grade`);
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
| 29.205621 | 165 | 0.664641 |
2cc639cb964936838eff44bc095fbdb82111aedb | 4,554 | py | Python | src/main.py | ewanlee/mackrl | 6dd505aa09830f16c35a022f67e255db935c807e | [
"Apache-2.0"
] | null | null | null | src/main.py | ewanlee/mackrl | 6dd505aa09830f16c35a022f67e255db935c807e | [
"Apache-2.0"
] | null | null | null | src/main.py | ewanlee/mackrl | 6dd505aa09830f16c35a022f67e255db935c807e | [
"Apache-2.0"
] | null | null | null | import numpy as np
import os
from os.path import dirname, abspath
import pymongo
from sacred import Experiment, SETTINGS
SETTINGS.CONFIG.READ_ONLY_CONFIG = False
from sacred.observers import FileStorageObserver
from sacred.observers import MongoObserver
from sacred.utils import apply_backspaces_and_linefeeds
import sys
import torch as th
from components.transforms import _merge_dicts
from run import run
from utils.logging import get_logger
import traceback
import warnings
import sys
def warn_with_traceback(message, category, filename, lineno, file=None, line=None):
log = file if hasattr(file,'write') else sys.stderr
traceback.print_stack(file=log)
log.write(warnings.formatwarning(message, category, filename, lineno, line))
# warnings.showwarning = warn_with_traceback
# warnings.simplefilter("always")
SETTINGS['CAPTURE_MODE'] = "fd" # set to "no" if you want to see stdout/stderr in console
logger = get_logger()
ex = Experiment("fastmarl")
ex.logger = logger
ex.captured_out_filter = apply_backspaces_and_linefeeds
results_path = os.path.join(dirname(dirname(abspath(__file__))), "results")
def setup_file_observer():
file_obs_path = os.path.join(results_path, "sacred")
logger.info("FileStorageObserver path: {}".format(file_obs_path))
logger.info("Using the FileStorageObserver in results/sacred")
ex.observers.append(FileStorageObserver.create(file_obs_path))
pass
@ex.main
def my_main(_run, _config, _log, env_args):
# global mongo_client
mongo_client = None
# Setting the random seed throughout the modules
np.random.seed(_config["seed"])
th.manual_seed(_config["seed"])
env_args['seed'] = _config["seed"]
# run the framework
run(_run, _config, _log, mongo_client)
# force exit
os._exit()
if __name__ == '__main__':
### Execute functions that modify the directory tree
from copy import deepcopy
from distutils.dir_util import copy_tree
import os
if os.path.exists("/fastmarl/3rdparty") and \
os.path.exists("/fastmarl/src"): # de facto only happens if called in docker file
fromDirectory = "/fastmarl/src/envs/starcraft2/maps"
toDirectory = "/fastmarl/3rdparty/StarCraftII__3.16.1/Maps/Melee"
if os.path.exists(toDirectory):
print("COPYING... {} to {}".format(fromDirectory, toDirectory))
copy_tree(fromDirectory, toDirectory)
toDirectory = "/fastmarl/3rdparty/StarCraftII__4.1.2/Maps/Melee"
if os.path.exists(toDirectory):
print("COPYING... {} to {}".format(fromDirectory, toDirectory))
copy_tree(fromDirectory, toDirectory)
### End execute functions that modify the directory tree
params = deepcopy(sys.argv)
defaults = []
config_dic = {}
# manually parse for experiment tags
del_indices = []
exp_name = None
for _i, _v in enumerate(params):
if _v.split("=")[0] == "--exp_name":
del_indices.append(_i)
exp_name = _v.split("=")[1]
break
# load experiment config (if there is such as thing)
exp_dic = None
if exp_name is not None:
from config.experiments import REGISTRY as exp_REGISTRY
print(exp_REGISTRY)
assert exp_name in exp_REGISTRY, "Unknown experiment name: {}".format(exp_name)
exp_dic = exp_REGISTRY[exp_name](None, logger)
if "defaults" in exp_dic:
defaults.extend(exp_dic["defaults"].split(" "))
del exp_dic["defaults"]
config_dic = deepcopy(exp_dic)
# check for defaults in command line parameters
for _i, _v in enumerate(params):
if _v.split("=")[0] == "--default_cfgs":
del_indices.append(_i)
defaults.extend(_v.split("=")[1].split(" "))
break
# load default configs in order
for _d in defaults:
from config.defaults import REGISTRY as def_REGISTRY
def_dic = def_REGISTRY[_d](config_dic, logger)
config_dic = _merge_dicts(config_dic, def_dic)
# finally merge with experiment config
if exp_name is not None:
config_dic = _merge_dicts(config_dic, exp_dic)
# add results path to config
config_dic["local_results_path"] = results_path
# now add all the config to sacred
ex.add_config(config_dic)
# delete indices that contain custom experiment tags
for _i in sorted(del_indices, reverse=True):
del params[_i]
if config_dic.get("observe_file", True):
setup_file_observer()
ex.run_commandline(params)
| 32.297872 | 89 | 0.692578 |
1a591f71a097fb277a222e3b29d0120fbc628fc4 | 10,657 | py | Python | Functions/NestedComBat.py | hannah-horng/generalized-combat | 5f58f960484084fc914f834e71f2b1bd8f7a6946 | [
"MIT"
] | null | null | null | Functions/NestedComBat.py | hannah-horng/generalized-combat | 5f58f960484084fc914f834e71f2b1bd8f7a6946 | [
"MIT"
] | null | null | null | Functions/NestedComBat.py | hannah-horng/generalized-combat | 5f58f960484084fc914f834e71f2b1bd8f7a6946 | [
"MIT"
] | null | null | null | # Written by Hannah Horng ([email protected])
import pandas as pd
import neuroCombat as nC
from sklearn.preprocessing import LabelEncoder
import matplotlib.pyplot as plt
from scipy.stats import ranksums, ttest_ind, ttest_rel, ks_2samp
import os
def NestedComBat(dat, covars, batch_list, categorical_cols=None, continuous_cols=None, drop=False,
write_p=False, plotting=False, filepath=''):
"""
Completes sequential nested ComBat harmonization on an input DataFrame. Order is determined by number of features
with statistically significant differences in distribution (KS test) due to a particular batch effect.
Arguments
---------
data : DataFrame of original data with shape (features, samples)
covars : DataFrame with shape (samples, covariates) corresponding to original data. All variables should be label-
encoded (i.e. strings converted to integer designations)
batch_list : list of strings indicating batch effect column names within covars (i.e. ['Manufacturer', 'CE'...])
categorical_cols : string or list of strings of categorical variables to adjust for
continuous_cols : string or list of strings of continuous variables to adjust for
drop : Boolean, if True -- features with significant differences in distribution due to the batch effect being
harmonized are dropped with each iteration (corresponds to NestedD)
write_p : Boolean, if True -- KS test p-values will be written as a CSV into the directory created from filepath
plotting : Boolean, if True -- kernel density plots will be written as image files into the directory created from
filepath
filepath : root directory path for saving KS test p-values and kernel density plots created during harmonization
Returns
-------
new_dat : DataFrame with shape (features, samples) that has been sequentially harmonized with Nested ComBat
"""
p_dict = {}
count_dict = {}
f_dict = {}
print('ROUND 1:')
for a in range(len(batch_list)):
batch_col = batch_list[a]
print('Harmonizing by ' + batch_col + '...')
filepath2 = filepath + 'Round 1/' + batch_col + '/'
if not os.path.exists(filepath2):
os.makedirs(filepath2)
# RUN COMBAT
print('ComBat with Raw Data...')
output = nC.neuroCombat(dat, covars, batch_col, continuous_cols=continuous_cols,
categorical_cols=categorical_cols)['data']
output_df = pd.DataFrame.from_records(output.T)
output_df.columns = dat.T.columns
f_dict[batch_col] = output_df
if plotting:
combat_histograms(dat.T, output_df, covars, covars, batch_col, filepath2)
if write_p:
p_values = combat_kstest(dat.T, output_df, covars, covars, batch_col, write=True, filepath=filepath2)
else:
p_values = combat_kstest(dat.T, output_df, covars, covars, batch_col)
p_values.index = output_df.columns
p_dict[batch_col] = p_values['ComBat']
count_dict[batch_col] = len(p_values[p_values['ComBat'] < .05])
drop_feature = [key for key, value in count_dict.items() if value == min(count_dict.values())][0]
# Iteration
batch_list2 = batch_list.copy()
batch_list2.remove(drop_feature)
new_data_df = f_dict[drop_feature]
new_pvalues = p_dict[drop_feature]
new_dat = new_data_df.T
if drop:
new_dat = new_data_df.T[new_pvalues > .05] # Dropping every iteration
c = 1
while len(batch_list2) > 0:
print('ROUND ' + str(c+1) + ':')
p_dict = {}
count_dict = {}
f_dict = {}
c = c+1
for b in range(len(batch_list2)):
batch_col = batch_list2[b]
print('Harmonizing by ' + batch_col + '...')
filepath2 = filepath+'Round '+str(c) + '/' + batch_col+'/'
if not os.path.exists(filepath2):
os.makedirs(filepath2)
# RUN COMBAT
# print('ComBat with Raw Data...')
output = nC.neuroCombat(new_dat, covars, batch_col, continuous_cols=continuous_cols, categorical_cols=categorical_cols)['data']
output_df = pd.DataFrame.from_records(output.T)
output_df.columns = new_dat.T.columns
f_dict[batch_col] = output_df
if plotting:
combat_histograms(new_dat.T, output_df, covars, covars, batch_col, filepath2)
if write_p:
p_values = combat_kstest(new_dat.T, output_df, covars, covars, batch_col, write=True, filepath=filepath2)
else:
p_values = combat_kstest(new_dat.T, output_df, covars, covars, batch_col)
p_values.index = output_df.columns
p_dict[batch_col] = p_values['ComBat']
count_dict[batch_col] = len(p_values[p_values['ComBat'] < .05])
drop_feature = [key for key, value in count_dict.items() if value == min(count_dict.values())][0]
new_data_df = f_dict[drop_feature]
new_pvalues = p_dict[drop_feature]
if drop:
new_dat = new_data_df.T[new_pvalues > .05] # Iteration + Dropping
else:
new_dat = new_data_df.T
batch_list2.remove(drop_feature)
output_df = pd.DataFrame.from_records(new_dat.T)
output_df.columns = new_dat.T.columns
return output_df
def combat_kstest(data, output, covars1, covars2, batch_col, filepath='', write=False):
"""
Calculating KS test for differences in distribution due to batch effect before and after harmonization
*Note that this is differs from the version in GMMComBat only by file destination naming
Arguments
---------
data : DataFrame of original data with shape (samples, features)
output: DataFrame of harmonized data with shape (samples, features)
covars1 : DataFrame with shape (samples, covariates) corresponding to original data
covars2 : DataFrame with shape (samples, covariates) corresponding to harmonized data
batch_col : string indicating batch/imaging parameter name in covars
filepath : write destination for ks p-value DataFrame if write is True
write: Boolean, set to True to save ks p-value DataFrame
Returns
-------
p_df : DataFrame with two colums corresponding to KS test p-value testing for significant differences in
distribution attributable to the batch effect specified by batch_col
"""
data_keys = data.keys()
batch_var1 = covars1[batch_col]
batch_var2 = covars2[batch_col]
data_0 = data[batch_var1 == 0]
data_1 = data[batch_var1 == 1]
output_0 = output[batch_var2 == 0]
output_1 = output[batch_var2 == 1]
# KS Test (more generalized differences in distribution)
p_before = []
p_after = []
for m in range(0, data.shape[1]):
p_value1 = ks_2samp(data_0.iloc[:, m], data_1.iloc[:, m])
p_value2 = ks_2samp(output_0.iloc[:, m], output_1.iloc[:, m])
p_before.append(p_value1.pvalue)
p_after.append(p_value2.pvalue)
p_df = pd.DataFrame({'Raw': p_before, 'ComBat': p_after})
if write:
p_df = pd.DataFrame({'Raw': p_before, 'ComBat': p_after})
p_df.index = data_keys
p_df.to_csv(filepath + '_' + batch_col + '_feature_ks_values.csv')
return p_df
def combat_histograms(data, output, covars1, covars2, batch_col, filepath):
"""
Plots kernel density plots separated by batch effect groups and before vs. after ComBat harmonization
Arguments
---------
data : DataFrame of original data with shape (samples, features)
output: DataFrame of harmonized data with shape (samples, features)
covars1 : DataFrame with shape (samples, covariates) corresponding to original data
covars2 : DataFrame with shape (samples, covariates) corresponding to harmonized data
batch_col : string indicating batch/imaging parameter name in covars
filepath : write destination for kernel density plots
"""
print('Plotting histograms...')
data_keys = data.keys()
batch_var1 = covars1[batch_col]
batch_var2 = covars2[batch_col]
data_0 = data[batch_var1 == 0]
data_1 = data[batch_var1 == 1]
output_0 = output[batch_var2 == 0]
output_1 = output[batch_var2 == 1]
for k in range(0, data.shape[1]):
plt.figure()
data_0.iloc[:, k].plot.kde()
data_1.iloc[:, k].plot.kde()
output_0.iloc[:, k].plot.kde()
output_1.iloc[:, k].plot.kde()
plt.xlabel(data_keys[k])
leg = ["0", "1", "0_ComBat", "1_ComBat"]
plt.legend(leg, loc='upper right')
plt.rcParams.update({'font.size': 12})
filename = filepath + batch_col + '_' + 'histogram_' + data_keys[k] + ".png"
plt.savefig(filename, bbox_inches='tight')
plt.close()
def feature_kstest_histograms(dat, covars, batch_list, filepath):
"""
Plots kernel density plots and computes KS test p-values separated by batch effect groups for a dataset (intended
to assess differences in distribution to all batch effects in batch_list following harmonization with
NestedComBat
*Note that this is differs from the version in GMMComBat only by file destination naming
Arguments
---------
data : DataFrame of original data with shape (samples, features)
output: DataFrame of harmonized data with shape (samples, features)
covars : DataFrame with shape (samples, covariates) corresponding to original data. All variables should be label-
encoded (i.e. strings converted to integer designations)
batch_list : list of strings indicating batch effect column names within covars (i.e. ['Manufacturer', 'CE'...])
filepath : write destination for kernel density plots
"""
print('Plotting final feature histograms...')
p_df = pd.DataFrame()
for batch_col in batch_list:
p = []
split_col = covars[batch_col]
filepath2 = filepath + 'feature_histograms/' + batch_col + '/'
if not os.path.exists(filepath2):
os.makedirs(filepath2)
for feature in dat:
plt.figure()
dat[feature][split_col == 0].plot.kde()
dat[feature][split_col == 1].plot.kde()
plt.xlabel(feature)
filename = filepath2 + feature + '.png'
plt.savefig(filename, bbox_inches='tight')
plt.close()
p_value = ks_2samp(dat[feature][split_col == 0], dat[feature][split_col == 1])
p.append(p_value.pvalue)
p_df[batch_col] = p
p_df.index = dat.keys()
p_df.to_csv(filepath + 'final_nested_ks_values.csv')
| 42.289683 | 139 | 0.663038 |
07b3ca19d715fd8aace95af6968ddbf79d668ee3 | 71 | cpp | C++ | cpp/example/src/CoinChange/coinChange.cpp | zcemycl/algoTest | 9518fb2b60fd83c85aeb2ab809ff647aaf643f0a | [
"MIT"
] | 1 | 2022-01-26T16:33:45.000Z | 2022-01-26T16:33:45.000Z | cpp/example/src/CoinChange/coinChange.cpp | zcemycl/algoTest | 9518fb2b60fd83c85aeb2ab809ff647aaf643f0a | [
"MIT"
] | null | null | null | cpp/example/src/CoinChange/coinChange.cpp | zcemycl/algoTest | 9518fb2b60fd83c85aeb2ab809ff647aaf643f0a | [
"MIT"
] | 1 | 2022-01-26T16:35:44.000Z | 2022-01-26T16:35:44.000Z | #include "coinChange.h"
int coinChange::naive(int n){
return n;
}
| 11.833333 | 29 | 0.661972 |
c401bc08ec86b95b8a9224ce279252c9a2ee9c31 | 2,553 | cc | C++ | tests/testfoundation/ringbuffertest.cc | sirAgg/nebula | 3fbccc73779944aa3e56b9e8acdd6fedd1d38006 | [
"BSD-2-Clause"
] | 377 | 2018-10-24T08:34:21.000Z | 2022-03-31T23:37:49.000Z | tests/testfoundation/ringbuffertest.cc | sirAgg/nebula | 3fbccc73779944aa3e56b9e8acdd6fedd1d38006 | [
"BSD-2-Clause"
] | 11 | 2020-01-22T13:34:46.000Z | 2022-03-07T10:07:34.000Z | tests/testfoundation/ringbuffertest.cc | sirAgg/nebula | 3fbccc73779944aa3e56b9e8acdd6fedd1d38006 | [
"BSD-2-Clause"
] | 23 | 2019-07-13T16:28:32.000Z | 2022-03-20T09:00:59.000Z | //------------------------------------------------------------------------------
// ringbuffertest.cc
// (C) 2008 Radon Labs GmbH
//------------------------------------------------------------------------------
#include "stdneb.h"
#include "ringbuffertest.h"
#include "util/ringbuffer.h"
namespace Test
{
__ImplementClass(Test::RingBufferTest, 'RBTT', Test::TestCase);
using namespace Util;
//------------------------------------------------------------------------------
/**
*/
void
RingBufferTest::Run()
{
RingBuffer<int> rb(5);
VERIFY(rb.Size() == 0);
VERIFY(rb.Capacity() == 5);
VERIFY(rb.IsEmpty());
rb.Add(1);
VERIFY(rb.Size() == 1);
VERIFY(!rb.IsEmpty());
VERIFY(rb.Front() == 1);
VERIFY(rb.Back() == 1);
VERIFY(1 == rb[0]);
rb.Add(2);
VERIFY(rb.Size() == 2);
VERIFY(rb.Front() == 1);
VERIFY(rb.Back() == 2);
VERIFY(1 == rb[0]);
VERIFY(2 == rb[1]);
rb.Add(3);
VERIFY(rb.Size() == 3);
VERIFY(rb.Front() == 1);
VERIFY(rb.Back() == 3);
VERIFY(1 == rb[0]);
VERIFY(2 == rb[1]);
VERIFY(3 == rb[2]);
rb.Add(4);
VERIFY(rb.Size() == 4);
VERIFY(rb.Front() == 1);
VERIFY(rb.Back() == 4);
VERIFY(1 == rb[0]);
VERIFY(2 == rb[1]);
VERIFY(3 == rb[2]);
VERIFY(4 == rb[3]);
rb.Add(5);
VERIFY(rb.Size() == 5);
VERIFY(rb.Front() == 1);
VERIFY(rb.Back() == 5);
VERIFY(1 == rb[0]);
VERIFY(2 == rb[1]);
VERIFY(3 == rb[2]);
VERIFY(4 == rb[3]);
VERIFY(5 == rb[4]);
rb.Add(6);
VERIFY(rb.Size() == 5);
VERIFY(rb.Front() == 2);
VERIFY(rb.Back() == 6);
VERIFY(2 == rb[0]);
VERIFY(3 == rb[1]);
VERIFY(4 == rb[2]);
VERIFY(5 == rb[3]);
VERIFY(6 == rb[4]);
rb.Add(7);
VERIFY(rb.Size() == 5);
VERIFY(rb.Front() == 3);
VERIFY(rb.Back() == 7);
VERIFY(3 == rb[0]);
VERIFY(4 == rb[1]);
VERIFY(5 == rb[2]);
VERIFY(6 == rb[3]);
VERIFY(7 == rb[4]);
// test copy constructor and assignment operator
RingBuffer<int> rb1 = rb;
VERIFY(rb1.Size() == 5);
VERIFY(rb1.Front() == 3);
VERIFY(rb1.Back() == 7);
VERIFY(3 == rb1[0]);
VERIFY(4 == rb1[1]);
VERIFY(5 == rb1[2]);
VERIFY(6 == rb1[3]);
VERIFY(7 == rb1[4]);
rb1.Reset();
VERIFY(rb1.Size() == 0);
rb1 = rb;
VERIFY(rb1.Size() == 5);
VERIFY(rb1.Front() == 3);
VERIFY(rb1.Back() == 7);
VERIFY(3 == rb1[0]);
VERIFY(4 == rb1[1]);
VERIFY(5 == rb1[2]);
VERIFY(6 == rb1[3]);
VERIFY(7 == rb1[4]);
}
} // namespace Test | 23.209091 | 80 | 0.444967 |
493637e7dd3bec871f2fe8b2f5f0e55841c21b58 | 298 | sql | SQL | sentencias/sql/create_schemas.sql | C1587S/OffenderDS | 45834a71895473b9fac2eac6fb590e4655d1259a | [
"MIT"
] | 2 | 2019-12-10T04:58:44.000Z | 2019-12-22T06:53:35.000Z | sentencias/sql/create_schemas.sql | C1587S/OffenderDS | 45834a71895473b9fac2eac6fb590e4655d1259a | [
"MIT"
] | 22 | 2019-12-09T04:14:25.000Z | 2019-12-12T04:07:09.000Z | sentencias/sql/create_schemas.sql | C1587S/OffenderDS | 45834a71895473b9fac2eac6fb590e4655d1259a | [
"MIT"
] | 2 | 2019-12-22T06:53:58.000Z | 2020-01-30T07:15:19.000Z | /*
Creamos los esquemas raw, cleaned y semantic, eliminándolos previamente en caso de que ya existieran
*/
drop schema if exists raw cascade;
create schema raw;
drop schema if exists cleaned cascade;
create schema cleaned;
drop schema if exists semantic cascade;
create schema semantic; | 27.090909 | 101 | 0.775168 |
bb7e3888d76c3c2377392b35c7dff90bd6db928b | 229 | cs | C# | tools/grial-uikit-extended-1.5.2/samples/Grial/Grial/Views/Theme/CommonViewsPage.xaml.cs | micbelgique/DevCamp2017-Team12 | 900af8365e2d1b4f9dc068ec5c23c8891f682926 | [
"MIT"
] | null | null | null | tools/grial-uikit-extended-1.5.2/samples/Grial/Grial/Views/Theme/CommonViewsPage.xaml.cs | micbelgique/DevCamp2017-Team12 | 900af8365e2d1b4f9dc068ec5c23c8891f682926 | [
"MIT"
] | null | null | null | tools/grial-uikit-extended-1.5.2/samples/Grial/Grial/Views/Theme/CommonViewsPage.xaml.cs | micbelgique/DevCamp2017-Team12 | 900af8365e2d1b4f9dc068ec5c23c8891f682926 | [
"MIT"
] | null | null | null | using System;
using System.Collections.Generic;
using Xamarin.Forms;
namespace UXDivers.Artina.Grial
{
public partial class CommonViewsPage : ContentPage
{
public CommonViewsPage()
{
InitializeComponent ();
}
}
}
| 13.470588 | 51 | 0.737991 |
e25b0f199e9ac77962d19226d34ffa4a6e1482ce | 1,038 | py | Python | config.py | rdriesen-vtj/vtj | e86da07e55d2a46b83b20cf8bae8900abcb9b4ee | [
"MIT"
] | null | null | null | config.py | rdriesen-vtj/vtj | e86da07e55d2a46b83b20cf8bae8900abcb9b4ee | [
"MIT"
] | null | null | null | config.py | rdriesen-vtj/vtj | e86da07e55d2a46b83b20cf8bae8900abcb9b4ee | [
"MIT"
] | null | null | null | import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
SECRET_KEY = os.environ.get('SECRET_KEY') or 'you-will-never-guess'
#SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or 'sqlite:///' + os.path.join(basedir, 'app.db')
dbmysql = "vamostrabalhar"
usermysql = "admin"
senhamysql = "S3nhaADMIN#"
hostmysql = "127.0.0.1"
SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://'+usermysql+':'+senhamysql+'@'+hostmysql+'/'+dbmysql
SQLALCHEMY_TRACK_MODIFICATIONS = False
SESSION_COOKIE_SECURE = True
REMEMBER_COOKIE_SECURE = True
UPLOADED_PHOTOS_DEST = os.path.join(basedir, 'uploads') # you'll need to create a folder named upload
MAIL_SERVER = 'smtp.gmail.com'
MAIL_PORT = 587
MAIL_USE_TLS = True
MAIL_USERNAME = '[email protected]'
MAIL_PASSWORD = 'Rtdqxj00'
FLASKY_MAIL_SUBJECT_PREFIX = '[Vamos Trabalhar Juntos]'
FLASKY_MAIL_SENDER = '[email protected]'
FLASKY_ADMIN = '[email protected]'
| 41.52 | 112 | 0.687861 |
4445dffd4596d7b719138a8d89037aff6730a55f | 1,547 | py | Python | src/pyfme/models/constants.py | gaofeng2020/PyFME | 26b76f0622a8dca0e24eb477a6fb4a8b2aa604d7 | [
"MIT"
] | 199 | 2015-12-29T19:49:42.000Z | 2022-03-19T14:31:24.000Z | src/pyfme/models/constants.py | gaofeng2020/PyFME | 26b76f0622a8dca0e24eb477a6fb4a8b2aa604d7 | [
"MIT"
] | 126 | 2015-09-23T11:15:42.000Z | 2020-07-29T12:27:22.000Z | src/pyfme/models/constants.py | gaofeng2020/PyFME | 26b76f0622a8dca0e24eb477a6fb4a8b2aa604d7 | [
"MIT"
] | 93 | 2015-12-26T13:02:29.000Z | 2022-03-19T14:31:13.000Z | # -*- coding: utf-8 -*-
"""
Python Flight Mechanics Engine (PyFME).
Copyright (c) AeroPython Development Team.
Distributed under the terms of the MIT License.
Constant variables
------------------
Sources:
[1] - COESA standard - U.S. Standard Atmosphere, 1976, U.S. Government Printing
Office, Washington, D.C., 1976:
http://hdl.handle.net/2060/19770009539
[2] - "Introducción a la Ingenería Aeroespacial". Sebastián Franchini,
Óscar López García. UPM
"""
# AIR CONSTANTS
# Adiabatic index or ratio of specific heats (dry air at 20º C) - [1]
GAMMA_AIR = 1.4
# Specific gas constant for dry air (J/(Kg·K))
R_AIR = 287.05287
# Air at sea level conditions h=0 (m)
RHO_0 = 1.225 # Density at sea level (kg/m3) - [1]
P_0 = 101325 # Pressure at sea level (Pa) - [1]
T_0 = 288.15 # Temperature at sea level (K) - [1]
SOUND_VEL_0 = 340.293990543 # Sound speed at sea level (m/s)
# EARTH CONSTANTS
GRAVITY = 9.80665 # Gravity of Ethe Earth (m/s^2) - [1]
# Standard Gravitational Parameter
# product of the gravitational constant G and the mass M of the body (m³/s²)
STD_GRAVITATIONAL_PARAMETER = 3.986004418e14
EARTH_MASS = 5.9722e24 # Mass of the Earth (kg)
GRAVITATIONAL_CONSTANT = 6.67384e11 # Gravitational constant (N·m²/kg²)
EARTH_MEAN_RADIUS = 6371000 # Mean radius of the Earth (m) - [2]
# CONVERSIONS
lbs2kg = 0.453592 # Pounds (lb) to kilograms (kg)
ft2m = 0.3048 # Feet (ft) to meters (m)
slug2kg = 14.5939 # Slug to kilograms (kg)
slugft2_2_kgm2 = 1.35581795 # Slug*feet^2 to kilograms*meters^2 (kg*m^2)
| 32.229167 | 79 | 0.693601 |
145ab9f991710c11339fa14a1e07ff87520891c1 | 547 | tsx | TypeScript | src/_app/render.tsx | VinayaSathyanarayana/next-page-tester | 476d7b731cfb283e68ea60fe7a16e810a94e4409 | [
"MIT"
] | null | null | null | src/_app/render.tsx | VinayaSathyanarayana/next-page-tester | 476d7b731cfb283e68ea60fe7a16e810a94e4409 | [
"MIT"
] | 2 | 2021-12-09T03:32:18.000Z | 2022-02-17T21:15:27.000Z | src/_app/render.tsx | VinayaSathyanarayana/next-page-tester | 476d7b731cfb283e68ea60fe7a16e810a94e4409 | [
"MIT"
] | null | null | null | import React from 'react';
import type { ExtendedOptions, PageObject, PageProps } from '../commonTypes';
import { getPageComponents } from '../makePageElement';
export default function renderApp({
options,
pageObject,
pageProps,
}: {
options: ExtendedOptions;
pageObject: PageObject;
pageProps: PageProps | undefined;
}): JSX.Element {
const { env } = options;
const { AppComponent, PageComponent } = getPageComponents({
pageObject,
env,
});
return <AppComponent Component={PageComponent} pageProps={pageProps} />;
}
| 24.863636 | 77 | 0.711152 |
b33dc8d5e5e1e8c41ea27f07fd94c0b2c8d6ad02 | 878 | py | Python | awards/myusers/forms.py | dan-mutua/djangowk3 | 9af2651ade9b7cd9c02ee1f93dc60c4f9ea1adeb | [
"MIT"
] | null | null | null | awards/myusers/forms.py | dan-mutua/djangowk3 | 9af2651ade9b7cd9c02ee1f93dc60c4f9ea1adeb | [
"MIT"
] | null | null | null | awards/myusers/forms.py | dan-mutua/djangowk3 | 9af2651ade9b7cd9c02ee1f93dc60c4f9ea1adeb | [
"MIT"
] | null | null | null | from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserChangeForm, UserCreationForm
class RegistrationForm(UserCreationForm):
email = forms.EmailField()
bio = forms.CharField()
class Meta:
model = User
fields=['username','email','bio', 'password1','password2']
def __init__(self, *args,**kwargs):
super(RegistrationForm,self).__init__(*args, **kwargs)
self.fields['username'].widget.attrs['class':'form-control']
self.fields['email'].widget.attrs['class':'form-control']
self.fields['password1'].widget.attrs['class':'form-control']
self.fields['password2'].widget.attrs['class':'form-control']
class EditProfile(UserChangeForm):
email = forms.EmailField()
bio = forms.CharField()
class Meta:
model = User
fields=['username','email','bio']
| 31.357143 | 70 | 0.690205 |
e9e0b516a0568bf9ead61e903ac6bdd5fafc546a | 2,578 | rs | Rust | src/bin/day-5/input.rs | drewwyatt/advent-of-code-2020 | 99d9e69f89cbf9d7c575980a8a4209534531414c | [
"MIT"
] | null | null | null | src/bin/day-5/input.rs | drewwyatt/advent-of-code-2020 | 99d9e69f89cbf9d7c575980a8a4209534531414c | [
"MIT"
] | null | null | null | src/bin/day-5/input.rs | drewwyatt/advent-of-code-2020 | 99d9e69f89cbf9d7c575980a8a4209534531414c | [
"MIT"
] | null | null | null | use std::str::FromStr;
const NUMBER_OF_ROWS: i32 = 128;
const NUMBER_OF_SEATS: i32 = 8;
#[derive(Debug)]
pub enum AdventError {
UnrecognizedChar,
InvalidTree,
}
#[derive(Debug)]
enum RowLetter {
F,
B,
}
#[derive(Debug)]
enum SeatLetter {
L,
R,
}
impl RowLetter {
fn from_char(c: char) -> Result<Self, AdventError> {
match c {
'F' => Ok(RowLetter::F),
'B' => Ok(RowLetter::B),
_ => Err(AdventError::UnrecognizedChar),
}
}
}
impl SeatLetter {
fn from_char(c: char) -> Result<Self, AdventError> {
match c {
'L' => Ok(SeatLetter::L),
'R' => Ok(SeatLetter::R),
_ => Err(AdventError::UnrecognizedChar),
}
}
}
#[derive(Debug)]
pub struct BoardingPass {
row_code: Vec<RowLetter>,
seat_code: Vec<SeatLetter>,
}
impl BoardingPass {
pub fn new(code: &str) -> Result<Self, AdventError> {
BoardingPass::from_str(code)
}
pub fn row(&self) -> Result<i32, AdventError> {
let mut tree = Tree::new(NUMBER_OF_ROWS);
for letter in self.row_code.iter() {
match letter {
RowLetter::F => tree.lower(),
RowLetter::B => tree.upper(),
};
}
tree.value()
}
pub fn seat(&self) -> Result<i32, AdventError> {
let mut tree = Tree::new(NUMBER_OF_SEATS);
for letter in self.seat_code.iter() {
match letter {
SeatLetter::L => tree.lower(),
SeatLetter::R => tree.upper(),
};
}
tree.value()
}
pub fn id(&self) -> Result<i32, AdventError> {
let row = self.row()?;
let seat = self.seat()?;
Ok(row * 8 + seat)
}
}
impl FromStr for BoardingPass {
type Err = AdventError;
fn from_str(line: &str) -> Result<Self, Self::Err> {
let mut row_code = vec![];
let mut seat_code = vec![];
for (index, char) in line.chars().enumerate() {
if index < 7 {
row_code.push(RowLetter::from_char(char)?);
} else {
seat_code.push(SeatLetter::from_char(char)?);
}
}
Ok(BoardingPass {
row_code,
seat_code,
})
}
}
// Tree Stuff
#[derive(Debug)]
struct Tree {
start: i32,
size: i32,
}
impl Tree {
fn new(size: i32) -> Self {
Tree { start: 0, size }
}
fn done(&self) -> bool {
self.size == 1
}
fn value(&self) -> Result<i32, AdventError> {
if self.done() {
Ok(self.start)
} else {
Err(AdventError::InvalidTree)
}
}
fn lower(&mut self) -> &Self {
self.size = self.size / 2;
self
}
fn upper(&mut self) -> &Self {
self.size = self.size / 2;
self.start += self.size;
self
}
}
| 17.902778 | 55 | 0.567494 |
15d43458e81378e7925ee852b1e69c2bcfd1a000 | 7,850 | rs | Rust | src/simulate.rs | iburinoc/bh_mhd | 98a548722f760f5d5efd6411d8cd35b81656b319 | [
"MIT"
] | 2 | 2018-01-19T03:58:45.000Z | 2022-03-21T10:52:09.000Z | src/simulate.rs | iburinoc/bh_mhd | 98a548722f760f5d5efd6411d8cd35b81656b319 | [
"MIT"
] | null | null | null | src/simulate.rs | iburinoc/bh_mhd | 98a548722f760f5d5efd6411d8cd35b81656b319 | [
"MIT"
] | 1 | 2022-03-21T10:52:03.000Z | 2022-03-21T10:52:03.000Z | extern crate glium;
use std::cell::RefCell;
use std::fmt;
use glium::{Program,VertexBuffer,IndexBuffer,Display};
use glium::framebuffer::MultiOutputFrameBuffer;
use glium::backend::Facade;
use glium::vertex::VertexBufferAny;
use glium::index::{PrimitiveType,IndexBufferAny};
use glium::texture::Texture2d;
pub struct Data {
pub layers: (Layer, Layer),
pub dimensions: SimDimensions,
pub iter: RefCell<i32>,
}
pub struct SimDimensions {
pub angs: u32,
pub rads: u32,
pub levs: u32,
}
impl Data {
pub fn new(f: &Display) -> Self {
let dims = {
let ang_num = 500u32;
let rad_num = 500u32;
let lev_num = 2u32;
SimDimensions {
angs: ang_num,
rads: rad_num,
levs: lev_num,
}
};
let layers = {
(Layer::new(f, dims.angs, dims.rads, dims.levs),
Layer::new(f, dims.angs, dims.rads, dims.levs))
};
Data { layers: layers,
dimensions: dims,
iter: RefCell::new(0) }
}
pub fn front_layer(&self) -> &Layer {
if *self.iter.borrow() % 2 == 0 {
&self.layers.0
} else {
&self.layers.1
}
}
pub fn back_layer(&self) -> &Layer {
if *self.iter.borrow() % 2 == 1 {
&self.layers.0
} else {
&self.layers.1
}
}
pub fn next_iter(&self) {
*self.iter.borrow_mut() += 1;
}
pub fn draw(&self, f: &Display, program: &Program,
buffers: &(VertexBufferAny, IndexBufferAny), dt: f32) {
use glium::Surface;
use glium::uniforms::{MinifySamplerFilter,MagnifySamplerFilter};
let front = self.front_layer();
let back = self.back_layer();
let outputs = [("v_p", &front.v_p), ("b", &front.b)];
let mut fbo = MultiOutputFrameBuffer::new(f,
outputs.iter().cloned()).unwrap();
let uniforms = uniform! {
rads: self.dimensions.rads,
angs: self.dimensions.angs,
levs: self.dimensions.levs,
dt: dt,
tex_v_p: back.v_p.sampled()
.minify_filter(MinifySamplerFilter::Nearest)
.magnify_filter(MagnifySamplerFilter::Nearest),
tex_b: back.b.sampled()
.minify_filter(MinifySamplerFilter::Nearest)
.magnify_filter(MagnifySamplerFilter::Nearest),
};
fbo.draw(&buffers.0, &buffers.1, program,
&uniforms, &Default::default()).unwrap();
self.next_iter();
}
}
pub struct Layer {
pub v_p: Texture2d,
pub b: Texture2d,
}
impl Layer {
fn new(f: &Display, width: u32, height: u32, depth: u32) -> Self {
let format = glium::texture::UncompressedFloatFormat::F32F32F32F32;
let mipmaps = glium::texture::MipmapsOption::NoMipmap;
Layer {
v_p: Texture2d::empty_with_format(f,
format, mipmaps, width * depth, height).unwrap(),
b: Texture2d::empty_with_format(f,
format, mipmaps, width * depth, height).unwrap(),
}
}
}
impl fmt::Debug for Layer {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Layer: ({:?}, {:?})", self.v_p, self.b)
}
}
pub struct Shaders {
pub initialize: Program,
pub update: Program,
pub buffers: (VertexBufferAny, IndexBufferAny),
}
impl Shaders {
pub fn new(f: &Display) -> Self {
Shaders {
initialize: sim_code::initial(f),
update: sim_code::update(f),
buffers: cover_buffers(f),
}
}
}
fn cover_buffers<F>(f: &F) -> (VertexBufferAny, IndexBufferAny)
where F: Facade {
#[derive(Copy, Clone)]
struct Vertex {
pos: (f32, f32),
}
implement_vertex!(Vertex, pos);
(
VertexBuffer::new(f, &[
Vertex { pos: (-1.0, 1.0) }, Vertex { pos: (1.0, 1.0) },
Vertex { pos: (-1.0, -1.0) }, Vertex { pos: (1.0, -1.0) },
]).unwrap().into(),
IndexBuffer::new(f, PrimitiveType::TriangleStrip,
&[0u8, 1, 2, 3]).unwrap().into()
)
}
mod sim_code {
use glium::{Display,Program};
pub fn initial(f: &Display) -> Program {
::check_program(Program::from_source(f,
VERT_SHADER,
&initial_frag_shader(),
None))
}
pub fn update(f: &Display) -> Program {
::check_program(Program::from_source(f,
VERT_SHADER,
&update_frag_shader(),
None))
}
fn initial_frag_shader() -> String {
gen_frag_shader(INIT)
}
fn update_frag_shader() -> String {
gen_frag_shader(UPDATE)
}
const INIT: &'static str = r#"
float x, v;
if(abs(rind - int(rads)/2) <= 1 && abs(lind - int(levs)/2) <= 1 && abs(aind - int(angs)/2) <= 1) {
x = 10;
v = 0;
} else {
x = 0;
v = 0;
}
v_p = vec4(x, x, x, 1.0);
b = vec4(v, v, v, 1.0);
"#;
const UPDATE: &'static str = r#"
/* take v_p as value, b as derivative */
#define VAL(r, l, a) (lookup(tex_v_p, r, l, a).g)
float div2 = (
(-VAL(wrap(rind-2, rads), lind, aind) + VAL(wrap(rind-1, rads), lind, aind)*16
-VAL(wrap(rind+2, rads), lind, aind) + VAL(wrap(rind+1, rads), lind, aind)*16) +
(-VAL(rind, wrap(lind-2, levs), aind) + VAL(rind, wrap(lind-1, levs), aind)*16
-VAL(rind, wrap(lind+2, levs), aind) + VAL(rind, wrap(lind+1, levs), aind)*16) +
(-VAL(rind, lind, wrap(aind-2, angs)) + VAL(rind, lind, wrap(aind-1, angs))*16
-VAL(rind, lind, wrap(aind+2, angs)) + VAL(rind, lind, wrap(aind+1, angs))*16) +
-VAL(rind, lind, aind) * 90) / 12;
#undef VAL
float x = lookup(tex_v_p, rind, lind, aind).r;
float v = lookup(tex_b, rind, lind, aind).r;
float nx = x + v * dt;
float nv = v + div2 * dt * 10;
v_p = vec4(nx, nx, nx, 1.0);
b = vec4(nv, nv, nv, 1.0);
//v_p = vec4(rind/float(rads), lind/float(levs), aind/float(angs), 1.0);
"#;
const VERT_SHADER: &'static str = r#"
#version 330
in vec2 pos;
out vec2 uv;
void main() {
gl_Position = vec4(pos, 0.0, 1.0);
uv = vec2((pos.x + 1) / 2.0, (pos.y + 1) / 2.0);
}
"#;
const FRAG_PREAMBLE: &'static str = r#"
#version 330
#define M_PI 3.1415926535897932384626433832795
uniform uint rads; /* texture height */
uniform uint angs; /* texture width per section */
uniform uint levs; /* texture section count */
in vec2 uv;
uniform float dt;
uniform sampler2D tex_v_p;
uniform sampler2D tex_b;
layout(location = 0) out vec4 v_p;
layout(location = 1) out vec4 b;
vec4 lookup(sampler2D tex, int rind, int lind, int aind) {
return texelFetch(tex, ivec2(lind * int(angs) + aind, rind), 0);
}
vec3 v_f(vec4 v_p) {
return vec3(v_p);
}
float p_f(vec4 v_p) {
return v_p.w;
}
vec3 b_f(vec4 b) {
return vec3(b);
}
int wrap(int ind, uint max) {
return (ind + int(max)) % int(max);
}
"#;
fn gen_frag_shader(update_func: &'static str) -> String {
format!(r#"
{preamble}
void main() {{
int rind = int(uv.y * rads);
int lind = int(uv.x * levs);
int aind = int(fract(uv.x * levs) * angs);
{update}
}}"#,
preamble = FRAG_PREAMBLE,
update = update_func)
}
}
| 27.16263 | 106 | 0.515414 |
939f257d7f0c536a8a1b44cec028c810a62fd9dd | 2,533 | cs | C# | tests/IntegrationTests/PackageTests/FluentValidation/AddressOptionsFluentValidationTests.cs | photo-cli/photo-cli | ee196580d4f3e419d00399de9f865d8dd947733a | [
"Apache-2.0"
] | 3 | 2022-02-09T12:18:32.000Z | 2022-02-15T21:02:14.000Z | tests/IntegrationTests/PackageTests/FluentValidation/AddressOptionsFluentValidationTests.cs | alpcoker/photo-cli | c3404de4e4d25d11cab9f12e629a6e5a53620be5 | [
"Apache-2.0"
] | null | null | null | tests/IntegrationTests/PackageTests/FluentValidation/AddressOptionsFluentValidationTests.cs | alpcoker/photo-cli | c3404de4e4d25d11cab9f12e629a6e5a53620be5 | [
"Apache-2.0"
] | 2 | 2021-12-18T23:51:50.000Z | 2021-12-20T04:27:59.000Z | namespace PhotoCli.Tests.IntegrationTests.PackageTests.FluentValidation;
public class AddressOptionsFluentValidationTests : BaseFluentValidationTests<AddressOptions, AddressOptionsValidator>
{
[Fact]
public void Null_InputFolderPath_Should_Give_NotNullValidator_Error()
{
var options = new AddressOptions(null!, ReverseGeocodeProviderFakes.Valid(), AddressListTypeFakes.Valid());
CheckPropertyNotNull(options, nameof(AddressOptions.InputPath), Required(nameof(AddressOptions.InputPath), "input", 'i'));
}
[Fact]
public void Using_InputPath_Without_Jpg_Or_Jpeg_Extension_Should_Give_RegularExpressionValidator_And_Verify_Error_Message()
{
var options = new AddressOptions("report.png", ReverseGeocodeProviderFakes.Valid(), AddressListTypeFakes.Valid());
CheckPropertyRegularExpression(options, nameof(AddressOptions.InputPath), $"{nameof(AddressOptions.InputPath)} should have .jpg or .jpeg extension");
}
#region ReverseGeocode Providers
[Fact]
public void When_Using_BigDataCloud_Not_Using_BigDataCloudAdminLevels_Should_Give_NullValidator_And_Verify_Error_Message()
{
var options = AddressOptionsFakes.WithReverseGeocodeService(ReverseGeocodeProvider.BigDataCloud);
CheckPropertyNotEmpty(options, nameof(AddressOptions.BigDataCloudAdminLevels), MustUseMessage(nameof(AddressOptions.BigDataCloudAdminLevels), nameof(ReverseGeocodeProvider.BigDataCloud),
"bigdatacloud-levels", 'v'));
}
[Theory]
[InlineData(ReverseGeocodeProvider.OpenStreetMapFoundation)]
[InlineData(ReverseGeocodeProvider.MapQuest)]
[InlineData(ReverseGeocodeProvider.LocationIq)]
public void When_Using_OpenStreetMap_Not_Using_OpenStreetMapProperties_Should_Give_NullValidator_And_Verify_Error_Message(ReverseGeocodeProvider reverseGeocodeProvider)
{
var options = AddressOptionsFakes.WithReverseGeocodeService(reverseGeocodeProvider);
CheckPropertyNotEmpty(options, nameof(AddressOptions.OpenStreetMapProperties),
MustUseMessage(nameof(AddressOptions.OpenStreetMapProperties), reverseGeocodeProvider.ToString(), "openstreetmap-properties", 'r'));
}
[Fact]
public void When_Using_GoogleMaps_Not_Using_GoogleMapsAddressTypes_Should_Give_NullValidator_And_Verify_Error_Message()
{
var options = AddressOptionsFakes.WithReverseGeocodeService(ReverseGeocodeProvider.GoogleMaps);
CheckPropertyNotEmpty(options, nameof(AddressOptions.GoogleMapsAddressTypes), MustUseMessage(nameof(AddressOptions.GoogleMapsAddressTypes), nameof(ReverseGeocodeProvider.GoogleMaps),
"googlemaps-types", 'm'));
}
#endregion
}
| 50.66 | 188 | 0.84998 |
5d4b8388df4e5a976081a87e4f0385c478f2c995 | 6,187 | cpp | C++ | src/Core/QSafeguard.cpp | ericzh86/qt-toolkit | 63ec071f8989d6efcc4afa30fa98ede695edba27 | [
"MIT"
] | 4 | 2020-01-07T07:05:18.000Z | 2020-01-09T10:25:41.000Z | src/Core/QSafeguard.cpp | ericzh86/qt-toolkit | 63ec071f8989d6efcc4afa30fa98ede695edba27 | [
"MIT"
] | null | null | null | src/Core/QSafeguard.cpp | ericzh86/qt-toolkit | 63ec071f8989d6efcc4afa30fa98ede695edba27 | [
"MIT"
] | null | null | null | #include "QSafeguard.h"
#include "QSafeguard_p.h"
#include <QStringBuilder>
#include <QLoggingCategory>
Q_LOGGING_CATEGORY(lcSafeguard, "QSafeguard")
// class QSafeguard
QSafeguard::QSafeguard(const QString &dumpPath, QObject *parent)
: QObject(parent)
, d_ptr(new QSafeguardPrivate())
{
d_ptr->q_ptr = this;
d_ptr->dumpPath = dumpPath;
}
QSafeguard::QSafeguard(QObject *parent)
: QObject(parent)
, d_ptr(new QSafeguardPrivate())
{
d_ptr->q_ptr = this;
}
QSafeguard::~QSafeguard()
{
}
void QSafeguard::setDumpPath(const QString &path)
{
Q_D(QSafeguard);
d->dumpPath = path;
}
void QSafeguard::setPipeName(const QString &name)
{
Q_D(QSafeguard);
d->pipeName = name;
}
const QString &QSafeguard::dumpPath() const
{
Q_D(const QSafeguard);
return d->dumpPath;
}
const QString &QSafeguard::pipeName() const
{
Q_D(const QSafeguard);
return d->pipeName;
}
bool QSafeguard::createServer()
{
Q_D(QSafeguard);
Q_ASSERT(!d->dumpPath.isEmpty());
Q_ASSERT(!d->pipeName.isEmpty());
#if defined(Q_OS_WIN32)
QString pipeName = QString::fromLatin1("\\\\.\\pipe\\") % d->pipeName;
QSharedPointer<google_breakpad::CrashGenerationServer> crashServer(new google_breakpad::CrashGenerationServer(
pipeName.toStdWString(),
nullptr,
nullptr,
nullptr,
nullptr,
nullptr,
nullptr,
nullptr,
nullptr,
nullptr,
true,
&d->dumpPath.toStdWString()
));
if (!crashServer->Start()) {
qWarning(lcSafeguard, "crash server start failed.");
return false;
}
qInfo(lcSafeguard, "crash server ready...");
d->crashServer = crashServer;
return true;
#else
/*
QString pipeName = QString::fromLatin1("\\\\.\\pipe\\") % d->pipeName;
QSharedPointer<google_breakpad::CrashGenerationServer> crashServer(new google_breakpad::CrashGenerationServer(
pipeName.toStdString().c_str(),
nullptr,
nullptr,
nullptr,
nullptr,
nullptr,
nullptr,
true,
d->dumpPath.toStdString()));
*/
#endif
/*
if (!crashServer->Start()) {
qWarning(lcSafeguard, "crash server start failed.");
return false;
}
qInfo(lcSafeguard, "crash server ready...");
d->crashServer = crashServer;
*/
return false;
}
void QSafeguard::createClient()
{
Q_D(QSafeguard);
Q_ASSERT(!d->dumpPath.isEmpty());
Q_ASSERT(!d->pipeName.isEmpty());
#if defined(Q_OS_WIN32)
QString pipeName = QString::fromLatin1("\\\\.\\pipe\\") % d->pipeName;
d->exceptionHandler.reset(new google_breakpad::ExceptionHandler(d->dumpPath.toStdWString(),
nullptr,
nullptr,
nullptr,
google_breakpad::ExceptionHandler::HANDLER_ALL,
MiniDumpNormal,
pipeName.toStdWString().c_str(),
nullptr));
if (d->exceptionHandler->IsOutOfProcess()) {
qInfo(lcSafeguard, "daemon mode.");
} else {
qInfo(lcSafeguard, "normal mode.");
}
#else
/*
QString pipeName = QString::fromLatin1("\\\\.\\pipe\\") % d->pipeName;
d->exceptionHandler.reset(new google_breakpad::ExceptionHandler(d->dumpPath.toStdString(),
nullptr,
nullptr,
nullptr,
true,
pipeName.toStdString().c_str()));
*/
#endif
/*
if (d->exceptionHandler->IsOutOfProcess()) {
qInfo(lcSafeguard, "daemon mode.");
} else {
qInfo(lcSafeguard, "normal mode.");
}
*/
}
void QSafeguard::makeSnapshot()
{
#if defined(Q_OS_WIN32)
Q_D(QSafeguard);
if (d->exceptionHandler) {
d->exceptionHandler->WriteMinidump();
}
#endif
}
// class QSafeguardPrivate
QSafeguardPrivate::QSafeguardPrivate()
: q_ptr(nullptr)
{
}
QSafeguardPrivate::~QSafeguardPrivate()
{
}
| 33.625 | 115 | 0.390981 |
4b8d1b9ba55a180d12c0162fb4b749c34f3d9a2a | 21,930 | cpp | C++ | pic32/cores/pic32/USB_HS.cpp | jg2562/chipKIT-core | ec18f615a3369f35a8c7ce1d46ff1aa4ee12e17a | [
"Apache-2.0"
] | 54 | 2015-09-08T01:16:21.000Z | 2022-03-29T06:21:54.000Z | pic32/cores/pic32/USB_HS.cpp | jg2562/chipKIT-core | ec18f615a3369f35a8c7ce1d46ff1aa4ee12e17a | [
"Apache-2.0"
] | 363 | 2015-07-30T21:14:46.000Z | 2022-03-14T04:25:27.000Z | pic32/cores/pic32/USB_HS.cpp | jg2562/chipKIT-core | ec18f615a3369f35a8c7ce1d46ff1aa4ee12e17a | [
"Apache-2.0"
] | 73 | 2015-08-01T14:29:53.000Z | 2022-01-25T15:07:13.000Z | /*
* Copyright (c) 2017, Majenko Technologies
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of Majenko Technologies nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <pins_arduino.h>
#if defined(_USB)
#ifdef __PIC32MZ__
#include <Arduino.h>
#include <USB.h>
//#define DEBUG 1
#define KVA_TO_PA(v) ((v) & 0x1fffffff)
#define PA_TO_KVA0(pa) ((pa) | 0x80000000) // cachable
#define PA_TO_KVA1(pa) ((pa) | 0xa000000
#define D2H(X) ((X & 0xF) < 10 ? '0' + (X & 0xF) : 'A' - 10 + (X & 0xF))
/*-------------- USB FS ---------------*/
USBHS *USBHS::_this;
#define WFB(X) (((X) + 3) / 4)
#ifdef PIN_LED_TX
volatile static uint32_t TXLedTimeout = 0;
static uint32_t TXLedSwitchOff(uint32_t t) {
TXLedTimeout++;
if (TXLedTimeout > 1) {
digitalWrite(PIN_LED_TX, LOW);
detachCoreTimerService(TXLedSwitchOff);
TXLedTimeout = 0;
}
return t + 50000;
}
static void TXOn() {
digitalWrite(PIN_LED_TX, HIGH);
TXLedTimeout = 0;
attachCoreTimerService(TXLedSwitchOff);
}
#else
# define TXOn()
#endif
#ifdef PIN_LED_RX
volatile static uint32_t RXLedTimeout = 0;
static uint32_t RXLedSwitchOff(uint32_t t) {
RXLedTimeout++;
if (RXLedTimeout > 1) {
digitalWrite(PIN_LED_RX, LOW);
detachCoreTimerService(RXLedSwitchOff);
RXLedTimeout = 0;
}
return t + 50000;
}
static void RXOn() {
digitalWrite(PIN_LED_RX, HIGH);
RXLedTimeout = 0;
attachCoreTimerService(RXLedSwitchOff);
}
#else
# define RXOn()
#endif
bool USBFS::enableUSB() {
#ifdef PIN_LED_TX
pinMode(PIN_LED_TX, OUTPUT);
digitalWrite(PIN_LED_TX, LOW);
createTask(TXLedSwitchOff, 10, TASK_ENABLE, NULL);
#endif
#ifdef PIN_LED_RX
pinMode(PIN_LED_RX, OUTPUT);
digitalWrite(PIN_LED_RX, LOW);
createTask(RXLedSwitchOff, 10, TASK_ENABLE, NULL);
#endif
clearIntFlag(_USB_VECTOR);
setIntVector(_USB_VECTOR, _usbInterrupt);
setIntPriority(_USB_VECTOR, 6, 0);
setIntEnable(_USB_VECTOR);
USBCSR0bits.SOFTCONN = 1; // D+/D- active
USBCSR0bits.HSEN = 0; // Full speed negotiation
USBCSR0bits.FUNC = 0; // Address 0
USBCSR2bits.RESETIE = 1;
#if defined(USBCRCON)
USBCRCONbits.USBIE = 1;
#endif
addEndpoint(0, EP_IN, EP_CTL, 64, _ctlRxA, _ctlRxB);
addEndpoint(0, EP_OUT, EP_CTL, 64, _ctlTxA, _ctlTxB);
return true;
}
bool USBHS::enableUSB() {
#ifdef PIN_LED_TX
pinMode(PIN_LED_TX, OUTPUT);
digitalWrite(PIN_LED_TX, LOW);
#endif
#ifdef PIN_LED_RX
pinMode(PIN_LED_RX, OUTPUT);
digitalWrite(PIN_LED_RX, LOW);
#endif
setIntVector(_USB_VECTOR, _usbInterrupt);
setIntPriority(_USB_VECTOR, 6, 0);
clearIntFlag(_USB_VECTOR);
setIntEnable(_USB_VECTOR);
IFS4bits.USBIF = 0;
IEC4bits.USBIE = 1;
USBCSR0bits.SOFTCONN = 1; // D+/D- active
USBCSR0bits.HSEN = 1; // High speed negotiation
USBCSR0bits.FUNC = 0; // Address 0
USBCSR2bits.RESETIE = 1;
#if defined(USBCRCON)
USBCRCONbits.USBIE = 1;
#endif
addEndpoint(0, EP_IN, EP_CTL, 64, _ctlRxA, _ctlRxB);
addEndpoint(0, EP_OUT, EP_CTL, 64, _ctlTxA, _ctlTxB);
return true;
}
bool USBHS::disableUSB() {
clearIntEnable(_USB_VECTOR);
#ifdef PIN_LED_TX
pinMode(PIN_LED_TX, INPUT);
#endif
#ifdef PIN_LED_RX
pinMode(PIN_LED_RX, INPUT);
#endif
USBCSR0bits.SOFTCONN = 0;
USBCSR0bits.FUNC = 0;
IEC4bits.USBIE = 0;
#if defined(USBCRCON)
USBCRCONbits.USBIE = 0;
#endif
USBCSR1bits.EP0IE = 0;
USBE0CSR0bits.TXMAXP = 0;
// for (int i = 0; i < 8; i++) {
// USBCSR3bits.ENDPOINT = i;
// USBIENCSR0bits.TXMAXP = 0;
// USBIENCSR3bits.PROTOCOL = 0b00;
// USBFIFOAbits.RXFIFOAD = 0;
// USBIENCSR0bits.CLRDT = 0;
// USBOTGbits.RXFIFOSZ = 0;
// USBCSR2bits.EP1RXIE = 0;
// USBFIFOAbits.TXFIFOAD = 0;
// USBIENCSR1bits.ISO = 0;
// }
//
// USBCSR2bits.EP1RXIE = 0;
// USBCSR2bits.EP2RXIE = 0;
// USBCSR2bits.EP3RXIE = 0;
// USBCSR2bits.EP4RXIE = 0;
// USBCSR2bits.EP5RXIE = 0;
// USBCSR2bits.EP6RXIE = 0;
// USBCSR2bits.EP7RXIE = 0;
// USBCSR1bits.EP1TXIE = 0;
// USBCSR1bits.EP2TXIE = 0;
// USBCSR1bits.EP3TXIE = 0;
// USBCSR1bits.EP4TXIE = 0;
// USBCSR1bits.EP5TXIE = 0;
// USBCSR1bits.EP6TXIE = 0;
// USBCSR1bits.EP7TXIE = 0;
//
// USBCSR3bits.ENDPOINT = 0;
return true;
}
bool USBHS::addEndpoint(uint8_t id, uint8_t direction, uint8_t type, uint32_t size, uint8_t *a, uint8_t *b) {
if (id > 7) return false;
uint8_t sz = 0;
if (size <= 8192) sz = 0b1101;
if (size <= 4096) sz = 0b1100;
if (size <= 2048) sz = 0b1011;
if (size <= 1024) sz = 0b1010;
if (size <= 512) sz = 0b1001;
if (size <= 256) sz = 0b1000;
if (size <= 128) sz = 0b0111;
if (size <= 64) sz = 0b0110;
if (size <= 32) sz = 0b0101;
if (size <= 16) sz = 0b0100;
if (size <= 8) sz = 0b0011;
if (id == 0) {
USBCSR1bits.EP0IE = 1;
USBE0CSR0bits.TXMAXP = size;
if (direction == EP_IN) {
_endpointBuffers[0].rx[0] = a;
_endpointBuffers[0].rx[1] = b;
} else {
_endpointBuffers[0].tx[0] = a;
_endpointBuffers[0].tx[1] = b;
}
_endpointBuffers[0].size = 64;
} else {
uint8_t ep = USBCSR3bits.ENDPOINT;
USBCSR3bits.ENDPOINT = id;
USBIENCSR0bits.TXMAXP = size;
switch (type) {
case EP_CTL: USBIENCSR3bits.PROTOCOL = 0b00; break;
case EP_ISO: USBIENCSR3bits.PROTOCOL = 0b01; break;
case EP_BLK: USBIENCSR3bits.PROTOCOL = 0b10; break;
case EP_INT: USBIENCSR3bits.PROTOCOL = 0b11; break;
}
_endpointBuffers[id].size = size;
if (direction == EP_IN) {
_endpointBuffers[id].rx[0] = a;
_endpointBuffers[id].rx[1] = b;
USBFIFOAbits.RXFIFOAD = _fifoOffset;
USBIENCSR0bits.CLRDT = 1;
USBOTGbits.RXFIFOSZ = sz;
_fifoOffset += size / 8;
switch (id) {
case 1: USBCSR2bits.EP1RXIE = 1;
case 2: USBCSR2bits.EP2RXIE = 1;
case 3: USBCSR2bits.EP3RXIE = 1;
case 4: USBCSR2bits.EP4RXIE = 1;
case 5: USBCSR2bits.EP5RXIE = 1;
case 6: USBCSR2bits.EP6RXIE = 1;
case 7: USBCSR2bits.EP7RXIE = 1;
}
} else if (direction == EP_OUT) {
_endpointBuffers[id].tx[0] = a;
_endpointBuffers[id].tx[1] = b;
USBFIFOAbits.TXFIFOAD = _fifoOffset;
USBIENCSR0bits.CLRDT = 1;
USBOTGbits.RXFIFOSZ = sz;
_fifoOffset += size / 8;
switch (id) {
case 1: USBCSR1bits.EP1TXIE = 1;
case 2: USBCSR1bits.EP2TXIE = 1;
case 3: USBCSR1bits.EP3TXIE = 1;
case 4: USBCSR1bits.EP4TXIE = 1;
case 5: USBCSR1bits.EP5TXIE = 1;
case 6: USBCSR1bits.EP6TXIE = 1;
case 7: USBCSR1bits.EP7TXIE = 1;
}
}
if (type == EP_ISO) {
USBIENCSR1bits.ISO = 1;
} else {
USBIENCSR1bits.ISO = 0;
}
USBIENCSR3bits.SPEED = 0b01; // High speed
USBCSR3bits.ENDPOINT = ep;
}
return true;
}
bool USBHS::canEnqueuePacket(uint8_t ep) {
if (ep == 0) {
return (USBE0CSR0bits.TXRDY == 0);
}
bool rdy = false;
uint8_t oep = USBCSR3bits.ENDPOINT;
USBCSR3bits.ENDPOINT = ep;
rdy = (USBIENCSR0bits.TXPKTRDY == 0);
USBCSR3bits.ENDPOINT = oep;
return rdy;
}
bool USBHS::enqueuePacket(uint8_t ep, const uint8_t *data, uint32_t len) {
uint32_t t = millis();
while (!canEnqueuePacket(ep)) {
if (millis() - t > USB_TX_TIMEOUT) {
return false;
}
}
volatile uint8_t *fifo = NULL;
switch (ep) {
case 0: fifo = (uint8_t *)&USBFIFO0; break;
case 1: fifo = (uint8_t *)&USBFIFO1; break;
case 2: fifo = (uint8_t *)&USBFIFO2; break;
case 3: fifo = (uint8_t *)&USBFIFO3; break;
case 4: fifo = (uint8_t *)&USBFIFO4; break;
case 5: fifo = (uint8_t *)&USBFIFO5; break;
case 6: fifo = (uint8_t *)&USBFIFO6; break;
case 7: fifo = (uint8_t *)&USBFIFO7; break;
}
if (fifo == NULL) return false;
for (uint32_t i = 0; i < len; i++) {
*fifo = data[i];
}
if (ep == 0) {
USBE0CSR0bits.TXRDY = 1;
} else {
uint8_t oep = USBCSR3bits.ENDPOINT;
USBCSR3bits.ENDPOINT = ep;
USBIENCSR0bits.MODE = 1;
USBIENCSR0bits.TXPKTRDY = 1;
USBCSR3bits.ENDPOINT = oep;
}
return true;
}
bool USBHS::sendBuffer(uint8_t ep, const uint8_t *data, uint32_t len) {
uint32_t remain = len;
uint32_t pos = 0;
uint32_t psize = _endpointBuffers[ep].size;
if (len == 0) {
while (1) {
if (canEnqueuePacket(ep)) {
enqueuePacket(ep, NULL, 0);
return true;
}
}
}
while (remain > 0) {
if (canEnqueuePacket(ep)) {
uint32_t toSend = min(remain, psize);
enqueuePacket(ep, &data[pos], toSend);
pos += toSend;
remain -= toSend;
}
}
return true;
}
void USBHS::handleInterrupt() {
int csr0 = 0, csr1 = 0, csr2 = 0;
clearIntFlag(_USB_VECTOR);
do {
uint32_t csr0 = USBCSR0;
bool isEP0IF = (csr0 & (1<<16)) ? true : false;
bool isEP1TXIF = (csr0 & (1<<17)) ? true : false;
bool isEP2TXIF = (csr0 & (1<<18)) ? true : false;
bool isEP3TXIF = (csr0 & (1<<19)) ? true : false;
bool isEP4TXIF = (csr0 & (1<<20)) ? true : false;
bool isEP5TXIF = (csr0 & (1<<21)) ? true : false;
bool isEP6TXIF = (csr0 & (1<<22)) ? true : false;
bool isEP7TXIF = (csr0 & (1<<23)) ? true : false;
uint32_t csr1 = USBCSR1;
bool isEP1RXIF = (csr1 & (1 << 1)) ? true : false;
bool isEP2RXIF = (csr1 & (1 << 2)) ? true : false;
bool isEP3RXIF = (csr1 & (1 << 3)) ? true : false;
bool isEP4RXIF = (csr1 & (1 << 4)) ? true : false;
bool isEP5RXIF = (csr1 & (1 << 5)) ? true : false;
bool isEP6RXIF = (csr1 & (1 << 6)) ? true : false;
bool isEP7RXIF = (csr1 & (1 << 7)) ? true : false;
uint32_t csr2 = USBCSR2;
bool __attribute__((unused)) isRESUMEIF = (csr2 & (1 << 17)) ? true : false;
bool isRESETIF = (csr2 & (1 << 18)) ? true : false;
bool __attribute__((unused)) isSOFIF = (csr2 & (1 << 19)) ? true : false;
bool __attribute__((unused)) isCONNIF = (csr2 & (1 << 20)) ? true : false;
bool __attribute__((unused)) isDISCONIF = (csr2 & (1 << 21)) ? true : false;
bool __attribute__((unused)) isSESSRQIF = (csr2 & (1 << 22)) ? true : false;
bool __attribute__((unused)) isVBUSERRIF = (csr2 & (1 << 23)) ? true : false;
#ifdef DEBUG
if (isEP0IF) Serial.println("EP0IF");
if (isEP1TXIF) Serial.println("EP1TXIF");
if (isEP2TXIF) Serial.println("EP2TXIF");
if (isEP3TXIF) Serial.println("EP3TXIF");
if (isEP4TXIF) Serial.println("EP4TXIF");
if (isEP5TXIF) Serial.println("EP5TXIF");
if (isEP6TXIF) Serial.println("EP6TXIF");
if (isEP7TXIF) Serial.println("EP7TXIF");
if (isEP1RXIF) Serial.println("EP1RXIF");
if (isEP2RXIF) Serial.println("EP2RXIF");
if (isEP3RXIF) Serial.println("EP3RXIF");
if (isEP4RXIF) Serial.println("EP4RXIF");
if (isEP5RXIF) Serial.println("EP5RXIF");
if (isEP6RXIF) Serial.println("EP6RXIF");
if (isEP7RXIF) Serial.println("EP7RXIF");
if (isRESUMEIF) Serial.println("RESUMEIF");
if (isRESETIF) Serial.println("RESETIF");
if (isSOFIF) Serial.println("SOFIF");
if (isDISCONIF) Serial.println("DISCONIF");
if (isSESSRQIF) Serial.println("SESSRQIF");
if (isVBUSERRIF) Serial.println("VBUSERRIF");
#endif
if (isRESETIF) {
addEndpoint(0, EP_IN, EP_CTL, 64, _ctlRxA, _ctlRxB);
addEndpoint(0, EP_OUT, EP_CTL, 64, _ctlTxA, _ctlTxB);
_manager->setEnumerated(false);
}
if (isDISCONIF) {
_manager->setEnumerated(false);
}
volatile uint8_t *fifo;
if (isEP0IF) {
if (USBE0CSR0bits.RXRDY) {
uint32_t pktlen = USBE0CSR2bits.RXCNT;
fifo = (uint8_t *)&USBFIFO0;
for (uint32_t i = 0; i < pktlen; i++) {
_endpointBuffers[0].rx[0][i] = *(fifo + (i & 3));
}
USBE0CSR0bits.RXRDYC = 1;
if (_manager) _manager->onSetupPacket(0, _endpointBuffers[0].rx[0], pktlen);
USBE0CSR0bits.SETENDC = 1;
} else {
if (_manager) _manager->onInPacket(0, _endpointBuffers[0].tx[0], _endpointBuffers[0].size);
}
}
if (isEP1RXIF) {
uint8_t oep = USBCSR3bits.ENDPOINT;
USBCSR3bits.ENDPOINT = 1;
uint32_t pktlen = USBIENCSR2bits.RXCNT;
fifo = (uint8_t *)&USBFIFO1;
for (uint32_t i = 0; i < pktlen; i++) {
_endpointBuffers[1].rx[0][i] = *(fifo + (i & 3));
}
USBIENCSR1bits.RXPKTRDY = 0;
if (_manager) _manager->onOutPacket(1, _endpointBuffers[1].rx[0], pktlen);
USBCSR3bits.ENDPOINT = oep;
}
if (isEP2RXIF) {
uint8_t oep = USBCSR3bits.ENDPOINT;
USBCSR3bits.ENDPOINT = 2;
uint32_t pktlen = USBIENCSR2bits.RXCNT;
fifo = (uint8_t *)&USBFIFO2;
for (uint32_t i = 0; i < pktlen; i++) {
_endpointBuffers[2].rx[0][i] = *(fifo + (i & 3));
}
USBIENCSR1bits.RXPKTRDY = 0;
if (_manager) _manager->onOutPacket(2, _endpointBuffers[2].rx[0], pktlen);
USBCSR3bits.ENDPOINT = oep;
}
if (isEP3RXIF) {
uint8_t oep = USBCSR3bits.ENDPOINT;
USBCSR3bits.ENDPOINT = 3;
uint32_t pktlen = USBIENCSR2bits.RXCNT;
fifo = (uint8_t *)&USBFIFO3;
for (uint32_t i = 0; i < pktlen; i++) {
_endpointBuffers[3].rx[0][i] = *(fifo + (i & 3));
}
USBIENCSR1bits.RXPKTRDY = 0;
if (_manager) _manager->onOutPacket(3, _endpointBuffers[3].rx[0], pktlen);
USBCSR3bits.ENDPOINT = oep;
}
if (isEP4RXIF) {
uint8_t oep = USBCSR3bits.ENDPOINT;
USBCSR3bits.ENDPOINT = 4;
uint32_t pktlen = USBIENCSR2bits.RXCNT;
fifo = (uint8_t *)&USBFIFO4;
for (uint32_t i = 0; i < pktlen; i++) {
_endpointBuffers[4].rx[0][i] = *(fifo + (i & 3));
}
USBIENCSR1bits.RXPKTRDY = 0;
if (_manager) _manager->onOutPacket(4, _endpointBuffers[4].rx[0], pktlen);
USBCSR3bits.ENDPOINT = oep;
}
if (isEP5RXIF) {
uint8_t oep = USBCSR3bits.ENDPOINT;
USBCSR3bits.ENDPOINT = 5;
uint32_t pktlen = USBIENCSR2bits.RXCNT;
fifo = (uint8_t *)&USBFIFO5;
for (uint32_t i = 0; i < pktlen; i++) {
_endpointBuffers[5].rx[0][i] = *(fifo + (i & 3));
}
USBIENCSR1bits.RXPKTRDY = 0;
if (_manager) _manager->onOutPacket(5, _endpointBuffers[5].rx[0], pktlen);
USBCSR3bits.ENDPOINT = oep;
}
if (isEP6RXIF) {
uint8_t oep = USBCSR3bits.ENDPOINT;
USBCSR3bits.ENDPOINT = 6;
uint32_t pktlen = USBIENCSR2bits.RXCNT;
fifo = (uint8_t *)&USBFIFO6;
for (uint32_t i = 0; i < pktlen; i++) {
_endpointBuffers[6].rx[0][i] = *(fifo + (i & 3));
}
USBIENCSR1bits.RXPKTRDY = 0;
if (_manager) _manager->onOutPacket(6, _endpointBuffers[6].rx[0], pktlen);
USBCSR3bits.ENDPOINT = oep;
}
if (isEP7RXIF) {
uint8_t oep = USBCSR3bits.ENDPOINT;
USBCSR3bits.ENDPOINT = 7;
uint32_t pktlen = USBIENCSR2bits.RXCNT;
fifo = (uint8_t *)&USBFIFO7;
for (uint32_t i = 0; i < pktlen; i++) {
_endpointBuffers[7].rx[0][i] = *(fifo + (i & 3));
}
USBIENCSR1bits.RXPKTRDY = 0;
if (_manager) _manager->onOutPacket(7, _endpointBuffers[7].tx[0], pktlen);
USBCSR3bits.ENDPOINT = oep;
}
if (isEP1TXIF) {
uint8_t oep = USBCSR3bits.ENDPOINT;
USBCSR3bits.ENDPOINT = 1;
USBIENCSR0bits.MODE = 0;
USBCSR3bits.ENDPOINT = oep;
if (_manager) _manager->onInPacket(1, _endpointBuffers[1].tx[0], _endpointBuffers[1].size);
}
if (isEP2TXIF) {
uint8_t oep = USBCSR3bits.ENDPOINT;
USBCSR3bits.ENDPOINT = 2;
USBIENCSR0bits.MODE = 0;
USBCSR3bits.ENDPOINT = oep;
if (_manager) _manager->onInPacket(2, _endpointBuffers[2].tx[0], _endpointBuffers[2].size);
}
if (isEP3TXIF) {
uint8_t oep = USBCSR3bits.ENDPOINT;
USBCSR3bits.ENDPOINT = 3;
USBIENCSR0bits.MODE = 0;
USBCSR3bits.ENDPOINT = oep;
if (_manager) _manager->onInPacket(3, _endpointBuffers[3].rx[0], _endpointBuffers[3].size);
}
if (isEP4TXIF) {
uint8_t oep = USBCSR3bits.ENDPOINT;
USBCSR3bits.ENDPOINT = 4;
USBIENCSR0bits.MODE = 0;
USBCSR3bits.ENDPOINT = oep;
if (_manager) _manager->onInPacket(4, _endpointBuffers[4].rx[0], _endpointBuffers[4].size);
}
if (isEP5TXIF) {
uint8_t oep = USBCSR3bits.ENDPOINT;
USBCSR3bits.ENDPOINT = 5;
USBIENCSR0bits.MODE = 0;
USBCSR3bits.ENDPOINT = oep;
if (_manager) _manager->onInPacket(5, _endpointBuffers[5].rx[0], _endpointBuffers[5].size);
}
if (isEP6TXIF) {
uint8_t oep = USBCSR3bits.ENDPOINT;
USBCSR3bits.ENDPOINT = 6;
USBIENCSR0bits.MODE = 0;
USBCSR3bits.ENDPOINT = oep;
if (_manager) _manager->onInPacket(6, _endpointBuffers[6].rx[0], _endpointBuffers[6].size);
}
if (isEP7TXIF) {
uint8_t oep = USBCSR3bits.ENDPOINT;
USBCSR3bits.ENDPOINT = 7;
USBIENCSR0bits.MODE = 0;
USBCSR3bits.ENDPOINT = oep;
if (_manager) _manager->onInPacket(7, _endpointBuffers[7].rx[0], _endpointBuffers[7].size);
}
} while ((csr0 | csr1 | csr2) != 0);
// clearIntFlag(_USB_VECTOR);
}
bool USBHS::setAddress(uint8_t address) {
USBCSR0bits.FUNC = address & 0x7F;
return true;
}
void USBHS::haltEndpoint(uint8_t ep) {
uint8_t oep = USBCSR3bits.ENDPOINT;
USBCSR3bits.ENDPOINT = ep;
if (!USBIENCSR0bits.SENDSTALL) {
USBIENCSR0bits.SENDSTALL = 1;
}
USBCSR3bits.ENDPOINT = oep;
}
void USBHS::resumeEndpoint(uint8_t ep) {
uint8_t oep = USBCSR3bits.ENDPOINT;
USBCSR3bits.ENDPOINT = ep;
if (USBIENCSR0bits.SENDSTALL) {
USBIENCSR0bits.SENDSTALL = 0;
USBIENCSR0bits.CLRDT = 1;
}
USBCSR3bits.ENDPOINT = oep;
}
bool USBHS::isIdle(uint8_t ep) {
(void)ep;
return true;
}
int USBHS::populateDefaultSerial(char *defSerial) {
defSerial[0] = D2H(DEVSN1 >> 28);
defSerial[1] = D2H(DEVSN1 >> 24);
defSerial[2] = D2H(DEVSN1 >> 20);
defSerial[3] = D2H(DEVSN1 >> 16);
defSerial[4] = D2H(DEVSN1 >> 12);
defSerial[5] = D2H(DEVSN1 >> 8);
defSerial[6] = D2H(DEVSN1 >> 4);
defSerial[7] = D2H(DEVSN1 >> 0);
defSerial[8] = D2H(DEVSN0 >> 28);
defSerial[9] = D2H(DEVSN0 >> 24);
defSerial[10] = D2H(DEVSN0 >> 20);
defSerial[11] = D2H(DEVSN0 >> 16);
defSerial[12] = D2H(DEVSN0 >> 12);
defSerial[13] = D2H(DEVSN0 >> 8);
defSerial[14] = D2H(DEVSN0 >> 4);
defSerial[15] = D2H(DEVSN0 >> 0);
return 16;
}
void USBHS::resume() {
USBCSR0bits.RESUME = 1;
delay(10);
USBCSR0bits.RESUME = 0;
}
#endif // __PIC32MZ__
#endif // _USB
| 30.416089 | 109 | 0.56995 |
a32449cd937ec80338a6706409781bd74fb958f9 | 94 | ts | TypeScript | backend/src/data/contracts/account/update-token-repository.ts | KPMGE/FreshGift | 68f8e202b020a2251017982319ade4909ea468cf | [
"MIT"
] | 1 | 2022-03-24T13:08:22.000Z | 2022-03-24T13:08:22.000Z | backend/src/data/contracts/account/update-token-repository.ts | KPMGE/FreshGift | 68f8e202b020a2251017982319ade4909ea468cf | [
"MIT"
] | null | null | null | backend/src/data/contracts/account/update-token-repository.ts | KPMGE/FreshGift | 68f8e202b020a2251017982319ade4909ea468cf | [
"MIT"
] | null | null | null | export interface UpdateTokenRepository {
update(id: string, token: string): Promise<void>
}
| 23.5 | 50 | 0.765957 |
4d58c28ab04c6527a0c1f5dede024a5535a780b7 | 233 | js | JavaScript | modulo6/desafios - JS01/ex05.js | W-Carlos/CodeClub | a1035b667fa8b4a47b96bd50fc9219e3768c734e | [
"MIT"
] | null | null | null | modulo6/desafios - JS01/ex05.js | W-Carlos/CodeClub | a1035b667fa8b4a47b96bd50fc9219e3768c734e | [
"MIT"
] | null | null | null | modulo6/desafios - JS01/ex05.js | W-Carlos/CodeClub | a1035b667fa8b4a47b96bd50fc9219e3768c734e | [
"MIT"
] | null | null | null | // Faça um programa que imprima na tela se um nome é igual ao outro nome digitado. Ex: João e João, imprime true. João e Maria, imprime false.
let name1 = "João"
let name2 = "João"
let result = name1 === name2
console.log(result) | 29.125 | 142 | 0.712446 |
6cf42216a2b684f52eac7cfb851926ae2e214161 | 144 | dart | Dart | lib/ui/views/program/program_view_model.dart | DanhDue/stacked_state_mamagement | d55c42867b94d65c7b49143f0e5097a20b4b07fa | [
"Apache-2.0"
] | 3 | 2021-12-25T02:05:11.000Z | 2022-01-07T09:28:56.000Z | lib/ui/views/program/program_view_model.dart | DanhDue/stacked_state_mamagement | d55c42867b94d65c7b49143f0e5097a20b4b07fa | [
"Apache-2.0"
] | null | null | null | lib/ui/views/program/program_view_model.dart | DanhDue/stacked_state_mamagement | d55c42867b94d65c7b49143f0e5097a20b4b07fa | [
"Apache-2.0"
] | null | null | null | import 'package:injectable/injectable.dart';
import 'package:stacked/stacked.dart';
@singleton
class ProgramViewModel extends BaseViewModel {}
| 24 | 47 | 0.819444 |
c93c6cdc77b3157ef5dbea224e3f9a09e8afe0af | 826 | ts | TypeScript | src/modules/config.module.ts | AhmedWaelElsawy/articles | 6094b234f515b97ffd55b249326f3fdc32ba1228 | [
"MIT"
] | null | null | null | src/modules/config.module.ts | AhmedWaelElsawy/articles | 6094b234f515b97ffd55b249326f3fdc32ba1228 | [
"MIT"
] | null | null | null | src/modules/config.module.ts | AhmedWaelElsawy/articles | 6094b234f515b97ffd55b249326f3fdc32ba1228 | [
"MIT"
] | null | null | null | import * as Joi from 'joi';
import { Module } from '@nestjs/common';
import { ConfigModule, ConfigService } from '@nestjs/config';
/**
* Import and provide app configuration related classes.
*
* @module
*/
const ENV = process.env.NODE_ENV;
const EnvFile = ENV == 'test'? '.env.test' : '.env'
console.log('Current Environment = ', ENV);
@Module({
imports: [
ConfigModule.forRoot({
isGlobal: true,
envFilePath: EnvFile,
validationSchema: Joi.object({
PORT: Joi.number().default(3000),
DATABASE_USER: Joi.string().required(),
DATABASE_PASS: Joi.string().required(),
DATABASE_NAME: Joi.string().required(),
SECRET_KEY: Joi.string().required(),
}),
}),
],
providers: [ConfigService],
exports: [ConfigService],
})
export class AppConfigModule {}
| 25.8125 | 61 | 0.638015 |
b18480d281a6d1d9098ce8917c33c34185894e91 | 130 | sql | SQL | 601-700/620. Not Boring Movies.sql | fanjieqi/LeetCodeRuby | b3eaa127acb61c7352b585240882b907ec2174fe | [
"MIT"
] | 2 | 2020-04-01T07:14:00.000Z | 2020-10-24T12:53:18.000Z | 601-700/620. Not Boring Movies.sql | fanjieqi/LeetCodeRuby | b3eaa127acb61c7352b585240882b907ec2174fe | [
"MIT"
] | null | null | null | 601-700/620. Not Boring Movies.sql | fanjieqi/LeetCodeRuby | b3eaa127acb61c7352b585240882b907ec2174fe | [
"MIT"
] | 2 | 2020-10-24T12:53:32.000Z | 2021-09-17T03:26:15.000Z | # Write your MySQL query statement below
select *
from cinema
where
id % 2 = 1
and description <> "boring"
order by rating desc
;
| 14.444444 | 40 | 0.730769 |
9770ca21741aff53fbd3dd1867b919515b53f036 | 208 | rb | Ruby | db/migrate/20160211061750_add_defaults_to_repositories_and_environments.rb | shammishailaj/slashdeploy | e93dd05b87518b90595a4d9ceedf63368795f8ae | [
"BSD-2-Clause"
] | 47 | 2017-06-20T05:51:59.000Z | 2021-07-09T02:24:10.000Z | db/migrate/20160211061750_add_defaults_to_repositories_and_environments.rb | shammishailaj/slashdeploy | e93dd05b87518b90595a4d9ceedf63368795f8ae | [
"BSD-2-Clause"
] | 64 | 2017-05-04T01:37:06.000Z | 2022-03-30T22:18:30.000Z | db/migrate/20160211061750_add_defaults_to_repositories_and_environments.rb | shammishailaj/slashdeploy | e93dd05b87518b90595a4d9ceedf63368795f8ae | [
"BSD-2-Clause"
] | 13 | 2017-12-07T18:57:15.000Z | 2022-03-23T06:39:21.000Z | class AddDefaultsToRepositoriesAndEnvironments < ActiveRecord::Migration
def change
add_column :repositories, :default_environment, :string
add_column :environments, :default_ref, :string
end
end
| 29.714286 | 72 | 0.802885 |
da723ae3513dd6ac6b3902d8fc25eb3b8ab9c1d4 | 866 | php | PHP | app/Assist.php | Coolpix/lahmp-api | b0a826f1cba4d923a6c10ad5d6a0f47c01eb5d91 | [
"MIT"
] | null | null | null | app/Assist.php | Coolpix/lahmp-api | b0a826f1cba4d923a6c10ad5d6a0f47c01eb5d91 | [
"MIT"
] | null | null | null | app/Assist.php | Coolpix/lahmp-api | b0a826f1cba4d923a6c10ad5d6a0f47c01eb5d91 | [
"MIT"
] | null | null | null | <?php
namespace App;
use Illuminate\Database\Eloquent\Model;
class Assist extends Model
{
protected $fillable = [
];
/**
* The goal that belong to the assist.
*/
public function goal()
{
return $this->belongsTo('App\Goal');
}
/**
* The team that belong to the assist.
*/
public function team()
{
return $this->belongsTo('App\Team');
}
/**
* The player that belong to the assist.
*/
public function player()
{
return $this->belongsTo('App\Player');
}
/**
* The match that belong to the assist.
*/
public function match()
{
return $this->belongsTo('App\Match');
}
/**
* The season that belong to the assist.
*/
public function season()
{
return $this->belongsTo('App\Season');
}
}
| 16.339623 | 46 | 0.535797 |
7981edda939b4d947d5205df38efd5440894c86b | 372 | php | PHP | app/Client.php | arianpour/SimpleApp01 | 078fdd7f47334a43e06b4e8491b522d6532c1d46 | [
"MIT"
] | null | null | null | app/Client.php | arianpour/SimpleApp01 | 078fdd7f47334a43e06b4e8491b522d6532c1d46 | [
"MIT"
] | null | null | null | app/Client.php | arianpour/SimpleApp01 | 078fdd7f47334a43e06b4e8491b522d6532c1d46 | [
"MIT"
] | null | null | null | <?php namespace App;
use Illuminate\Database\Eloquent\Model;
class Client extends Model {
//
protected $fillable=['firstName','lastName','idNumber','nationality'
,'client_role'];
public function user()
{
return $this->belongsTo('App\User');
}
public function AddressBook(){
return $this->hasOne('App\AddressBook');
}
}
| 16.173913 | 72 | 0.63172 |
20a5fa7d28093e90b6fde03c6bb8630dfaf71d8b | 12,946 | py | Python | pre_processing.py | meliude/ERAINTERIM | 9d9ab640dc4e04d04443095fcb10ff24f3fb7c6f | [
"MIT"
] | null | null | null | pre_processing.py | meliude/ERAINTERIM | 9d9ab640dc4e04d04443095fcb10ff24f3fb7c6f | [
"MIT"
] | null | null | null | pre_processing.py | meliude/ERAINTERIM | 9d9ab640dc4e04d04443095fcb10ff24f3fb7c6f | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Mar 24 17:44:54 2019
@author: meliude
"""
import pygrib
import pandas as pd
years = ['1998.grib', '1999.grib', '2000.grib', '2001.grib', '2002.grib', '2003.grib', '2004.grib', '2005.grib', '2006.grib', '2007.grib', '2008.grib']
data = []
dp = [] #dewpoint
tp = [] #totalprecipitation
for i in range(0,11):
data.append(pygrib.open(years[i]))
dp.append(data[i].select(name = '2 metre dewpoint temperature'))
tp.append(data[i].select(name = 'Total precipitation'))
#leapyears: 2000, 2004 and 2008
##jan(0,31),feb(31,60),mar(60,91),apr(91,121),may(121,152),jun(152,182),jul(182,213),aug(213,244)
##sep(244,274),oct(274,305),nov(305,335),dec(335,366)
#for dewpoint temperature
jan_dp,feb_dp,mar_dp,apr_dp,may_dp,jun_dp,jul_dp,aug_dp,sep_dp,oct_dp,nov_dp,dec_dp=[], [], [], [], [], [], [], [], [], [], [], []
#convert Kelvin to Celsius
def convert(x):
return x-273.15
for i in range(0,11):
if i==2:
for j in range(0,31):
jan_dp.append(convert(dp[i][j].values))
for j in range(31,60):
feb_dp.append(convert(dp[i][j].values))
for j in range(60,91):
mar_dp.append(convert(dp[i][j].values))
for j in range(91,121):
apr_dp.append(convert(dp[i][j].values))
for j in range(121,152):
may_dp.append(convert(dp[i][j].values))
for j in range(152,182):
jun_dp.append(convert(dp[i][j].values))
for j in range(182,213):
jul_dp.append(convert(dp[i][j].values))
for j in range(213,244):
aug_dp.append(convert(dp[i][j].values))
for j in range(244,274):
sep_dp.append(convert(dp[i][j].values))
for j in range(274,305):
oct_dp.append(convert(dp[i][j].values))
for j in range(305,335):
nov_dp.append(convert(dp[i][j].values))
for j in range(335,366):
dec_dp.append(convert(dp[i][j].values))
elif i==6:
for j in range(0,31):
jan_dp.append(convert(dp[i][j].values))
for j in range(31,60):
feb_dp.append(convert(dp[i][j].values))
for j in range(60,91):
mar_dp.append(convert(dp[i][j].values))
for j in range(91,121):
apr_dp.append(convert(dp[i][j].values))
for j in range(121,152):
may_dp.append(convert(dp[i][j].values))
for j in range(152,182):
jun_dp.append(convert(dp[i][j].values))
for j in range(182,213):
jul_dp.append(convert(dp[i][j].values))
for j in range(213,244):
aug_dp.append(convert(dp[i][j].values))
for j in range(244,274):
sep_dp.append(convert(dp[i][j].values))
for j in range(274,305):
oct_dp.append(convert(dp[i][j].values))
for j in range(305,335):
nov_dp.append(convert(dp[i][j].values))
for j in range(335,366):
dec_dp.append(convert(dp[i][j].values))
elif i==10:
for j in range(0,31):
jan_dp.append(convert(dp[i][j].values))
for j in range(31,60):
feb_dp.append(convert(dp[i][j].values))
for j in range(60,91):
mar_dp.append(convert(dp[i][j].values))
for j in range(91,121):
apr_dp.append(convert(dp[i][j].values))
for j in range(121,152):
may_dp.append(convert(dp[i][j].values))
for j in range(152,182):
jun_dp.append(convert(dp[i][j].values))
for j in range(182,213):
jul_dp.append(convert(dp[i][j].values))
for j in range(213,244):
aug_dp.append(convert(dp[i][j].values))
for j in range(244,274):
sep_dp.append(convert(dp[i][j].values))
for j in range(274,305):
oct_dp.append(convert(dp[i][j].values))
for j in range(305,335):
nov_dp.append(convert(dp[i][j].values))
for j in range(335,366):
dec_dp.append(convert(dp[i][j].values))
else:
for j in range(0,31):
jan_dp.append(convert(dp[i][j].values))
for j in range(31,59):
feb_dp.append(convert(dp[i][j].values))
for j in range(59,90):
mar_dp.append(convert(dp[i][j].values))
for j in range(90,120):
apr_dp.append(convert(dp[i][j].values))
for j in range(120,151):
may_dp.append(convert(dp[i][j].values))
for j in range(151,181):
jun_dp.append(convert(dp[i][j].values))
for j in range(181,212):
jul_dp.append(convert(dp[i][j].values))
for j in range(212,243):
aug_dp.append(convert(dp[i][j].values))
for j in range(243,273):
sep_dp.append(convert(dp[i][j].values))
for j in range(273,304):
oct_dp.append(convert(dp[i][j].values))
for j in range(304,334):
nov_dp.append(convert(dp[i][j].values))
for j in range(334,365):
dec_dp.append(convert(dp[i][j].values))
#jan,mar,may,jul,aug,oct,dec
def totalsum(x,a=1,b=2):
return sum(x[a],x[b])
##0,30,31,61,62,92,93,123,124,154,155,185,186,216,217,247,248,278,279,309,310,340
lb=[0,31,62,93,124,155,186,217,248,279,310]
up=[30,61,92,123,154,185,216,247,278,309,340]
jan_tdp, mar_tdp, may_tdp, jul_tdp, aug_tdp,oct_tdp,dec_tdp=[], [], [], [], [], [], []
for i,j in zip(lb,up):
jan_tdp.append(totalsum(jan_dp,a=i,b=j))
mar_tdp.append(totalsum(mar_dp,a=i,b=j))
may_tdp.append(totalsum(may_dp,a=i,b=j))
jul_tdp.append(totalsum(jul_dp,a=i,b=j))
aug_tdp.append(totalsum(aug_dp,a=i,b=j))
oct_tdp.append(totalsum(oct_dp,a=i,b=j))
dec_tdp.append(totalsum(dec_dp,a=i,b=j))
jan_tdp, mar_tdp, may_tdp, jul_tdp, aug_tdp,oct_tdp,dec_tdp=sum(jan_tdp)/11, sum(mar_tdp)/11, sum(may_tdp)/11, sum(jul_tdp)/11, sum(aug_tdp)/11, sum(oct_tdp)/11, sum(dec_tdp)/11
pd.DataFrame(jan_tdp).to_csv('grid_jan.csv')
pd.DataFrame(mar_tdp).to_csv('grid_mar.csv')
pd.DataFrame(may_tdp).to_csv('grid_may.csv')
pd.DataFrame(jul_tdp).to_csv('grid_jul.csv')
pd.DataFrame(aug_tdp).to_csv('grid_aug.csv')
pd.DataFrame(oct_tdp).to_csv('grid_oct.csv')
pd.DataFrame(dec_tdp).to_csv('grid_dec.csv')
#apr,jun,sep,nov
##0,29,30,59,60,89,90,119,120,149,150,179,180,209,210,239,240,269,270,299,300,329
lb=[0,30,60,90,120,150,180,210,240,270,300]
ub=[29,59,89,119,149,179,209,239,269,299,329]
apr_tdp, jun_tdp, sep_tdp, nov_tdp=[], [], [], []
for i,j in zip(lb,ub):
apr_tdp.append(totalsum(apr_dp,a=i,b=j))
jun_tdp.append(totalsum(jun_dp,a=i,b=j))
sep_tdp.append(totalsum(sep_dp,a=i,b=j))
nov_tdp.append(totalsum(nov_dp,a=i,b=j))
apr_tdp, jun_tdp, sep_tdp, nov_tdp=sum(apr_tdp)/11, sum(jun_tdp)/11, sum(sep_tdp)/11, sum(nov_tdp)/11
pd.DataFrame(apr_tdp).to_csv('grid_apr.csv')
pd.DataFrame(jun_tdp).to_csv('grid_jun.csv')
pd.DataFrame(sep_tdp).to_csv('grid_sep.csv')
pd.DataFrame(nov_tdp).to_csv('grid_nov.csv')
#feb
##0,27,28,55,56,84,85,112,113,140,141,168,169,197,198,225,226,253,254,281,282,310
lb=[0,28,56,85,113,141,169,198,226,254,282]
ub=[27,55,84,112,140,168,197,225,253,281,310]
feb_tdp=[]
for i,j in zip(lb,ub):
feb_tdp.append(totalsum(feb_dp,a=i,b=j))
feb_tdp=sum(feb_tdp)/11
pd.DataFrame(feb_tdp).to_csv('grid_feb.csv')
############################################################################################
#for total prepicipation
jan_tp,feb_tp,mar_tp,apr_tp,may_tp,jun_tp,jul_tp,aug_tp,sep_tp,oct_tp,nov_tp,dec_tp=[], [], [], [], [], [], [], [], [], [], [], []
for i in range(0,11):
if i==2:
for j in range(0,31):
jan_tp.append(tp[i][j].values)
for j in range(31,60):
feb_tp.append(tp[i][j].values)
for j in range(60,91):
mar_tp.append(tp[i][j].values)
for j in range(91,121):
apr_tp.append(tp[i][j].values)
for j in range(121,152):
may_tp.append(tp[i][j].values)
for j in range(152,182):
jun_tp.append(tp[i][j].values)
for j in range(182,213):
jul_tp.append(tp[i][j].values)
for j in range(213,244):
aug_tp.append(tp[i][j].values)
for j in range(244,274):
sep_tp.append(tp[i][j].values)
for j in range(274,305):
oct_tp.append(tp[i][j].values)
for j in range(305,335):
nov_tp.append(tp[i][j].values)
for j in range(335,366):
dec_tp.append(tp[i][j].values)
elif i==6:
for j in range(0,31):
jan_tp.append(tp[i][j].values)
for j in range(31,60):
feb_tp.append(tp[i][j].values)
for j in range(60,91):
mar_tp.append(tp[i][j].values)
for j in range(91,121):
apr_tp.append(tp[i][j].values)
for j in range(121,152):
may_tp.append(tp[i][j].values)
for j in range(152,182):
jun_tp.append(tp[i][j].values)
for j in range(182,213):
jul_tp.append(tp[i][j].values)
for j in range(213,244):
aug_tp.append(tp[i][j].values)
for j in range(244,274):
sep_tp.append(tp[i][j].values)
for j in range(274,305):
oct_tp.append(tp[i][j].values)
for j in range(305,335):
nov_tp.append(tp[i][j].values)
for j in range(335,366):
dec_tp.append(tp[i][j].values)
elif i==10:
for j in range(0,31):
jan_tp.append(tp[i][j].values)
for j in range(31,60):
feb_tp.append(tp[i][j].values)
for j in range(60,91):
mar_tp.append(tp[i][j].values)
for j in range(91,121):
apr_tp.append(tp[i][j].values)
for j in range(121,152):
may_tp.append(tp[i][j].values)
for j in range(152,182):
jun_tp.append(tp[i][j].values)
for j in range(182,213):
jul_tp.append(tp[i][j].values)
for j in range(213,244):
aug_tp.append(tp[i][j].values)
for j in range(244,274):
sep_tp.append(tp[i][j].values)
for j in range(274,305):
oct_tp.append(tp[i][j].values)
for j in range(305,335):
nov_tp.append(tp[i][j].values)
for j in range(335,366):
dec_tp.append(tp[i][j].values)
else:
for j in range(0,31):
jan_tp.append(tp[i][j].values)
for j in range(31,59):
feb_tp.append(tp[i][j].values)
for j in range(59,90):
mar_tp.append(tp[i][j].values)
for j in range(90,120):
apr_tp.append(tp[i][j].values)
for j in range(120,151):
may_tp.append(tp[i][j].values)
for j in range(151,181):
jun_tp.append(tp[i][j].values)
for j in range(181,212):
jul_tp.append(tp[i][j].values)
for j in range(212,243):
aug_tp.append(tp[i][j].values)
for j in range(243,273):
sep_tp.append(tp[i][j].values)
for j in range(273,304):
oct_tp.append(tp[i][j].values)
for j in range(304,334):
nov_tp.append(tp[i][j].values)
for j in range(334,365):
dec_tp.append(tp[i][j].values)
#jan,mar,may,jul,aug,oct,dec
jan_ttp, mar_ttp, may_ttp, jul_ttp, aug_ttp,oct_ttp,dec_ttp=[], [], [], [], [], [], []
for i,j in zip(lb,up):
jan_ttp.append(totalsum(jan_tp,a=i,b=j))
mar_ttp.append(totalsum(mar_tp,a=i,b=j))
may_ttp.append(totalsum(may_tp,a=i,b=j))
jul_ttp.append(totalsum(jul_tp,a=i,b=j))
aug_ttp.append(totalsum(aug_tp,a=i,b=j))
oct_ttp.append(totalsum(oct_tp,a=i,b=j))
dec_ttp.append(totalsum(dec_tp,a=i,b=j))
jan_ttp, mar_ttp, may_ttp, jul_ttp, aug_ttp,oct_ttp,dec_ttp=sum(jan_ttp)/11, sum(mar_ttp)/11, sum(may_ttp)/11, sum(jul_ttp)/11, sum(aug_ttp)/11, sum(oct_ttp)/11, sum(dec_ttp)/11
pd.DataFrame(jan_ttp).to_csv('grid_jan_p.csv')
pd.DataFrame(mar_ttp).to_csv('grid_mar_p.csv')
pd.DataFrame(may_ttp).to_csv('grid_may_p.csv')
pd.DataFrame(jul_ttp).to_csv('grid_jul_p.csv')
pd.DataFrame(aug_ttp).to_csv('grid_aug_p.csv')
pd.DataFrame(oct_ttp).to_csv('grid_oct_p.csv')
pd.DataFrame(dec_ttp).to_csv('grid_dec_p.csv')
#apr,jun,sep,nov
apr_ttp, jun_ttp, sep_ttp, nov_ttp=[], [], [], []
for i,j in zip(lb,ub):
apr_ttp.append(totalsum(apr_tp,a=i,b=j))
jun_ttp.append(totalsum(jun_tp,a=i,b=j))
sep_ttp.append(totalsum(sep_tp,a=i,b=j))
nov_ttp.append(totalsum(nov_tp,a=i,b=j))
apr_ttp, jun_ttp, sep_ttp, nov_ttp=sum(apr_ttp)/11, sum(jun_ttp)/11, sum(sep_ttp)/11, sum(nov_ttp)/11
pd.DataFrame(apr_ttp).to_csv('grid_apr_p.csv')
pd.DataFrame(jun_ttp).to_csv('grid_jun_p.csv')
pd.DataFrame(sep_ttp).to_csv('grid_sep_p.csv')
pd.DataFrame(nov_ttp).to_csv('grid_nov_p.csv')
#feb
feb_ttp=[]
for i,j in zip(lb,ub):
feb_ttp.append(totalsum(feb_tp,a=i,b=j))
feb_ttp=sum(feb_ttp)/11
pd.DataFrame(feb_ttp).to_csv('grid_feb_p.csv') | 37.308357 | 177 | 0.584659 |
5e2d035f351c39b6e834035b85b739203fd8b944 | 1,827 | sql | SQL | db/companydata_seeds.sql | jonathanschimpf/Employee-Content-Management-System | 6896248f52d5516c0f5092debc49af594d3018f4 | [
"MIT"
] | null | null | null | db/companydata_seeds.sql | jonathanschimpf/Employee-Content-Management-System | 6896248f52d5516c0f5092debc49af594d3018f4 | [
"MIT"
] | null | null | null | db/companydata_seeds.sql | jonathanschimpf/Employee-Content-Management-System | 6896248f52d5516c0f5092debc49af594d3018f4 | [
"MIT"
] | null | null | null | USE companydata_db;
-- department_info (insert into)
INSERT INTO department_info (department_name)
VALUES
("Web Development"),
("Sales"),
("Accounting"),
("Legal"),
("Information Technology"),
("Human Resources");
SELECT * FROM department_info;
-- role_info (insert into)
INSERT INTO role_info (title, salary, department_id)
VALUES
("Web Development Manager", 175000.00, 1),
("Sales Department Manager", 200000.00, 2),
("Accounting Department Manager", 200000.00, 3),
("Legal Team Manager", 200000.00, 4),
("Information Technology Manager", 150000.00, 5),
("Human Resources Manager", 100000.00, 6),
("Web Developer", 85000.00, 1),
("Sales Representative", 70000.00, 2),
("Accountant", 80000.00, 3),
("Lawyer", 100000.00, 4),
("IT Specialist", 70000.00, 5),
("HR Generalist", 70000.00, 6);
SELECT * FROM role_info;
-- Employee Managers (insert into)
INSERT INTO employee_info (first_name, last_name, role_id)
VALUES
("Lucille", "Luvsthenet", 1),
("Tony", "Revenue", 2),
("Johnny", "Numbers", 3),
("Daisuke", "SueYou", 4),
("Harry", "Helpdesk", 5),
("Patricia", "Peepleperson", 6);
-- Employees With Managers
INSERT INTO employee_info (first_name, last_name, role_id, manager_id)
VALUES
("Carl", "Codesmith", 7, 1),
("Jennifer", "JavaScript", 7, 1),
("Vincent", "VeeEscode", 7, 1),
("Matthew", "Markup", 7, 1),
("Sharon", "Shmoneymaker", 8, 2),
("Christopher", "Cashflow", 8, 2),
("Jim", "Green", 8, 2),
("Bethany", "BalanceDebooks", 9, 3),
("Tara", "Texasinstrument", 9, 3),
("Darryl", "Divide", 9, 3),
("Carlene", "Counsel", 10, 4),
("Angel", "Advisor", 10, 4),
("Patty", "Procurator", 10, 4),
("Nevin", "Knowstech", 11, 5),
("Ivan", "Install", 11, 5),
("Desi", "Didjureboot", 11, 5),
("Robert", "Relations", 12, 6),
("Peggy", "Payroll", 12, 6),
("Timothy", "Training", 12, 6);
SELECT * FROM employee_info;
| 24.039474 | 70 | 0.650246 |
b090937b77e4b161e34dcdaa0913478afbf0ff58 | 3,571 | py | Python | src/regev/signatures.py | mmiguel6288code/regev | 52f9911b57c348ff5adb7901b9d8cb72ea45ba17 | [
"MIT"
] | null | null | null | src/regev/signatures.py | mmiguel6288code/regev | 52f9911b57c348ff5adb7901b9d8cb72ea45ba17 | [
"MIT"
] | null | null | null | src/regev/signatures.py | mmiguel6288code/regev | 52f9911b57c348ff5adb7901b9d8cb72ea45ba17 | [
"MIT"
] | null | null | null | from __future__ import print_function, unicode_literals, division
from states import InProgressState, DetectedState, StateData
class Signature():
construction_complete = False
def __init__(self):
self.construction_complete = True
def __setattr__(self,name,value):
if not self.construction_complete:
object.__setattr__(self,name,value)
else:
raise Exception('Signature objects are immutable. Attribute assignment not supported.')
def __add__(self,other):
...
def __and__(self,other):
...
def __or__(self,other):
...
def __mul__(self,operand):
...
def __hash__(self):
return hash((self.__class__,) + self.__getstate__())
def __eq__(self,other):
if isinstance(other,Signature):
return (self.__class__,)+self.__getstate__() == (other.__class__,) + other.__getstate__()
else:
return False
def __ne__(self,other):
return not (self == other)
class LetterSignature(Signature):
def alphabet(self):
yield self.letter
class PatternSignature(Signature):
def alphabet(self):
for element in self.elements:
yield from element.alphabet()
class OBS(LetterSignature):
def __init__(self,name,tag=None):
self.name = name
self.letter = (name,frozenset())
self.tag = tag
self.construction_complete = True
def __getstate__(self):
return (self.name,self.tag)
def __setstate__(self,state):
object.__setattr__(self,'construction_complete',False)
self.name,self.tag = state
self.construction_complete = True
def update(self,state,timestamp,name,attrs):
return DetectedState(self,timestamp,timestamp,None)
class CON(LetterSignature):
def __init__(self,name,*constraints,tag=None):
self.name = name
self.constraints = frozenset(tuple(constraint) for constraint in constraints)
self.letter = (name,constraints)
self.tag = tag
self.construction_complete = True
def __getstate__(self):
return (self.name,self.constraints,self.tag)
def __setstate__(self,state):
object.__setattr__(self,'construction_complete',False)
self.name,self.constraints,self.tag = state
self.construction_complete = True
def update(self,state,timestamp,name,attrs):
return DetectedState(self,timestamp,timestamp,None)
class ALL(PatternSignature):
def __init__(self,*elements):
accepted_elements = set()
for element in elements:
if isinstance(element,ALL):
for sub_element in element.elements:
accepted_elements.add(sub_element)
elif isinstance(element,str):
accepted_elements.add(OBS(element))
else:
accepted_elements.add(element)
self.elements = frozenset(accepted_elements)
def __getstate__(self):
return (self.elements,)
def __setstate__(self,state):
object.__setattr__(self,'construction_complete',False)
self.elements, = state
self.construction_complete = True
def update(self,state,timestamp,name,attrs):
if state is None:
state = InProgressState(self,timestamp,timestamp,StateData(in_progress={},detected={}))
for element in self.elements:
...
class ANY(PatternSignature):
...
class SEQ(PatternSignature):
...
class REPMIN(PatternSignature):
...
class REPMAX(PatternSignature):
...
| 33.064815 | 101 | 0.653038 |
4459615ac22e8065f2b66d59b282e9c4800dc54a | 90 | py | Python | tests/test_api/test_authentication/test_oauth/protocols/__init__.py | maxzhenzhera/my_vocab_backend | 2e9f968374e0bc2fcc0ae40830ca40f3cf5754d1 | [
"MIT"
] | null | null | null | tests/test_api/test_authentication/test_oauth/protocols/__init__.py | maxzhenzhera/my_vocab_backend | 2e9f968374e0bc2fcc0ae40830ca40f3cf5754d1 | [
"MIT"
] | null | null | null | tests/test_api/test_authentication/test_oauth/protocols/__init__.py | maxzhenzhera/my_vocab_backend | 2e9f968374e0bc2fcc0ae40830ca40f3cf5754d1 | [
"MIT"
] | null | null | null | from .used_meta_user import HasUsedMetaUserFixture
__all__ = ['HasUsedMetaUserFixture']
| 18 | 50 | 0.833333 |
b4c1019def2ab365581f470bd31b150deef1f1b1 | 4,785 | rb | Ruby | spec/requests/api/v1/admin/articles/create_spec.rb | EevanR/the-new-herald-api | 738e5962a41051aa4e3075fa1e6df8014e6d6e54 | [
"MIT"
] | null | null | null | spec/requests/api/v1/admin/articles/create_spec.rb | EevanR/the-new-herald-api | 738e5962a41051aa4e3075fa1e6df8014e6d6e54 | [
"MIT"
] | 3 | 2021-05-19T22:07:18.000Z | 2021-09-28T01:29:32.000Z | spec/requests/api/v1/admin/articles/create_spec.rb | EevanR/the-new-herald-api | 738e5962a41051aa4e3075fa1e6df8014e6d6e54 | [
"MIT"
] | null | null | null | RSpec.describe 'POST /api/v1/admin/articles', type: :request do
let(:journalist) { create(:journalist)}
let(:journalist_credentials) { journalist.create_new_auth_token }
let!(:journalist_headers) { { HTTP_ACCEPT: 'application/json' }.merge!(journalist_credentials) }
let(:image) do
{
type: 'application/image',
encoder: 'name=article_picture.jpg;base64',
data: 'iVBORw0KGgoAAAANSUhEUgAABjAAAAOmCAYAAABFYNwHAAAgAElEQVR4XuzdB3gU1cLG8Te9EEgISQi9I71KFbBXbFixN6zfvSiIjSuKInoVFOyIDcWuiKiIol4Q6SBVOtI7IYSWBkm',
extension: 'jpg'
}
end
describe 'Successfully with valid params and user' do
before do
post "/api/v1/admin/articles",
params: {
article: {
title_en: "Article 1",
body_en: "Some content",
category: "tech",
image: image
}
},
headers: journalist_headers
end
it 'returns a 200 response status' do
expect(response).to have_http_status 200
end
it 'shows that an image has been attached successfully' do
article = Article.find_by(title: "Article 1")
expect(article.image.attached?).to eq true
end
end
describe 'unsuccessfully with' do
describe 'no title and content' do
before do
post "/api/v1/admin/articles",
params: {
article: {
title: nil,
body: ""
}
},
headers: journalist_headers
end
it 'returns a 422 response status' do
expect(response).to have_http_status 422
end
it 'returns error message' do
expect(response_json["error"]).to eq ["Title can't be blank", "Body can't be blank"]
end
end
describe 'has title and content but no image' do
before do
post "/api/v1/admin/articles",
params: {
article: {
title_en: "Article 2",
body_en: "Some Content"
}
},
headers: journalist_headers
end
it 'returns a 422 response status' do
expect(response).to have_http_status 422
end
it 'returns no article' do
expect(Article.find_by(title: "Article 2")).to eq nil
end
it 'returns error message' do
expect(response_json["error"]).to eq "Please attach an image"
end
end
describe 'non logged in user' do
let!(:non_authorized_headers) { { HTTP_ACCEPT: 'application/json' } }
describe 'in english' do
before do
post "/api/v1/admin/articles",
params: {
article: {
title: 'Title',
body: "Some content"
}
},
headers: non_authorized_headers
end
it 'returns a 401 response status' do
expect(response).to have_http_status 401
end
it 'returns error message' do
expect(response_json["errors"][0]).to eq "You need to sign in or sign up before continuing."
end
end
describe 'in swedish' do
before do
post "/api/v1/admin/articles",
params: {
article: {
title: 'Title',
body: "Some content"
},
locale: :sv
},
headers: non_authorized_headers
end
it 'returns error message in swedish' do
expect(response_json["errors"][0]).to eq "Du måste bli medlem eller logga in för att fortsätta."
end
end
end
describe 'user that is not a journalist' do
let(:regular_user) { create(:user, role: 'user')}
let(:regular_user_credentials) { regular_user.create_new_auth_token }
let!(:regular_user_headers) { { HTTP_ACCEPT: 'application/json' }.merge!(regular_user_credentials) }
describe 'in english' do
before do
post "/api/v1/admin/articles",
params: {
article: {
title: "Title",
body: "Some content"
}
},
headers: regular_user_headers
end
it 'returns a 404 response status' do
expect(response).to have_http_status 404
end
it 'returns error message' do
expect(response_json["error"]).to eq "Not authorized!"
end
end
describe 'in swedish' do
before do
post "/api/v1/admin/articles",
params: {
article: {
title: "Title",
body: "Some content"
},
locale: :sv
},
headers: regular_user_headers
end
it 'returns error message in swedish' do
expect(response_json["error"]).to eq "Åtkomst nekad"
end
end
end
end
end | 27.5 | 154 | 0.56489 |
09a26331b25205c797447a583f4c990ae9da3af2 | 519 | asm | Assembly | test/actual/commandc.asm | amisonnet8/hack | d834e1f799a7afcf68e4ed6dfe087f6768398988 | [
"MIT"
] | null | null | null | test/actual/commandc.asm | amisonnet8/hack | d834e1f799a7afcf68e4ed6dfe087f6768398988 | [
"MIT"
] | null | null | null | test/actual/commandc.asm | amisonnet8/hack | d834e1f799a7afcf68e4ed6dfe087f6768398988 | [
"MIT"
] | null | null | null | D+1
=D+1
D+1;
=D+1;
D=D+1
D=D+1;
D+1;JMP
=D+1;JMP
D=D+1;JMP
M=D+1
D=D+1
MD=D+1
A=D+1
AM=D+1
AD=D+1
AMD=D+1
AMD=0
AMD=1
AMD=-1
AMD=D
AMD=A
AMD=!D
AMD=!A
AMD=-D
AMD=-A
AMD=D+1
AMD=A+1
AMD=D-1
AMD=A-1
AMD=D+A
AMD=A+D
AMD=D-A
AMD=A-D
AMD=D&A
AMD=A&D
AMD=D|A
AMD=A|D
AMD=M
AMD=!M
AMD=-M
AMD=M+1
AMD=M-1
AMD=D+M
AMD=M+D
AMD=D-M
AMD=M-D
AMD=D&M
AMD=M&D
AMD=D|M
AMD=M|D
AMD=A+D;
AMD=A+D;JGT
AMD=A+D;JEQ
AMD=A+D;JGE
AMD=A+D;JLT
AMD=A+D;JNE
AMD=A+D;JLE
AMD=A+D;JMP
| 8.370968 | 12 | 0.529865 |
2cc4c8b4378b4d8b9f4621e2b5599b8ee8d786d2 | 447 | cpp | C++ | headers/10-throw1.cpp | cpp-tutor/learnmoderncpp-tutorial | 96ca86a2508c80093f51f8ac017f41a994d04d52 | [
"MIT"
] | 1 | 2022-03-07T09:14:07.000Z | 2022-03-07T09:14:07.000Z | headers/10-throw1.cpp | cpp-tutor/learnmoderncpp-tutorial | 96ca86a2508c80093f51f8ac017f41a994d04d52 | [
"MIT"
] | null | null | null | headers/10-throw1.cpp | cpp-tutor/learnmoderncpp-tutorial | 96ca86a2508c80093f51f8ac017f41a994d04d52 | [
"MIT"
] | null | null | null | // 10-throw1.cpp : simple exception demonstration, throw and catch
#include <iostream>
#include <exception>
using namespace std;
template <typename T>
void getInteger(T& value) {
cout << "Please enter an integer (0 to throw): ";
cin >> value;
if (!value) {
throw exception{};
}
}
int main() {
long long v{};
try {
getInteger(v);
}
catch (...) {
cerr << "Caught exception!\n";
return 1;
}
cout << "Got value: " << v << '\n';
}
| 16.555556 | 66 | 0.612975 |
f643a2479b7f181bdcc0a0eb0d546f15490fe208 | 24,468 | cpp | C++ | src/platforms/windows/direct2d.cpp | LinkDoyle/YuKi | 22c7ac495e48985e0f21574b327ee687563a88e5 | [
"Apache-2.0"
] | null | null | null | src/platforms/windows/direct2d.cpp | LinkDoyle/YuKi | 22c7ac495e48985e0f21574b327ee687563a88e5 | [
"Apache-2.0"
] | null | null | null | src/platforms/windows/direct2d.cpp | LinkDoyle/YuKi | 22c7ac495e48985e0f21574b327ee687563a88e5 | [
"Apache-2.0"
] | null | null | null | #include "direct2d.h"
#include <algorithm>
#include <cassert>
#include <exception>
#include <string_view>
#include <utility>
#undef max
#undef min
#pragma comment(lib, "d2d1")
#pragma comment(lib, "d3d11")
#pragma comment(lib, "dwrite.lib")
#pragma comment(lib, "dxguid.lib")
namespace yuki {
namespace platforms {
namespace windows {
using Microsoft::WRL::ComPtr;
template <typename R, typename S>
static constexpr R ConvertTo(S s) {
return static_cast<R>(s);
}
/*******************************************************************************
* class ComException
******************************************************************************/
struct ComException : std::exception {
HRESULT const hr;
explicit ComException(HRESULT const value) : hr(value) {}
};
inline void ThrowIfFailed(HRESULT const hr) {
if (S_OK != hr) throw ComException(hr);
}
inline void WarnIfFailed(HRESULT const hr) {
if (S_OK != hr) throw ComException(hr);
}
/*******************************************************************************
* Direct2D Device Dependent Resource
******************************************************************************/
class D2DDeviceDependentRes {
public:
D2DDeviceDependentRes() = default;
D2DDeviceDependentRes(const D2DDeviceDependentRes&) = default;
D2DDeviceDependentRes(D2DDeviceDependentRes&&) = default;
D2DDeviceDependentRes& operator=(const D2DDeviceDependentRes&) = default;
D2DDeviceDependentRes& operator=(D2DDeviceDependentRes&&) = default;
virtual ~D2DDeviceDependentRes() = default;
virtual void recreate() {}
};
/*******************************************************************************
* class D2DBrush
******************************************************************************/
class D2DBrush : public D2DDeviceDependentRes {
public:
D2DBrush() = default;
virtual ComPtr<ID2D1Brush> getD2DBrush() const = 0;
};
/*******************************************************************************
* class D2DSoildColorBrush
******************************************************************************/
class D2DSolidColorBrush : public D2DBrush {
public:
explicit D2DSolidColorBrush(ComPtr<ID2D1SolidColorBrush> brush)
: brush_(std::move(brush)) {}
ComPtr<ID2D1Brush> getD2DBrush() const override { return brush_; }
private:
ComPtr<ID2D1SolidColorBrush> brush_;
};
class D2DBitmap : public Bitmap, public D2DDeviceDependentRes {
public:
D2DBitmap() = default;
explicit D2DBitmap(ComPtr<ID2D1Bitmap> bitmap) : bitmap_(std::move(bitmap)) {}
D2DBitmap(const D2DBitmap&) = default;
D2DBitmap(D2DBitmap&&) = default;
D2DBitmap& operator=(const D2DBitmap&) = default;
D2DBitmap& operator=(D2DBitmap&&) = default;
virtual ~D2DBitmap() = default;
virtual ComPtr<ID2D1Bitmap> getD2DBitmap() const { return bitmap_; }
private:
ComPtr<ID2D1Bitmap> bitmap_;
};
/*******************************************************************************
* DirectWrite TextFormat Wrapper
******************************************************************************/
template <>
constexpr DWRITE_TEXT_ALIGNMENT ConvertTo(TextAlignment textAlignment) {
DWRITE_TEXT_ALIGNMENT result = DWRITE_TEXT_ALIGNMENT_LEADING;
switch (textAlignment) {
case TextAlignment::Leading:
result = DWRITE_TEXT_ALIGNMENT_LEADING;
break;
case TextAlignment::Trailing:
result = DWRITE_TEXT_ALIGNMENT_TRAILING;
break;
case TextAlignment::Center:
result = DWRITE_TEXT_ALIGNMENT_CENTER;
break;
case TextAlignment::Justified:
result = DWRITE_TEXT_ALIGNMENT_JUSTIFIED;
break;
}
return result;
}
template <>
constexpr TextAlignment ConvertTo(DWRITE_TEXT_ALIGNMENT textAlignment) {
TextAlignment result = TextAlignment::Leading;
switch (textAlignment) {
case DWRITE_TEXT_ALIGNMENT_LEADING:
result = TextAlignment::Leading;
break;
case DWRITE_TEXT_ALIGNMENT_TRAILING:
result = TextAlignment::Trailing;
break;
case DWRITE_TEXT_ALIGNMENT_CENTER:
result = TextAlignment::Center;
break;
case DWRITE_TEXT_ALIGNMENT_JUSTIFIED:
result = TextAlignment::Justified;
break;
}
return result;
}
template <>
constexpr DWRITE_PARAGRAPH_ALIGNMENT ConvertTo(
ParagraphAlignment paragraphAlignment) {
DWRITE_PARAGRAPH_ALIGNMENT result = DWRITE_PARAGRAPH_ALIGNMENT_NEAR;
switch (paragraphAlignment) {
case ParagraphAlignment::Near:
result = DWRITE_PARAGRAPH_ALIGNMENT_NEAR;
break;
case ParagraphAlignment::Far:
result = DWRITE_PARAGRAPH_ALIGNMENT_FAR;
break;
case ParagraphAlignment::Center:
result = DWRITE_PARAGRAPH_ALIGNMENT_CENTER;
break;
}
return result;
}
template <>
constexpr ParagraphAlignment ConvertTo(
DWRITE_PARAGRAPH_ALIGNMENT paragraphAlignment) {
ParagraphAlignment result = ParagraphAlignment::Near;
switch (paragraphAlignment) {
case DWRITE_PARAGRAPH_ALIGNMENT_NEAR:
result = ParagraphAlignment::Near;
break;
case DWRITE_PARAGRAPH_ALIGNMENT_FAR:
result = ParagraphAlignment::Far;
break;
case DWRITE_PARAGRAPH_ALIGNMENT_CENTER:
result = ParagraphAlignment::Center;
break;
}
return result;
}
template <>
constexpr DWRITE_WORD_WRAPPING ConvertTo(WordWrapping paragraphAlignment) {
DWRITE_WORD_WRAPPING result = DWRITE_WORD_WRAPPING_WRAP;
switch (paragraphAlignment) {
case WordWrapping::Wrap:
result = DWRITE_WORD_WRAPPING_WRAP;
break;
case WordWrapping::NoWrap:
result = DWRITE_WORD_WRAPPING_NO_WRAP;
break;
case WordWrapping::EmergencyBreak:
result = DWRITE_WORD_WRAPPING_EMERGENCY_BREAK;
break;
case WordWrapping::WholeWord:
result = DWRITE_WORD_WRAPPING_WHOLE_WORD;
break;
case WordWrapping::Character:
result = DWRITE_WORD_WRAPPING_CHARACTER;
break;
}
return result;
}
template <>
constexpr WordWrapping ConvertTo(DWRITE_WORD_WRAPPING paragraphAlignment) {
WordWrapping result = WordWrapping::Wrap;
switch (paragraphAlignment) {
case DWRITE_WORD_WRAPPING_WRAP:
result = WordWrapping::Wrap;
break;
case DWRITE_WORD_WRAPPING_NO_WRAP:
result = WordWrapping::NoWrap;
break;
case DWRITE_WORD_WRAPPING_EMERGENCY_BREAK:
result = WordWrapping::EmergencyBreak;
break;
case DWRITE_WORD_WRAPPING_WHOLE_WORD:
result = WordWrapping::WholeWord;
break;
case DWRITE_WORD_WRAPPING_CHARACTER:
result = WordWrapping::Character;
break;
}
return result;
}
template <>
constexpr FontWeight ConvertTo(DWRITE_FONT_WEIGHT fontWeight) {
return static_cast<FontWeight>(fontWeight);
}
template <>
constexpr DWRITE_FONT_WEIGHT ConvertTo(FontWeight fontWeight) {
return static_cast<DWRITE_FONT_WEIGHT>(fontWeight);
}
class DWriteTextFormat : public TextFormat {
public:
explicit DWriteTextFormat(ComPtr<IDWriteTextFormat> textFormat)
: textFormat_(std::move(textFormat)) {}
ComPtr<IDWriteTextFormat> getTextFormat() const { return textFormat_; }
String getFontFamilyName() const override {
auto length = textFormat_->GetFontFamilyNameLength();
auto buffer = std::make_unique<WCHAR[]>(length);
WarnIfFailed(textFormat_->GetFontFamilyName(buffer.get(), length));
std::wstring_view wstringView(buffer.get(), length);
return String(wstringView.begin(), wstringView.end());
}
float getSize() const override { return textFormat_->GetFontSize(); }
void setTextAlignment(TextAlignment textAlignment) override {
WarnIfFailed(textFormat_->SetTextAlignment(
ConvertTo<DWRITE_TEXT_ALIGNMENT>(textAlignment)));
}
TextAlignment getTextAlignment() const override {
return ConvertTo<TextAlignment>(textFormat_->GetTextAlignment());
}
void setParagraphAlignment(ParagraphAlignment paragraphAlignment) override {
WarnIfFailed(textFormat_->SetParagraphAlignment(
ConvertTo<DWRITE_PARAGRAPH_ALIGNMENT>(paragraphAlignment)));
}
ParagraphAlignment getParagraphAlignment() const override {
return ConvertTo<ParagraphAlignment>(textFormat_->GetParagraphAlignment());
}
void setWordWrapping(WordWrapping wordWrapping) override {
WarnIfFailed(textFormat_->SetWordWrapping(
ConvertTo<DWRITE_WORD_WRAPPING>(wordWrapping)));
}
WordWrapping getWordWrapping() const override {
return ConvertTo<WordWrapping>(textFormat_->GetWordWrapping());
}
int getWeight() const override { return textFormat_->GetFontWeight(); }
private:
ComPtr<IDWriteTextFormat> textFormat_;
};
/*******************************************************************************
* DirectX Type Helper Functions
******************************************************************************/
static float ConvertDipsToPixels(float dips, float dpi) {
static const auto DIPS_PER_INCH = 96.0f;
return floorf(dips * dpi / DIPS_PER_INCH + 0.5f);
}
static constexpr D2D1_POINT_2F ToD2DPointF(const PointF& point) noexcept {
return D2D1_POINT_2F{point.x(), point.y()};
}
static constexpr D2D1_RECT_F ToD2DRectF(const RectF& rect) noexcept {
return D2D1_RECT_F{rect.left(), rect.top(), rect.right(), rect.bottom()};
}
static constexpr D2D1_RECT_F ToD2DRectF(const RectF* rect) noexcept {
if (rect == nullptr) return D2D1_RECT_F{};
return D2D1_RECT_F{rect->left(), rect->top(), rect->right(), rect->bottom()};
}
static constexpr D2D1_ROUNDED_RECT ToD2DRoundedRectF(
const RoundedRectF& rect) noexcept {
return D2D1_ROUNDED_RECT{
{rect.left(), rect.top(), rect.right(), rect.bottom()},
rect.radiusX(),
rect.radiusY()};
}
static constexpr D2D1_ELLIPSE ToD2DEllipse(const EllipseF& ellipse) {
return D2D1_ELLIPSE{
{ellipse.x(), ellipse.y()}, ellipse.radiusX(), ellipse.radiusY()};
}
static constexpr D2D1_ELLIPSE ToD2DEllipse(const CircleF& circle) {
return D2D1_ELLIPSE{
{circle.x(), circle.y()}, circle.radius(), circle.radius()};
}
static constexpr D2D1_COLOR_F ToD2DColorF(const ColorF& color) noexcept {
return D2D1_COLOR_F{color.red(), color.green(), color.blue(), color.alpha()};
}
static ComPtr<ID2D1StrokeStyle> ToD2DStrokeStyle(
const StrokeStyle* strokeStyle) {
return ComPtr<ID2D1StrokeStyle>();
}
static ComPtr<ID2D1Bitmap1> ToD2DBitmap(const Bitmap* bitmap) {
return ComPtr<ID2D1Bitmap1>();
}
static ComPtr<IDWriteTextFormat> ToWriteTextFormat(const TextFormat* font) {
return dynamic_cast<const DWriteTextFormat*>(font)->getTextFormat();
}
/*******************************************************************************
* class DirectXRes
******************************************************************************/
ComPtr<ID3D11Device> DirectXRes::d3device_;
ComPtr<IDXGIDevice> DirectXRes::dxdevice_;
ComPtr<IDXGIAdapter> DirectXRes::adapter_;
ComPtr<IDXGIFactory2> DirectXRes::factory_;
ComPtr<ID2D1Factory1> DirectXRes::d2dFactory_;
ComPtr<ID2D1Device> DirectXRes::d2dDevice_;
ComPtr<IWICImagingFactory> DirectXRes::imagingFactory_;
ComPtr<IDWriteFactory1> DirectXRes::dWriteFactory_;
void DirectXRes::init() { createDeviceResources(); }
void DirectXRes::releaseAll() {
d3device_.Reset();
dxdevice_.Reset();
adapter_.Reset();
factory_.Reset();
d2dFactory_.Reset();
d2dDevice_.Reset();
}
void DirectXRes::handleDeviceLost() { throw; }
inline ComPtr<IDXGIFactory2> DirectXRes::getDXGIFactory() {
assert(factory_);
return factory_;
}
inline ComPtr<ID3D11Device> DirectXRes::getD3D11Device() {
assert(d3device_);
return d3device_;
}
inline ComPtr<ID2D1Factory1> DirectXRes::getD2DFactory() {
assert(d2dFactory_);
return d2dFactory_;
}
inline ComPtr<ID2D1Device> DirectXRes::getD2DDevice() {
assert(d2dDevice_);
return d2dDevice_;
}
inline ComPtr<IDWriteFactory1> DirectXRes::getDWriteFactory() {
if (dWriteFactory_ == nullptr) {
DWriteCreateFactory(
DWRITE_FACTORY_TYPE_SHARED, __uuidof(dWriteFactory_),
reinterpret_cast<IUnknown**>(dWriteFactory_.GetAddressOf()));
}
return dWriteFactory_;
}
ComPtr<IWICImagingFactory> DirectXRes::getImagingFactory() {
if (imagingFactory_ == nullptr) {
ThrowIfFailed(CoCreateInstance(CLSID_WICImagingFactory, nullptr,
CLSCTX_INPROC_SERVER,
IID_PPV_ARGS(&imagingFactory_)));
}
return imagingFactory_;
}
std::unique_ptr<D2DContext2D> DirectXRes::createContextFromHWnd(HWND hWnd) {
return std::make_unique<D2DContext2D>(hWnd);
}
void DirectXRes::createDeviceResources() {
auto hr = createD3D11Device(D3D_DRIVER_TYPE_HARDWARE, d3device_);
if (DXGI_ERROR_UNSUPPORTED == hr) {
hr = createD3D11Device(D3D_DRIVER_TYPE_WARP, d3device_);
}
ThrowIfFailed(hr);
ThrowIfFailed(d3device_.As(&dxdevice_));
ThrowIfFailed(dxdevice_->GetAdapter(adapter_.GetAddressOf()));
ThrowIfFailed(adapter_->GetParent(IID_PPV_ARGS(&factory_)));
D2D1_FACTORY_OPTIONS options = {};
#ifdef _DEBUG
options.debugLevel = D2D1_DEBUG_LEVEL_INFORMATION;
#endif
ThrowIfFailed(D2D1CreateFactory(D2D1_FACTORY_TYPE_SINGLE_THREADED, options,
d2dFactory_.GetAddressOf()));
ThrowIfFailed(
d2dFactory_->CreateDevice(dxdevice_.Get(), d2dDevice_.GetAddressOf()));
}
inline HRESULT DirectXRes::createD3D11Device(D3D_DRIVER_TYPE const type,
ComPtr<ID3D11Device>& device) {
UINT flags = D3D11_CREATE_DEVICE_BGRA_SUPPORT;
#ifdef _DEBUG
flags |= D3D11_CREATE_DEVICE_DEBUG;
#endif
return D3D11CreateDevice(nullptr, type, nullptr, flags, nullptr, 0,
D3D11_SDK_VERSION, device.GetAddressOf(), nullptr,
nullptr);
}
/*******************************************************************************
* class D2DBrushAllocation
******************************************************************************/
D2DBrushAllocation::D2DBrushAllocation() : soildColorBrushcache_(64) {}
ComPtr<ID2D1Brush> D2DBrushAllocation::getD2DBrush(
ID2D1DeviceContext* d2dContext, const Brush* brush) {
if (brush == nullptr) {
return nullptr;
}
switch (brush->style()) {
case BrushStyle::SolidColor: {
const auto solidColorBrush = static_cast<const SolidColorBrush*>(brush);
ColorF color = solidColorBrush->getColor();
if (auto result = soildColorBrushcache_.get(color)) {
return result.get();
} else {
ComPtr<ID2D1SolidColorBrush> d2dBrush;
ThrowIfFailed(d2dContext->CreateSolidColorBrush(
ToD2DColorF(solidColorBrush->getColor()), &d2dBrush));
soildColorBrushcache_.insert(color, d2dBrush);
return d2dBrush;
}
}
default:
return nullptr;
}
}
void D2DBrushAllocation::reset() { soildColorBrushcache_.clear(); }
/*******************************************************************************
* class D2DContext
******************************************************************************/
void D2DContext2D::resetSize(SizeF size) {
brushAllocation_->reset();
using namespace Microsoft::WRL;
const int DOUBLE_BUFFER_COUNT = 2;
context_->SetTarget(nullptr);
bitmap_.Reset();
UINT width = std::max(lround(size.width()), 8L);
UINT height = std::max(lround(size.height()), 8L);
auto hr = swapChain_->ResizeBuffers(DOUBLE_BUFFER_COUNT, width, height,
DXGI_FORMAT_B8G8R8A8_UNORM, 0);
if (hr == DXGI_ERROR_DEVICE_REMOVED || hr == DXGI_ERROR_DEVICE_RESET) {
handleDeviceLost();
return;
}
ThrowIfFailed(hr);
createDeviceSwapChainBitmap();
}
D2DContext2D::D2DContext2D(HWND hWnd)
: brushAllocation_(new D2DBrushAllocation) {
createDeviceContextFromHWnd(hWnd);
createDeviceSwapChainBitmap();
}
inline void D2DContext2D::begin() {}
inline bool D2DContext2D::flush() {
ThrowIfFailed(context_->Flush());
return true;
}
inline bool D2DContext2D::end() { return true; }
void D2DContext2D::setTransform(const Transform2D& transform) {}
void D2DContext2D::resetTransform() {}
Transform2D D2DContext2D::getTransform() const {
return Transform2D::identity();
}
void D2DContext2D::clear(Color color) {}
void D2DContext2D::clear(const ColorF& color) {
context_->Clear(ToD2DColorF(color));
}
void D2DContext2D::drawCircle(const CircleF& circle, const Brush* brush,
float strokeWidth, StrokeStyle* strokeStyle) {
auto d2dBrush = brushAllocation_->getD2DBrush(context_.Get(), brush);
context_->DrawEllipse(ToD2DEllipse(circle), d2dBrush.Get(), strokeWidth,
ToD2DStrokeStyle(strokeStyle).Get());
}
void D2DContext2D::drawEllipse(const EllipseF& ellipse, const Brush* brush,
float strokeWidth, StrokeStyle* strokeStyle) {
auto d2dBrush = brushAllocation_->getD2DBrush(context_.Get(), brush);
context_->DrawEllipse(ToD2DEllipse(ellipse), d2dBrush.Get(), strokeWidth,
ToD2DStrokeStyle(strokeStyle).Get());
}
void D2DContext2D::drawLine(const LineF& line, const Brush* brush,
float strokeWidth, StrokeStyle* strokeStyle) {
auto d2dBrush = brushAllocation_->getD2DBrush(context_.Get(), brush);
context_->DrawLine(ToD2DPointF(line.p1()), ToD2DPointF(line.p2()),
d2dBrush.Get(), strokeWidth,
ToD2DStrokeStyle(strokeStyle).Get());
}
void D2DContext2D::drawRect(const RectF& rect, const Brush* brush,
float strokeWidth, StrokeStyle* strokeStyle) {
auto d2dBrush = brushAllocation_->getD2DBrush(context_.Get(), brush);
context_->DrawRectangle(ToD2DRectF(rect), d2dBrush.Get(), strokeWidth,
ToD2DStrokeStyle(strokeStyle).Get());
}
void D2DContext2D::drawRoundedRect(const RoundedRectF& rect, const Brush* brush,
float strokeWidth,
StrokeStyle* strokeStyle) {
auto d2dBrush = brushAllocation_->getD2DBrush(context_.Get(), brush);
context_->DrawRoundedRectangle(ToD2DRoundedRectF(rect), d2dBrush.Get(),
strokeWidth,
ToD2DStrokeStyle(strokeStyle).Get());
}
void D2DContext2D::fillCircle(const CircleF& circle, const Brush* brush) {
auto d2dBrush = brushAllocation_->getD2DBrush(context_.Get(), brush);
context_->FillEllipse(ToD2DEllipse(circle), d2dBrush.Get());
}
void D2DContext2D::fillEllipse(const EllipseF& ellipse, const Brush* brush) {
auto d2dBrush = brushAllocation_->getD2DBrush(context_.Get(), brush);
context_->FillEllipse(ToD2DEllipse(ellipse), d2dBrush.Get());
}
void D2DContext2D::fillRect(const RectF& rect, const Brush* brush) {
auto d2dBrush = brushAllocation_->getD2DBrush(context_.Get(), brush);
context_->FillRectangle(ToD2DRectF(rect), d2dBrush.Get());
}
void D2DContext2D::fillRoundedRect(const RoundedRectF& rect,
const Brush* brush) {
auto d2dBrush = brushAllocation_->getD2DBrush(context_.Get(), brush);
context_->FillRoundedRectangle(ToD2DRoundedRectF(rect), d2dBrush.Get());
}
void D2DContext2D::pushClip(const RectF& rect) {
context_->PushAxisAlignedClip(ToD2DRectF(rect),
D2D1_ANTIALIAS_MODE_PER_PRIMITIVE);
}
void D2DContext2D::popClip() { context_->PopAxisAlignedClip(); }
void D2DContext2D::setDpi(float dpiX, float dpiY) {
context_->SetDpi(dpiX, dpiY);
}
void D2DContext2D::getDpi(float* dpiX, float* dpiY) {
context_->GetDpi(dpiX, dpiY);
}
void D2DContext2D::drawBitmap(const Bitmap* bitmap,
const RectF* destionationRectangle, float opacity,
BitmapInterpolationMode mode,
const RectF* sourceRectangle) {
if (destionationRectangle == nullptr) {
context_->DrawBitmap(ToD2DBitmap(bitmap).Get());
} else {
D2D1_BITMAP_INTERPOLATION_MODE interpolationMode;
switch (mode) {
case BitmapInterpolationMode::Linear:
interpolationMode = D2D1_BITMAP_INTERPOLATION_MODE_LINEAR;
break;
case BitmapInterpolationMode::NearestNeighbor:
interpolationMode = D2D1_BITMAP_INTERPOLATION_MODE_NEAREST_NEIGHBOR;
break;
default:
interpolationMode = D2D1_BITMAP_INTERPOLATION_MODE_LINEAR;
break;
}
if (sourceRectangle == nullptr) {
context_->DrawBitmap(ToD2DBitmap(bitmap).Get(),
ToD2DRectF(*destionationRectangle), opacity,
interpolationMode);
} else {
context_->DrawBitmap(ToD2DBitmap(bitmap).Get(),
ToD2DRectF(*destionationRectangle), opacity,
interpolationMode, ToD2DRectF(sourceRectangle));
}
}
}
void D2DContext2D::drawText(const String& text, const TextFormat* font,
const RectF& rect, const Brush* brush) {
auto d2dBrush = brushAllocation_->getD2DBrush(context_.Get(), brush);
context_->DrawTextW(text.c_str(), text.size(), ToWriteTextFormat(font).Get(),
ToD2DRectF(rect), d2dBrush.Get());
}
std::unique_ptr<Bitmap> D2DContext2D::loadBitmap(const String& filename) {
auto factory = DirectXRes::getImagingFactory();
ComPtr<IWICBitmapDecoder> decoder;
ThrowIfFailed(factory->CreateDecoderFromFilename(
filename.c_str(), nullptr, GENERIC_READ, WICDecodeMetadataCacheOnLoad,
&decoder));
ComPtr<IWICBitmapFrameDecode> source;
ThrowIfFailed(decoder->GetFrame(0, &source));
ComPtr<IWICFormatConverter> converter;
factory->CreateFormatConverter(&converter);
ThrowIfFailed(converter->Initialize(
source.Get(), GUID_WICPixelFormat32bppPBGRA, WICBitmapDitherTypeNone,
nullptr, 0.f, WICBitmapPaletteTypeMedianCut));
ComPtr<ID2D1Bitmap> bitmap;
ThrowIfFailed(context_->CreateBitmapFromWicBitmap(source.Get(), &bitmap));
return std::make_unique<D2DBitmap>(std::move(bitmap));
}
void D2DContext2D::createDeviceContextFromHWnd(HWND hWnd) {
DXGI_SWAP_CHAIN_DESC1 props_dscd = {};
props_dscd.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
props_dscd.SampleDesc.Count = 1;
props_dscd.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT;
props_dscd.BufferCount = 2;
ThrowIfFailed(DirectXRes::getDXGIFactory()->CreateSwapChainForHwnd(
DirectXRes ::getD3D11Device().Get(), hWnd, &props_dscd, nullptr, nullptr,
swapChain_.GetAddressOf()));
ThrowIfFailed(DirectXRes::getD2DDevice()->CreateDeviceContext(
D2D1_DEVICE_CONTEXT_OPTIONS_NONE, context_.GetAddressOf()));
}
void D2DContext2D::createDeviceSwapChainBitmap() {
float dpiX, dpiY;
DirectXRes::getD2DFactory()->GetDesktopDpi(&dpiX, &dpiY);
context_->SetDpi(dpiX, dpiY);
ComPtr<IDXGISurface> surface;
ThrowIfFailed(swapChain_->GetBuffer(0, IID_PPV_ARGS(&surface)));
const auto properties = D2D1::BitmapProperties1(
D2D1_BITMAP_OPTIONS_TARGET | D2D1_BITMAP_OPTIONS_CANNOT_DRAW,
D2D1::PixelFormat(DXGI_FORMAT_B8G8R8A8_UNORM, D2D1_ALPHA_MODE_IGNORE),
dpiX, dpiY);
ThrowIfFailed(context_->CreateBitmapFromDxgiSurface(surface.Get(), properties,
bitmap_.GetAddressOf()));
context_->SetTarget(bitmap_.Get());
context_->SetUnitMode(D2D1_UNIT_MODE_PIXELS);
}
void D2DContext2D::handleDeviceLost() {
HWND hWnd;
ThrowIfFailed(swapChain_->GetHwnd(&hWnd));
DirectXRes::handleDeviceLost();
context_->SetTarget(nullptr);
bitmap_.Reset();
context_.Reset();
swapChain_.Reset();
createDeviceContextFromHWnd(hWnd);
createDeviceSwapChainBitmap();
}
void D2DContext2D::beginDraw() { context_->BeginDraw(); }
bool D2DContext2D::endDraw() {
ThrowIfFailed(context_->EndDraw());
const auto hr = swapChain_->Present(1, 0);
if (S_OK != hr && DXGI_STATUS_OCCLUDED != hr) {
ThrowIfFailed(hr);
}
return true;
}
std::unique_ptr<TextFormat> D2DContext2D::createTextFormat(const String& name,
float size,
FontWeight weight) {
ComPtr<IDWriteTextFormat> textFormat;
auto factory = DirectXRes::getDWriteFactory();
ThrowIfFailed(factory->CreateTextFormat(
name.c_str(), nullptr, ConvertTo<DWRITE_FONT_WEIGHT>(weight),
DWRITE_FONT_STYLE_NORMAL, DWRITE_FONT_STRETCH_NORMAL, size, TEXT(""),
&textFormat));
return std::make_unique<DWriteTextFormat>(std::move(textFormat));
}
} // namespace windows
} // namespace platforms
} // namespace yuki
| 33.42623 | 80 | 0.671489 |
cde51cba504415c37974fa3081b87f8418deba51 | 1,012 | cs | C# | Source/Treenumerable.Linq/VirtualForestExtensions/VirtualForestExtensions.FollowingSiblingsAndSelf.cs | jasonmcboyd/Treenumerable.Linq | fc6a157ed8a48d38a4b46c0eafd29b8244ac750b | [
"MIT"
] | 1 | 2016-06-12T20:05:58.000Z | 2016-06-12T20:05:58.000Z | Source/Treenumerable.Linq/VirtualForestExtensions/VirtualForestExtensions.FollowingSiblingsAndSelf.cs | jasonmcboyd/Treenumerable.Linq | fc6a157ed8a48d38a4b46c0eafd29b8244ac750b | [
"MIT"
] | null | null | null | Source/Treenumerable.Linq/VirtualForestExtensions/VirtualForestExtensions.FollowingSiblingsAndSelf.cs | jasonmcboyd/Treenumerable.Linq | fc6a157ed8a48d38a4b46c0eafd29b8244ac750b | [
"MIT"
] | null | null | null | using System.Collections.Generic;
using System.Linq;
namespace Treenumerable.Linq
{
public static partial class VirtualForestExtensions
{
public static IVirtualForest<T> FollowingSiblingsAndSelf<T>(this IVirtualForest<T> forest)
{
return
forest
.ShallowCopy(roots =>
roots
.SelectMany(y =>
forest
.TreeWalker
.GetFollowingSiblingsAndSelf(y, forest.Comparer)));
}
public static IVirtualForest<T> FollowingSiblingsAndSelf<T>(
this IVirtualForest<T> forest,
IEqualityComparer<T> comparer)
{
return new VirtualForest<T>(
forest.TreeWalker,
comparer,
forest.Roots.SelectMany(x =>
forest
.TreeWalker
.GetFollowingSiblingsAndSelf(x, comparer)));
}
}
}
| 29.764706 | 98 | 0.513834 |
5f051e4e70cc70a351bcf8c015f5f2b4d7455f79 | 726 | dart | Dart | lib/src/widgets/button/circle_button.dart | najeebaslan/flutter_utils_project | 1752d7878d462a3d667984b4c9ad4de60f552187 | [
"MIT"
] | 2 | 2022-02-27T07:00:10.000Z | 2022-03-25T01:08:48.000Z | lib/src/widgets/button/circle_button.dart | najeebaslan/flutter_utils_project | 1752d7878d462a3d667984b4c9ad4de60f552187 | [
"MIT"
] | 1 | 2022-02-20T16:58:51.000Z | 2022-02-20T16:58:51.000Z | lib/src/widgets/button/circle_button.dart | najeebaslan/flutter_utils_project | 1752d7878d462a3d667984b4c9ad4de60f552187 | [
"MIT"
] | 1 | 2022-02-27T07:00:13.000Z | 2022-02-27T07:00:13.000Z | import 'package:flutter/material.dart';
class CircleButton extends StatelessWidget {
final Widget icon;
final double iconSize;
final Function() onPressed;
final Color? color;
const CircleButton({
Key? key,
required this.icon,
this.color,
required this.iconSize,
required this.onPressed,
}) : super(key: key);
@override
Widget build(BuildContext context) {
return Container(
margin: const EdgeInsets.all(6.0),
decoration: BoxDecoration(
color:color ?? Colors.grey[200],
shape: BoxShape.circle,
),
child: IconButton(
icon: icon,
iconSize: iconSize,
color:Colors.black,
onPressed: onPressed,
),
);
}
}
| 21.352941 | 44 | 0.632231 |
72e89ae878edb48f24111bb5e5c33335a32db9a8 | 221 | cs | C# | ViewModel/WaitingDialogViewModel.cs | qgy123/YueDroidBox | 547e680eb885bcf65875f80a64abf56e10da72dc | [
"MIT"
] | 1 | 2019-08-23T03:53:39.000Z | 2019-08-23T03:53:39.000Z | ViewModel/WaitingDialogViewModel.cs | qgy123/YueDroidBox | 547e680eb885bcf65875f80a64abf56e10da72dc | [
"MIT"
] | null | null | null | ViewModel/WaitingDialogViewModel.cs | qgy123/YueDroidBox | 547e680eb885bcf65875f80a64abf56e10da72dc | [
"MIT"
] | null | null | null | using Stylet;
namespace YueDroidBox.ViewModel
{
public class WaitingDialogViewModel : Screen
{
public WaitingDialogViewModel()
{
DisplayName = "Busy Waiting Dialog";
}
}
} | 18.416667 | 48 | 0.61086 |
e23bcc48afdc8eea9aaa30ecdad4c3732a1cd24c | 3,312 | py | Python | util/constants.py | yankai14/event-management-telegram-bot-client | 141a2c455c52b09fc4e7b047eb4efdb638d82060 | [
"MIT"
] | null | null | null | util/constants.py | yankai14/event-management-telegram-bot-client | 141a2c455c52b09fc4e7b047eb4efdb638d82060 | [
"MIT"
] | 7 | 2021-06-28T07:24:56.000Z | 2021-07-22T12:59:30.000Z | util/constants.py | yankai14/event-management-telegram-bot-client | 141a2c455c52b09fc4e7b047eb4efdb638d82060 | [
"MIT"
] | null | null | null | from telegram.ext import ConversationHandler
from enum import Enum
class State(Enum):
# State definition for top level conversation
FEATURE_SELECTION = 1
# State definitions for 2nd level conversation (features)
EVENT_LIST = 100
NEW_LAUNCH_INTELLIGENCE = 101
REGISTER = 102
LOGIN = 103
ENROLLMENT_HISTORY = 104
ENROLLMENT_PAYMENT = 105
# State definitions for Enrollment history feature
ENROLLMENT_HISTORY_SELECTING_ACTION = 200
ENROLLMENT_HISTORY_GET_INFO = 201
# State definition for Register feature
REGISTER_SELECTING_ACTION = 300
REGISTER_GET_INFO = 301
REGISTER_SUBMIT = 302
# State definition for login feature
LOGIN_SELECTING_ACTION = 400
LOGIN_GET_INFO = 401
LOGIN_SUBMIT = 402
# State definitions for Event feature
EVENT_SELECTING_ACTION = 500
EVENT_INSTANCE_LIST = 501
# State definitions for Enrollment feature
ENROLLMENT_SELECTING_ACTION = 601
ENROLLMENT_GET_INFO = 602
ENROLLMENT_SELECT_ROLE = 603
ENROLLMENT_SUBMIT = 604
ENROLLMENT_PAYMENT_GET_INFO = 701
#State definitions for Pagination
EVENT_PAGINATION = 801
EVENT_INSTANCE_PAGINATION = 802
ENROLLMENT_HISTORY_PAGINATION = 803
# Meta states
STOPPING = 1000
SHOWING = 1001
START_OVER = 1002
BACK = 1003
# Shortcut to end conversation
END = ConversationHandler.END
class Other:
CURRENT_FEATURE = "current_feature"
class Authentication:
USERNAME = "username"
PASSWORD = "password"
EMAIL = "email"
FIRST_NAME = "first_name"
LAST_NAME = "last_name"
PASSWORD = "password"
REGISTRATION_DATA = "registration_data"
LOGIN_DATA = "login_data"
AUTH_TOKEN = "AUTH_TOKEN"
EVENTS = "events"
class Event:
EVENTS = "events"
CODE = "eventCode"
NAME = "name"
DESCRIPTION = "description"
LOCATION = "location"
class EventInstance:
CODE = "eventInstanceCode"
LOCATION = "location"
DATES = "dates"
FEE = "fee"
IS_COMPLETED = "isCompleted"
IS_OPEN_FOR_SIGNUPS = "isOpenForSignUps"
EVENT= "event"
DATES = "dates"
class EnrollmentRoles(Enum):
PARTICIPANT = 1
FACILITATOR = 2
EVENT_ADMIN = 3
COORDINATOR = 4
LEAD = 5
class EnrollmentStatus(Enum):
PENDING = 1
ENROLLED = 2
REJECTED = 3
WITHDRAWN = 4
AWATING_PAYMENT = 5
class Enrollment:
ENROLL = "enroll"
ENROLLMENT_DATA = "enrollment_data"
USERNAME = "username"
ROLE = "role"
STATUS = "status"
CHECKOUT = "checkout"
ROLE_ENUM = EnrollmentRoles
STATUS_ENUM = EnrollmentStatus
class History:
ENROLLMENT_INFO = "enrollment_info"
# Data structure
EVENT_INSTANCE = "eventInstance"
IS_COMPLETED = "isCompleted"
class Payment:
ENROLLMENT_PAYMENT_INFO = "enrollment_payment_info"
PAYMENT_ID = "paymentId"
class Folder:
# Data structure
EVENT_INSTANCE = "eventInstance"
FOLDER_ID = "folderId"
FOLDER_NAME = "folderName"
class FolderPermissionRoles(Enum):
READER = "reader"
WRITER = "writer"
ORGANIZER = "organizer"
class FolderPermission:
# Data structure
FOLDER = "folder"
USER = "user"
PERMISSION_ID = "permissionId"
FOLDER_ROLE = "folderRole"
FOLDER_ROLE_ENUM = FolderPermissionRoles | 22.228188 | 61 | 0.691727 |
cd3a5c5d14f76d00257487fa7cc5bf68b0237dca | 125 | cshtml | C# | samples/meetings-sidepanel/csharp/Side Panel/Views/Home/SidePanelParticalView.cshtml | paul-cheung/Microsoft-Teams-Samples | 40c609bd9ab6801e2332902f9b854dae74670a6b | [
"MIT"
] | 264 | 2021-01-21T12:57:01.000Z | 2022-03-29T09:15:17.000Z | samples/meetings-sidepanel/csharp/Side Panel/Views/Home/SidePanelParticalView.cshtml | paul-cheung/Microsoft-Teams-Samples | 40c609bd9ab6801e2332902f9b854dae74670a6b | [
"MIT"
] | 102 | 2021-01-20T20:26:20.000Z | 2022-03-31T14:17:15.000Z | samples/meetings-sidepanel/csharp/Side Panel/Views/Home/SidePanelParticalView.cshtml | paul-cheung/Microsoft-Teams-Samples | 40c609bd9ab6801e2332902f9b854dae74670a6b | [
"MIT"
] | 272 | 2021-01-20T07:12:59.000Z | 2022-03-31T09:45:06.000Z | @model List<SidePanel.Models.TaskInfo>
@foreach (var item in Model)
{
<li>@Html.DisplayFor(model => item.Title)</li>
}
| 17.857143 | 50 | 0.68 |
7b464977ab4ff5a641e0e059ad7f44d8ce4202cc | 159 | rb | Ruby | app/models/heb412_gen/campohc.rb | alejocruzrcc/heb412_gen | dc0e19cd43e760d303b2d36548077f8c0277320c | [
"0BSD"
] | 4 | 2020-08-20T14:50:34.000Z | 2020-09-17T19:50:23.000Z | app/models/heb412_gen/campohc.rb | alejocruzrcc/heb412_gen | dc0e19cd43e760d303b2d36548077f8c0277320c | [
"0BSD"
] | 16 | 2019-11-26T12:46:42.000Z | 2022-02-12T12:03:25.000Z | app/models/heb412_gen/campohc.rb | alejocruzrcc/heb412_gen | dc0e19cd43e760d303b2d36548077f8c0277320c | [
"0BSD"
] | 1 | 2019-07-08T20:16:23.000Z | 2019-07-08T20:16:23.000Z | require 'heb412_gen/concerns/models/campohc'
module Heb412Gen
class Campohc < ActiveRecord::Base
include Heb412Gen::Concerns::Models::Campohc
end
end
| 19.875 | 48 | 0.779874 |
8c0ad07ca80f9155fdf226306c4d7785164595f8 | 1,729 | cs | C# | Runtime/OpenXRManifest.cs | studentutu/OpenXRRuntimeSelector | 547f20aa72a0427002270579607c1bf3eb4acac0 | [
"MIT"
] | 16 | 2021-01-24T19:10:50.000Z | 2022-03-07T09:03:59.000Z | Runtime/OpenXRManifest.cs | studentutu/OpenXRRuntimeSelector | 547f20aa72a0427002270579607c1bf3eb4acac0 | [
"MIT"
] | 1 | 2021-09-08T07:17:15.000Z | 2021-09-22T11:58:33.000Z | Runtime/OpenXRManifest.cs | studentutu/OpenXRRuntimeSelector | 547f20aa72a0427002270579607c1bf3eb4acac0 | [
"MIT"
] | 3 | 2021-07-13T21:05:57.000Z | 2021-09-07T07:58:04.000Z | // Copyright 2021 KOGA Mitsuhiro Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
using System;
using UnityEngine;
namespace OpenXRRuntimeJsons
{
[Serializable]
public class ManifestRuntime : ISerializationCallbackReceiver
{
[NonSerialized] public string Name;
[NonSerialized] public string LibraryPath;
[SerializeField] [HideInInspector] private string name;
[SerializeField] [HideInInspector] private string library_path;
public void OnBeforeSerialize()
{
name = Name;
library_path = LibraryPath;
}
public void OnAfterDeserialize()
{
Name = name;
LibraryPath = library_path;
}
}
[Serializable]
public class Manifest : ISerializationCallbackReceiver
{
[NonSerialized] public string FileFormatVersion;
[NonSerialized] public ManifestRuntime Runtime;
[SerializeField] [HideInInspector] private string file_format_version;
[SerializeField] [HideInInspector] private ManifestRuntime runtime;
public void OnBeforeSerialize()
{
file_format_version = FileFormatVersion;
runtime = Runtime;
}
public void OnAfterDeserialize()
{
FileFormatVersion = file_format_version;
Runtime = runtime;
}
public static Manifest FromJson(string jsonPath)
{
return JsonUtility.FromJson<Manifest>(jsonPath);
}
public static string ToJson(Manifest manifest)
{
return JsonUtility.ToJson(manifest);
}
}
} | 28.344262 | 78 | 0.637941 |
6dd6494d71cc057ba14d8b2de6d06dfe8cae5b20 | 35,258 | c | C | src/storage/seg/hashtable.c | jidongfang/pelikan | e7b084a4536070f4cf27790be0a88fb289cfe9e8 | [
"Apache-2.0"
] | null | null | null | src/storage/seg/hashtable.c | jidongfang/pelikan | e7b084a4536070f4cf27790be0a88fb289cfe9e8 | [
"Apache-2.0"
] | null | null | null | src/storage/seg/hashtable.c | jidongfang/pelikan | e7b084a4536070f4cf27790be0a88fb289cfe9e8 | [
"Apache-2.0"
] | null | null | null |
#define XXH_INLINE_ALL
#include "hashtable.h"
#include "xxhash.h"
#include "item.h"
#include "seg.h"
#include <cc_mm.h>
#include <hash/cc_murmur3.h>
#include <stdlib.h>
#include <sys/mman.h>
#include <sysexits.h>
#include <x86intrin.h>
/* TODO(jason): use static allocated array
* TODO(jason): add bucket array shrink
* */
/* the size of bucket in bytes, used in malloc alignment */
#define N_BYTE_PER_BUCKET 64
/* the number of slots in one bucket */
#define N_SLOT_PER_BUCKET 8u
/* the number of slots expressed in number of bits 8 == 2^3 */
#define N_SLOT_PER_BUCKET_LOG2 3u
/* mask for item_info */
#define TAG_MASK 0xfff0000000000000ul
#define FREQ_MASK 0x000ff00000000000ul
#define SEG_ID_MASK 0x00000ffffff00000ul
#define OFFSET_MASK 0x00000000000ffffful
#define TAG_BIT_SHIFT 52ul
#define FREQ_BIT_SHIFT 44ul
#define SEG_ID_BIT_SHIFT 20ul
#define OFFSET_UNIT_IN_BIT 3ul /* offset is in 8-byte unit */
/* this bit indicates whether the frequency has increased in the current sec */
#define FREQ_INC_INDICATOR_MASK 0x0008000000000000ul
#define CLEAR_FREQ_SMOOTH_MASK 0xfff7fffffffffffful
/* mast for bucket info */
#define LOCK_MASK 0xff00000000000000ul
#define BUCKET_CHAIN_LEN_MASK 0x00ff000000000000ul
#define TS_MASK 0x0000ffff00000000ul /* ts in bucket info */
#define CAS_MASK 0x00000000fffffffful
#define LOCK_BIT_SHIFT 56ul
#define BUCKET_CHAIN_LEN_BIT_SHIFT 48ul
#define TS_BIT_SHIFT 32ul
/* ts from proc ts, we only need 16-bit */
#define PROC_TS_MASK 0x000000000000fffful
/* a per-bucket spin lock */
#define LOCKED 0x0100000000000000ul
#define UNLOCKED 0x0000000000000000ul
extern seg_metrics_st *seg_metrics;
static struct hash_table hash_table;
static bool hash_table_initialized = false;
static __thread __uint128_t g_lehmer64_state = 1;
#define HASHSIZE(_n) (1ULL << (_n))
#define HASHMASK(_n) (HASHSIZE(_n) - 1)
#define CAL_HV(key, klen) _get_hv_xxhash(key, klen)
/* tags is calculated in two places,
* one is extracted from hash value, the other is extracted from item info,
* we use the top 12 bits of hash value as tag and
* store it in the top 12 bits of item info,
* note that we make tag to start from 1, so when we calculate the true tag
* we perform OR with 0x0001000000000000ul */
#define GET_TAG(item_info) ((item_info) & TAG_MASK)
#define GET_FREQ(item_info) (((item_info) & FREQ_MASK) >> FREQ_BIT_SHIFT)
#define GET_SEG_ID(item_info) (((item_info) & SEG_ID_MASK) >> SEG_ID_BIT_SHIFT)
#define GET_OFFSET(item_info) (((item_info) & OFFSET_MASK) << OFFSET_UNIT_IN_BIT)
#define CLEAR_FREQ(item_info) ((item_info) & (~FREQ_MASK))
#define CAL_TAG_FROM_HV(hv) (((hv) & TAG_MASK) | 0x0010000000000000ul)
#define GET_BUCKET(hv) (&hash_table.table[((hv) & (hash_table.hash_mask))])
#define GET_TS(bucket_ptr) (((*(bucket_ptr)) & TS_MASK) >> TS_BIT_SHIFT)
#define GET_CAS(bucket_ptr) ((*(bucket_ptr)) & CAS_MASK)
/* calculate the number of buckets in the bucket chain */
#define GET_BUCKET_CHAIN_LEN(bucket_ptr) \
((((*(bucket_ptr)) & BUCKET_CHAIN_LEN_MASK) >> BUCKET_CHAIN_LEN_BIT_SHIFT) + 1)
#define INCR_BUCKET_CHAIN_LEN(bucket_ptr) \
((*(bucket_ptr)) += 0x0001000000000000ul)
#define CAS_SLOT(slot_ptr, expect_ptr, new_val) \
__atomic_compare_exchange_n( \
(slot_ptr), (expect_ptr), (new_val), false, \
__ATOMIC_RELEASE, __ATOMIC_RELAXED \
)
#if defined HASHTABLE_DBG
#define SET_BUCKET_MAGIC(bucket_ptr) \
(*(bucket_ptr)) = ((*(bucket_ptr)) | 0x0000ffff00000000ul)
#define CHECK_BUCKET_MAGIC(bucket_ptr) \
ASSERT(((*(bucket_ptr)) & 0x0000ffff00000000ul) == 0x0000ffff00000000ul)
#undef GET_BUCKET
static uint64_t* GET_BUCKET(uint64_t hv)
{
uint64_t *bucket_ptr = (&hash_table.table[((hv) & hash_table.hash_mask)]);
CHECK_BUCKET_MAGIC(bucket_ptr);
return bucket_ptr;
}
#else
#define SET_BUCKET_MAGIC(bucket_ptr)
#define CHECK_BUCKET_MAGIC(bucket_ptr)
#endif
#define use_atomic_set
/* we assume little-endian here */
#define lock(bucket_ptr) \
do { \
uint8_t locked = 0; \
while (!CAS_SLOT(((uint8_t *)bucket_ptr + 7), &locked, 1)) { \
ASSERT(locked == 1); \
locked = 0; \
; \
} \
} while (0)
#define unlock(bucket_ptr) \
do { \
ASSERT(((*bucket_ptr) & LOCK_MASK) == LOCKED); \
*bucket_ptr ^= LOCKED; \
} while (0)
#define unlock_and_update_cas(bucket_ptr) \
do { \
ASSERT(((*bucket_ptr) & LOCK_MASK) != 0); \
*bucket_ptr = (*bucket_ptr + 1) ^ LOCKED; \
} while (0)
#ifdef use_atomic_set
#undef lock
#undef unlock
#undef unlock_and_update_cas
#define lock(bucket_ptr) \
do { \
while (__atomic_test_and_set( \
((uint8_t *)(bucket_ptr) + 7), __ATOMIC_ACQUIRE)) { \
; \
} \
} while (0)
#define unlock(bucket_ptr) \
do { \
__atomic_clear(((uint8_t *)(bucket_ptr) + 7), __ATOMIC_RELEASE); \
} while (0)
#define unlock_and_update_cas(bucket_ptr) \
do { \
*bucket_ptr += 1; \
__atomic_clear(((uint8_t *)(bucket_ptr) + 7), __ATOMIC_RELEASE); \
} while (0)
#endif
#ifdef no_lock
#undef lock
#undef unlock
#undef unlock_and_update_cas
#define lock(bucket_ptr)
#define unlock(bucket_ptr)
#define unlock_and_update_cas(bucket_ptr) ((*(bucket_ptr)) += 1)
#endif
/**
* this is placed here because it is called within bucket lock and it
* needs to parse item_info
*
*/
static inline struct item *
_info_to_item(uint64_t item_info)
{
uint64_t seg_id = GET_SEG_ID(item_info);
uint64_t offset = GET_OFFSET(item_info);
ASSERT(seg_id < heap.max_nseg);
ASSERT(offset < heap.seg_size);
return (struct item *) (heap.base + heap.seg_size * seg_id + offset);
}
static inline void
_item_free(uint64_t item_info, bool mark_tombstone)
{
struct item *it;
uint64_t seg_id = GET_SEG_ID(item_info);
uint64_t offset = GET_OFFSET(item_info);
it = (struct item *) (heap.base + heap.seg_size * seg_id + offset);
uint32_t sz = item_ntotal(it);
__atomic_fetch_sub(&heap.segs[seg_id].occupied_size, sz, __ATOMIC_RELAXED);
__atomic_fetch_sub(&heap.segs[seg_id].n_item, 1, __ATOMIC_RELAXED);
ASSERT(__atomic_load_n(&heap.segs[seg_id].n_item, __ATOMIC_RELAXED) >= 0);
ASSERT(__atomic_load_n(&heap.segs[seg_id].occupied_size, __ATOMIC_RELAXED)
>= 0);
if (mark_tombstone) {
it->deleted = true;
}
}
static inline bool
_same_item(const char *key, uint32_t klen, uint64_t item_info)
{
struct item *oit = _info_to_item(item_info);
return ((oit->klen == klen) && cc_memcmp(item_key(oit), key, klen) == 0);
}
static inline uint64_t
_build_item_info(uint64_t tag, uint64_t seg_id, uint64_t offset)
{
ASSERT(offset % 8 == 0);
uint64_t item_info = tag | (seg_id << 20u) | (offset >> 3u);
return item_info;
}
#define SET_BIT(u64, pos) ((u64) | (1ul << (pos)))
#define GET_BIT(u64, pos) ((u64) & (1ul << (pos)))
#define CHECK_BIT(u64, pos) GET_BIT(u64, pos)
#define CLEAR_BIT(u64, pos) ((u64) & (~(1ul << (pos))))
static inline uint64_t
prand(void)
{
g_lehmer64_state *= 0xda942042e4dd58b5;
return (uint64_t) g_lehmer64_state;
}
static inline uint64_t
_get_hv_xxhash(const char *key, size_t klen)
{
return XXH3_64bits(key, klen);
}
/*
* we use an approximate and probabilistic frequency counter
* the freq counter has 8 bits, we set i-th bit if
* 1. all bits before i has been set
* 2. current time in sec mod (1 << i) == 0
*
* this avoids
* 1. counting temporal bursts in seconds
* 2. by counting only once per sec,
* the i-th bit is set with probability 1/(1<<i)
*
*/
//#ifdef COMMENT
//static inline uint64_t
//_incr_freq(uint64_t item_info)
//{
//#define FREQ_BIT_START 44u
// if (last_sec_update != time_proc_sec()) {
// last_sec_update = time_proc_sec();
// cur_sec_freq_bit = 0;
// while (((last_sec_update >> (cur_sec_freq_bit)) & 1u) == 0 && cur_sec_freq_bit < 8) {
// cur_sec_freq_bit += 1;
// }
// cur_sec_freq_bit += 1;
// ASSERT(FREQ_BIT_START + cur_sec_freq_bit <= 63);
// }
//
//
// for (unsigned int i = 0; i < cur_sec_freq_bit; i++) {
// if (GET_BIT(item_info, FREQ_BIT_START + i) == 0) {
// /* current bit is not set */
// item_info = SET_BIT(item_info, FREQ_BIT_START + i);
// /* set one bit at a time */
// break;
// }
// }
//
// return item_info;
//}
//#endif
//static inline uint64_t
//_incr_freq(uint64_t item_info)
//{
//#define FREQ_BIT_START 44u
// uint64_t freq = GET_FREQ(item_info);
// if (freq == 0 || prand() % freq == 0) {
// freq = freq < 255 ? freq + 1 : 255;
// }
//
// item_info = (CLEAR_FREQ(item_info)) | (freq << FREQ_BIT_START);
// return item_info;
//}
/*
* Allocate table given size
*/
static inline uint64_t *
_hashtable_alloc(uint64_t n_slot)
{
uint64_t *table =
aligned_alloc(N_BYTE_PER_BUCKET, sizeof(uint64_t) * n_slot);
if (table == NULL) {
log_crit("cannot create hash table");
exit(EX_CONFIG);
}
cc_memset(table, 0, sizeof(uint64_t) * n_slot);
#ifdef MADV_HUGEPAGE
/* USE_HUGEPAGE */
madvise(table, sizeof(uint64_t) * n_slot, MADV_HUGEPAGE);
#endif
return table;
}
void
hashtable_setup(uint32_t hash_power)
{
ASSERT(hash_power > 0);
if (hash_table_initialized) {
log_warn("hash table has been initialized");
hashtable_teardown();
}
/* init members */
hash_table.hash_power = hash_power;
uint64_t n_slot = HASHSIZE(hash_power);
/* N_SLOT_PER_BUCKET slots are in one bucket, so hash_mask last
* N_SLOT_PER_BUCKET_LOG2 bits should be zero */
hash_table.hash_mask =
(n_slot - 1) & (0xfffffffffffffffful << N_SLOT_PER_BUCKET_LOG2);
/* alloc table */
hash_table.table = _hashtable_alloc(n_slot);
#if defined CC_ASSERT_PANIC || defined CC_ASSERT_LOG
for (uint64_t i = 0; i < n_slot / N_SLOT_PER_BUCKET; i++) {
SET_BUCKET_MAGIC(hash_table.table + i * N_SLOT_PER_BUCKET);
}
#endif
hash_table_initialized = true;
log_info("create hash table of %" PRIu64 " entries %" PRIu64 " buckets",
n_slot, n_slot >> N_SLOT_PER_BUCKET_LOG2);
}
void
hashtable_teardown(void)
{
if (!hash_table_initialized) {
log_warn("hash table is not initialized");
return;
}
cc_free(hash_table.table);
hash_table.table = NULL;
hash_table_initialized = false;
}
//static inline uint64_t
//_get_hv_murmur3(const char *key, size_t klen)
//{
// uint64_t hv[2];
//
// hash_murmur3_128_x64(key, klen, murmur3_iv, hv);
//
// return hv[0];
//}
/**
* insert an item into hash table
* insert has two steps, insert and possibly delete
* insert and delete must be completed in the same pass (atomic or locked),
* otherwise it cannot guarantee correctness
*
* procedure:
* scan through all slots of the head bucket,
* 1. if we found the item, replace with new item
* 2. if we found an empty slot first, we store new item in empty slots and
* 2-1. remove the old item if the old item is in the head bucket,
* 2-2. if we do not find it in the head bucket, we stop searching, and
* let the clean up to eviction time
* 3. if old item is not found in the first bucket nor empty bucket,
* we continue to search
*/
void
hashtable_put(struct item *it, const uint64_t seg_id, const uint64_t offset)
{
const char *key = item_key(it);
const uint32_t klen = item_nkey(it);
uint64_t hv = CAL_HV(key, klen);
uint64_t tag = CAL_TAG_FROM_HV(hv);
uint64_t *head_bkt = GET_BUCKET(hv);
uint64_t *bkt = head_bkt;
INCR(seg_metrics, hash_insert);
/* 12-bit tag, 8-bit counter,
* 24-bit seg id, 20-bit offset (in the unit of 8-byte) */
uint64_t item_info, insert_item_info;
insert_item_info = _build_item_info(tag, seg_id, offset);
lock(head_bkt);
int bkt_chain_len = GET_BUCKET_CHAIN_LEN(head_bkt);
int n_item_slot;
do {
/* the last slot will be a pointer to the next
* bucket if there is next bucket */
n_item_slot = bkt_chain_len > 1 ?
N_SLOT_PER_BUCKET - 1 :
N_SLOT_PER_BUCKET;
for (int i = 0; i < n_item_slot; i++) {
if (bkt == head_bkt && i == 0) {
/* the first slot of the head bucket is bucket into */
continue;
}
item_info = bkt[i];
if (GET_TAG(item_info) != tag) {
if (insert_item_info != 0 && item_info == 0) {
/* store item info in the first empty slot */
bkt[i] = insert_item_info;
insert_item_info = 0;
}
continue;
}
/* a potential hit */
if (!_same_item(key, klen, item_info)) {
INCR(seg_metrics, hash_tag_collision);
continue;
}
/* found the item, now atomic update (or delete if already inserted)
* read and write 8-byte on x86 is always atomic */
bkt[i] = insert_item_info;
insert_item_info = 0;
/* now mark the old item as deleted, update stat */
_item_free(item_info, false);
goto finish;
}
if (insert_item_info == 0) {
/* item has been inserted, do not check next bucket to delete
* old item, the info will be gc when item is evicted */
goto finish;
}
/* if there are overflown buckets, we continue to check */
bkt_chain_len -= 1;
if (bkt_chain_len > 0) {
bkt = (uint64_t *) (bkt[N_SLOT_PER_BUCKET - 1]);
}
} while (bkt_chain_len > 0);
/* we have searched every bucket, but have not found the old item
* nor inserted new item - so we need to allocate a new array,
* this is very rare */
INCR(seg_metrics, hash_bucket_alloc);
uint64_t *new_bkt = cc_zalloc(sizeof(uint64_t) * N_SLOT_PER_BUCKET);
/* move the last item from last bucket to new bucket */
new_bkt[0] = bkt[N_SLOT_PER_BUCKET - 1];
new_bkt[1] = insert_item_info;
insert_item_info = 0;
/* Q(juncheng: what is the purpose of fence */
__atomic_thread_fence(__ATOMIC_RELEASE);
bkt[N_SLOT_PER_BUCKET - 1] = (uint64_t) new_bkt;
INCR_BUCKET_CHAIN_LEN(head_bkt);
log_verb("increase bucket chain to len %d", GET_BUCKET_CHAIN_LEN(head_bkt));
/* this is for debugging, chain length in production should not so large */
ASSERT(GET_BUCKET_CHAIN_LEN(head_bkt) <= 16);
finish:
ASSERT(insert_item_info == 0);
unlock_and_update_cas(head_bkt);
}
bool
hashtable_delete(const struct bstring *key)
{
INCR(seg_metrics, hash_remove);
bool deleted = false;
uint64_t item_info;
uint64_t hv = CAL_HV(key->data, key->len);
uint64_t tag = CAL_TAG_FROM_HV(hv);
uint64_t *head_bkt = GET_BUCKET(hv);
uint64_t *bkt = head_bkt;
lock(head_bkt);
int bkt_chain_len = GET_BUCKET_CHAIN_LEN(head_bkt) - 1;
int n_item_slot;
do {
n_item_slot =
bkt_chain_len > 0 ?
N_SLOT_PER_BUCKET - 1 :
N_SLOT_PER_BUCKET;
for (int i = 0; i < n_item_slot; i++) {
if (bkt == head_bkt && i == 0) {
continue;
}
item_info = bkt[i];
if (GET_TAG(item_info) != tag) {
continue;
}
/* a potential hit */
if (!_same_item(key->data, key->len, item_info)) {
INCR(seg_metrics, hash_tag_collision);
continue;
}
/* found the item, now delete */
/* if this is the first and most up-to-date hash table entry
* we need to mark tombstone, this is for recovery */
_item_free(item_info, !deleted);
bkt[i] = 0;
deleted = true;
}
bkt_chain_len -= 1;
bkt = (uint64_t *) (bkt[N_SLOT_PER_BUCKET - 1]);
} while (bkt_chain_len >= 0);
unlock(head_bkt);
return deleted;
}
/*
* the difference between delete and evict is that
* delete needs to mark tombstone on the most recent object,
* evict: if the item being evicted
* is the most recent version (not updated),
* evict needs to mark tombstone on the second most recent object
* is not the up-to-date version
* evict does not need to mark tombstone
*
* The decision on tombstone is used for recovery, normal usage does not need
* to mark tombstone, while tombstone is used to find out which an object is
* an up-to-date object
*
*/
bool
hashtable_evict(const char *oit_key, const uint32_t oit_klen,
const uint64_t seg_id, const uint64_t offset)
{
INCR(seg_metrics, hash_evict);
uint64_t hv = CAL_HV(oit_key, oit_klen);
uint64_t tag = CAL_TAG_FROM_HV(hv);
uint64_t *first_bkt = GET_BUCKET(hv);
uint64_t *bkt = first_bkt;
uint64_t item_info;
uint64_t oit_info = _build_item_info(tag, seg_id, offset);
bool first_match = true, item_outdated = true, fount_oit = false;
lock(first_bkt);
int bkt_chain_len = GET_BUCKET_CHAIN_LEN(first_bkt) - 1;
int n_item_slot;
do {
n_item_slot =
bkt_chain_len > 0 ? N_SLOT_PER_BUCKET - 1 : N_SLOT_PER_BUCKET;
for (int i = 0; i < n_item_slot; i++) {
if (bkt == first_bkt && i == 0) {
continue;
}
item_info = CLEAR_FREQ(bkt[i]);
if (GET_TAG(item_info) != tag) {
continue;
}
/* a potential hit */
if (!_same_item(oit_key, oit_klen, item_info)) {
INCR(seg_metrics, hash_tag_collision);
continue;
}
if (first_match) {
if (oit_info == item_info) {
_item_free(item_info, false);
bkt[i] = 0;
item_outdated = false;
fount_oit = true;
}
first_match = false;
continue;
}
else {
/* not first match, delete hash table entry,
* mark tombstone only when oit is the most up-to-date entry */
if (!fount_oit && item_info == oit_info) {
fount_oit = true;
}
_item_free(bkt[i], !item_outdated);
bkt[i] = 0;
}
}
bkt_chain_len -= 1;
bkt = (uint64_t *) (bkt[N_SLOT_PER_BUCKET - 1]);
} while (bkt_chain_len >= 0);
unlock(first_bkt);
return fount_oit;
}
/**
* delete a specific item from the hash table
* this is needed because an updated object can have its old version
* in object store and hash table, hashtable_delete_it deletes the specific
* object, for example, delete an outdated object while not affecting the
* up-to-date object
*
*/
bool
hashtable_delete_it(struct item *it,
const uint64_t seg_id, const uint64_t offset)
{
INCR(seg_metrics, hash_remove_it);
uint64_t hv = CAL_HV(item_key(it), it->klen);
uint64_t tag = CAL_TAG_FROM_HV(hv);
uint64_t *first_bkt = GET_BUCKET(hv);
uint64_t *bkt = first_bkt;
uint64_t item_info;
uint64_t oit_info = _build_item_info(tag, seg_id, offset);
bool found_oit = false;
lock(first_bkt);
int bkt_chain_len = GET_BUCKET_CHAIN_LEN(first_bkt) - 1;
int n_item_slot;
do {
n_item_slot =
bkt_chain_len > 0 ? N_SLOT_PER_BUCKET - 1 : N_SLOT_PER_BUCKET;
for (int i = 0; i < n_item_slot; i++) {
item_info = CLEAR_FREQ(bkt[i]);
if (oit_info == CLEAR_FREQ(bkt[i])) {
_item_free(item_info, false);
bkt[i] = 0;
found_oit = true;
break;
}
}
bkt_chain_len -= 1;
bkt = (uint64_t *) (bkt[N_SLOT_PER_BUCKET - 1]);
} while (bkt_chain_len >= 0);
unlock(first_bkt);
return found_oit;
}
struct item *
hashtable_get(const char *key, const uint32_t klen,
int32_t *seg_id,
uint64_t *cas)
{
INCR(seg_metrics, hash_lookup);
uint64_t hv = CAL_HV(key, klen);
uint64_t tag = CAL_TAG_FROM_HV(hv);
uint64_t *first_bkt = GET_BUCKET(hv);
uint64_t *bkt = first_bkt;
uint64_t offset;
struct item *it;
uint64_t item_info;
int bkt_chain_len = GET_BUCKET_CHAIN_LEN(first_bkt) - 1;
int n_item_slot;
// uint64_t curr_ts = (uint64_t) time_proc_sec() & 0xfffful;
uint64_t curr_ts = ((uint64_t) time_proc_sec()) & PROC_TS_MASK;
if (curr_ts != GET_TS(first_bkt)) {
/* clear the indicator of all items in the bucket that
* the frequency has increased in curr sec */
lock(first_bkt);
if (curr_ts != GET_TS(first_bkt)) {
*first_bkt =
((*first_bkt) & (~TS_MASK)) | (curr_ts << TS_BIT_SHIFT);
do {
n_item_slot = bkt_chain_len > 0 ?
N_SLOT_PER_BUCKET - 1 :
N_SLOT_PER_BUCKET;
for (int i = 0; i < n_item_slot; i++) {
if (bkt == first_bkt && i == 0) {
continue;
}
/* clear the bit */
bkt[i] = bkt[i] & CLEAR_FREQ_SMOOTH_MASK;
}
bkt_chain_len -= 1;
bkt = (uint64_t *) (bkt[N_SLOT_PER_BUCKET - 1]);
} while (bkt_chain_len >= 0);
}
unlock(first_bkt);
/* reset bucket and bucket chain length */
bkt = first_bkt;
bkt_chain_len = GET_BUCKET_CHAIN_LEN(first_bkt) - 1;
}
/* try to find the item in the hash table */
do {
n_item_slot = bkt_chain_len > 0 ?
N_SLOT_PER_BUCKET - 1 :
N_SLOT_PER_BUCKET;
for (int i = 0; i < n_item_slot; i++) {
if (bkt == first_bkt && i == 0) {
continue;
}
item_info = bkt[i];
if (GET_TAG(item_info) != tag) {
continue;
}
/* a potential hit */
if (!_same_item(key, klen, item_info)) {
INCR(seg_metrics, hash_tag_collision);
continue;
}
if (cas) {
*cas = GET_CAS(first_bkt);
}
*seg_id = GET_SEG_ID(item_info);
offset = GET_OFFSET(item_info);
it = (struct item *) (heap.base + heap.seg_size * *seg_id + offset);
/* item found, try to update the frequency */
uint64_t freq = GET_FREQ(item_info);
if (freq < 127) {
/* counter caps at 127 */
if (freq <= 16 || prand() % freq == 0) {
/* increase frequency by 1
* if freq <= 16 or with prob 1/freq */
freq = ((freq + 1) | 0x80ul) << FREQ_BIT_SHIFT;
}
else {
/* we do not increase frequency, but mark that
* we have already tried at current sec */
freq = (freq | 0x80ul) << FREQ_BIT_SHIFT;
}
/* there can be benign data race where other items in the same
* hash bucket increases it frequency, but it is OK */
lock(first_bkt);
if (bkt[i] == item_info) {
/* make sure it is not updated by other threads */
bkt[i] = (item_info & (~FREQ_MASK)) | freq;
}
unlock(first_bkt);
}
/* done frequency update section */
return it;
}
bkt_chain_len -= 1;
bkt = (uint64_t *) (bkt[N_SLOT_PER_BUCKET - 1]);
} while (bkt_chain_len >= 0);
return NULL;
}
/**
* get but not increase item frequency
*
**/
struct item *
hashtable_get_no_freq_incr(const char *key, const uint32_t klen,
int32_t *seg_id,
uint64_t *cas)
{
uint64_t hv = CAL_HV(key, klen);
uint64_t tag = CAL_TAG_FROM_HV(hv);
uint64_t *first_bkt = GET_BUCKET(hv);
uint64_t *bkt = first_bkt;
uint64_t offset;
struct item *it;
/* 16-bit tag, 28-bit seg id, 20-bit offset (in the unit of 8-byte) */
uint64_t item_info;
int bkt_chain_len = GET_BUCKET_CHAIN_LEN(first_bkt) - 1;
int n_item_slot;
do {
n_item_slot = bkt_chain_len > 0 ?
N_SLOT_PER_BUCKET - 1 :
N_SLOT_PER_BUCKET;
for (int i = 0; i < n_item_slot; i++) {
if (bkt == first_bkt && i == 0) {
continue;
}
item_info = bkt[i];
if (GET_TAG(item_info) != tag) {
continue;
}
/* a potential hit */
if (!_same_item(key, klen, item_info)) {
INCR(seg_metrics, hash_tag_collision);
continue;
}
if (cas) {
*cas = GET_CAS(first_bkt);
}
*seg_id = GET_SEG_ID(item_info);
offset = GET_OFFSET(item_info);
it = (struct item *) (heap.base + heap.seg_size * (*seg_id)
+ offset);
return it;
}
bkt_chain_len -= 1;
bkt = (uint64_t *) (bkt[N_SLOT_PER_BUCKET - 1]);
} while (bkt_chain_len >= 0);
return NULL;
}
/**
* get item frequency
*
**/
int
hashtable_get_it_freq(const char *it_key, const uint32_t it_klen,
const uint64_t seg_id, const uint64_t offset)
{
uint64_t hv = CAL_HV(it_key, it_klen);
uint64_t tag = CAL_TAG_FROM_HV(hv);
uint64_t *first_bkt = GET_BUCKET(hv);
uint64_t *curr_bkt = first_bkt;
uint64_t curr_item_info;
uint64_t item_info_to_find = _build_item_info(tag, seg_id, offset);
int freq = 0;
int bkt_chain_len = GET_BUCKET_CHAIN_LEN(first_bkt) - 1;
int n_item_slot;
do {
n_item_slot = bkt_chain_len > 0 ?
N_SLOT_PER_BUCKET - 1 :
N_SLOT_PER_BUCKET;
for (int i = 0; i < n_item_slot; i++) {
if (curr_bkt == first_bkt && i == 0) {
continue;
}
curr_item_info = CLEAR_FREQ(curr_bkt[i]);
if (GET_TAG(curr_item_info) != tag) {
continue;
}
if (curr_item_info == item_info_to_find) {
freq = GET_FREQ(curr_bkt[i]) & 0x7Ful;
return freq;
}
/* a potential hit */
if (!_same_item(it_key, it_klen, curr_item_info)) {
INCR(seg_metrics, hash_tag_collision);
continue;
}
/* the item to find is outdated */
return 0;
}
bkt_chain_len -= 1;
curr_bkt = (uint64_t *) (curr_bkt[N_SLOT_PER_BUCKET - 1]);
} while (bkt_chain_len >= 0);
return 0;
}
/*
* relink is used when the item is moved from one segment to another
*
* a few caveats
* item being relinked could be outdated, in which case we should not relink
*
* TODO(jason): it might be better not clear those old entries?
*/
bool
hashtable_relink_it(const char *oit_key, const uint32_t oit_klen,
const uint64_t old_seg_id, const uint64_t old_offset,
const uint64_t new_seg_id, const uint64_t new_offset)
{
INCR(seg_metrics, hash_relink);
uint64_t hv = CAL_HV(oit_key, oit_klen);
uint64_t tag = CAL_TAG_FROM_HV(hv);
uint64_t *first_bkt = GET_BUCKET(hv);
uint64_t *curr_bkt = first_bkt;
uint64_t item_info;
bool item_outdated = true, first_match = true;
uint64_t oit_info = _build_item_info(tag, old_seg_id, old_offset);
uint64_t nit_info = _build_item_info(tag, new_seg_id, new_offset);
lock(first_bkt);
int bkt_chain_len = GET_BUCKET_CHAIN_LEN(first_bkt) - 1;
int n_item_slot;
do {
n_item_slot = bkt_chain_len > 0 ?
N_SLOT_PER_BUCKET - 1 :
N_SLOT_PER_BUCKET;
for (int i = 0; i < n_item_slot; i++) {
if (curr_bkt == first_bkt && i == 0) {
continue;
}
item_info = CLEAR_FREQ(curr_bkt[i]);
if (GET_TAG(item_info) != tag) {
continue;
}
/* a potential hit */
if (!_same_item(oit_key, oit_klen, item_info)) {
INCR(seg_metrics, hash_tag_collision);
continue;
}
if (first_match) {
if (oit_info == item_info) {
/* item is not outdated */
curr_bkt[i] = nit_info;
item_outdated = false;
}
first_match = false;
continue;
}
/* not first match, delete */
_item_free(curr_bkt[i], false);
curr_bkt[i] = 0;
}
bkt_chain_len -= 1;
curr_bkt = (uint64_t *) (curr_bkt[N_SLOT_PER_BUCKET - 1]);
} while (bkt_chain_len >= 0);
unlock(first_bkt);
return !item_outdated;
}
void
hashtable_stat(int *item_cnt_ptr, int *bucket_cnt_ptr)
{
#define BUCKET_HEAD(idx) (&hash_table.table[(idx) * N_SLOT_PER_BUCKET])
*item_cnt_ptr = 0;
*bucket_cnt_ptr = 0;
int n_item_slots; /* the number of used slot in current bucket */
int bkt_chain_len; /* the number of buckets in current bucket chain */
uint64_t item_info, *head_bkt, *curr_bkt;
for (uint64_t bucket_idx = 0;
bucket_idx < HASHSIZE(hash_table.hash_power - N_SLOT_PER_BUCKET_LOG2);
bucket_idx++) {
head_bkt = curr_bkt = BUCKET_HEAD(bucket_idx);
bkt_chain_len = GET_BUCKET_CHAIN_LEN(head_bkt);
*bucket_cnt_ptr += bkt_chain_len;
do {
n_item_slots = bkt_chain_len > 1 ?
N_SLOT_PER_BUCKET - 1 :
N_SLOT_PER_BUCKET;
for (int i = 0; i < n_item_slots; i++) {
/* this is bucket info */
if (curr_bkt == head_bkt && i == 0) {
continue;
}
item_info = curr_bkt[i];
if (item_info != 0) {
*item_cnt_ptr += 1;
}
}
bkt_chain_len -= 1;
if (bkt_chain_len > 0) {
curr_bkt = (uint64_t *) (curr_bkt[N_SLOT_PER_BUCKET - 1]);
}
} while (bkt_chain_len > 0);
}
log_info("hashtable %d items, %d buckets", *item_cnt_ptr, *bucket_cnt_ptr);
#undef BUCKET_HEAD
}
void
scan_hashtable_find_seg(int32_t target_seg_id)
{
#define BUCKET_HEAD(idx) (&hash_table.table[(idx) * N_SLOT_PER_BUCKET])
/* expensive debug */
log_warn("scan_hashtable_find_seg is expensive func");
int bkt_chain_len;
uint64_t item_info;
uint64_t *head_bkt, *curr_bkt;
int n_item_slot;
uint64_t seg_id;
uint64_t offset;
struct item *it;
int n_bkt_in_table =
HASHSIZE(hash_table.hash_power - N_SLOT_PER_BUCKET_LOG2);
for (uint64_t bucket_idx = 0; bucket_idx < n_bkt_in_table; bucket_idx++) {
curr_bkt = head_bkt = BUCKET_HEAD(bucket_idx);
bkt_chain_len = GET_BUCKET_CHAIN_LEN(head_bkt);
do {
n_item_slot = bkt_chain_len >= 1 ?
N_SLOT_PER_BUCKET - 1 :
N_SLOT_PER_BUCKET;
for (int i = 0; i < n_item_slot; i++) {
if (curr_bkt == head_bkt && i == 0) {
continue;
}
item_info = curr_bkt[i];
if (item_info == 0) {
continue;
}
seg_id = ((item_info & SEG_ID_MASK) >> SEG_ID_BIT_SHIFT);
if (target_seg_id == seg_id) {
offset = (item_info & OFFSET_MASK) << OFFSET_UNIT_IN_BIT;
it =
(struct item *) (heap.base + heap.seg_size * seg_id +
offset);
log_warn("find item (%.*s) klen %d on seg %d offset %d, "
"item_info %lu, slot %d, bkt_len %d, bkt_len left %d",
it->klen, item_key(it), it->klen, seg_id, offset,
item_info, i,
GET_BUCKET_CHAIN_LEN(head_bkt), bkt_chain_len);
ASSERT(0);
}
}
bkt_chain_len -= 1;
curr_bkt = (uint64_t *) (curr_bkt[N_SLOT_PER_BUCKET - 1]);
} while (bkt_chain_len > 0);
}
#undef BUCKET_HEAD
}
| 31.621525 | 95 | 0.545068 |
c8dba34d722d87837571134ff8bd0a5702a51f9b | 360 | sql | SQL | transform/snowflake-dbt/models/sources/gitlab_dotcom/gitlab_dotcom_project_import_data.sql | danieldiamond/gitlab-analytics | f99e02c95c3a964b01cb14617a43cd5f64ecd88d | [
"MIT"
] | 3 | 2021-07-22T06:44:31.000Z | 2022-01-29T05:35:12.000Z | transform/snowflake-dbt/models/sources/gitlab_dotcom/gitlab_dotcom_project_import_data.sql | danieldiamond/gitlab-analytics | f99e02c95c3a964b01cb14617a43cd5f64ecd88d | [
"MIT"
] | null | null | null | transform/snowflake-dbt/models/sources/gitlab_dotcom/gitlab_dotcom_project_import_data.sql | danieldiamond/gitlab-analytics | f99e02c95c3a964b01cb14617a43cd5f64ecd88d | [
"MIT"
] | null | null | null | WITH source AS (
SELECT *
FROM {{ source('gitlab_dotcom', 'project_import_data') }}
QUALIFY ROW_NUMBER() OVER (PARTITION BY id ORDER BY _uploaded_at DESC) = 1
), renamed AS (
SELECT
id::NUMBER AS project_import_relation_id,
project_id::NUMBER AS project_id
FROM source
)
SELECT *
FROM renamed
| 18 | 76 | 0.622222 |
0468f6b72bee8af35383be244ef09fe79311ffa5 | 206 | kt | Kotlin | idea/testData/quickfix/createFromUsage/createVariable/property/extensionPropertyOnTypeFromAnotherPackage.after.kt | rohankumardubey/kotlin | 99c5ec7e051f1f3ee01459ff9c9483e5e7107b4e | [
"Apache-2.0"
] | 1 | 2021-05-16T01:37:30.000Z | 2021-05-16T01:37:30.000Z | idea/testData/quickfix/createFromUsage/createVariable/property/extensionPropertyOnTypeFromAnotherPackage.after.kt | rohankumardubey/kotlin | 99c5ec7e051f1f3ee01459ff9c9483e5e7107b4e | [
"Apache-2.0"
] | null | null | null | idea/testData/quickfix/createFromUsage/createVariable/property/extensionPropertyOnTypeFromAnotherPackage.after.kt | rohankumardubey/kotlin | 99c5ec7e051f1f3ee01459ff9c9483e5e7107b4e | [
"Apache-2.0"
] | 1 | 2020-09-27T13:23:44.000Z | 2020-09-27T13:23:44.000Z | // "Create extension property 'foo'" "true"
// ERROR: Unresolved reference: foo
import package1.A
val package2.A.foo: Any
class X {
init {
val y = package2.A()
val foo = y.foo
}
} | 15.846154 | 43 | 0.597087 |
3f41e94e92e59a3b33a1fd0d5dd8196c07699f03 | 1,760 | rb | Ruby | spec/models/user_spec.rb | TwilioDevEd/anonymous-communications-rails | b2113cbc4466bc790f2d65a7f42618b50ce1f93c | [
"MIT"
] | 21 | 2015-08-24T19:45:12.000Z | 2021-11-15T11:11:50.000Z | spec/models/user_spec.rb | TwilioDevEd/anonymous-communications-rails | b2113cbc4466bc790f2d65a7f42618b50ce1f93c | [
"MIT"
] | 167 | 2015-07-29T20:38:50.000Z | 2022-03-30T16:30:33.000Z | spec/models/user_spec.rb | TwilioDevEd/anonymous-communications-rails | b2113cbc4466bc790f2d65a7f42618b50ce1f93c | [
"MIT"
] | 13 | 2015-07-31T21:40:27.000Z | 2021-09-18T22:25:24.000Z | require "test_helper"
RSpec.describe User, type: :model do
fixtures :users
context "without email" do
it "is not valid" do
expect(User.new(user_params(email: nil)).valid?).to be_falsey
end
end
context "with invalid email" do
it "is not valid" do
expect(User.new(user_params(email: "blah")).valid?).to be_falsey
end
end
context "with valid email" do
it "is valid" do
expect(User.new(user_params).valid?).to be_truthy
end
end
context "with duplicated email" do
it "is not valid" do
user = users(:one)
user2 = User.new(user_params(email: user.email))
expect(user2.valid?).to be_falsey
end
end
context "with duplicated phone number" do
it "is not valid" do
user = users(:one)
user2 = User.new(user_params(phone_number: user.phone_number))
expect(user2.valid?).to be_falsey
end
end
context "with invalid password" do
it "is not valid" do
expect(User.new(user_params(password: "root")).valid?).to be_falsey
end
end
context "with valid password" do
it "is valid" do
expect(User.new(user_params).valid?).to be_truthy
end
end
context "without password" do
it "is not valid" do
expect(User.new(user_params(password: nil)).valid?).to be_falsey
end
end
context "without name" do
it "is not valid" do
expect(User.new(user_params(name: nil)).valid?).to be_falsey
end
end
context "without country code" do
it "is not valid" do
expect(User.new(user_params(country_code: nil)).valid?).to be_falsey
end
end
context "without phone number" do
it "is not valid" do
expect(User.new(user_params(phone_number: nil)).valid?).to be_falsey
end
end
end
| 23.157895 | 74 | 0.660227 |
c8842951edbe6e653b3a9b340c334773b3ec3279 | 253 | css | CSS | src/css/templates/searchPage.css | tristanmcc/Instant-Box | cb592daf7c6fa361e42abd00da4279768998eef3 | [
"MIT"
] | null | null | null | src/css/templates/searchPage.css | tristanmcc/Instant-Box | cb592daf7c6fa361e42abd00da4279768998eef3 | [
"MIT"
] | 1 | 2020-11-06T21:04:11.000Z | 2020-11-08T21:40:26.000Z | src/css/templates/searchPage.css | tristanmcc/Instant-Box | cb592daf7c6fa361e42abd00da4279768998eef3 | [
"MIT"
] | null | null | null | .search-bar{
display: flex;
justify-content: center;
padding-bottom: 20px;
}
.search-title{
text-align: center;
padding-top: 20px;
}
.search-page{
display: flex;
flex-direction: column;
min-height: 100vh;
}
.site-content{
flex: 1;
} | 12.65 | 26 | 0.660079 |
b839d9cfbc690d1f364709c7be81aefdb8d06c80 | 1,326 | kt | Kotlin | app/src/main/java/chooongg/box/simple/App.kt | Chooongg/ChooonggBox | f2b4adac448036ef2e48ec2a3ac030c7f8d0fe2e | [
"Apache-2.0"
] | 4 | 2021-07-29T10:08:21.000Z | 2022-03-02T06:57:22.000Z | app/src/main/java/chooongg/box/simple/App.kt | Chooongg/ChooonggBox | f2b4adac448036ef2e48ec2a3ac030c7f8d0fe2e | [
"Apache-2.0"
] | null | null | null | app/src/main/java/chooongg/box/simple/App.kt | Chooongg/ChooonggBox | f2b4adac448036ef2e48ec2a3ac030c7f8d0fe2e | [
"Apache-2.0"
] | 1 | 2021-07-29T10:08:41.000Z | 2021-07-29T10:08:41.000Z | package chooongg.box.simple
import chooongg.box.BoxApplication
import com.umeng.analytics.MobclickAgent
import com.umeng.commonsdk.UMConfigure
import me.dkzwm.widget.srl.IRefreshViewCreator
import me.dkzwm.widget.srl.SmoothRefreshLayout
import me.dkzwm.widget.srl.extra.IRefreshView
import me.dkzwm.widget.srl.extra.footer.ClassicFooter
import me.dkzwm.widget.srl.extra.header.ClassicHeader
import me.dkzwm.widget.srl.indicator.IIndicator
class App : BoxApplication() {
companion object {
init {
SmoothRefreshLayout.setDefaultCreator(object : IRefreshViewCreator {
override fun createHeader(layout: SmoothRefreshLayout): IRefreshView<IIndicator> {
return ClassicHeader(layout.context)
}
override fun createFooter(layout: SmoothRefreshLayout): IRefreshView<IIndicator> {
return ClassicFooter(layout.context)
}
})
}
}
override fun onCreate() {
super.onCreate()
if (!isAppMainProcess()) return
// 友盟
UMConfigure.setProcessEvent(true)
UMConfigure.setLogEnabled(BuildConfig.DEBUG)
UMConfigure.init(this, UMConfigure.DEVICE_TYPE_PHONE, null)
MobclickAgent.setPageCollectionMode(MobclickAgent.PageMode.AUTO)
}
} | 34 | 98 | 0.696078 |
385cfdd2a1d0b04d2a5e0207fcc658b90fdf944a | 464 | sql | SQL | openGaussBase/testcase/KEYWORDS/Escape/Opengauss_Function_Keyword_Escape_Case0027.sql | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | openGaussBase/testcase/KEYWORDS/Escape/Opengauss_Function_Keyword_Escape_Case0027.sql | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | openGaussBase/testcase/KEYWORDS/Escape/Opengauss_Function_Keyword_Escape_Case0027.sql | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | -- @testpoint:opengauss关键字escape(非保留),作为序列名
--关键字不带引号-成功
drop sequence if exists escape;
create sequence escape start 100 cache 50;
drop sequence escape;
--关键字带双引号-成功
drop sequence if exists "escape";
create sequence "escape" start 100 cache 50;
drop sequence "escape";
--关键字带单引号-合理报错
drop sequence if exists 'escape';
create sequence 'escape' start 100 cache 50;
--关键字带反引号-合理报错
drop sequence if exists `escape`;
create sequence `escape` start 100 cache 50;
| 22.095238 | 44 | 0.765086 |
46df43e6aa46a4cc7d73b6bb53f9e7ad2d82553e | 2,722 | py | Python | tests/schema/test_schema_object.py | GSE-CCL/getting-unstuck-web | f05f2892725a02abd24a0b6a0aab0813a38cb4dc | [
"MIT"
] | 2 | 2021-04-29T14:52:59.000Z | 2021-12-23T00:12:57.000Z | tests/schema/test_schema_object.py | GSE-CCL/getting-unstuck-web | f05f2892725a02abd24a0b6a0aab0813a38cb4dc | [
"MIT"
] | 137 | 2020-06-04T02:07:13.000Z | 2020-08-05T23:13:55.000Z | tests/schema/test_schema_object.py | GSE-CCL/getting-unstuck-web | f05f2892725a02abd24a0b6a0aab0813a38cb4dc | [
"MIT"
] | null | null | null | import mongoengine as mongo
import os
import pytest
from lib import schema
def test_schema_object_defaults(credentials):
schema.connect_db(credentials)
challenge = schema.Challenge(
title="Test challenge - DEFAULTS",
description="This is a challenge created by test_schema_object.py",
min_blockify=schema.Blockify(),
text=schema.ResultText())
challenge.save()
assert len(schema.Challenge.objects(title="Test challenge - DEFAULTS")) == 1
def test_schema_object_correct(credentials):
schema.connect_db(credentials)
min_blockify = schema.Blockify(comments=3,
costumes=1,
sounds=0,
sprites=1,
variables=0)
text = schema.ResultText(explanation="explanation here")
challenge = schema.Challenge(
title="Test challenge - CORRECT",
description="This is a challenge created by test_schema_object.py",
min_instructions_length=50,
min_description_length=50,
min_comments_made=1,
min_blockify=min_blockify,
text=text,
required_text=[["hello",
"world"]],
required_block_categories={"motion": 1},
required_blocks=[{
"event_whenflagclicked": 1
}])
challenge.save()
assert len(schema.Challenge.objects(title="Test challenge - CORRECT")) == 1
def test_schema_object_blank(credentials):
schema.connect_db(credentials)
challenge = schema.Challenge()
with pytest.raises(mongo.ValidationError):
challenge.save()
def test_schema_object_incorrect_rt(credentials):
schema.connect_db(credentials)
challenge = schema.Challenge(min_blockify=schema.Blockify(),
text=schema.ResultText(),
required_text=[[["an array too far"]]])
with pytest.raises(mongo.ValidationError):
challenge.save()
def test_schema_object_incorrect_rc(credentials):
schema.connect_db(credentials)
challenge = schema.Challenge(min_blockify=schema.Blockify(),
text=schema.ResultText(),
required_block_categories=[])
with pytest.raises(mongo.ValidationError):
challenge.save()
def test_schema_object_incorrect_rb(credentials):
db = schema.connect_db(credentials)
challenge = schema.Challenge(
min_blockify=schema.Blockify(),
text=schema.ResultText(),
required_blocks={"name": "event_whenflagclicked"})
with pytest.raises(mongo.ValidationError):
challenge.save()
db.drop_database("test_db")
| 32.795181 | 80 | 0.634827 |
e9bf01a8321db5da5b098c9e57263352eea51a99 | 1,531 | swift | Swift | testdemo/Component/GenericTableView/PagingListDataFetcher.swift | Changzw/every-case-demo | 0ba08146fee2904bd2151fcea8041f03b8b86e71 | [
"Apache-2.0"
] | null | null | null | testdemo/Component/GenericTableView/PagingListDataFetcher.swift | Changzw/every-case-demo | 0ba08146fee2904bd2151fcea8041f03b8b86e71 | [
"Apache-2.0"
] | null | null | null | testdemo/Component/GenericTableView/PagingListDataFetcher.swift | Changzw/every-case-demo | 0ba08146fee2904bd2151fcea8041f03b8b86e71 | [
"Apache-2.0"
] | null | null | null | //
// PagingListDataFetcher.swift
// testdemo
//
// Created by 常仲伟 on 2021/10/26.
// Copyright © 2021 常仲伟. All rights reserved.
//
import Foundation
import Action
import RxSwift
final class PagingListDataFetcher<Item> {
private(set) var pageInfo = PageInfo(pageSize: 20)
@BehaviorRelayed(wrappedValue: [])
private(set) var items: [RecentItem]
private(set) lazy var reloadAllData = Action<Void, RecentItemRsp> {
self.pageInfo.reset()
return self.mockData
}
private(set) lazy var loadMoreData = Action<Void, RecentItemRsp> {
return self.mockData
}
private let bag = DisposeBag()
var errorInfo: ((String)->())?
var mockData: Observable<RecentItemRsp> {
Observable<RecentItemRsp>.create { sink in
DispatchQueue.main.asyncAfter(deadline: .now() + .seconds(1)) {
sink.onNext(recentItems)
sink.onCompleted()
}
return Disposables.create()
}
}
private let reloadAllObservable: Observable<Item>
private let loadMoreObservable: Observable<Item>
init(reloadAllObservable: Observable<Item>,
loadMoreObservable: Observable<Item>) {
self.reloadAllObservable = reloadAllObservable
self.loadMoreObservable = loadMoreObservable
reloadAllData.elements
.bind(to: $items)
.disposed(by: bag)
loadMoreData.elements
// .map{[weak self] in -> [Item]
// guard let self = self else { return []}
// return self.items + $0
// }
.bind(to: $items)
.disposed(by: bag)
}
}
| 25.098361 | 69 | 0.663619 |
8d746d15e7d7570d780afa9100b792c2481f5927 | 3,022 | js | JavaScript | PACKAGES/holarchy/lib/filters/cp-method-act-filter.js | Encapsule/holistic | b27834717b7fcb05a2da1e4a5d2d7c2d0250c7db | [
"MIT"
] | 2 | 2020-02-13T18:39:12.000Z | 2020-09-11T07:48:38.000Z | PACKAGES/holarchy/lib/filters/cp-method-act-filter.js | Encapsule/holistic | b27834717b7fcb05a2da1e4a5d2d7c2d0250c7db | [
"MIT"
] | 23 | 2020-02-17T17:42:07.000Z | 2020-11-18T21:13:27.000Z | PACKAGES/holarchy/lib/filters/cp-method-act-filter.js | Encapsule/holistic | b27834717b7fcb05a2da1e4a5d2d7c2d0250c7db | [
"MIT"
] | null | null | null | "use strict";
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; }
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { _defineProperty(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
// cp-method-act-filter.js
var arccore = require("@encapsule/arccore");
var opcMethodActInputSpec = require("./iospecs/opc-method-act-input-spec");
var opcMethodActOutputSpec = require("./iospecs/opc-method-act-output-spec");
(function () {
var filterDeclaration = {
operationID: "izxx6c96QCu0g8jl6TjAlw",
operationName: "CellProcessor::act Filter",
operationDescription: "Executes a synchronous action request made by an external actor (some function/subsystem outside of CellProcessor).",
inputFilterSpec: _objectSpread(_objectSpread({}, opcMethodActInputSpec), {}, {
____label: "CellProcessor.act Method Request",
____description: "Defines the request format accepted by CellProcessor.act method."
}),
outputFilterSpec: _objectSpread(_objectSpread({}, opcMethodActOutputSpec), {}, {
____label: "CellProcessor.act Method Result",
____description: "Defines the result format returned by the CellProcessor.act method."
}),
bodyFunction: function bodyFunction(externalActorRequest_) {
var response = {
error: null
};
var errors = [];
var inBreakScope = false;
while (!inBreakScope) {
inBreakScope = true;
var actorName = "[".concat(this.filterDescriptor.operationID, "::").concat(this.filterDescriptor.operationName, "]");
var opcActResponse = externalActorRequest_.opcRef.act(externalActorRequest_);
var opcActResponse2 = null;
response = opcActResponse; // Change no external API behavior.
break;
} // while
if (errors.length) {
response.error = errors.join(" ");
}
return response;
} // bodyFunction
}; // filterDeclaration
var factoryResponse = arccore.filter.create(filterDeclaration);
if (factoryResponse.error) {
throw new Error(factoryResponse.error);
}
module.exports = factoryResponse.result;
})(); | 47.968254 | 534 | 0.704831 |
d39ff3080c6af05ee4665875ca2a9002f6ed09e5 | 623 | rb | Ruby | app/models/subscription.rb | kumabook/spread_beaver | 23b0c7126e59c3799b3e91fe5bffd480d8cb5b03 | [
"MIT"
] | 3 | 2016-10-25T18:42:41.000Z | 2017-05-04T06:21:27.000Z | app/models/subscription.rb | kumabook/spread_beaver | 23b0c7126e59c3799b3e91fe5bffd480d8cb5b03 | [
"MIT"
] | 128 | 2017-02-28T10:00:24.000Z | 2019-01-21T06:46:42.000Z | app/models/subscription.rb | kumabook/spread_beaver | 23b0c7126e59c3799b3e91fe5bffd480d8cb5b03 | [
"MIT"
] | null | null | null | # frozen_string_literal: true
class Subscription < ApplicationRecord
belongs_to :user
belongs_to :feed
has_many :subscription_categories, dependent: :destroy
has_many :categories , through: :subscription_categories
after_save :delete_cache_of_subscriptions
after_destroy :delete_cache_of_subscriptions
def self.of(user)
Rails.cache.fetch("subscriptions_of_user-#{user.id}") do
user.subscriptions.to_a
end
end
def delete_cache_of_subscriptions
Rails.cache.delete_matched("subscriptions_of_user-#{user.id}")
User.delete_cache_of_stream(user.stream_id)
end
end
| 27.086957 | 72 | 0.76244 |
95ae4e45880daff0e372916c467e85d045f1e53e | 3,800 | kt | Kotlin | app/src/main/java/com/hoc/weatherapp/koin/RetrofitModule.kt | huachuan/Weather_Rminder_kotlin | 608ff138b793cf73af2677756a623d72bc3de6ce | [
"MIT"
] | null | null | null | app/src/main/java/com/hoc/weatherapp/koin/RetrofitModule.kt | huachuan/Weather_Rminder_kotlin | 608ff138b793cf73af2677756a623d72bc3de6ce | [
"MIT"
] | null | null | null | app/src/main/java/com/hoc/weatherapp/koin/RetrofitModule.kt | huachuan/Weather_Rminder_kotlin | 608ff138b793cf73af2677756a623d72bc3de6ce | [
"MIT"
] | null | null | null | package com.hoc.weatherapp.koin
import com.hoc.weatherapp.BuildConfig
import com.hoc.weatherapp.data.models.TemperatureUnit
import com.hoc.weatherapp.data.remote.BASE_URL_TIMEZONE_DB
import com.hoc.weatherapp.data.remote.OPEN_WEATHER_MAP_APP_ID
import com.hoc.weatherapp.data.remote.OPEN_WEATHER_MAP_BASE_URL
import com.hoc.weatherapp.data.remote.OpenWeatherMapApiService
import com.hoc.weatherapp.data.remote.TIMEZONE_DB_API_KEY
import com.hoc.weatherapp.data.remote.TimezoneDbApiService
import com.squareup.moshi.Moshi
import com.squareup.moshi.kotlin.reflect.KotlinJsonAdapterFactory
import okhttp3.OkHttpClient
import okhttp3.logging.HttpLoggingInterceptor
import org.koin.dsl.context.ModuleDefinition
import org.koin.dsl.module.module
import retrofit2.Retrofit
import retrofit2.adapter.rxjava2.RxJava2CallAdapterFactory
import retrofit2.converter.moshi.MoshiConverterFactory
const val OPEN_WEATHER_MAP_RETROFIT = "OPEN_WEATHER_MAP_RETROFIT"
const val TIMEZONE_DB_RETROFIT = "TIMEZONE_DB_RETROFIT"
val retrofitModule = module {
single { getOkHttpClient() }
single { getMoshi() }
single(name = OPEN_WEATHER_MAP_RETROFIT) { getOpenWeatherMapRetrofit() }
single { getWeatherApiService() }
single(name = TIMEZONE_DB_RETROFIT) { getTimezoneDbRetrofit() }
single { getTimezoneDbApiService() }
}
private fun ModuleDefinition.getTimezoneDbApiService(): TimezoneDbApiService {
return get<Retrofit>(name = TIMEZONE_DB_RETROFIT).create(TimezoneDbApiService::class.java)
}
private fun ModuleDefinition.getTimezoneDbRetrofit(): Retrofit {
return Retrofit.Builder()
.baseUrl(BASE_URL_TIMEZONE_DB)
.client(get())
.addConverterFactory(MoshiConverterFactory.create(get()))
.addCallAdapterFactory(RxJava2CallAdapterFactory.create())
.build()
}
private fun ModuleDefinition.getWeatherApiService(): OpenWeatherMapApiService {
return get<Retrofit>(name = OPEN_WEATHER_MAP_RETROFIT)
.create(OpenWeatherMapApiService::class.java)
}
private fun ModuleDefinition.getOpenWeatherMapRetrofit(): Retrofit {
return Retrofit.Builder()
.baseUrl(OPEN_WEATHER_MAP_BASE_URL)
.client(get())
.addConverterFactory(MoshiConverterFactory.create(get()))
.addCallAdapterFactory(RxJava2CallAdapterFactory.create())
.build()
}
private fun getMoshi(): Moshi {
return Moshi.Builder()
.add(KotlinJsonAdapterFactory())
.build()
}
private fun getOkHttpClient(): OkHttpClient {
return OkHttpClient.Builder()
.apply {
if (BuildConfig.DEBUG) {
HttpLoggingInterceptor()
.setLevel(HttpLoggingInterceptor.Level.BODY)
.let(::addInterceptor)
}
}
.addInterceptor { chain ->
val originalRequest = chain.request()
val host = originalRequest.url().host()
when {
"openweathermap" in host -> originalRequest
.newBuilder()
.url(
originalRequest.url()
.newBuilder()
.addQueryParameter("units", TemperatureUnit.KELVIN.toString())
.addQueryParameter("appid", OPEN_WEATHER_MAP_APP_ID)
.build()
)
"timezonedb" in host -> {
if ("get-time-zone" in originalRequest.url().encodedPath()) {
originalRequest
.newBuilder()
.url(
originalRequest.url()
.newBuilder()
.addQueryParameter("format", "json")
.addQueryParameter("key", TIMEZONE_DB_API_KEY)
.addQueryParameter("by", "position")
.build()
)
} else {
return@addInterceptor chain.proceed(originalRequest)
}
}
else -> return@addInterceptor chain.proceed(originalRequest)
}.build().let(chain::proceed)
}
.build()
} | 33.333333 | 92 | 0.707368 |
dbb02655578e53245cf4e6bb072df78061b3bc8a | 542 | swift | Swift | Sources/QiNiuSDK/Common/Constants.swift | Adrift001/QiNiuSDK | 1ccab5c1f3ac0643cff3ae7b804fedb8dc528b9c | [
"MIT"
] | 3 | 2020-06-11T03:13:49.000Z | 2020-06-20T09:12:54.000Z | Sources/QiNiuSDK/Common/Constants.swift | Adrift001/QiNiuSDK | 1ccab5c1f3ac0643cff3ae7b804fedb8dc528b9c | [
"MIT"
] | null | null | null | Sources/QiNiuSDK/Common/Constants.swift | Adrift001/QiNiuSDK | 1ccab5c1f3ac0643cff3ae7b804fedb8dc528b9c | [
"MIT"
] | null | null | null | //
// Constants.swift
// QiNiuSDK
//
// Created by 荆学涛 on 2019/5/8.
//
public final class Constants {
public static let VERSION = "1.0.0"
public static let BLOCK_SIZE = 4194304
public static let CONNECT_TIMEOUT = 10
public static let WRITE_TIMEOUT = 0
public static let READ_TIMEOUT = 30
public static let DISPATCHER_MAX_REQUESTS = 64
public static let DISPATCHER_MAX_REQUESTS_PER_HOST = 16
public static let CONNECTION_POOL_MAX_IDLE_COUNT = 32
public static let CONNECTION_POOL_MAX_IDLE_MINUTES = 5
}
| 28.526316 | 59 | 0.734317 |
203fd207eec62ac290079341e6dbf27a252d1a4e | 1,528 | ps1 | PowerShell | Resources/Functions/Edit.ps1 | haruki-taka8/Rows | 2066c87475f21d7934f2ed13e2497fb66912381a | [
"MIT"
] | 2 | 2021-10-19T13:57:28.000Z | 2022-01-04T11:16:24.000Z | Resources/Functions/Edit.ps1 | haruki-taka8/Rows | 2066c87475f21d7934f2ed13e2497fb66912381a | [
"MIT"
] | null | null | null | Resources/Functions/Edit.ps1 | haruki-taka8/Rows | 2066c87475f21d7934f2ed13e2497fb66912381a | [
"MIT"
] | 1 | 2021-12-03T02:27:43.000Z | 2021-12-03T02:27:43.000Z | # Wrapper to reinitialize grid
function Update-Grid {
$rows.Rows.Title = 'Rows - Unsaved changes'
$rows.Grid.ItemsSource = $csv
$rows.Grid.Items.Refresh()
}
# Do not allow Undo, Redo and Commit buttons if
# there are no undo steps or ReadWrite is off
function Get-CanEdit ($Stack, $ReadWrite) {
return ($Stack.Count -gt 0) -and $ReadWrite
}
function Add-Row ($At, $Count, $Header, $IsTemplate, $ColumnTemplate) {
# Prepare blank row template
$RowTemplate = [PSCustomObject] @{}
$Header.ForEach({$RowTemplate | Add-Member NoteProperty $_ ''})
# Insert rows
$Now = Get-Date
$ThisAt = $At
for ($i = 0; $i -lt $Count; $i++) {
# Expand <x> notation
$ThisRow = $RowTemplate.PSObject.Copy()
if ($IsTemplate) {
for ($j = 0; $j -lt $Header.Count; $j++) {
$ThisRow.($Header[$j]) = $ColumnTemplate[$j] -replace
'<D>', $Now.ToString('yyyyMMdd') -replace
'<T>', $Now.ToString('HHmmss') -replace
'<#>', $i
}
}
if ($csv) {
# Make IDs arranged in ascending order
$script:csv.Insert($ThisAt, $ThisRow)
$ThisAt++
} else {
[Collections.ArrayList] $script:csv = @($ThisRow)
}
$undo.Push(@{
Operation = 'Insert'
At = $At
OldRow = $ThisRow
})
$rows.Undo.IsEnabled = Get-CanEdit $undo $config.ReadWrite
}
}
| 29.960784 | 71 | 0.522906 |
0d6a3f5e18120a4b3e0ac590d99d267b1e7f9087 | 3,459 | c | C | bootloader/hal/TARGET_Freescale/TARGET_MK20DX/C90TFS/drvsrc/source/FlashGetSecurityState.c | bygreencn/CMSIS-DAP | a554e66afc29532c4e669735b1556625606add9f | [
"Apache-2.0"
] | 1 | 2019-05-07T15:01:19.000Z | 2019-05-07T15:01:19.000Z | interface/flash_algo_mdk/MKXXX/C90TFS/drvsrc/source/FlashGetSecurityState.c | bygreencn/CMSIS-DAP | a554e66afc29532c4e669735b1556625606add9f | [
"Apache-2.0"
] | null | null | null | interface/flash_algo_mdk/MKXXX/C90TFS/drvsrc/source/FlashGetSecurityState.c | bygreencn/CMSIS-DAP | a554e66afc29532c4e669735b1556625606add9f | [
"Apache-2.0"
] | null | null | null | /************************************************************************
* (c) Copyright Freescale Semiconductor, Inc 2011, All Rights Reserved *
*************************************************************************
*************************************************************************
* *
* Standard Software Flash Driver For FTFx *
* *
* FILE NAME : FlashGetSecurityState.c *
* DATE : January 28,2011 *
* *
* AUTHOR : FPT Team *
* E-mail : [email protected] *
* *
*************************************************************************/
/************************** CHANGES *************************************
0.0.1 06.09.2010 FPT Team Initial Version
0.1.0 06.11.2010 FPT Team Finalize to 0.1.0
0.1.3 09.16.2010 FPT Team Finalize to 0.1.3
0.2.0 09.27.2010 FPT Team Finalize to 0.2.0
0.2.1 01.28.2011 FPT Team Updated the preprocesor
condition to enter Debug state
*************************************************************************/
/* include the header files */
#include "SSD_Types.h"
#include "SSD_FTFx.h"
#include "SSD_FTFx_Internal.h"
/************************************************************************
*
* Function Name : FlashGetSecurityState.c
* Description : This function retrieves the current Flash security
* status, including the security enabling state and
* the backdoor key enabling state.
* Arguments : PFLASH_SSD_CONFIG, UINT8*
* Return Value : UINT32
*
*************************************************************************/
UINT32 FlashGetSecurityState(PFLASH_SSD_CONFIG PSSDConfig, UINT8* securityState)
{
/* store data read from flash register */
UINT8 registerValue;
/*Get flash security register value */
registerValue = REG_READ(PSSDConfig->ftfxRegBase + FTFx_SSD_FSEC_OFFSET);
/* check the status of the flash security bits in the security register */
if(FLASH_SECURITY_STATE_UNSECURED == (registerValue & FTFx_SSD_FSEC_SEC))
{
/* Flash in unsecured state */
*securityState = FLASH_NOT_SECURE;
}
else
{
/* Flash in secured state */
/* check for backdoor key security enable bit */
if(FLASH_SECURITY_STATE_KEYEN == (registerValue & FTFx_SSD_FSEC_KEYEN))
{
/* Backdoor key security enabled */
*securityState = FLASH_SECURE_BACKDOOR_ENABLED;
}
else
{
/* Backdoor key security disabled */
*securityState = FLASH_SECURE_BACKDOOR_DISABLED;
}
}
/* Enter Debug state if enabled */
if (TRUE == (PSSDConfig->DebugEnable))
{
#if ((CPU_CORE == ARM_CM4) && (COMPILER == IAR)) /* Kx Products */
asm
(
" BKPT #0 \n " /* enter Debug state */
);
#endif
}
return(FTFx_OK);
}
/* end of file */
| 40.694118 | 80 | 0.415149 |
490aa41a9f4da0e31ef78980cfe9ef53478a1fc1 | 1,525 | py | Python | _GTW/_OMP/_PAP/Legal_Entity.py | Tapyr/tapyr | 4235fba6dce169fe747cce4d17d88dcf4a3f9f1d | [
"BSD-3-Clause"
] | 6 | 2016-12-10T17:51:10.000Z | 2021-10-11T07:51:48.000Z | _GTW/_OMP/_PAP/Legal_Entity.py | Tapyr/tapyr | 4235fba6dce169fe747cce4d17d88dcf4a3f9f1d | [
"BSD-3-Clause"
] | null | null | null | _GTW/_OMP/_PAP/Legal_Entity.py | Tapyr/tapyr | 4235fba6dce169fe747cce4d17d88dcf4a3f9f1d | [
"BSD-3-Clause"
] | 3 | 2020-03-29T07:37:03.000Z | 2021-01-21T16:08:40.000Z | # -*- coding: utf-8 -*-
# Copyright (C) 2013 Mag. Christian Tanzer All rights reserved
# Glasauergasse 32, A--1130 Wien, Austria. [email protected]
# #*** <License> ************************************************************#
# This module is part of the package GTW.OMP.PAP.
#
# This module is licensed under the terms of the BSD 3-Clause License
# <http://www.c-tanzer.at/license/bsd_3c.html>.
# #*** </License> ***********************************************************#
#
#++
# Name
# GTW.OMP.PAP.Legal_Entity
#
# Purpose
# Model a legal entity that isn't a natural person
#
# Revision Dates
# 4-Mar-2013 (CT) Creation
# 13-Jun-2014 (RS) `_Ancestor_Essence` is now `_PAP.Group`
# remove attributes inherited from ancestor
# ««revision-date»»···
#--
from _MOM.import_MOM import *
from _GTW._OMP._PAP.Attr_Type import *
from _GTW import GTW
from _GTW._OMP._PAP import PAP
from _TFL.I18N import _
import _GTW._OMP._PAP.Group
_Ancestor_Essence = PAP.Group
class _PAP_Legal_Entity_ (_Ancestor_Essence) :
"""Model a legal entity that isn't a natural person."""
_real_name = "Legal_Entity"
is_partial = True
class _Attributes (_Ancestor_Essence._Attributes) :
_Ancestor = _Ancestor_Essence._Attributes
# end class _Attributes
Legal_Entity = _PAP_Legal_Entity_ # end class
if __name__ != "__main__" :
GTW.OMP.PAP._Export ("*")
### __END__ GTW.OMP.PAP.Legal_Entity
| 28.240741 | 78 | 0.600656 |
3402d63b8320a4daca6f4e804c980e5b192ce1e6 | 1,154 | lua | Lua | flos/num/test.lua | siesta-project/siesta-sfl | bc657c7a4e1a48d513120c38c663edea44c6ea3a | [
"MIT"
] | 3 | 2017-02-02T08:05:55.000Z | 2018-04-01T05:00:59.000Z | flos/num/test.lua | siesta-project/siesta-sfl | bc657c7a4e1a48d513120c38c663edea44c6ea3a | [
"MIT"
] | 11 | 2017-03-14T14:14:02.000Z | 2021-08-01T19:28:01.000Z | flos/num/test.lua | siesta-project/siesta-sfl | bc657c7a4e1a48d513120c38c663edea44c6ea3a | [
"MIT"
] | 3 | 2017-05-26T15:30:44.000Z | 2019-06-15T11:58:33.000Z |
local array = require "flos.num"
v1 = array.Array( 6 )
print(#v1, v1:size())
for i = 1, #v1 do
v1[i] = i
end
v2 = array.Array.empty(6, 6)
k = 0
for i = 1, #v2 do
for j = 1, #v2[i] do
v2[i][j] = i * j + k
end
k = k + 1
end
print(#v2, v2:size())
print('Array1D dot Array1D')
print(v1:dot(v1))
print('Array2D dot Array1D')
print(v2:dot(v1))
print('Array1D dot Array2D')
print(v1:dot(v2))
print('Array2D ^ T')
print(v2 ^ "T")
local function sprint(v1, v2)
print(v1.shape, v2.shape)
end
print('Array1D: reshaping, explicit')
sprint(v1, v1:reshape(0))
print('Array1D: reshaping, implicit')
sprint(v1, v1:reshape())
print('Array1D: reshaping, other')
sprint(v1, v1:reshape(2, 0))
print('Array1D: reshaping, other')
sprint(v1, v1:reshape(0, 2))
print('Array2D: reshaping, explicit')
sprint(v2, v2:reshape(0))
print('Array2D: reshaping, implicit')
sprint(v2, v2:reshape())
print('Array2D: reshaping, other')
sprint(v2, v2:reshape(12, 0))
print('Array2D: reshaping, other')
sprint(v2, v2:reshape(0, 12))
print('Array1D: range')
print(array.Array.range(1, -34, -3))
print('Array1D: copy')
print(array.Array.ones(3, 4):copy())
| 17.753846 | 37 | 0.652513 |
cce1c3bd621eb91f804a47398410bc5669364974 | 89 | rb | Ruby | app/models/pathogen.rb | chanzuckerberg/idseq-web | fb6c841d07858b6b60e9e037adbc02d5cd3110d1 | [
"MIT"
] | 66 | 2017-09-29T20:15:12.000Z | 2021-12-08T22:28:02.000Z | app/models/pathogen.rb | chanzuckerberg/idseq-web | fb6c841d07858b6b60e9e037adbc02d5cd3110d1 | [
"MIT"
] | 1,405 | 2017-08-30T04:35:08.000Z | 2020-11-04T17:04:09.000Z | app/models/pathogen.rb | chanzuckerberg/czid-web | f87c3a53d0d12f977db589feaec71600d615b3dc | [
"MIT"
] | 23 | 2017-11-08T21:45:35.000Z | 2021-06-11T08:22:58.000Z | class Pathogen < ApplicationRecord
has_and_belongs_to_many :pathogen_list_versions
end
| 22.25 | 49 | 0.876404 |
1cf17c03683241b7d5b26275c156ed12954e6f09 | 2,037 | swift | Swift | JChat/JCTool.swift | MrLinTianbao/FuXin | 8922e9817be91b3d2ce22eef077fe2c664ca7a0e | [
"MIT"
] | 1 | 2019-11-20T11:09:13.000Z | 2019-11-20T11:09:13.000Z | JChat/JCTool.swift | MrLinTianbao/FXRedMakoda | 00df0c3d43cbebeae74237b6b3a18cdb7b6e6384 | [
"MIT"
] | null | null | null | JChat/JCTool.swift | MrLinTianbao/FXRedMakoda | 00df0c3d43cbebeae74237b6b3a18cdb7b6e6384 | [
"MIT"
] | null | null | null | //
// JCTool.swift
// JChat
//
// Created by DUONIU_MAC on 2019/6/10.
// Copyright © 2019年 HXHG. All rights reserved.
//
import UIKit
class JCTool: NSObject {
//MARK: base64字符串转图片
static func base64ToImage(base : String) -> UIImage {
if base == "" {
return UIImage()
}
let imageData = Data.init(base64Encoded: base)
let image = UIImage.init(data: imageData!)
return image!
}
//MARK: 图片转base64
static func imageToBase64(image : UIImage) -> String {
let imageData = image.pngData()
let imageBase64String = imageData?.base64EncodedString()
return imageBase64String!
}
//MARK: 自定义弹框
static func showText(_text:String) {
let windwow = UIApplication.shared.keyWindow
let showView = UIView()
showView.backgroundColor = UIColor.black
showView.frame = CGRect.init(x: 1, y: 1, width: 1, height: 1)
showView.layer.cornerRadius = 5.0
showView.layer.masksToBounds = true
windwow?.addSubview(showView)
let label = UILabel()
let size = _text.size(font: RATIO(maxNum: 14), width: RATIO(maxNum: 300))
label.frame = .init(x: 0, y: 0, width: size.width, height: size.height)
label.text = _text
label.textColor = UIColor.white
label.textAlignment = .center
label.backgroundColor = UIColor.clear
label.font = UIFont.systemFont(ofSize: RATIO(maxNum: 14))
label.numberOfLines = 0
showView.addSubview(label)
showView.frame = .init(x: (ScreenW-size.width)/2, y: ScreenH - size.height - 20, width: size.width, height: size.height)
UIView.animate(withDuration: 0.2, delay: 1, options: .curveLinear, animations: {
showView.alpha = 0
}) { (true) in
showView.removeFromSuperview()
}
}
}
| 26.802632 | 128 | 0.566028 |
0df33303049c9af1e3f47dcf8289c2ea5d06c5f4 | 478 | cs | C# | Scripts/Math.cs | evilantAU/godotfps | d8ede18fba9d3005eef0b62e990f00b25676c59e | [
"MIT"
] | null | null | null | Scripts/Math.cs | evilantAU/godotfps | d8ede18fba9d3005eef0b62e990f00b25676c59e | [
"MIT"
] | null | null | null | Scripts/Math.cs | evilantAU/godotfps | d8ede18fba9d3005eef0b62e990f00b25676c59e | [
"MIT"
] | null | null | null | using Godot;
using System;
using static Godot.Mathf;
public static class FMath
{
public static Vector3 Flattened(this Vector3 Self)
{
return new Vector3(Self.x, 0, Self.z);
}
public static Vector3 ClampVec3(Vector3 Vec, float Min, float Max)
{
return Vec.Normalized() * Mathf.Clamp(Vec.Length(), Min, Max);
}
public static float LoopRotation(float Rot)
{
Rot = Rot % 360;
if(Rot < 0)
Rot += 360;
if(Rot == 360)
Rot = 0;
return Rot;
}
} | 15.933333 | 70 | 0.650628 |
e4c1995eec2ad5f024ef2391ea486032572a9577 | 775 | sql | SQL | conf/evolutions/default/1.sql | serdar-/navprot_web | c5c3b4a3da1edd620170df8370caba5e6b8adfc0 | [
"Apache-2.0"
] | null | null | null | conf/evolutions/default/1.sql | serdar-/navprot_web | c5c3b4a3da1edd620170df8370caba5e6b8adfc0 | [
"Apache-2.0"
] | null | null | null | conf/evolutions/default/1.sql | serdar-/navprot_web | c5c3b4a3da1edd620170df8370caba5e6b8adfc0 | [
"Apache-2.0"
] | null | null | null | # --- Created by Ebean DDL
# To stop Ebean DDL generation, remove this comment and start using Evolutions
# --- !Ups
create table task (
id varchar(255) primary key,
email varchar(255),
initial_pdb varchar(255),
initial_chain varchar(255),
final_pdb varchar(255),
final_chain varchar(255),
rmsd varchar(255),
is_done integer(1),
is_running integer(1),
is_cancelled integer(1),
upload_date timestamp,
start_time timestamp,
end_time timestamp)
;
# --- !Downs
PRAGMA foreign_keys = OFF;
drop table task;
PRAGMA foreign_keys = ON;
| 23.484848 | 78 | 0.51871 |
143b243c5c1b36d1a0538f193e479d7157947347 | 740 | tsx | TypeScript | src/App.tsx | NastyZ98/aws-cognito-amplify-video | d3622eb2ac7763bc2c410eab11267695c497c40e | [
"MIT"
] | null | null | null | src/App.tsx | NastyZ98/aws-cognito-amplify-video | d3622eb2ac7763bc2c410eab11267695c497c40e | [
"MIT"
] | null | null | null | src/App.tsx | NastyZ98/aws-cognito-amplify-video | d3622eb2ac7763bc2c410eab11267695c497c40e | [
"MIT"
] | null | null | null | import React from "react";
import styled from "styled-components";
import { Container, Logo } from "shared/components/Auth/common";
const AppContainer = styled(Container)`
flex: 1;
justify-content: center;
align-items: center;
`;
const App = () => (
<AppContainer>
<Logo
style={{ width: "20%" }}
src="https://camo.githubusercontent.com/a1c4be4671e634c1461fe578cca2c97c7b11e486/68747470733a2f2f73332d75732d776573742d322e616d617a6f6e6177732e636f6d2f747261636b69742d7075626c69632d6172746966616374732f6769746875622d706167652f6c6f676f2e706e67"
alt="logo"
/>
<h1 style={{ textAlign: "center" }}>
Sorry, you are not allowed to access this broadcast
</h1>
</AppContainer>
);
export default App;
| 29.6 | 248 | 0.733784 |
0d2b36ae50eb20adca31cfddc7cd6e13b223ccd6 | 348 | c | C | lib/my/my_compute_square_root.c | iElden/CPE_matchstick | a0744b6e104625a526a330239b54e627287f7fa1 | [
"MIT"
] | null | null | null | lib/my/my_compute_square_root.c | iElden/CPE_matchstick | a0744b6e104625a526a330239b54e627287f7fa1 | [
"MIT"
] | null | null | null | lib/my/my_compute_square_root.c | iElden/CPE_matchstick | a0744b6e104625a526a330239b54e627287f7fa1 | [
"MIT"
] | null | null | null | /*
** EPITECH PROJECT, 2017
** my_compute_square_root
** File description:
** display the square root of a number
*/
int square_root(int nb)
{
int i;
i = nb / 2;
while (i > 1) {
if (i * i == nb) {
return (i);
}
i --;
}
return (0);
}
int my_compute_square_root(int nb)
{
int result;
result = square_root(nb);
return (result);
}
| 12 | 38 | 0.600575 |
b2cbb7e4e5b9fb8dd33a2d6467ee4a5df0a1aaef | 4,208 | sql | SQL | src/test/resources/data.sql | CyberCastle/specification-with-projection | 743222c71772db10ffd19b213fecaf80437779b1 | [
"MIT"
] | 133 | 2016-09-29T14:40:13.000Z | 2022-03-26T20:33:48.000Z | src/test/resources/data.sql | Vinh-Nguyen-E/specification-with-projection | e5118cb85d07541a49117628ef20a01057f5ccfb | [
"MIT"
] | 18 | 2017-07-14T18:55:39.000Z | 2022-01-02T16:01:38.000Z | src/test/resources/data.sql | Vinh-Nguyen-E/specification-with-projection | e5118cb85d07541a49117628ef20a01057f5ccfb | [
"MIT"
] | 46 | 2017-05-02T22:11:46.000Z | 2022-03-26T20:33:52.000Z | insert into form_type values('01', 'ก.01');
insert into form_type values('04', 'ก.04');
insert into form_type values('05', 'ก.05');
insert into form_type values('06', 'ก.06');
insert into form_type values('07', 'ก.07');
insert into form_type values('08', 'ก.08');
insert into document (id, description, category, type, FLAG_HAS_SUB, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, 'descriptiontest', 'ก.01', 'ต้นฉบับ', 'false', '01');
insert into document (id, description, category, type, FLAG_HAS_SUB, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, 'description', 'ก.01', 'ต้นฉบับ', 'true', '01');
insert into document (id, description, category, type, FLAG_HAS_SUB, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, 'description2', 'ก.01', 'สำเนา', 'false', '01');
insert into document (id, description, category, type, FLAG_HAS_SUB, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, 'description3', 'ก.04', 'ทั้งหมด', 'false', '04');
insert into document (id, description, category, type, FLAG_HAS_SUB, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, 'description4', 'ก.08', 'สำเนา', 'false', '08');
insert into document (id, description, category, type, FLAG_HAS_SUB, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, 'description5', 'ก.07', 'ต้นฉบับ', 'false', '07');
insert into document (id, description, category, type, parent_id, FLAG_HAS_SUB, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, 'description6', 'ก.01', 'ต้นฉบับ', 2, 'false', '01');
insert into document (id, description, category, type, FLAG_HAS_SUB, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, 'description7', 'ก.05', 'ต้นฉบับ', 'false', '05');
insert into document (id, description, category, type, FLAG_HAS_SUB, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, 'description8', 'ก.06', 'ต้นฉบับ', 'false', '06');
insert into document (id, description, category, type, FLAG_HAS_SUB, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, 'description9', 'ก.06', 'สำเนา', 'false', '06');
insert into document (id, description, category, type, FLAG_HAS_SUB, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, 'description10', 'ก.05', 'ต้นฉบับ', 'false', '05');
insert into document (id, description, category, type, FLAG_HAS_SUB, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, 'description11', 'ก.06', 'ทั้งหมด', 'false', '06');
insert into document (id, description, category, type, FLAG_HAS_SUB, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, 'description12', 'ก.06', 'ต้นฉบับ', 'false', '06');
insert into document (id, description, category, type, FLAG_HAS_SUB, parent_id, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, '', 'ก.01', 'ต้นฉบับ', 'false', 1, '01');
insert into document (id, description, category, type, FLAG_HAS_SUB, parent_id, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, '', 'ก.01', 'สำเนา', 'false', 3, '01');
insert into document (id, description, category, type, FLAG_HAS_SUB, parent_id, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, '', 'ก.04', 'ทั้งหมด', 'false', 4, '04');
insert into document (id, description, category, type, FLAG_HAS_SUB, parent_id, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, '', 'ก.08', 'สำเนา', 'false', 5, '08');
insert into document (id, description, category, type, FLAG_HAS_SUB, parent_id, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, '', 'ก.07', 'ต้นฉบับ', 'false', 6, '07');
insert into document (id, description, category, type, FLAG_HAS_SUB, parent_id, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, '', 'ก.05', 'ต้นฉบับ', 'false', 8, '05');
insert into document (id, description, category, type, FLAG_HAS_SUB, parent_id, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, '', 'ก.06', 'ต้นฉบับ', 'false', 9, '06');
insert into document (id, description, category, type, FLAG_HAS_SUB, parent_id, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, '', 'ก.06', 'สำเนา', 'false', 10, '06');
insert into document (id, description, category, type, FLAG_HAS_SUB, parent_id, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, '', 'ก.05', 'ต้นฉบับ', 'false', 11, '05');
insert into document (id, description, category, type, FLAG_HAS_SUB, parent_id, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, '', 'ก.06', 'ทั้งหมด', 'false', 12, '06');
insert into document (id, description, category, type, FLAG_HAS_SUB, parent_id, FORM_TYPE_ID) values(DOCUMENT_SEQ.nextVal, '', 'ก.06', 'ต้นฉบับ', 'false', 13, '06'); | 135.741935 | 176 | 0.720532 |
936d11eb46bee9bf131ce01174acea2a0944cd1c | 308 | dart | Dart | lib/common/abstract_base.dart | comecsoftdev/public_flutter_flashsale_repo | 2d062963452ba9a2575865b3c17639a6df6c3829 | [
"MIT"
] | 4 | 2022-02-09T02:43:39.000Z | 2022-02-14T02:58:26.000Z | lib/common/abstract_base.dart | comecsoftdev/public_flutter_flashsale_repo | 2d062963452ba9a2575865b3c17639a6df6c3829 | [
"MIT"
] | null | null | null | lib/common/abstract_base.dart | comecsoftdev/public_flutter_flashsale_repo | 2d062963452ba9a2575865b3c17639a6df6c3829 | [
"MIT"
] | null | null | null | abstract class RepositoryResponse{
final int? statusCode;
final String? msg;
RepositoryResponse({this.statusCode, this.msg});
}
abstract class BlocEventBase {
const BlocEventBase();
List<Object?> get props;
}
abstract class BlocStateBase {
const BlocStateBase();
List<Object?> get props;
} | 17.111111 | 50 | 0.733766 |
606c17c40e7057c80518670d755dad20d3910ad3 | 24 | sql | SQL | migrations/000002_add_article_table.down.sql | polaris1119/testpkg | 2894f502043722255fc69c403d999961e0f136e6 | [
"Apache-2.0"
] | 3 | 2020-06-19T10:03:38.000Z | 2020-08-27T03:33:35.000Z | migrations/000002_add_article_table.down.sql | polaris1119/testpkg | 2894f502043722255fc69c403d999961e0f136e6 | [
"Apache-2.0"
] | null | null | null | migrations/000002_add_article_table.down.sql | polaris1119/testpkg | 2894f502043722255fc69c403d999961e0f136e6 | [
"Apache-2.0"
] | null | null | null | DROP TABLE
article;
| 8 | 12 | 0.666667 |
0a3ff63b180a8e24b8b2fb9a7d27e04ff99ea4f0 | 70,989 | h | C | wisdom/sentinel/x64/hasp_api.h | chenghuaiyu/caffe | b16e30408a8c26c1bb72b807ae04f81c7389869c | [
"BSD-2-Clause"
] | 1 | 2020-01-18T16:44:03.000Z | 2020-01-18T16:44:03.000Z | wisdom/sentinel/x64/hasp_api.h | chenghuaiyu/caffe | b16e30408a8c26c1bb72b807ae04f81c7389869c | [
"BSD-2-Clause"
] | null | null | null | wisdom/sentinel/x64/hasp_api.h | chenghuaiyu/caffe | b16e30408a8c26c1bb72b807ae04f81c7389869c | [
"BSD-2-Clause"
] | null | null | null | /**
* \mainpage Sentinel Licensing API
* \file hasp_api.h Sentinel Licensing API declarations
*
* Copyright (C) 2013, SafeNet, Inc. All rights reserved.
*/
#ifndef __HASP_API_H__
#define __HASP_API_H__
#if !defined(WITH_AKSTYPES) && !defined(WITH_OEFTYPES)
# if defined(_MSC_VER) || defined(__WATCOMC__) || defined(__BORLANDC__)
typedef unsigned __int64 hasp_u64_t;
typedef signed __int64 hasp_s64_t;
# else
typedef unsigned long long hasp_u64_t;
typedef signed long long hasp_s64_t;
# endif
# if defined(_MSC_VER)
typedef unsigned long hasp_u32_t;
typedef signed long hasp_s32_t;
# else
typedef unsigned int hasp_u32_t;
typedef signed int hasp_s32_t;
# endif
typedef unsigned short hasp_u16_t;
typedef signed short hasp_s16_t;
typedef signed char hasp_s8_t;
typedef unsigned char hasp_u8_t;
#endif
#if defined(_WIN32) || defined(WIN32) || defined(_MSC_VER) || defined(__BORLANDC__)
#if defined(__MINGW32__)
# define HASP_CALLCONV __declspec(dllimport) __stdcall
#else
# define HASP_CALLCONV __stdcall
#endif
#else
# define HASP_CALLCONV
#endif
#ifdef __cplusplus
extern "C" {
#endif
/**
* @defgroup hasp_feature_ids Legacy HASP HL Run-time API: Feature ID defines
*
* @{
*/
/**
* \brief Legacy HASP HL Run-time API: "Feature Type" mask
*
* AND-mask used to identify Feature type.
*/
#define HASP_FEATURETYPE_MASK 0xffff0000
/**
* \brief Legacy HASP HL Run-time API: "Program Number Feature" type
*
* After AND-ing with HASP_FEATURETYPE_MASK the Feature type contain this value.
*/
#define HASP_PROGNUM_FEATURETYPE 0xffff0000
/**
* \brief Legacy HASP HL Run-time API: "Program Number Mask"
*
* AND-mask used to extract the Program Number from a
* "prognum" Feature ID.
*/
#define HASP_PROGNUM_MASK 0x000000ff
/**
* \brief Legacy HASP HL Run-time API: "Program Number Options" mask
*
* AND-mask used to identify Program Number options:
* <ul>
* <li>HASP_PROGNUM_OPT_NO_LOCAL</li>
* <li>HASP_PROGNUM_OPT_NO_REMOTE</li>
* <li>HASP_PROGNUM_OPT_PROCESS</li>
* <li>HASP_PROGNUM_OPT_CLASSIC</li>
* <li>HASP_PROGNUM_OPT_TS</li>
* </ul>
*
* 3 bits of the mask are reserved for future extensions and currently unused.
* Initialize them with zero.
*/
#define HASP_PROGNUM_OPT_MASK 0x0000ff00
/**
* \brief Legacy HASP HL Run-time API: "Program Number" option
*
* Disables the search for local licenses.
*/
#define HASP_PROGNUM_OPT_NO_LOCAL 0x00008000
/**
* \brief Legacy HASP HL Run-time API: "Program Number" option
*
* Disables the search for network licenses.
*/
#define HASP_PROGNUM_OPT_NO_REMOTE 0x00004000
/**
* \brief Legacy HASP HL Run-time API: "Program Number" option
*
* Sets session count of network licenses to "per-process".
*/
#define HASP_PROGNUM_OPT_PROCESS 0x00002000
/**
* \brief Legacy HASP HL Run-time API: "Program Number" option
*
* Enables the API to access "classic" (HASP4 or earlier) keys.
*/
#define HASP_PROGNUM_OPT_CLASSIC 0x00001000
/**
* \brief Legacy HASP HL Run-time API: "Program Number" option
*
* Ignores the presence of terminal servers.
*/
#define HASP_PROGNUM_OPT_TS 0x00000800
/**
* \brief The Sentinel default Feature ID
*
* Available in every Sentinel key.
*/
#define HASP_DEFAULT_FID 0
/**
* \brief Legacy HASP HL Run-time API: The HASP default Feature ID
*
* Available in every legacy HASP hardware key.
*/
#define HASP_PROGNUM_DEFAULT_FID (HASP_DEFAULT_FID | HASP_PROGNUM_FEATURETYPE)
/**
* @}
*/
/**
* @defgroup hasp_file_ids Memory File ID defines
*
* @{
*/
/**
* \brief Legacy HASP HL Run-time API: HASP4 memory file
*
* File ID for HASP4-compatible memory contents w/o FAS.
*/
#define HASP_FILEID_MAIN 0xfff0
/**
* \brief Legacy HASP HL Run-time API: HASP4 FAS memory file
*
* (Dummy) File ID for the license data area of memory contents.
*/
#define HASP_FILEID_LICENSE 0xfff2
/**
* \brief Sentinel secure writable memory file
*
* File ID for Sentinel secure writable memory.
*/
#define HASP_FILEID_RW 0xfff4
/**
* \brief Sentinel secure read only memory file
*
* File ID for Sentinel read only memory.
*/
#define HASP_FILEID_RO 0xfff5
/**
* \brief Sentinel dynamic memory file ID avaliable range
*
* File ID lower limit for Sentinel dynamic memory file.
*/
#define HASP_FILEID_DYNAMIC_FIRST 0x1
/**
* \brief Sentinel dynamic memory file ID avaliable range
*
* File ID upper limit for Sentinel dynamic memory file.
*/
#define HASP_FILEID_DYNAMIC_LAST 0xffbf
/**
* @}
*/
/**
* @defgroup hasp_error_codes Run-time API Status Codes
*
* @{
*/
enum hasp_error_codes
{
/** Request successfully completed */
HASP_STATUS_OK = 0,
/** Request exceeds memory range of a Sentinel file */
HASP_MEM_RANGE = 1,
/** Legacy HASP HL Run-time API: Unknown/Invalid Feature ID option */
HASP_INV_PROGNUM_OPT = 2,
/** System is out of memory */
HASP_INSUF_MEM = 3,
/** Too many open Features/login sessions */
HASP_TMOF = 4,
/** Access to Feature, Sentinel protection key or functionality denied */
HASP_ACCESS_DENIED = 5,
/** Legacy decryption function cannot work on Feature */
HASP_INCOMPAT_FEATURE = 6,
/** Sentinel protection key not available */
HASP_HASP_NOT_FOUND = 7,
/* Deprecated - use HASP_HASP_NOT_FOUND */
HASP_CONTAINER_NOT_FOUND = 7,
/** Encrypted/decrypted data length too short to execute function call */
HASP_TOO_SHORT = 8,
/** Invalid login handle passed to function */
HASP_INV_HND = 9,
/** Specified File ID not recognized by API */
HASP_INV_FILEID = 10,
/** Installed driver or daemon too old to execute function */
HASP_OLD_DRIVER = 11,
/** Real-time clock (rtc) not available */
HASP_NO_TIME = 12,
/** Generic error from host system call */
HASP_SYS_ERR = 13,
/** Required driver not installed */
HASP_NO_DRIVER = 14,
/** Invalid XML format */
HASP_INV_FORMAT = 15,
/** Unable to execute function in this context; the requested
* functionality is not implemented */
HASP_REQ_NOT_SUPP = 16,
/** Binary data passed to function does not contain valid update */
HASP_INV_UPDATE_OBJ = 17,
/** Sentinel protection key not found */
HASP_KEYID_NOT_FOUND = 18,
/** Required XML tags not found; Contents in binary data are missing
* or invalid */
HASP_INV_UPDATE_DATA = 19,
/** Update request not supported by Sentinel protection key */
HASP_INV_UPDATE_NOTSUPP = 20,
/** Update counter set incorrectly */
HASP_INV_UPDATE_CNTR = 21,
/** Invalid Vendor Code passed */
HASP_INV_VCODE = 22,
/** Sentinel protection key does not support encryption type */
HASP_ENC_NOT_SUPP = 23,
/** Passed time value outside supported value range */
HASP_INV_TIME = 24,
/** Real-time clock battery out of power */
HASP_NO_BATTERY_POWER = 25,
/** Acknowledge data requested by update, but ack_data parameter
* is NULL */
HASP_NO_ACK_SPACE = 26,
/** Program running on a terminal server */
HASP_TS_DETECTED = 27,
/** Program running on a Remote Desktop */
HASP_RDP_DETECTED = 27,
/** Requested Feature type not implemented */
HASP_FEATURE_TYPE_NOT_IMPL = 28,
/** Unknown algorithm used in H2R/V2C file */
HASP_UNKNOWN_ALG = 29,
/** Signature verification operation failed */
HASP_INV_SIG = 30,
/** Requested Feature not available */
HASP_FEATURE_NOT_FOUND = 31,
/** Access log not enabled */
HASP_NO_LOG = 32,
/** Communication error between API and local Sentinel License Manager */
HASP_LOCAL_COMM_ERR = 33,
/** Vendor Code not recognized by API */
HASP_UNKNOWN_VCODE = 34,
/** Invalid XML specification */
HASP_INV_SPEC = 35,
/** Invalid XML scope */
HASP_INV_SCOPE = 36,
/** Too many Sentinel protection keys match the scope */
HASP_TOO_MANY_KEYS = 37,
/** Too many concurrent user sessions currently connected */
HASP_TOO_MANY_USERS = 38,
/** Session been interrupted */
HASP_BROKEN_SESSION = 39,
/** Communication error between local and remote Sentinel License Managers */
HASP_REMOTE_COMM_ERR = 40,
/** Feature expired */
HASP_FEATURE_EXPIRED = 41,
/** Sentinel License Manager version too old */
HASP_OLD_LM = 42,
/** Input/Output error occurred in secure storage area of Sentinel SL key OR
* a USB error occurred when communicating with a Sentinel HL key */
HASP_DEVICE_ERR = 43,
/** Update installation not permitted; This update was already applied */
HASP_UPDATE_BLOCKED = 44,
/** System time has been tampered with */
HASP_TIME_ERR = 45,
/** Communication error occurred in secure channel */
HASP_SCHAN_ERR = 46,
/** Corrupt data exists in secure storage area of Sentinel SL protection key */
HASP_STORAGE_CORRUPT = 47,
/** Unable to find Vendor library */
HASP_NO_VLIB = 48,
/** Unable to load Vendor library */
HASP_INV_VLIB = 49,
/** Unable to locate any Feature matching scope */
HASP_SCOPE_RESULTS_EMPTY = 50,
/** Program running on a virtual machine */
HASP_VM_DETECTED = 51,
/** Sentinel SL key incompatible with machine hardware; Sentinel SL key is locked
* to different hardware. OR:
* In the case of a V2C file, conflict between Sentinel SL key data and machine
* hardware data; Sentinel SL key locked to different hardware */
HASP_HARDWARE_MODIFIED = 52,
/** Login denied because of user restrictions */
HASP_USER_DENIED = 53,
/** Trying to install a V2C file with an update counter that is out of
* sequence with the update counter on the Sentinel protection key.
* The update counter value in the V2C file is lower than the value in
* Sentinel protection key. */
HASP_UPDATE_TOO_OLD = 54,
/** Trying to install a V2C file with an update counter that is out of
* sequence with update counter in the Sentinel protection key. The
* first value in the V2C file is greater than the value in the
* Sentinel protection key. */
HASP_UPDATE_TOO_NEW = 55,
/** Vendor library version too old */
HASP_OLD_VLIB = 56,
/** Upload via ACC failed, e.g. because of illegal format */
HASP_UPLOAD_ERROR = 57,
/** Invalid XML "recipient" parameter */
HASP_INV_RECIPIENT = 58,
/** Invalid XML "action" parameter */
HASP_INV_ACTION = 59,
/* Deprecated - use HASP_INV_ACTION */
HASP_INV_DETACH_ACTION = 59,
/** Scope does not specify a unique Product */
HASP_TOO_MANY_PRODUCTS = 60,
/** Invalid Product information */
HASP_INV_PRODUCT = 61,
/** Unknown Recipient; update can only be applied to the
* Recipient specified in hasp_detach(), and not to this computer */
HASP_UNKNOWN_RECIPIENT = 62,
/** Invalid duration */
HASP_INV_DURATION = 63,
/** Cloned secure storage area detected */
HASP_CLONE_DETECTED = 64,
/** Specified V2C update already installed in the LLM */
HASP_UPDATE_ALREADY_ADDED = 65,
/** Specified Hasp Id is in Inactive state */
HASP_HASP_INACTIVE = 66,
/** No detachable feature exists */
HASP_NO_DETACHABLE_FEATURE = 67,
/** No detachable feature exists (typo kept for compatibility */
HASP_NO_DEATCHABLE_FEATURE = 67,
/** Scope does not specify a unique host */
HASP_TOO_MANY_HOSTS = 68,
/** Rehost is not allowed for any license */
HASP_REHOST_NOT_ALLOWED = 69,
/** License is rehosted to other machine */
HASP_LICENSE_REHOSTED = 70,
/** Old rehost license try to apply */
HASP_REHOST_ALREADY_APPLIED = 71,
/** File not found or access denied */
HASP_CANNOT_READ_FILE = 72,
/** Extension of license not allowed as number of detached
* licenses is greater than current concurrency count */
HASP_EXTENSION_NOT_ALLOWED = 73,
/** Detach of license not allowed as product
* contains VM disabled feature and host machine is a virtual machine */
HASP_DETACH_DISABLED = 74,
/** Rehost of license not allowed as container
* contains VM disabled feature and host machine is a virtual machine */
HASP_REHOST_DISABLED = 75,
/** Format SL-AdminMode or migrate SL-Legacy to SL-AdminMode not allowed
* as container has detached license */
HASP_DETACHED_LICENSE_FOUND = 76,
/** Recipient of the requested operation is older than expected */
HASP_RECIPIENT_OLD_LM = 77,
/** Secure storage ID mismatch */
HASP_SECURE_STORE_ID_MISMATCH = 78,
/** Duplicate Hostname found while key contains Hostname Fingerprinting */
HASP_DUPLICATE_HOSTNAME = 79,
/** The Sentinel License Manager is required for this operation */
HASP_MISSING_LM = 80,
/** Attempting to consume multiple executions during log in to a Feature.
* However, the license for the Feature does not contain enough remaining executions */
HASP_FEATURE_INSUFFICIENT_EXECUTION_COUNT = 81,
/** Attempting to perform an operation not compatible with target platform */
HASP_INCOMPATIBLE_PLATFORM = 82,
/** The key is disabled due to suspected tampering */
HASP_HASP_DISABLED = 83,
/** API dispatcher: API for this Vendor Code was not found */
HASP_NO_API_DYLIB = 400,
/** API dispatcher: Unable to load API; DLL possibly corrupt? */
HASP_INV_API_DYLIB = 401,
/** Invalid function parameter */
HASP_INVALID_PARAMETER = 501,
/** C++ API: Object incorrectly initialized */
HASP_INVALID_OBJECT = 500,
/** C++ API: Logging in twice to the same object */
HASP_ALREADY_LOGGED_IN = 502,
/** C++ API: Logging out twice of the same object */
HASP_ALREADY_LOGGED_OUT = 503,
/** .NET API: Incorrect use of system or platform */
HASP_OPERATION_FAILED = 525,
/* Internal use: no classic memory extension block available */
HASP_NO_EXTBLOCK = 600,
/* Internal use: invalid port type */
HASP_INV_PORT_TYPE = 650,
/* Internal use: invalid port value */
HASP_INV_PORT = 651,
/* Dot-Net DLL found broken */
HASP_NET_DLL_BROKEN = 652,
/** Requested function not implemented */
HASP_NOT_IMPL = 698,
/** Internal error occurred in API */
HASP_INT_ERR = 699,
/* Reserved for Sentinel helper libraries */
HASP_FIRST_HELPER = 2001,
/* Reserved for Sentinel Activation API */
HASP_FIRST_HASP_ACT = 3001,
HASP_NEXT_FREE_VALUES = 7001,
};
/**
* @}
*/
/**
* @defgroup hasp_general Sentinel typedefs and macros
*
* @{
*/
/** A Sentinel status code */
typedef enum hasp_error_codes hasp_status_t;
/** Sentinel size type */
typedef hasp_u32_t hasp_size_t;
/** Sentinel connection handle */
typedef hasp_u32_t hasp_handle_t;
/** Sentinel Feature ID */
typedef hasp_u32_t hasp_feature_t;
/** Sentinel File ID */
typedef hasp_u32_t hasp_fileid_t;
/** Sentinel timestamp, representing elapsed seconds since
* Jan-01-1970 0:00:00 GMT */
typedef hasp_u64_t hasp_time_t;
/** Sentinel Vendor Code buffer */
typedef const void *hasp_vendor_code_t;
/** format to retrieve update info (C2V) */
#define HASP_UPDATEINFO "<haspformat format=\"updateinfo\"/>"
/* format to retrieve a small update info (C2V) */
#define HASP_FASTUPDATEINFO "<haspformat format=\"fastupdateinfo\"/>"
/** format to retrieve session info */
#define HASP_SESSIONINFO "<haspformat format=\"sessioninfo\"/>"
/** format to retrieve key/hardware info */
#define HASP_KEYINFO "<haspformat format=\"keyinfo\"/>"
/** format to retrieve host fingerprint info */
#define HASP_FINGERPRINT "<haspformat format=\"host_fingerprint\"/>"
/** format to retrieve recipient parameter for hasp_transfer */
#define HASP_RECIPIENT "<haspformat root=\"location\">" \
" <license_manager>" \
" <attribute name=\"id\" />" \
" <attribute name=\"time\" />" \
" <element name=\"hostname\" />" \
" <element name=\"version\" />" \
" <element name=\"host_fingerprint\" />" \
" </license_manager>" \
"</haspformat>"
/*
* Invalid handle value for hasp_login() and hasp_login_scope() functions.
*/
#define HASP_INVALID_HANDLE_VALUE 0
/**
* \brief Minimum block size for hasp_encrypt() and hasp_decrypt() functions.
*/
#define HASP_MIN_BLOCK_SIZE 16
/**
* \brief Minimum block size for hasp_legacy_encrypt()
* and hasp_legacy_decrypt() legacy functions.
*/
#define HASP_MIN_BLOCK_SIZE_LEGACY 8
/**
* @}
*/
/**
* @defgroup hasp_basic The Basic Sentinel Licensing API
*
* @{
*/
/**
* \brief Logs into a Feature and thereby establishes a session context.
*
* <b>Sentinel Licensing API Usage Notes</b><br>
* This function establishes a context to a Sentinel protection key
* containing a license for the requested Feature ID.
*
* The requisite Vendor Codes are stored in a VendorCodes folder in your
* system. Without the correct Vendor Code, the function call cannot succeed.
*
* You can open up to 512 simultaneous login sessions.
*
* <b>Legacy HASP Remarks</b><br>
* For local prognum Features, concurrency is not handled and each login
* performs a decrement if it is a counting license.
*
* Network "prognum" features continue to use the old HASP/Sentinel LM login logic,
* with its inherent limitations.
*
* There is only support for concurrent usage of <b>one</b> server (global
* server address).
*
* With "Program Number" features (see \ref HASP_FEATURETYPE_MASK), 8
* bits are reserved for legacy options (see \ref HASP_PROGNUM_OPT_MASK,
* currently 5 bits are used):
* <ul>
* <li>only local</li>
* <li>only remote</li>
* <li>login is counted per process ID</li>
* <li>disable terminal server check</li>
* <li>enable access to old (HASP3/HASP4) keys</li>
* </ul>
*
* \param feature_id Unique identifier for a specific Feature stored
* in a Sentinel protection key
* \param vendor_code Pointer to the Vendor Code
* \param handle Pointer to the resulting session handle
*
* \return <ul>
* <li>HASP_STATUS_OK</li> Request was successfully
* completed
* <li>HASP_HASP_NOT_FOUND</li> Required Sentinel
* protection key not found
* <li>HASP_FEATURE_NOT_FOUND</li> Cannot find requested
* Feature
* <li>HASP_FEATURE_TYPE_NOT_IMPL</li> Requested Feature
* type not available
* <li>HASP_TMOF</li> Too many open login sessions
* <li>HASP_INSUF_MEM</li> Out of memory
* <li>HASP_INV_VCODE</li> Invalid Vendor Code
* <li>HASP_NO_DRIVER</li> Driver not installed
* <li>HASP_NO_VLIB</li> Vendor library cannot be found
* <li>HASP_INV_VLIB</li> Vendor library cannot be loaded
* <li>HASP_OLD_DRIVER</li> Driver too old
* <li>HASP_UNKNOWN_VCODE</li> Vendor Code not recognized
* <li>HASP_FEATURE_EXPIRED</li> Feature has expired
* <li>HASP_TOO_MANY_USERS</li> Too many users currently
* connected
* <li>HASP_OLD_LM</li> Sentinel License Manager version
* too old
* <li>HASP_DEVICE_ERR</li> Input/Output error in Sentinel
* SL/SL-AdminMode/SL-UserMode secure storage, OR in
* case of a Sentinel HL key, USB communication error
* <li>HASP_TIME_ERR</li> System time has been tampered
* with
* <li>HASP_HARDWARE_MODIFIED</li> Sentinel SL key
* incompatible with machine hardware; Sentinel SL key is
* locked to different hardware
* <li>HASP_TS_DETECTED</li> Program is running on a
* Terminal Server
* <li>HASP_LOCAL_COMM_ERR</li> Communication error
* between API and local Sentinel License Manager
* <li>HASP_REMOTE_COMM_ERR</li> Communication error
* between local and remote Sentinel License Manager
* <li>HASP_OLD_VLIB</li> Vendor Library version too old
* <li>HASP_CLONE_DETECTED</li> Cloned Sentinel SL storage
* detected. Feature unavailable
* </ul>
*
* \sa hasp_login_scope()
* \sa hasp_logout()
*
*/
hasp_status_t HASP_CALLCONV hasp_login(hasp_feature_t feature_id,
hasp_vendor_code_t vendor_code,
hasp_handle_t *handle);
/**
* \brief Logs into a Feature to establish a session, according to
* predefined search parameters.
*
* <b>Sentinel Licensing API Usage Notes</b><br>
* This function is used to specify conditions that describe where login
* information is to searched for.
*
* The requisite Vendor Codes are stored in a VendorCodes folder in your
* system. Without the correct Vendor Code, the function call cannot succeed.
*
* You can open up to 512 simultaneous login sessions.
*
* This function does not work with legacy HASP Features.
*
* \param feature_id Unique identifier for a specific Feature stored in
* a Sentinel protection key
* \param scope Definition of the search parameters for
* this Feature ID. See the additional Sentinel API
* Reference documentation for more information
* about Scope XML Tags
* \param vendor_code Pointer to the vendor code
* \param handle Pointer to the resulting session handle
*
* \return <ul>
* <li>HASP_STATUS_OK</li> Request was successfully
* completed
* <li>HASP_HASP_NOT_FOUND</li> Required Sentinel
* protection key not found
* <li>HASP_FEATURE_NOT_FOUND</li> Cannot find requested
* Feature
* <li>HASP_SCOPE_RESULTS_EMPTY</li> Unable to locate
* a Feature matching the scope
* <li>HASP_TMOF</li> Too many open login sessions
* <li>HASP_INSUF_MEM</li> Out of memory
* <li>HASP_INV_VCODE</li> Invalid Vendor Code
* <li>HASP_NO_DRIVER</li> Driver not installed
* <li>HASP_NO_VLIB</li> Vendor library cannot be found
* <li>HASP_INV_VLIB</li> Vendor library cannot be loaded
* <li>HASP_OLD_DRIVER</li> Driver too old
* <li>HASP_UNKNOWN_VCODE</li> Vendor Code not recognized
* <li>HASP_INVALID_PARAMETER</li> Scope string too
* long (max. length 32 kb)
* <li>HASP_FEATURE_EXPIRED</li> Feature has expired
* <li>HASP_TOO_MANY_USERS</li> Too many users currently
* connected
* <li>HASP_OLD_LM</li> Sentinel License Manager version
* too old
* <li>HASP_DEVICE_ERR</li> Input/Output error in Sentinel
* SL/SL-AdminMode/SL-UserMode secure storage, OR in
* case of a Sentinel HL key, USB communication error
* <li>HASP_TIME_ERR</li> System time has been tampered
* with
* <li>HASP_TS_DETECTED</li> Program is running on a
* Terminal Server
* <li>HASP_HARDWARE_MODIFIED</li> Sentinel SL key
* incompatible with machine hardware; Sentinel SL key is
* locked to different hardware
* <li>HASP_LOCAL_COMM_ERR</li> Communication error
* between API and local Sentinel License Manager
* <li>HASP_REMOTE_COMM_ERR</li> Communication error
* between local and remote Sentinel License Manager
* <li>HASP_OLD_VLIB</li> Vendor Library version too old
* <li>HASP_CLONE_DETECTED</li> Cloned Sentinel SL storage
* detected. Feature unavailable
* </ul>
*
* \sa hasp_login()
* \sa hasp_logout()
* \sa hasp_get_info()
*
*/
hasp_status_t HASP_CALLCONV hasp_login_scope(hasp_feature_t feature_id,
const char *scope,
hasp_vendor_code_t vendor_code,
hasp_handle_t *handle);
/**
* \brief Logs out from a context or session.
*
* <b>Sentinel Licensing API Usage Notes</b><br>
* Use this function to end a connection to an API session object. Once
* logged out from a session, all memory allocated for the session is released.
*
* \param handle Handle for the session being terminated
*
* \return <ul>
* <li>HASP_STATUS_OK</li> Request was successfully
* completed
* <li>HASP_INV_HND</li> Invalid input handle
* <li>HASP_LOCAL_COMM_ERR</li> Communication error
* between API and local Sentinel License Manager
* <li>HASP_REMOTE_COMM_ERR</li> Communication error
* between local and remote Sentinel License Manager
* </ul>
*
* \sa hasp_login()
* \sa hasp_login_scope()
*
*/
hasp_status_t HASP_CALLCONV hasp_logout(hasp_handle_t handle);
/**
* \brief Encrypts a buffer.
*
* <b>Sentinel Licensing API Usage Notes</b><br>
* Encrypts data using the encryption engine in the Sentinel protection key.
* The specific session handle determines which Sentinel protection key and
* which Feature ID encrypts the data buffer. The encryption key remains
* in the Sentinel protection key. If the encryption fails, the buffer
* is not modified. To decrypt the data buffer, use the \ref hasp_decrypt
* function.
*
* \param handle Handle for the session
* \param buffer Pointer to the buffer to be encrypted
* \param length Size (in bytes) of the buffer to be encrypted
* (16 bytes minimum)
*
* \return <ul>
* <li>HASP_STATUS_OK</li> Request was successfully
* completed
* <li>HASP_INV_HND</li> Invalid input handle
* <li>HASP_DEVICE_ERR</li> Input/Output error in Sentinel
* SL secure storage, OR in case of a Sentinel HL key,
* USB communication error
* <li>HASP_TIME_ERR</li> System time has been tampered
* with
* <li>HASP_TOO_SHORT</li> Encryption data length is
* too short
* <li>HASP_ENC_NOT_SUPP</li> Sentinel protection key
* does not support encryption type
* <li>HASP_SCHAN_ERR</li> Communication error occurred
* in secure channel OR Sentinel HL Firmware too old
* (update to 3.25 or later)
* <li>HASP_BROKEN_SESSION</li> Session has been
* interrupted
* <li>HASP_LOCAL_COMM_ERR</li> Communication error
* between API and local Sentinel License Manager
* <li>HASP_REMOTE_COMM_ERR</li> Communication error
* between local and remote Sentinel License Manager
* </ul>
*
* \sa hasp_decrypt()
*
*/
hasp_status_t HASP_CALLCONV hasp_encrypt(hasp_handle_t handle,
void *buffer,
hasp_size_t length);
/**
* \brief Decrypts a buffer.
*
* <b>Sentinel Licensing API Usage Notes</b><br>
* Decrypts data using the decryption engine in the Sentinel protection key
* The specific session handle determines which Sentinel protection key and
* which Feature ID decrypts the data buffer. The decryption key remains
* in the Sentinel protection key. If the decryption fails, the buffer
* is not modified. To encrypt the data buffer, use the \ref hasp_encrypt
* function.
*
* \param handle Handle for the session
* \param buffer Pointer to the buffer to be decrypted
* \param length Size (in bytes) of the buffer to be decrypted
* (16 bytes minimum)
*
* \return <ul>
* <li>HASP_STATUS_OK</li> Request was successfully
* completed
* <li>HASP_INV_HND</li> Invalid input handle
* <li>HASP_DEVICE_ERR</li> Input/Output error in Sentinel
* SL/SL-AdminMode/SL-UserMode secure storage, OR in
* case of a Sentinel HL key, USB communication error
* <li>HASP_TIME_ERR</li> System time has been tampered
* with
* <li>HASP_TOO_SHORT</li> Decryption data length is
* too short
* <li>HASP_SCHAN_ERR</li> Communication error occurred
* in secure channel OR Sentinel HL Firmware too old
* (update to 3.25 or later)
* <li>HASP_ENC_NOT_SUPP</li> Sentinel protection key
* does not support encryption type
* <li>HASP_BROKEN_SESSION</li> Session has been
* interrupted
* <li>HASP_LOCAL_COMM_ERR</li> Communication error
* between API and local Sentinel License Manager
* <li>HASP_REMOTE_COMM_ERR</li> Communication error
* between local and remote Sentinel License Manager
* </ul>
*
* \sa hasp_encrypt()
*
*/
hasp_status_t HASP_CALLCONV hasp_decrypt(hasp_handle_t handle,
void *buffer,
hasp_size_t length);
/**
* \brief Reads the memory of a Sentinel protection key.
*
* <b>Sentinel Licensing API Usage Notes</b><br>
* Valid File IDs are \ref HASP_FILEID_RW and \ref HASP_FILEID_RO.
* From Sentinel LDK 7.1, Sentinel HL Driverless keys support dynamic memory,
* the File IDs can be from HASP_FILEID_DYNAMIC_FIRST(0x1) to HASP_FILEID_DYNAMIC_LAST(0xFFBF).
*
* <b>Legacy HASP Remarks</b><br>
* Valid File IDs are \ref HASP_FILEID_LICENSE and \ref HASP_FILEID_MAIN.
*
* \param handle Handle for the session
* \param fileid Identifier for the file that is to be read
* \param offset Byte offset in the file
* \param length Number of bytes to be read from the file
* \param buffer Pointer to the retrieved data
*
* \return <ul>
* <li>HASP_STATUS_OK</li> Request was successfully
* completed
* <li>HASP_INV_HND</li> Invalid input handle
* <li>HASP_INV_FILEID</li> Unrecognized file identifier
* <li>HASP_MEM_RANGE</li> Request exceeds the memory
* range of a Sentinel file
* <li>HASP_DEVICE_ERR</li> Input/Output error in Sentinel
* SL/SL-AdminMode/SL-UserMode secure storage, OR in
* case of a Sentinel HL key, USB communication error
* <li>HASP_SCHAN_ERR</li> Communication error occurred
* in secure channel OR Sentinel HL Firmware too old
* (update to 3.25 or later)
* <li>HASP_BROKEN_SESSION</li> Session has been
* interrupted
* <li>HASP_LOCAL_COMM_ERR</li> Communication error
* between API and local Sentinel License Manager
* <li>HASP_REMOTE_COMM_ERR</li> Communication error
* between local and remote Sentinel License Manager
* </ul>
*
* \remark
* Use the \ref hasp_get_size function to determine the size of the file you
* want to read.
*
* \sa hasp_write()
* \sa hasp_get_size()
*
*/
hasp_status_t HASP_CALLCONV hasp_read(hasp_handle_t handle,
hasp_fileid_t fileid,
hasp_size_t offset,
hasp_size_t length,
void *buffer);
/**
* \brief Writes to the memory of a Sentinel protection key.
*
* <b>Sentinel Licensing API Usage Notes</b><br>
* Valid File ID is \ref HASP_FILEID_RW.
* From Sentinel LDK 7.1, Sentinel HL Driverless keys support dynamic memory,
* the File IDs can be from HASP_FILEID_DYNAMIC_FIRST(0x1) to HASP_FILEID_DYNAMIC_LAST(0xFFBF).
*
* <b>Legacy HASP Remarks</b><br>
* Depending on the provided session handle (either logged into the
* default Feature or any other Feature), write access to the FAS memory
* (\ref HASP_FILEID_LICENSE) is not permitted.
*
* \param handle Handle for the session
* \param fileid Identifier for the file that is to be written
* \param offset Byte offset in the file
* \param length Number of bytes to be written to the file
* \param buffer Pointer to the data
*
* \return <ul>
* <li>HASP_STATUS_OK</li> Request was successfully
* completed
* <li>HASP_INV_HND</li> Invalid input handle
* <li>HASP_INV_FILEID</li> Unrecognized file identifier
* <li>HASP_MEM_RANGE</li> Request exceeds the memory
* range of a Sentinel file
* <li>HASP_DEVICE_ERR</li> Input/Output error in Sentinel
* SL/SL-AdminMode/SL-UserMode secure storage, OR in
* case of a Sentinel HL key, USB communication error
* <li>HASP_SCHAN_ERR</li> Communication error occurred
* in secure channel OR Sentinel HL Firmware too old
* (update to 3.25 or later)
* <li>HASP_BROKEN_SESSION</li> Session has been
* interrupted
* <li>HASP_LOCAL_COMM_ERR</li> Communication error
* between API and local Sentinel License Manager
* <li>HASP_REMOTE_COMM_ERR</li> Communication error
* between local and remote Sentinel License Manager
* </ul>
*
* \remark
* Use the \ref hasp_get_size function to determine the size of the file you
* want to write.
*
* \sa hasp_read()
* \sa hasp_get_size()
*
*/
hasp_status_t HASP_CALLCONV hasp_write(hasp_handle_t handle,
hasp_fileid_t fileid,
hasp_size_t offset,
hasp_size_t length,
const void *buffer);
/**
* \brief Retrieves the byte size of a memory file from a Sentinel protection key.
*
* This function is used to determine the file size of a Sentinel
* memory file.
*
* <b>Sentinel Licensing API Usage Notes</b><br>
* From Sentinel LDK 7.1, Sentinel HL Driverless keys support dynamic memory,
* the file IDs can be from HASP_FILEID_DYNAMIC_FIRST(0x1) to HASP_FILEID_DYNAMIC_LAST(0xFFBF).
*
* \param handle Handle for the session
* \param fileid Identifier for the file that is to be queried.
* \param size Pointer to the resulting file size
*
* \return <ul>
* <li>HASP_STATUS_OK</li> Request was successfully
* completed
* <li>HASP_INV_HND</li> Invalid input handle
* <li>HASP_INV_FILEID</li> Unrecognized file identifier
* <li>HASP_DEVICE_ERR</li> Input/Output error in Sentinel
* SL/SL-AdminMode/SL-UserMode secure storage, OR in
* case of a Sentinel HL key, USB communication error
* <li>HASP_BROKEN_SESSION</li> Session has been
* interrupted
* <li>HASP_LOCAL_COMM_ERR</li> Communication error
* between API and local Sentinel License Manager
* <li>HASP_REMOTE_COMM_ERR</li> Communication error
* between local and remote Sentinel License Manager
* </ul>
*
* \sa hasp_read()
* \sa hasp_write()
*
*/
hasp_status_t HASP_CALLCONV hasp_get_size(hasp_handle_t handle,
hasp_fileid_t fileid,
hasp_size_t *size);
/**
* \brief Reads the current time.
*
* <b>Sentinel Licensing API Usage Notes</b><br>
* Only Sentinel HL keys with a Real-time clock (rtc) and Sentinel SL keys can
* provide the current time.
*
* Primarily used to obtain reliable timestamps that are independent from
* the system clock.
*
* Time values are returned as the number of seconds that have elapsed
* since Jan-01-1970 0:00:00 UTC.
*
* <b>Legacy HASP Remarks</b><br>
* This request is only supported on locally accessed keys. Trying to
* get the time from a remotely accessed key will return HASP_NO_TIME.
*
* \param handle Handle for the session
* \param time Pointer to the current time
*
* \return <ul>
* <li>HASP_STATUS_OK</li> Request was successfully
* completed
* <li>HASP_INV_HND</li> Invalid input handle
* <li>HASP_BROKEN_SESSION</li> Session has been
* interrupted
* <li>HASP_LOCAL_COMM_ERR</li> Communication error
* between API and local Sentinel License Manager
* <li>HASP_REMOTE_COMM_ERR</li> Communication error
* between local and remote Sentinel License Manager
* <li>HASP_NO_BATTERY_POWER</li> Real-time clock has
* run out of power
* <li>HASP_NO_TIME</li> Real-time clock is not
* available
* </ul>
*
* \sa hasp_datetime_to_hasptime()
* \sa hasp_hasptime_to_datetime()
*
*/
hasp_status_t HASP_CALLCONV hasp_get_rtc(hasp_handle_t handle,
hasp_time_t *time);
/**
* @}
*/
/**
* @defgroup hasp_classic Legacy HASP functionality for backward compatibility
*
* @{
*/
/**
* \brief Legacy HASP4 compatible encryption function.
*
* <b>Sentinel Licensing API Usage Notes</b><br>
* This function does not work with a Sentinel login handle.
*
* <b>Legacy HASP Remarks</b><br>
* The handle must have been obtained by calling \ref hasp_login() with
* a "prognum" Feature ID.
*
* \param handle Handle for the session
* \param buffer Pointer to the buffer to be encrypted
* \param length Size in bytes of the buffer (min. 8 bytes)
*
* \return <ul>
* <li>HASP_STATUS_OK</li> Request was successfully
* completed
* <li>HASP_INV_HND</li> Invalid input handle
* <li>HASP_TOO_SHORT</li> Encryption data length is
* too short
* <li>HASP_ENC_NOT_SUPP</li> Sentinel protection key
* does not support encryption type
* <li>HASP_HASP_NOT_FOUND</li> Required Sentinel
* protection key not found
* </ul>
*
* \sa hasp_legacy_decrypt()
* \sa hasp_encrypt()
* \sa hasp_decrypt()
*
*/
hasp_status_t HASP_CALLCONV hasp_legacy_encrypt(hasp_handle_t handle,
void *buffer,
hasp_size_t length);
/**
* \brief Legacy HASP4 compatible decryption function.
*
* <b>Sentinel Licensing API Usage Notes</b><br>
* This function does not work with a Sentinel login handle.
*
* <b>Legacy HASP Remarks</b><br>
* The handle must have been obtained by calling \ref hasp_login() with
* a "prognum" Feature ID.
*
* \param handle Handle for the session
* \param buffer Pointer to the buffer to be decrypted
* \param length Size in bytes of the buffer (min. 8 bytes)
*
* \return <ul>
* <li>HASP_STATUS_OK</li> Request was successfully
* completed
* <li>HASP_INV_HND</li> Invalid input handle
* <li>HASP_TOO_SHORT</li> Encryption data length is
* too short
* <li>HASP_ENC_NOT_SUPP</li> Sentinel protection key
* does not support encryption type
* <li>HASP_HASP_NOT_FOUND</li> Required Sentinel
* protection key not found
* </ul>
*
* \sa hasp_legacy_encrypt()
* \sa hasp_decrypt()
* \sa hasp_encrypt()
*
*/
hasp_status_t HASP_CALLCONV hasp_legacy_decrypt(hasp_handle_t handle,
void *buffer,
hasp_size_t length);
/**
* \brief Writes to HASP4-compatible real-time clock
*
* <b>Sentinel Licensing API Usage Notes</b><br>
* This function does not work with a Sentinel login handle.
*
* <b>Legacy HASP Remarks</b><br>
* The handle must have been obtained by calling \ref hasp_login() with
* a "prognum" Feature ID.
*
* This request is only supported on locally accessed keys. Attempting to
* set the time on a remotely accessed key will return HASP_NO_TIME.
*
* \param handle Handle for the session
* \param new_time The new time value
*
* \return <ul>
* <li>HASP_STATUS_OK</li> Request was successfully
* completed
* <li>HASP_INV_HND</li> Invalid input handle
* <li>HASP_HASP_NOT_FOUND</li> Required Sentinel
* protection key not found
* <li>HASP_NO_TIME</li> Real-time clock not available
* OR access remote
* </ul>
*
* \sa hasp_get_rtc()
* \sa hasp_datetime_to_hasptime()
* \sa hasp_hasptime_to_datetime()
*
*/
hasp_status_t HASP_CALLCONV hasp_legacy_set_rtc(hasp_handle_t handle,
hasp_time_t new_time);
/**
* \brief Set the LM idle time.
*
* <b>Sentinel Licensing API Usage Notes</b><br>
* This function does not work with a Sentinel login handle.
*
* <b>Legacy HASP Remarks</b><br>
* The handle must have been obtained by calling \ref hasp_login() with
* a "prognum" Feature ID.
*
* \param handle Handle for the session
* \param idle_time The idle time in minutes
*
* \return <ul>
* <li>HASP_STATUS_OK</li> Request was successfully
* completed
* <li>HASP_INV_HND</li> Invalid input handle
* <li>HASP_HASP_NOT_FOUND</li> Required Sentinel
* protection key not found
* <li>HASP_REQ_NOT_SUPP</li> Tried to set the idle time
* for a local license
* </ul>
*
*/
hasp_status_t HASP_CALLCONV hasp_legacy_set_idletime(hasp_handle_t handle,
hasp_u16_t idle_time);
/**
* @}
*/
/**
* @defgroup hasp_extended Extended Sentinel Licensing API functionality
*
* The extended API consists of functions that provide extended
* functionality. This advanced functionality is sometimes necessary,
* and addresses the "advanced" user.
*
* @{
*/
/**
* \brief Retrieves information about system components, according to
* customizable search parameters, and presents it according to
* customizable formats.
*
* <b>Sentinel Licensing API Usage Notes</b><br>
* You do not need to be logged in to a Sentinel Feature in order to use
* this function.
*
* This function is used to specify conditions about where to search for
* information. In addition, it enables you to specify conditions about
* the format in which the retrieved information is presented. If
* retrieved information is appropriately formatted, it can be used as
* a template in the hasp_login_scope() function.
*
* The requisite Vendor Codes are stored in a VendorCodes folder in
* your system. Without the correct Vendor Code, the function call
* cannot succeed.
*
* This function allocates memory for the information it retrieves. To
* release allocated memory resources, use the \ref hasp_free function.
*
* This function cannot be used to retrieve legacy HASP Features.
*
* \param scope Definition of the data that is to be searched, in XML
* format. For more information, see the accompanying
* Sentinel Licensing API help documentation
* \param format Definition of the format in which the data is to be
* displayed, in XML format. For more information, see the
* accompanying Sentinel Licensing API help
* documentation
* \param vendor_code Pointer to the Vendor Code
* \param info Pointer to the information that is retrieved, in XML
* format
*
* \return <ul>
* <li>HASP_STATUS_OK</li> Request was successfully
* completed
* <li>HASP_SCOPE_RESULTS_EMPTY</li> Unable to locate
* a Feature matching the scope
* <li>HASP_INSUF_MEM</li> Out of memory
* <li>HASP_INV_VCODE</li> Invalid Vendor Code
* <li>HASP_UNKNOWN_VCODE</li> Vendor Code not recognized
* <li>HASP_INVALID_PARAMETER</li> Scope or format string
* too long (max. length 32 kb)
* <li>HASP_DEVICE_ERR</li> Input/Output error in Sentinel
* SL/SL-AdminMode/SL-UserMode secure storage, OR in
* case of a Sentinel HL key, USB communication error
* <li>HASP_LOCAL_COMM_ERR</li> Communication error
* between API and local Sentinel License Manager
* <li>HASP_REMOTE_COMM_ERR</li> Communication error
* between local and remote Sentinel License Manager
* <li>HASP_INV_FORMAT</li> Unrecognized format string
* <li>HASP_INV_SCOPE</li> Unrecognized scope string
* <li>HASP_BROKEN_SESSION</li> Session has been
* interrupted
* <li>HASP_TOO_MANY_KEYS</li> In case of getting C2V:
* Too many Sentinel protection keys match the scope
* <li>HASP_TOO_MANY_HOSTS</li> In case of getting host
* fingerrpint: Too many Sentinel License Manager
* match the scope
* <li>HASP_HASP_INACTIVE</li> In case of getting C2V:
* Inactive Sentinel SL-AdminMode/SL-UserMode key-id
* </ul>
*
* \sa hasp_get_sessioninfo()
* \sa hasp_free()
*
*/
hasp_status_t HASP_CALLCONV hasp_get_info(const char *scope,
const char *format,
hasp_vendor_code_t vendor_code,
char **info);
/**
* \brief Retrieves information regarding a session context.
*
* Memory for the information is allocated by this function and has to
* be freed with the \ref hasp_free function.
*
* \param handle Handle for the session
* \param format Definition for the type of output data structure, in
* XML format.
* There are three format options:
* <ul>
* <li>HASP_KEYINFO</li> For retrieving information on
* the Sentinel protection key
* <li>HASP_SESSIONINFO</li> For retrieving information on
* the login session
* <li>HASP_UPDATEINFO</li> For retrieving information on
* a license update usually contained in a C2V file.
* The retrieved information includes the current
* state of the key, including update counters,
* license availability and memory images
* </ul>
* \param info Pointer to the information that is retrieved as XML text
*
* \return <ul>
* <li>HASP_STATUS_OK</li> Request was successfully
* completed
* <li>HASP_INSUF_MEM</li> Out of memory
* <li>HASP_DEVICE_ERR</li> Input/Output error in Sentinel
* SL secure storage, OR in case of a Sentinel HL key,
* USB communication error
* <li>HASP_LOCAL_COMM_ERR</li> Communication error
* between API and local Sentinel License Manager
* <li>HASP_REMOTE_COMM_ERR</li> Communication error
* between local and remote Sentinel License Manager
* <li>HASP_INV_FORMAT</li> Unrecognized format string
* <li>HASP_INV_HND</li> Invalid input handle
* <li>HASP_BROKEN_SESSION</li> Session has been
* interrupted
* <li>HASP_TIME_ERR</li> System time has been tampered
* with
* </ul>
*
* \sa hasp_free()
* \sa HASP_UPDATEINFO
* \sa HASP_SESSIONINFO
* \sa HASP_KEYINFO
*/
hasp_status_t HASP_CALLCONV hasp_get_sessioninfo(hasp_handle_t handle,
const char *format,
char **info);
/**
* \brief Frees memory resources utilized by other API functions.
*
* Used only in C code to release memory resources allocated to storing
* retrieved data from API calls.
*
* \param info Pointer to the memory resources to be released
*
* \sa hasp_get_info()
* \sa hasp_get_sessioninfo()
* \sa hasp_update()
*
*/
void HASP_CALLCONV hasp_free(char *info);
/**
* \brief Updates a Sentinel protection key.
*
* This function writes update information. Note that the Sentinel
* protection key must be locally connected.
*
* The update code contains all necessary data to perform the update on
* a deployed Sentinel protection key including:
* <ul>
* <li>The Sentinel protection key on which the updated information is
* to be written</li>
* <li>The necessary Vendor Code that is required to access the
* Sentinel key</li>
* <li>The actual update information</li>
* </ul>
*
* Depending on the update data, the function returns an acknowledgement
* code that is signed/encrypted by the key. The code is evidence that
* an update has been applied to a license. This function allocates memory
* for the acknowledge data. To release allocated memory resources, use
* \a hasp_free().
*
* \param update_data Pointer to the complete update data.
* \param ack_data Pointer to a buffer to retrieve the acknowledge data.
*
* \return <ul>
* <li>HASP_STATUS_OK</li> Request was successfully
* completed
* <li>HASP_KEYID_NOT_FOUND</li> The Sentinel protection
* key was not found
* <li>HASP_INV_UPDATE_DATA</li> Required XML tags
* not found; Contents in binary data are missing
* or invalid
* <li>HASP_INV_UPDATE_NOTSUPP</li> Update request is
* not supported by Sentinel protection key
* <li>HASP_INV_UPDATE_CNTR</li> Update counter not
* set correctly
* <li>HASP_INSUF_MEM</li> Out of memory
* <li>HASP_DEVICE_ERR</li> Input/Output error in Sentinel
* SL secure storage, OR in case of a Sentinel HL key,
* USB communication error
* <li>HASP_LOCAL_COMM_ERR</li> Communication error
* between API and local Sentinel License Manager
* <li>HASP_REMOTE_COMM_ERR</li> Communication error
* between local and remote Sentinel License Manager
* <li>HASP_NO_ACK_SPACE</li> Acknowledge data
* requested by the update, but ack_data parameter
* is NULL
* <li>HASP_UNKNOWN_ALG</li> Unknown algorithm used
* in V2C file
* <li>HASP_INV_SIG</li> Signature verification failed
* <li>HASP_TOO_MANY_KEYS</li> Too many Sentinel
* protection keys match the scope
* <li>HASP_HARDWARE_MODIFIED</li> Conflict between
* Sentinel SL key data and machine hardware data;
* Sentinel SL key locked to different hardware
* <li>HASP_UPDATE_TOO_OLD</li> Trying to install a
* V2C file with an update counter that is out of
* sequence with the update counter in the Sentinel
* protection key. The values of the update counter
* in the file are lower than those in the Sentinel
* protection key
* <li>HASP_UPDATE_TOO_NEW</li> Trying to install a
* V2C file with an update counter that is out of
* sequence with the update counter in the Sentinel
* protection key. The first value in the file is more
* than 1 greater than the value in the Sentinel
* protection key
* <li>HASP_UNKNOWN_RECIPIENT</li> In case of an H2R:
* Update can only be applied to the Recipient
* specified in hasp_detach(), not to this machine
* <li>HASP_HASP_INACTIVE</li> Inactive Sentinel
* SL-AdminMode/SL-UserMode key-id
* <li>HASP_UPDATE_ALREADY_ADDED</li> Sentinel
* SL-AdminMode/SL-UserMode V2C update(s) already applied
* <li>HASP_REHOST_ALREADY_APPLIED</li> In case of SL-AdminMode
* /SL-UserMode H2H: Specified H2H already applied
* <li>HASP_LICENSE_REHOSTED</li> In case of SL-AdminMode
* /SL-UserMode V2C: Specified V2C already rehosted to another
* host
* </ul>
*
* \sa hasp_free()
*
*/
hasp_status_t HASP_CALLCONV hasp_update(const char *update_data,
char **ack_data);
/**
* \brief Detaches or cancels an attached license, according to customizable
* parameters.
*
* Starting from Sentinel LDK version 6.0, the "hasp_detach" API has been deprecated.
* SafeNet recommends that user should use the "hasp_transfer" API to perform
* the detach/cancel actions.This API has been retained for backward compatibility.
*
* <b>Sentinel Licensing API Usage Notes</b><br>
* You do not need to be logged in to a Sentinel Feature in order to use
* this function.
*
* This function is used to detach a license for a Product (i.e. all Sentinel
* Features and Memory files which belong to this Product) from a Sentinel
* SL Protection key. The function returns a H2R file which must then be
* applied on the recipient machine using hasp_update() or the ACC.
*
* This function only works with Sentinel SL Protection Keys; Sentinel HL
* Protection Keys are ignored.
*
* This function can also be used on the recipient machine to cancel an
* attached license. In this case, the recipient parameter is ignored and
* should be set to NULL. For cancelling, the function returns a R2H file
* which must be applied on the host machine using hasp_update() or the
* ACC. If the detached Product is already expired, no R2H file will be
* returned.
*
* The required Vendor Codes are stored in a VendorCodes folder in
* your system. Without the correct Vendor Code, the function call
* cannot succeed.
*
* \param detach_action Parameters for the operation, in XML format. For
* more information, see the accompanying
* Sentinel Licensing API help documentation.
* \param scope Search parameters for the Product that is to be
* detached
* \param vc Pointer to the Vendor Code
* \param recipient Definition in XML format of the recipient computer,
* on which the detached Product will be installed.
* This information can be retrieved using either
* @a hasp_get_info or @a hasp_get_sessioninfo
* together with the format specifier
* @a HASP_RECIPIENT.
* Set to NULL if an attached protection key is cancelled.
* \param info Pointer to the information that is retrieved, in
* XML format. This information is a V2C, which can
* then be installed on the recipient computer
* via @a hasp_update. Use @a hasp_free to release this
* pointer after use.
*
* \return <ul>
* <li>HASP_STATUS_OK</li> Request was successfully
* completed
* <li>HASP_INV_DETACH_ACTION</li> Invalid XML
* "detach_action" parameter
* <li>HASP_INV_RECIPIENT</li> Invalid XML "recipient"
* parameter
* <li>HASP_TOO_MANY_PRODUCTS</li> Scope for hasp_detach
* does not specify a unique Parameter
* <li>HASP_TOO_MANY_USERS</li> Too many users currently
* connected, or: at least one detachable Feature
* does not have enough network seats available
* <li>HASP_ACCESS_DENIED</li> Request cannot be
* processed due to ACC restrictions
* <li>HASP_FEATURE_EXPIRED</li> All detachable
* Features are expired
* <li>HASP_INV_PRODUCT</li> Invalid Product information
* <li>HASP_INV_DURATION</li> In the case of a new
* detachable license, duration exceeds maximum allowed
* OR, in the case of a detachable license extension,
* expiration date earlier than original date
* or too short (if an existing detached Product is
* extended, and the new expiration date is earlier
* than the original expiration date)
* <li>HASP_INSUF_MEM</li> Out of memory
* <li>HASP_DEVICE_ERR</li> Input/Output error in Sentinel
* SL secure storage, OR in case of a Sentinel HL key,
* USB communication error
* <li>HASP_LOCAL_COMM_ERR</li> Communication error
* between API and local Sentinel License Manager
* <li>HASP_REMOTE_COMM_ERR</li> Communication error
* between local and remote Sentinel License Manager
* </ul>
*
* \sa hasp_get_info()
* \sa hasp_update()
* \sa hasp_free()
*/
#if (_MSC_VER >= 1400)
__declspec(deprecated)hasp_status_t HASP_CALLCONV hasp_detach(const char *detach_action,
const char *scope,
hasp_vendor_code_t vc,
const char *recipient,
char **info);
#elif defined(__GNUC__)
hasp_status_t HASP_CALLCONV hasp_detach(const char *detach_action,
const char *scope,
hasp_vendor_code_t vc,
const char *recipient,
char **info)__attribute__ ((deprecated));
#else
hasp_status_t HASP_CALLCONV hasp_detach(const char *detach_action,
const char *scope,
hasp_vendor_code_t vc,
const char *recipient,
char **info);
#endif
/**
* \brief: Deprecate the above API "hasp_detach()" , This API performs
* same functionalities as "hasp_detach()" does. Along with this,
* "hasp_transfer()" API is used to rehost the SL-AdminMode/SL-UserMode
* V2C from one machine to another machine.
*
* <b>Sentinel Licensing API Usage Notes</b><br>
* You do not need to be logged in to a Sentinel Feature in
* order to use this function.
*
* This function is used to perform the following task as per its
* "action" parameter.
* for "detach" action: detach a license for a Product (i.e. all
* Sentinel Features and Memory files which
* belong to this Product) from a Sentinel
* SL/SL-AdminMode/SL-UserMode key. The function
* returns a bufferwhich should be saved as H2R file.
*
* for "cancel" action: This action runs on the recipient machine to
* cancel an attached license. In this case, the
* recipient parameter is ignored and should be
* set to NULL. For cancelling, the function returns
* a buffer which must be applied on the host machine
* using hasp_update() or ACC If the detached Product
* is already expired, no buffer will be returned.
*
* for "rehost" action: create a tranferable a license for given container
* (i.e. all Sentinel Features and Memory files
* which belong to this container) from SL-AdminMode/
* SL-UserMode Protection key. The function returns
* buffer on success which must be saved as V2C file.
* hasp_update() or ACC is used to apply this on
* destination machine.
*
* This function only works with Sentinel SL/SL-AdminMode/SL-UserMode
* Protection Keys; Sentinel HL Protection Keys are ignored.
*
* The required Vendor Codes are stored in a VendorCodes folder in
* your system. Without the correct Vendor Code, the function call
* cannot succeed.
*
* \param action Parameters for the operation, in XML format. For
* more information, see the accompanying
* Sentinel Licensing API help documentation.
* \param scope Search parameters for the conatiner-id that is to be
* re-hosted. For more information, see the accompanying
* Sentinel Licensing API help documentation.
* \param vc Pointer to the Vendor Code
* \param recipient Definition in XML format of the recipient computer,
* on which the detached Product will be installed.
* This information can be retrieved using either
* @a hasp_get_info or @a hasp_get_sessioninfo
* together with the format specifier
* @a HASP_RECIPIENT.
* \param info Pointer to the information that is retrieved, in
* XML format. This information is a V2C, which can
* then be installed on the destination computer
* via @a hasp_update. Use @a hasp_free to release this
* pointer after use.
*
* \return <ul>
* <li>HASP_STATUS_OK</li> Request was successfully
* completed
* <li>HASP_INV_ACTION</li> Invalid XML
* "action" parameter
* <li>HASP_INV_RECIPIENT</li> Invalid XML "recipient"
* parameter
* <li>HASP_TOO_MANY_PRODUCTS</li> Scope for hasp_transfer
* for detach action does not specify a unique Parameter
* <li>HASP_TOO_MANY_USERS</li> Too many users currently
* connected, or: at least one detachable Feature
* does not have enough network seats available
* <li>HASP_ACCESS_DENIED</li> Request cannot be
* processed due to ACC restrictions
* <li>HASP_FEATURE_EXPIRED</li> All detachable
* Features are expired
* <li>HASP_INV_PRODUCT</li> Invalid Product information
* <li>HASP_INV_DURATION</li> In the case of a new
* detachable license, duration exceeds maximum allowed
* OR, in the case of a detachable license extension,
* expiration date earlier than original date
* or too short (if an existing detached Product is
* extended, and the new expiration date is earlier
* than the original expiration date)
* <li>HASP_TOO_MANY_KEYS</li> Scope for hasp_transfer
* does not specify a unique Parameter
* <li>HASP_ACCESS_DENIED</li> Request cannot be
* processed due to ACC restrictions
* <li>HASP_INSUF_MEM</li> Out of memory
* <li>HASP_DEVICE_ERR</li> Input/Output error in Sentinel
* SL/SL-AdminMode/SL-UserMode secure storage, OR in
* case of a Sentinel HL key, USB communication error
* <li>HASP_LOCAL_COMM_ERR</li> Communication error
* between API and local Sentinel License Manager
* <li>HASP_NO_DEATCHABLE_FEATURE</li> In case of H2R:
* No detachable feature found in specified product
* <li><HASP_OLD_LM></li> Sentinel License Manager is
* not supported to SL-AdminMode/ SL-UserMode
* <li><HASP_HASP_INACTIVE></li> SL-AdminMode/
* SL-UserMode container is inactive
* <li><HASP_REHOST_NOT_ALLOWED></li> Specified
* SL-AdminMode/SL-UserMode container is not
* allowed for rehost
* </ul>
*
* \sa hasp_get_info()
* \sa hasp_update()
* \sa hasp_free()
*/
hasp_status_t HASP_CALLCONV hasp_transfer(const char *action,
const char *scope,
hasp_vendor_code_t vc,
const char *recipient,
char **info);
/**
* @}
*/
/**
* @defgroup hasp_util Utility functions
*
* @{
*/
/**
* \brief Retrieves version and build number of the Sentinel library
*
* \param major_version Pointer to retrieve the major version number
* \param minor_version Pointer to retrieve the minor version number
* \param build_server Pointer to retrieve the build server id
* \param build_number Pointer to retrieve the build number
* \param vendor_code Pointer to the Vendor Code
*
* \return <ul>
* <li>HASP_STATUS_OK</li> Request was successfully
* completed
* </ul>
*
* \remark Any pointer other than the vendor_code can be NULL if its
* information is not required.
*
*/
hasp_status_t HASP_CALLCONV hasp_get_version(unsigned int *major_version,
unsigned int *minor_version,
unsigned int *build_server,
unsigned int *build_number,
hasp_vendor_code_t vendor_code);
/**
* \brief Converts a date and time value to hasptime (the number of
* elapsed seconds since January 1 1970).
*
* \param day Input for day value (range 1-31)
* \param month Input for month value (range 1-12)
* \param year Input for year value (range 1970+)
* \param hour Input for hour value (range 0-23)
* \param minute Input for minute value (range 0-59)
* \param second Input for second value (range 0-59)
* \param time Pointer to the resulting time value
*
* \return <ul>
* <li>HASP_STATUS_OK</li> Request was successfully
* completed
* <li>HASP_INV_TIME</li> Passed time value is outside
* the supported value range
* </ul>
*
* \remark Time values are in UTC.
*
* \sa hasp_hasptime_to_datetime()
*
*/
hasp_status_t HASP_CALLCONV hasp_datetime_to_hasptime(unsigned int day,
unsigned int month,
unsigned int year,
unsigned int hour,
unsigned int minute,
unsigned int second,
hasp_time_t *time);
/**
* \brief Converts a time value (elapsed seconds since January 1 1970)
* into a date and time
*
* \param time The input time value
* \param day Pointer to the day value
* \param month Pointer to the month value
* \param year Pointer to the year value
* \param hour Pointer to the hour value
* \param minute Pointer to the minute value
* \param second Pointer to the second value
*
* \return <ul>
* <li>HASP_STATUS_OK</li> Request was successfully
* completed
* <li>HASP_INV_TIME</li> Passed time value outside
* of supported value range
* </ul>
*
* \remark Time values are in UTC.
*
* \sa hasp_datetime_to_hasptime()
*
*/
hasp_status_t HASP_CALLCONV hasp_hasptime_to_datetime(hasp_time_t time,
unsigned int *day,
unsigned int *month,
unsigned int *year,
unsigned int *hour,
unsigned int *minute,
unsigned int *second);
/**
* @}
*/
#ifdef __cplusplus
} // extern "C"
#endif
#endif /* __HASP_API_H__ */
| 38.919408 | 95 | 0.600544 |
42175054db6a46708db0b415386677cd7ad1348c | 5,032 | cs | C# | Assets/Scripts/GameController.cs | didii/PacMan | 81abb9a6d2db37399992718cc3fe997bfde87005 | [
"Unlicense"
] | 2 | 2017-05-28T13:01:23.000Z | 2020-11-06T21:22:54.000Z | Assets/Scripts/GameController.cs | didii/PacMan | 81abb9a6d2db37399992718cc3fe997bfde87005 | [
"Unlicense"
] | null | null | null | Assets/Scripts/GameController.cs | didii/PacMan | 81abb9a6d2db37399992718cc3fe997bfde87005 | [
"Unlicense"
] | null | null | null | using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
public class GameController : MonoBehaviour {
#region Fields
/// <summary>
/// Counts the number of fixed frames that have passed. Used to ensure collisions aren't triggered twice
/// </summary>
public static int FixedFrameCount;
/// <summary>
/// Amount of lives the player still has
/// </summary>
[Header("Player"), Range(0, 3)]
public int PlayerLives;
/// <summary>
/// Bottom part of the UI
/// </summary>
[Header("UI")]
public GameObject BottomUI;
/// <summary>
/// The amount of lives shown in the left-lower corner
/// </summary>
public GameObject LivesIndicatorPrefab;
/// <summary>
/// How wide the lives indicator should be
/// </summary>
[Range(0, 100)]
public float LivesIndicatorWidth;
/// <summary>
/// A reference to the fruit indicator prefab
/// </summary>
public GameObject FruitIndicatorPrefab;
/// <summary>
/// How wide the fruit indicator should be
/// </summary>
[Range(0, 100)]
public float FruitIndicatorWidth;
/// <summary>
/// The texture pack of the fruit
/// </summary>
public Texture2D FruitTexture;
/// <summary>
/// All of the individual fruit sprites, taken from <see cref="FruitTexture"/>
/// </summary>
private Sprite[] _fruitSprites;
/// <summary>
/// Points to all the life indicator objects
/// </summary>
private List<GameObject> _livesIndicators;
/// <summary>
/// Points to all the fruit indicator objects
/// </summary>
private List<GameObject> _fruitIndicators;
#endregion
/// <summary>
/// Use this for initialization
/// </summary>
void Start() {
//_fruitSprites = Resources.LoadAll<Sprite>(FruitTexture.name);
if (_livesIndicators == null)
_livesIndicators = new List<GameObject>();
if (_fruitIndicators == null)
_fruitIndicators = new List<GameObject>();
_livesIndicators.AddRange(BottomUI.transform
.GetAllChildren(child => child.tag == "LifeIndicator" &&
!_livesIndicators.Contains(child.gameObject))
.Select(child => child.gameObject));
SetLivesIndicator();
}
/// <summary>
/// FixedUpdate is called once every physics update
/// </summary>
void FixedUpdate() {
FixedFrameCount++;
}
#region Methods
/// <summary>
/// Sets the correct amount of lives based on <see cref="PlayerLives"/>.
/// </summary>
public void SetLivesIndicator() {
#if UNITY_EDITOR
if (_livesIndicators == null)
_livesIndicators = new List<GameObject>();
// Make sure no life indicators already exist not included in the list
_livesIndicators.AddRange(BottomUI.transform
.GetAllChildren(child => child.tag == "LifeIndicator" &&
!_livesIndicators.Contains(child.gameObject))
.Select(child => child.gameObject));
#endif
// Remove all life indicators
while (_livesIndicators.Count > 0) {
#if UNITY_EDITOR
DestroyImmediate(_livesIndicators.Last());
#else
Destroy(_livesIndicators.Last());
#endif
_livesIndicators.RemoveLast();
}
// Add them all back
while (PlayerLives > _livesIndicators.Count) {
_livesIndicators.Add(Instantiate(LivesIndicatorPrefab));
_livesIndicators.Last().transform.SetParent(BottomUI.transform, false);
_livesIndicators.Last().GetComponent<RectTransform>().anchoredPosition =
new Vector2((_livesIndicators.Count - 1) * LivesIndicatorWidth, 0);
}
}
/// <summary>
/// Does some stuff I need right now
/// </summary>
public void DoRandomStuff() {
var line1 = new LineSegment2D(new Vector2(0, 0), Vector2.right);
var line2 = new LineSegment2D(Vector2.zero, 2 * Vector2.right);
var line3 = new LineSegment2D(Vector2.zero, 2 * Vector2.right);
var line4 = new LineSegment2D(Vector2.zero, 3 * Vector2.up);
var lines = new List<LineSegment2D> {line2, line4, line1, line3};
Debug.Log("(" + lines[0].Length + "," + lines[1].Length + "," + lines[2].Length + "," + lines[3].Length + ")");
var lines2 = lines.OrderBy(line => line.End.magnitude).ToArray();
Debug.Log("(" +
lines2[0].Length +
"," +
lines2[1].Length +
"," +
lines2[2].Length +
"," +
lines2[3].Length +
")");
}
#endregion
}
| 33.771812 | 119 | 0.564587 |
a98932eb4afa929afda4bb1f3e06edc4b19eee30 | 3,324 | go | Go | main.go | urvil38/git-push | 14c126ad9bde7386137d31587c193ebb25a8a8d9 | [
"MIT"
] | 4 | 2018-01-26T06:10:15.000Z | 2022-01-21T13:27:16.000Z | main.go | urvil38/git-push | 14c126ad9bde7386137d31587c193ebb25a8a8d9 | [
"MIT"
] | null | null | null | main.go | urvil38/git-push | 14c126ad9bde7386137d31587c193ebb25a8a8d9 | [
"MIT"
] | null | null | null | package main
import (
"flag"
"fmt"
"io/ioutil"
"log"
"os"
"strings"
"github.com/fatih/color"
"github.com/urvil38/git-push/questions"
"github.com/urvil38/git-push/types"
"github.com/urvil38/git-push/utils"
"gopkg.in/AlecAivazis/survey.v1"
)
func init() {
red = color.New(color.FgRed, color.Bold).SprintFunc()
yellow = color.New(color.FgYellow, color.Bold).SprintFunc()
green = color.New(color.FgGreen, color.Bold).SprintFunc()
reset := flag.String("reset", "", "Use for Resetting Account, Equivalent to Logout\nExample: git-push -reset [github | bitbucket | gitlab | all]")
flag.Parse()
if *reset != "" {
reset := strings.ToLower(*reset)
err := utils.ResetAccount(reset)
if err != nil {
_, ok := err.(*os.PathError)
if ok {
fmt.Printf("%s\n", red("Cound't Reset Account.You are not Logged in to "+reset+" Account"))
}
os.Exit(0)
}
fmt.Printf("%s\n", green("Successfully Reset "+reset+" Account"))
os.Exit(0)
}
userConfigFile = utils.GetUserConfigFilePath()
configFolder = utils.GetConfigFolderPath()
err := utils.CreateDir(configFolder)
if err != nil {
log.Fatal(err)
}
checkUserInfo()
remoteExists, _ = utils.CheckRemoteRepo()
if remoteExists {
fmt.Printf("%s%s%s\n",
red("Sorry, this tool will not help you because working repository is already on github or bitbucket or gitlab!\nℹ You can use "),
yellow("$ git push origin master"),
red(" to push changes."))
os.Exit(0)
}
}
func checkUserInfo() {
b, err := ioutil.ReadFile(userConfigFile)
if err != nil {
return
}
userInfo := strings.Split(string(b), "\n")
basicUserInfo.Name = userInfo[0]
basicUserInfo.Email = userInfo[1]
}
var (
basicUserInfo types.BasicUserInfo
remoteExists bool
err error
home string
userConfigFile string
configFolder string
version string
red func(...interface{}) string
yellow func(...interface{}) string
green func(...interface{}) string
)
const (
banner = `
________ .__ __ __________ .__
/ _____/ |__|_/ |_ \______ \ __ __ ______| |__
/ \ ___ | |\ __\ ______ | ___/| | \ / ___/| | \
\ \_\ \| | | | /_____/ | | | | / \___ \ | Y \
\______ /|__| |__| |____| |____/ /____ >|___| /
\/ \/ \/
# Author : Urvil Patel
# Version : %s
# Twitter : @UrvilPatel12
# Github : https://github.com/urvil38
`
)
func main() {
fmt.Printf("%s\n", yellow(fmt.Sprintf(banner, version)))
if basicUserInfo.Email == "" || basicUserInfo.Name == "" {
err := survey.Ask(questions.UserInfo, &basicUserInfo)
if err != nil {
fmt.Println(err)
return
}
err = ioutil.WriteFile(userConfigFile, []byte(basicUserInfo.Name+"\n"+basicUserInfo.Email), 0555)
if err != nil {
return
}
}
var serviceName string
err = survey.Ask(questions.ServiceName, &serviceName)
if err != nil {
fmt.Println(err)
return
}
var repo types.Repo
if !remoteExists {
err = survey.Ask(questions.GithubRepoInfo, &repo)
if err != nil {
fmt.Println(err)
return
}
}
err = invokeService(serviceName, repo)
if err != nil {
fmt.Printf("%s\n", red("=> "+err.Error()))
os.Exit(0)
}
}
| 24.262774 | 147 | 0.600782 |
65ccbe388ec786cd6b7a6cebf05d60c5f2a263b9 | 5,917 | lua | Lua | engine/entity_ai/drivers.lua | AntumMT/mtmod-unimobiles | e5f998fad0411f823ac7f5bc5b8f488282162656 | [
"MIT"
] | 1 | 2017-05-29T14:56:51.000Z | 2017-05-29T14:56:51.000Z | engine/entity_ai/drivers.lua | AntumMT/mtmod-unimobiles | e5f998fad0411f823ac7f5bc5b8f488282162656 | [
"MIT"
] | 1 | 2017-05-29T05:47:19.000Z | 2017-05-30T06:03:06.000Z | engine/entity_ai/drivers.lua | AntumMT/mtmod-unimobiles | e5f998fad0411f823ac7f5bc5b8f488282162656 | [
"MIT"
] | null | null | null |
--[[
Copyright (c) 2016 - Auke Kok <[email protected]>
* entity_ai is licensed as follows:
- All code is: GNU Affero General Public License, Version 3.0 (AGPL-3.0)
- All artwork is: CC-BY-ND-4.0
A Contributor License Agreement exists, please read:
- https://github.com/sofar/entity_ai/readme.md.
--]]
entity_ai.register_driver("roam", {
start = function(self)
-- start with idle animation unless we get a path
self.driver:animation("idle")
local state = self.entity_ai_state
state.roam_ttl = math.random(3, 9)
self.path = Path(self)
if not self.path:find() then
--print("Unable to calculate path")
self.driver:switch("idle")
return
end
-- done, roaming mode good!
self.driver:animation("move")
end,
step = function(self, dtime)
-- handle movement stuff
local state = self.entity_ai_state
if state.roam_ttl and state.roam_ttl <= 0 then
self.driver:switch("idle")
return
end
state.roam_ttl = state.roam_ttl - dtime
-- do path movement
if not self.path or self.path:distance() < 0.7 or
not self.path:step(dtime) then
self.driver:switch("idle")
return
end
end,
stop = function(self)
local state = self.entity_ai_state
state.roam_ttl = nil
end,
})
entity_ai.register_driver("idle", {
start = function(self)
self.driver:animation("idle")
self.object:setvelocity(vector.new())
local state = self.entity_ai_state
state.idle_ttl = math.random(2, 20)
-- sanity checks
check_trapped_and_escape(self)
end,
step = function(self, dtime)
local state = self.entity_ai_state
state.idle_ttl = state.idle_ttl - dtime
if state.idle_ttl <= 0 then
self.driver:switch("roam")
return
end
end,
stop = function(self)
local state = self.entity_ai_state
state.idle_ttl = nil
end,
})
entity_ai.register_driver("startle", {
start = function(self, factordata)
-- startle animation
self.driver:animation("startle")
self.object:setvelocity(vector.new())
-- collect info we want to use in this driver
local state = self.entity_ai_state
if factordata and factordata["got_hit"] then
state.attacker = factordata["got_hit"][1]
state.attacked_at = factordata["got_hit"][5]
end
end,
step = function(self, dtime)
end,
stop = function(self)
-- play out remaining animations
end,
})
entity_ai.register_driver("eat", {
start = function(self, factordata)
self.driver:animation("eat")
self.object:setvelocity(vector.new())
-- collect info we want to use in this driver
local state = self.entity_ai_state
state.eat_ttl = math.random(30, 60)
if factordata and factordata.near_foodnode then
state.food = factordata.near_foodnode
end
end,
step = function(self, dtime)
local state = self.entity_ai_state
if state.eat_ttl > 0 then
state.eat_ttl = state.eat_ttl - dtime
return
end
state.ate_enough = math.random(200, 300)
self.driver:switch("eat_end")
end,
stop = function(self)
local state = self.entity_ai_state
state.eat_ttl = nil
-- increase HP
local hp = self.object:get_hp()
if hp < self.driver:get_property("hp_max") then
self.object:set_hp(hp + 1)
end
-- eat foodnode
local food = state.food
if not food then
return
end
local node = minetest.get_node(food)
minetest.sound_play(minetest.registered_nodes[node.name].sounds.dug, {pos = food, max_hear_distance = 18})
if node.name == "default:dirt_with_grass" or node.name == "default:dirt_with_dry_grass" then
minetest.set_node(food, {name = "default:dirt"})
--elseif node.name == "default:grass_1" or node.name == "default:dry_grass_1" then
-- minetest.remove_node(food)
elseif node.name == "default:grass_2" then
minetest.set_node(food, {name = "default:grass_1"})
elseif node.name == "default:grass_3" then
minetest.set_node(food, {name = "default:grass_2"})
elseif node.name == "default:grass_4" then
minetest.set_node(food, {name = "default:grass_3"})
elseif node.name == "default:grass_5" then
minetest.set_node(food, {name = "default:grass_4"})
elseif node.name == "default:dry_grass_2" then
minetest.set_node(food, {name = "default:dry_grass_1"})
elseif node.name == "default:dry_grass_3" then
minetest.set_node(food, {name = "default:dry_grass_2"})
elseif node.name == "default:dry_grass_4" then
minetest.set_node(food, {name = "default:dry_grass_3"})
elseif node.name == "default:dry_grass_5" then
minetest.set_node(food, {name = "default:dry_grass_4"})
end
state.food = nil
end,
})
entity_ai.register_driver("eat_end", {
start = function(self)
self.driver:animation("eat")
self.object:setvelocity(vector.new())
end,
step = function(self, dtime)
end,
stop = function(self)
end,
})
entity_ai.register_driver("flee", {
start = function(self)
self.driver:animation("move")
local state = self.entity_ai_state
state.flee_start = minetest.get_us_time()
end,
step = function(self, dtime)
-- check timer ourselves
local state = self.entity_ai_state
if (minetest.get_us_time() - state.flee_start) > (15 * 1000000) then
state.flee_start = nil
self.driver:switch("roam")
return
end
-- are we fleeing yet?
if self.path and self.path.distance then
-- stop fleeing if we're at a safe distance
-- execute flee path
if self.path:distance() < 2.0 then
-- get a new flee path
self.path = {}
else
-- follow path
if not self.path:step() then
self.path = {}
end
end
else
self.path = Path(self)
if not self.path:find() then
--print("Unable to calculate path")
return
end
-- done, flee path good!
self.driver:animation("move")
end
end,
stop = function(self)
-- play out remaining animations
end,
})
entity_ai.register_driver("death", {
start = function(self)
-- start with moving animation
self.driver:animation("idle")
end,
step = function(self, dtime)
end,
stop = function(self)
-- play out remaining animations
end,
})
| 26.066079 | 108 | 0.699003 |
7adc6eb1325ae242d8c6eb19265712d1d181c342 | 8,977 | cs | C# | PARAM/PARAM_C1G2TargetTag.cs | mksmbrtsh/llrp | 586982af0c82d06d04dd210e8187d01e9ea08fdd | [
"Apache-2.0"
] | null | null | null | PARAM/PARAM_C1G2TargetTag.cs | mksmbrtsh/llrp | 586982af0c82d06d04dd210e8187d01e9ea08fdd | [
"Apache-2.0"
] | null | null | null | PARAM/PARAM_C1G2TargetTag.cs | mksmbrtsh/llrp | 586982af0c82d06d04dd210e8187d01e9ea08fdd | [
"Apache-2.0"
] | null | null | null | // Decompiled with JetBrains decompiler
// Type: Org.LLRP.LTK.LLRPV1.PARAM_C1G2TargetTag
// Assembly: LLRP, Version=1.0.0.6, Culture=neutral, PublicKeyToken=null
// MVID: B7459025-14F1-49D6-9002-DE41A125DA72
// Assembly location: C:\gosniias\NARA\multi_llrp_uhf_rdr\multi_llrp_uhf_rdr\bin\Debug\LLRP.dll
using Org.LLRP.LTK.LLRPV1.DataType;
using System;
using System.Collections;
using System.Xml;
namespace Org.LLRP.LTK.LLRPV1
{
public class PARAM_C1G2TargetTag : Parameter
{
private const ushort param_reserved_len4 = 5;
public TwoBits MB = new TwoBits((ushort) 0);
private short MB_len;
public bool Match;
private short Match_len;
public ushort Pointer;
private short Pointer_len;
public LLRPBitArray TagMask = new LLRPBitArray();
private short TagMask_len;
public LLRPBitArray TagData = new LLRPBitArray();
private short TagData_len;
public PARAM_C1G2TargetTag() => this.typeID = (ushort) 339;
public static PARAM_C1G2TargetTag FromBitArray(
ref BitArray bit_array,
ref int cursor,
int length)
{
if (cursor >= length)
return (PARAM_C1G2TargetTag) null;
int num1 = cursor;
int num2 = length;
ArrayList arrayList = new ArrayList();
PARAM_C1G2TargetTag paramC1G2TargetTag = new PARAM_C1G2TargetTag();
paramC1G2TargetTag.tvCoding = bit_array[cursor];
int val;
if (paramC1G2TargetTag.tvCoding)
{
++cursor;
val = (int) (ulong) Util.CalculateVal(ref bit_array, ref cursor, 7);
}
else
{
cursor += 6;
val = (int) (ulong) Util.CalculateVal(ref bit_array, ref cursor, 10);
paramC1G2TargetTag.length = (ushort) Util.DetermineFieldLength(ref bit_array, ref cursor);
num2 = num1 + (int) paramC1G2TargetTag.length * 8;
}
if (val != (int) paramC1G2TargetTag.TypeID)
{
cursor = num1;
return (PARAM_C1G2TargetTag) null;
}
if (cursor > length || cursor > num2)
throw new Exception("Input data is not a complete LLRP message");
int field_len1 = 2;
object obj;
Util.ConvertBitArrayToObj(ref bit_array, ref cursor, out obj, typeof (TwoBits), field_len1);
paramC1G2TargetTag.MB = (TwoBits) obj;
if (cursor > length || cursor > num2)
throw new Exception("Input data is not a complete LLRP message");
int field_len2 = 1;
Util.ConvertBitArrayToObj(ref bit_array, ref cursor, out obj, typeof (bool), field_len2);
paramC1G2TargetTag.Match = (bool) obj;
cursor += 5;
if (cursor > length || cursor > num2)
throw new Exception("Input data is not a complete LLRP message");
int field_len3 = 16;
Util.ConvertBitArrayToObj(ref bit_array, ref cursor, out obj, typeof (ushort), field_len3);
paramC1G2TargetTag.Pointer = (ushort) obj;
if (cursor > length || cursor > num2)
throw new Exception("Input data is not a complete LLRP message");
int fieldLength1 = Util.DetermineFieldLength(ref bit_array, ref cursor);
Util.ConvertBitArrayToObj(ref bit_array, ref cursor, out obj, typeof (LLRPBitArray), fieldLength1);
paramC1G2TargetTag.TagMask = (LLRPBitArray) obj;
if (cursor > length || cursor > num2)
throw new Exception("Input data is not a complete LLRP message");
int fieldLength2 = Util.DetermineFieldLength(ref bit_array, ref cursor);
Util.ConvertBitArrayToObj(ref bit_array, ref cursor, out obj, typeof (LLRPBitArray), fieldLength2);
paramC1G2TargetTag.TagData = (LLRPBitArray) obj;
return paramC1G2TargetTag;
}
public override string ToString()
{
string str = "<C1G2TargetTag>" + "\r\n";
if (this.MB != null)
{
try
{
str = str + " <MB>" + this.MB.ToString() + "</MB>";
str += "\r\n";
}
catch
{
}
}
try
{
str = str + " <Match>" + Util.ConvertValueTypeToString((object) this.Match, "u1", "") + "</Match>";
str += "\r\n";
}
catch
{
}
try
{
str = str + " <Pointer>" + Util.ConvertValueTypeToString((object) this.Pointer, "u16", "") + "</Pointer>";
str += "\r\n";
}
catch
{
}
if (this.TagMask != null)
{
try
{
str = str + " <TagMask Count=\"" + (object) this.TagMask.Count + "\">" + Util.ConvertArrayTypeToString((object) this.TagMask, "u1v", "Hex") + "</TagMask>";
str += "\r\n";
}
catch
{
}
}
if (this.TagData != null)
{
try
{
str = str + " <TagData Count=\"" + (object) this.TagData.Count + "\">" + Util.ConvertArrayTypeToString((object) this.TagData, "u1v", "Hex") + "</TagData>";
str += "\r\n";
}
catch
{
}
}
return str + "</C1G2TargetTag>" + "\r\n";
}
public static PARAM_C1G2TargetTag FromXmlNode(XmlNode node)
{
ArrayList arrayList = new ArrayList();
XmlNamespaceManager namespaceManager = new XmlNamespaceManager(node.OwnerDocument.NameTable);
namespaceManager.AddNamespace("", "http://www.llrp.org/ltk/schema/core/encoding/xml/1.0");
namespaceManager.AddNamespace("llrp", "http://www.llrp.org/ltk/schema/core/encoding/xml/1.0");
PARAM_C1G2TargetTag paramC1G2TargetTag = new PARAM_C1G2TargetTag();
string nodeValue1 = XmlUtil.GetNodeValue(node, "MB");
paramC1G2TargetTag.MB = TwoBits.FromString(nodeValue1);
string nodeValue2 = XmlUtil.GetNodeValue(node, "Match");
paramC1G2TargetTag.Match = (bool) Util.ParseValueTypeFromString(nodeValue2, "u1", "");
string nodeValue3 = XmlUtil.GetNodeValue(node, "Pointer");
paramC1G2TargetTag.Pointer = (ushort) Util.ParseValueTypeFromString(nodeValue3, "u16", "");
string nodeValue4 = XmlUtil.GetNodeValue(node, "TagMask");
paramC1G2TargetTag.TagMask = (LLRPBitArray) Util.ParseArrayTypeFromString(nodeValue4, "u1v", "Hex");
string nodeAttribute1 = XmlUtil.GetNodeAttribute(node, "TagMask", "Count");
if (nodeAttribute1 != string.Empty)
paramC1G2TargetTag.TagMask.Count = Convert.ToInt32(nodeAttribute1);
string nodeValue5 = XmlUtil.GetNodeValue(node, "TagData");
paramC1G2TargetTag.TagData = (LLRPBitArray) Util.ParseArrayTypeFromString(nodeValue5, "u1v", "Hex");
string nodeAttribute2 = XmlUtil.GetNodeAttribute(node, "TagData", "Count");
if (nodeAttribute2 != string.Empty)
paramC1G2TargetTag.TagData.Count = Convert.ToInt32(nodeAttribute2);
return paramC1G2TargetTag;
}
public override void ToBitArray(ref bool[] bit_array, ref int cursor)
{
int num = cursor;
if (this.tvCoding)
{
bit_array[cursor] = true;
++cursor;
Util.ConvertIntToBitArray((uint) this.typeID, 7).CopyTo((Array) bit_array, cursor);
cursor += 7;
}
else
{
cursor += 6;
Util.ConvertIntToBitArray((uint) this.typeID, 10).CopyTo((Array) bit_array, cursor);
cursor += 10;
cursor += 16;
}
if (this.MB != null)
{
try
{
BitArray bitArray = Util.ConvertObjToBitArray((object) this.MB, (int) this.MB_len);
bitArray.CopyTo((Array) bit_array, cursor);
cursor += bitArray.Length;
}
catch
{
}
}
try
{
BitArray bitArray = Util.ConvertObjToBitArray((object) this.Match, (int) this.Match_len);
bitArray.CopyTo((Array) bit_array, cursor);
cursor += bitArray.Length;
}
catch
{
}
cursor += 5;
try
{
BitArray bitArray = Util.ConvertObjToBitArray((object) this.Pointer, (int) this.Pointer_len);
bitArray.CopyTo((Array) bit_array, cursor);
cursor += bitArray.Length;
}
catch
{
}
if (this.TagMask != null)
{
try
{
Util.ConvertIntToBitArray((uint) this.TagMask.Count, 16).CopyTo((Array) bit_array, cursor);
cursor += 16;
BitArray bitArray = Util.ConvertObjToBitArray((object) this.TagMask, (int) this.TagMask_len);
bitArray.CopyTo((Array) bit_array, cursor);
cursor += bitArray.Length;
}
catch
{
}
}
if (this.TagData != null)
{
try
{
Util.ConvertIntToBitArray((uint) this.TagData.Count, 16).CopyTo((Array) bit_array, cursor);
cursor += 16;
BitArray bitArray = Util.ConvertObjToBitArray((object) this.TagData, (int) this.TagData_len);
bitArray.CopyTo((Array) bit_array, cursor);
cursor += bitArray.Length;
}
catch
{
}
}
if (this.tvCoding)
return;
Util.ConvertIntToBitArray((uint) (cursor - num) / 8U, 16).CopyTo((Array) bit_array, num + 16);
}
}
}
| 35.482213 | 166 | 0.611786 |
386714ac73234549656a600966d382604acd80e4 | 442 | php | PHP | _src/Section01/functional_02.php | paullewallencom/php-978-1-7883-9198-6 | 35f2f7903bad0e0dc240d85ca0a3e8c789316d37 | [
"Apache-2.0"
] | 10 | 2017-06-08T12:14:36.000Z | 2021-11-08T13:11:12.000Z | _src/Section01/functional_02.php | paullewallencom/php-978-1-7883-9198-6 | 35f2f7903bad0e0dc240d85ca0a3e8c789316d37 | [
"Apache-2.0"
] | null | null | null | _src/Section01/functional_02.php | paullewallencom/php-978-1-7883-9198-6 | 35f2f7903bad0e0dc240d85ca0a3e8c789316d37 | [
"Apache-2.0"
] | 5 | 2017-08-01T14:01:40.000Z | 2020-07-02T05:13:32.000Z | <?php
include __DIR__ . '/../vendor/autoload.php';
use function Functional\map;
use function Functional\filter;
use function Functional\reduce_left;
$input = ['apple', 'banana', 'orange', 'raspberry'];
$sum = reduce_left(filter(map($input, function($fruit) {
return strlen($fruit);
}), function($length) {
return $length > 5;
}), function($val, $i, $col, $reduction) {
return $val + $reduction;
});
printf("sum: %d\n", $sum); | 23.263158 | 56 | 0.653846 |
6b70eef1906bb506d1d181e59ff5a903ea4281a0 | 745 | js | JavaScript | eq-author-api/tests/utils/contextBuilder/importing/importQuestions.js | ONSdigital/eq-author-app | 9d6b9d85a5b7629d9eaa05f2a0857a5969a6a079 | [
"MIT"
] | 9 | 2018-11-19T16:57:19.000Z | 2021-07-06T08:31:20.000Z | eq-author-api/tests/utils/contextBuilder/importing/importQuestions.js | ONSdigital/eq-author-app | 9d6b9d85a5b7629d9eaa05f2a0857a5969a6a079 | [
"MIT"
] | 1,686 | 2018-11-14T12:54:21.000Z | 2022-03-28T22:20:32.000Z | eq-author-api/tests/utils/contextBuilder/importing/importQuestions.js | ONSdigital/eq-author-app | 9d6b9d85a5b7629d9eaa05f2a0857a5969a6a079 | [
"MIT"
] | 10 | 2019-03-15T15:33:24.000Z | 2021-09-09T07:55:06.000Z | const executeQuery = require("../../executeQuery");
const { filter } = require("graphql-anywhere");
const gql = require("graphql-tag");
const mutation = `
mutation ImportQuestions($input: ImportQuestionsInput!) {
importQuestions(input: $input) {
id
}
}
`;
const importQuestions = async (ctx, input) => {
const result = await executeQuery(
mutation,
{
input: filter(
gql`
{
questionnaireId
questionIds
position {
folderId
sectionId
index
}
}
`,
input
),
},
ctx
);
return result.data.importQuestions;
};
module.exports = {
mutation,
importQuestions,
};
| 17.738095 | 59 | 0.531544 |
e6b25f4bbce28c939b751a13975b8e0282844829 | 806 | asm | Assembly | oeis/180/A180762.asm | neoneye/loda-programs | 84790877f8e6c2e821b183d2e334d612045d29c0 | [
"Apache-2.0"
] | 11 | 2021-08-22T19:44:55.000Z | 2022-03-20T16:47:57.000Z | oeis/180/A180762.asm | neoneye/loda-programs | 84790877f8e6c2e821b183d2e334d612045d29c0 | [
"Apache-2.0"
] | 9 | 2021-08-29T13:15:54.000Z | 2022-03-09T19:52:31.000Z | oeis/180/A180762.asm | neoneye/loda-programs | 84790877f8e6c2e821b183d2e334d612045d29c0 | [
"Apache-2.0"
] | 3 | 2021-08-22T20:56:47.000Z | 2021-09-29T06:26:12.000Z | ; A180762: Half the number of nX2 binary arrays with each element equal to at least one neighbor
; Submitted by Christian Krause
; 1,3,11,34,111,361,1172,3809,12377,40218,130687,424661,1379916,4483973,14570461,47346034,153848731,499924281,1624480652,5278674169,17152805697,55737242698,181115572487,588526611821,1912389796556,6214221516093,20192823199861,65615637892034,213214957282851,692832066708401,2251325511008332,7315577323949329,23771627568290617,77244795895404378,251003364232487087,815623733944966981,2650331310930806796,8612128074946578613,27984708807305568461,90935007028973350034,295489067272364004971
add $0,1
lpb $0
sub $0,1
mov $2,$1
add $1,1
add $1,$3
mov $3,$1
sub $3,$4
mov $4,$2
mov $2,$3
dif $5,$1
add $5,$4
mov $3,$5
add $5,$2
mul $5,2
lpe
mov $0,$3
add $0,1
| 35.043478 | 483 | 0.782878 |
b21827ee07ff74699583da4532113d260fec6f07 | 1,315 | rb | Ruby | lib/hcloud/concerns/creatable.rb | floriandejonckheere/hcloud | 021505304dbcc32288622b70e83657ce5d55ae86 | [
"MIT"
] | null | null | null | lib/hcloud/concerns/creatable.rb | floriandejonckheere/hcloud | 021505304dbcc32288622b70e83657ce5d55ae86 | [
"MIT"
] | 9 | 2022-01-06T17:35:11.000Z | 2022-02-03T18:52:23.000Z | lib/hcloud/concerns/creatable.rb | floriandejonckheere/hcloud | 021505304dbcc32288622b70e83657ce5d55ae86 | [
"MIT"
] | null | null | null | # frozen_string_literal: true
module HCloud
module Creatable
extend ActiveSupport::Concern
included do
attribute :created, :datetime
def create
assign_attributes client
.post("/#{resource_name.pluralize}", creatable_params)
.fetch(resource_name.to_sym)
end
def created?
created.present?
end
def creatable_attributes
[]
end
# Convert creatable_attributes into a key-value list
# rubocop:disable Metrics/AbcSize,Metrics/CyclomaticComplexity,Metrics/PerceivedComplexity
def creatable_params
# Split simple and nested attributes
nested_attributes, simple_attributes = creatable_attributes.partition { |a| a.respond_to? :each }
attributes
.slice(*simple_attributes.map(&:to_s))
.transform_values { |v| v&.send_wrap(:try, :to_h) || v&.send_wrap(:to_s) }
.merge(nested_attributes.reduce(&:merge)&.map { |k, v| [k.to_s, Array(v).filter_map { |w| send(k)&.send_wrap(w) }.first] }.to_h)
.compact
end
# rubocop:enable Metrics/AbcSize,Metrics/CyclomaticComplexity,Metrics/PerceivedComplexity
end
class_methods do
def create(**attributes)
new(attributes)
.tap(&:create)
end
end
end
end
| 27.978723 | 138 | 0.647148 |
5c812e801666c3c9dc67b375c7282bdf158c26e4 | 516 | lua | Lua | rbxmk/testdata/args.lua | Anaminus/rbxmk | 25317d8f688a802cc20b87c12b154e4b9bfc83ea | [
"MIT"
] | 60 | 2017-06-05T21:11:10.000Z | 2022-03-30T17:19:39.000Z | rbxmk/testdata/args.lua | Anaminus/rbxmk | 25317d8f688a802cc20b87c12b154e4b9bfc83ea | [
"MIT"
] | 61 | 2017-02-14T17:19:59.000Z | 2022-03-09T03:14:34.000Z | rbxmk/testdata/args.lua | Anaminus/rbxmk | 25317d8f688a802cc20b87c12b154e4b9bfc83ea | [
"MIT"
] | 8 | 2017-03-22T03:38:03.000Z | 2022-03-13T04:20:08.000Z | -- Test arguments
T.Pass(select(1, ...) == true , "arg 1 is true")
T.Pass(select(2, ...) == false , "arg 2 is false")
T.Pass(select(3, ...) == nil , "arg 3 is nil")
T.Pass(select(4, ...) == 42 , "arg 4 is 42")
T.Pass(select(5, ...) == 3.141592653589793, "arg 5 is pi")
T.Pass(select(6, ...) == -1e-8 , "arg 6 is -1e-8")
T.Pass(select(7, ...) == "hello, world!" , "arg 7 is string")
T.Pass(select(8, ...) == "hello\0world!" , "arg 8 is string with NUL")
| 51.6 | 71 | 0.482558 |
bb7e9cf7baaf15750de3ba4992204349742aa442 | 441 | cs | C# | exercises/ex030/ex030/Program.cs | gpe0/csharp-exercises | e089dc900c545b56c53c3b7675e1bcff32c5022f | [
"MIT"
] | 5 | 2020-04-28T13:26:19.000Z | 2021-12-18T20:31:49.000Z | exercises/ex030/ex030/Program.cs | gpe0/csharp-exercises | e089dc900c545b56c53c3b7675e1bcff32c5022f | [
"MIT"
] | null | null | null | exercises/ex030/ex030/Program.cs | gpe0/csharp-exercises | e089dc900c545b56c53c3b7675e1bcff32c5022f | [
"MIT"
] | null | null | null | using System;
namespace ex030
{
class Program
{
static void Main(string[] args)
{
//Jagged arrays | Array of arrays
int[][] a = new int[][]
{
new int[] {4, 3, 2},
new int[] {2, 9, 6},
new int[] {10, 459, 67} // I created an array of 3 arrays
};
Console.WriteLine(a[2][1]); // Outputs 459
}
}
}
| 22.05 | 73 | 0.408163 |
e40e6b1407601ae34bfc0103ddceab29a83f7bbf | 147 | cs | C# | src/Core/EasyOC.Core/Models/PostInputModel.cs | giannik/EasyOC | b3031456c84ec89b962875669abf36dae87be84a | [
"Apache-2.0"
] | 19 | 2021-12-23T02:29:08.000Z | 2022-03-31T10:26:39.000Z | src/Core/EasyOC.Core/Models/PostInputModel.cs | giannik/EasyOC | b3031456c84ec89b962875669abf36dae87be84a | [
"Apache-2.0"
] | 7 | 2022-01-20T06:41:56.000Z | 2022-02-13T08:41:09.000Z | src/Core/EasyOC.Core/Models/PostInputModel.cs | giannik/EasyOC | b3031456c84ec89b962875669abf36dae87be84a | [
"Apache-2.0"
] | 5 | 2022-01-21T16:32:59.000Z | 2022-03-28T08:30:53.000Z | namespace EasyOC.Core.DtoModels
{
public class PostInputModel
{
public string __RequestVerificationToken { get; set; }
}
}
| 13.363636 | 62 | 0.659864 |
a9e1bfaf6d1923f3f9a6b986e54fe06427b1d8c8 | 245 | php | PHP | src/Contracts/Service/Reader.php | lynxcat/laravel-route-annotation | 8de001e683512c09c533637f18b0aff3cb1e7b30 | [
"MIT"
] | null | null | null | src/Contracts/Service/Reader.php | lynxcat/laravel-route-annotation | 8de001e683512c09c533637f18b0aff3cb1e7b30 | [
"MIT"
] | null | null | null | src/Contracts/Service/Reader.php | lynxcat/laravel-route-annotation | 8de001e683512c09c533637f18b0aff3cb1e7b30 | [
"MIT"
] | null | null | null | <?php
namespace Lynxcat\Annotation\Contracts\Service;
use Lynxcat\Annotation\Contracts\Model\AnnotationsClass;
interface Reader
{
public function parse(\ReflectionClass $ref): AnnotationsClass;
public function getType(): string;
}
| 16.333333 | 67 | 0.77551 |
ac213cc302e48478ee59cadc5b397e3c0d065949 | 8,343 | lua | Lua | Tools/BlockTracking.lua | yating1901/Sychronize-construction-of-Builderbots | 20af410f435095d33ecd6f64668e521d88440528 | [
"MIT"
] | 1 | 2020-11-08T15:23:55.000Z | 2020-11-08T15:23:55.000Z | Tools/BlockTracking.lua | yating1901/Sychronize-construction-of-Builderbots | 20af410f435095d33ecd6f64668e521d88440528 | [
"MIT"
] | null | null | null | Tools/BlockTracking.lua | yating1901/Sychronize-construction-of-Builderbots | 20af410f435095d33ecd6f64668e521d88440528 | [
"MIT"
] | null | null | null | ----------------------------------------------------
-- Block tracking of BuilderBot
--
-- Author
-- Weixu Zhu, Tutti mi chiamano Harry
-- [email protected]
--
----------------------------------------------------
local BLOCKLENGTH = 0.055
local Hungarian = require("Hungarian")
local function FindBlockXYZ(position, orientation) -- for camera
-- this function finds axis of a block :
-- |Z Z| /Y the one pointing up is z
-- |__ Y |/ the nearest one pointing towards the camera is x
-- / \ and then y follows right hand coordinate system
-- X/ \X
-- All vector in the system of the camera
-- /z
-- /
-- ------- x
-- |
-- |y in the camera's eye
local X, Y, Z -- vectors of XYZ axis of a block (in camera's coor system)
-- all the 6 dirs of a block
local dirs = {}
dirs[1] = vector3(1,0,0)
dirs[2] = vector3(0,1,0)
dirs[3] = vector3(0,0,1)
dirs[1]:rotate(orientation)
dirs[2]:rotate(orientation)
dirs[3]:rotate(orientation)
dirs[4] = -dirs[1]
dirs[5] = -dirs[2]
dirs[6] = -dirs[3]
-- clear out 3 pointing far away
for i, v in pairs(dirs) do
if v.z > 0 then dirs[i] = nil end
end
-- choose the one pointing highest(min y) as Z
local highestI
local highestY = 0
for i, v in pairs(dirs) do
if v.y < highestY then highestY = v.y highestI = i end
end
Z = dirs[highestI]
dirs[highestI] = nil
-- choose the one pointing nearest(min z) as X
local nearestI
local nearestZ = 99999999999
for i, v in pairs(dirs) do
if (position + v):length() < nearestZ then nearestZ = (position + v):length(); nearestI = i end
end
X = dirs[nearestI]
dirs[nearestI] = nil
Y = vector3(Z):cross(X) -- stupid argos way of saying Y = Z * X
return X, Y, Z -- unit vectors
end
local function XYtoQuaternion(_orientation, _X, _Y)
-- assume Z match
-- from the XY to calculate the right quaternion
local orientation = _orientation
local x = vector3(1,0,0)
x:rotate(orientation)
if (x - _X):length() < 0.2 then
-- x match
return orientation
elseif (x - _Y):length() < 0.2 then
-- x matches Y, rotate 90 clockwise
return orientation * quaternion(-math.pi/2, vector3(0,0,1))
elseif (x + _X):length() < 0.2 then
-- x matches -X, rotate 180 clockwise
return orientation * quaternion(math.pi, vector3(0,0,1))
elseif (x + _Y):length() < 0.2 then
-- x matches -Y, rotate 90 anti-clockwise
return orientation * quaternion(math.pi/2, vector3(0,0,1))
end
end
local function XYZtoQuaternion(_orientation, _X, _Y, _Z)
-- from the XYZ to calculate the right quaternion
local orientation = _orientation
local x = vector3(1,0,0)
local y = vector3(0,1,0)
local z = vector3(0,0,1)
x:rotate(orientation)
y:rotate(orientation)
z:rotate(orientation)
if (z - _Z):length() < 0.2 then
-- z is up
return XYtoQuaternion(orientation, _X, _Y)
elseif (-z - _Z):length() < 0.2 then
-- -z is up, rotate 180 along x
orientation = orientation * quaternion(math.pi, vector3(1,0,0))
return XYtoQuaternion(orientation, _X, _Y)
elseif (x - _Z):length() < 0.2 then
-- x is up, rotate a-clock 90 along y
orientation = orientation * quaternion(math.pi/2, vector3(0,1,0))
return XYtoQuaternion(orientation, _X, _Y)
elseif (-x - _Z):length() < 0.2 then
-- -x is up, rotate clock 90 along y
orientation = orientation * quaternion(-math.pi/2, vector3(0,1,0))
return XYtoQuaternion(orientation, _X, _Y)
elseif (y - _Z):length() < 0.2 then
-- y is up, rotate clock 90 along x
orientation = orientation * quaternion(-math.pi/2, vector3(1,0,0))
return XYtoQuaternion(orientation, _X, _Y)
elseif (-y - _Z):length() < 0.2 then
-- y is up, rotate a-clock 90 along x
orientation = orientation * quaternion(math.pi/2, vector3(1,0,0))
return XYtoQuaternion(orientation, _X, _Y)
end
end
local function UpdateBlock(oldBlock, newBlock)
oldBlock.position = newBlock.position
oldBlock.orientation = newBlock.orientation
oldBlock.X = newBlock.X
oldBlock.Y = newBlock.Y
oldBlock.Z = newBlock.Z
oldBlock.tags = newBlock.tags
end
local function HungarianMatch(_oldBlocks, _newBlocks)
-- the index of _oldBlocks maybe not consistent, like 1, 2, 4, 6
-- put it into oldBlockArray with 1,2,3,4
local oldBlocksArray = {}
local count = 0
for i, block in pairs(_oldBlocks) do
count = count + 1
oldBlocksArray[count] = block
oldBlocksArray[count].index = i
end
-- max size
local n = #oldBlocksArray
if #_newBlocks > n then n = #_newBlocks end
-- set penalty matrix
-- fill n * n with 0
local penaltyMatrix = {}
for i = 1, n do
penaltyMatrix[i] = {}
for j = 1,n do
penaltyMatrix[i][j] = 0
end
end
-- new blocks
-- * * * * * * * *
-- old blocks * *
-- * * * * * * * *
for i, oldB in ipairs(oldBlocksArray) do
for j, newB in ipairs(_newBlocks) do
local dis = (oldB.position - newB.position):length()
penaltyMatrix[i][j] = dis + 0.1 -- 0.1 to make it not 0
end
end
local hun = Hungarian:create{costMat = penaltyMatrix, MAXorMIN = "MIN"}
hun:aug()
-- hun.match_of_X[i] is the index of match for oldBlocksArray[i]
for i, oldB in ipairs(oldBlocksArray) do
if penaltyMatrix[i][hun.match_of_X[i]] == 0 then
-- lost
local index = oldB.index
_oldBlocks[index] = nil
else
-- tracking
local index = oldB.index
--_oldBlocks[index] = _newBlocks[hun.match_of_X[i]]
UpdateBlock(_oldBlocks[index], _newBlocks[hun.match_of_X[i]])
end
end
local index = 1
for j, newB in ipairs(_newBlocks) do
if penaltyMatrix[hun.match_of_Y[j]][j] == 0 then
-- new blocks
while _oldBlocks[index] ~= nil do index = index + 1 end
_oldBlocks[index] = newB
_oldBlocks[index].id = index
end
end
end
function CheckTagDirection(block)
for i, tag in ipairs(block.tags) do
local dif = (tag.position - block.position) * (1/BLOCKLENGTH) * 2
if (block.X - dif):length() < 0.5 then
block.tags.front = tag
elseif (block.Z - dif):length() < 0.5 then
block.tags.up = tag
elseif (block.Y - dif):length() < 0.5 then
block.tags.right = tag
elseif (-block.Y - dif):length() < 0.5 then
block.tags.left = tag
end
end
for i, tag in ipairs(block.tags) do
block.tags[i] = nil
end
end
function BlockTracking(_blocks, _tags)
local blocks = {}
-- cluster tags into blocks
local p = vector3(0, 0, -BLOCKLENGTH/2)
for i, tag in ipairs(_tags) do
local middlePointV3 = vector3(p):rotate(tag.orientation) + tag.position
-- find which block it belongs
local flag = 0
for j, block in ipairs(blocks) do
if (middlePointV3 - block.position):length() < BLOCKLENGTH/3 then
flag = 1
block.tags[#block.tags + 1] = tag
block.positionSum = block.positionSum + middlePointV3
break
end
end
if flag == 0 then
blocks[#blocks + 1] = {position = middlePointV3,
positionSum = middlePointV3,
orientation = tag.orientation,
tags = {tag},
}
end
end
-- average block position
for i, block in ipairs(blocks) do
block.position = block.positionSum * (1/#block.tags)
block.positionSum = nil
end
-- adjust block orientation
for i, block in ipairs(blocks) do
block.X, block.Y, block.Z = FindBlockXYZ(block.position, block.orientation)
-- X,Y,Z are unit vectors
block.orientation = XYZtoQuaternion(block.orientation, block.X, block.Y, block.Z)
-- to make orientation matches X,Y,Z
CheckTagDirection(block)
end
HungarianMatch(_blocks, blocks)
end
| 32.088462 | 101 | 0.583843 |
1a794bddbaea8af7101dc296fabe0629d83b26b5 | 583 | sql | SQL | test/JDBC/input/BABEL-2218.sql | faizol/babelfish_extensions | 3dab47c2a27a784906426c9401fc99c9519906d1 | [
"PostgreSQL",
"Apache-2.0",
"BSD-3-Clause"
] | 115 | 2021-10-29T18:17:24.000Z | 2022-03-28T01:05:20.000Z | test/JDBC/input/BABEL-2218.sql | faizol/babelfish_extensions | 3dab47c2a27a784906426c9401fc99c9519906d1 | [
"PostgreSQL",
"Apache-2.0",
"BSD-3-Clause"
] | 81 | 2021-10-29T19:22:51.000Z | 2022-03-31T19:31:12.000Z | test/JDBC/input/BABEL-2218.sql | faizol/babelfish_extensions | 3dab47c2a27a784906426c9401fc99c9519906d1 | [
"PostgreSQL",
"Apache-2.0",
"BSD-3-Clause"
] | 53 | 2021-10-30T01:26:50.000Z | 2022-03-22T00:12:47.000Z | use master;
go
CREATE TABLE t2218(c1 INT)
INSERT INTO t2218 VALUES (2218);
GO
-- should throw an error
CREATE FUNCTION f2218()
RETURNS INT AS
BEGIN
DECLARE @return INT
SET @return = 0
SELECT * from t2218
RETURN @return
END
GO
-- if select statement has a destination, no error
CREATE FUNCTION f2218()
RETURNS INT AS
BEGIN
DECLARE @return INT
SET @return = 0
SELECT @return=c1 from t2218
RETURN @return
END
GO
-- we have an issue. see BABEL-2655
--SELECT f2218();
--GO
DECLARE @ret INT;
SET @ret = f2218();
SELECT @ret;
DROP FUNCTION f2218;
DROP TABLE t2218;
GO
| 14.219512 | 50 | 0.713551 |
df5e5e67e8b79c97cb7eb4089d9d1f607d9504ac | 418 | cs | C# | ManageCourse.Core/Model/Args/UpdateGradeNormalArgs.cs | hdh-se/classroom-api | ef3873cd19c0909954daf6f15c46f6298048894c | [
"MIT"
] | null | null | null | ManageCourse.Core/Model/Args/UpdateGradeNormalArgs.cs | hdh-se/classroom-api | ef3873cd19c0909954daf6f15c46f6298048894c | [
"MIT"
] | null | null | null | ManageCourse.Core/Model/Args/UpdateGradeNormalArgs.cs | hdh-se/classroom-api | ef3873cd19c0909954daf6f15c46f6298048894c | [
"MIT"
] | 1 | 2022-03-01T11:18:54.000Z | 2022-03-01T11:18:54.000Z | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace ManageCourse.Core.Model.Args
{
public class UpdateGradeNormalArgs
{
public List<UpdateGradeSpecificArgsBase> Grades { get; set; }
public int CourseId { get; set; }
public int AssignmentId { get; set; }
public string CurrentUser { get; set; }
}
}
| 24.588235 | 69 | 0.691388 |
05ff80d548041d33b579375f2e8a0e5f04805256 | 566 | py | Python | code/python/get_all_file_name.py | Zousiyu/code_snippet | 2191b57c26082d7e592481c6953c712d60261782 | [
"MIT"
] | 1 | 2018-05-17T08:32:51.000Z | 2018-05-17T08:32:51.000Z | code/python/get_all_file_name.py | Zousiyu/code_snippet | 2191b57c26082d7e592481c6953c712d60261782 | [
"MIT"
] | null | null | null | code/python/get_all_file_name.py | Zousiyu/code_snippet | 2191b57c26082d7e592481c6953c712d60261782 | [
"MIT"
] | null | null | null | from os import listdir, walk
from os.path import isfile, isdir, join
def get_all_file_name(path):
file_list = [join(path, f) for f in listdir(path) if isfile(join(path, f))]
sub_dir = [join(path, d) for d in listdir(path) if isdir(join(path, d))]
for i in range(len(sub_dir)):
next_dir = sub_dir[i]
file_list.extend(get_all_file_name(next_dir))
return file_list
def get_all_file_name_generator(path):
for folder, subdirs, files in walk(path):
for f in files:
yield join(folder, f)
| 25.727273 | 80 | 0.641343 |
d30312c16f8e5421a457899030662ab5ea8f0e60 | 404 | cs | C# | Kore.Web.Common/Constants.cs | widecommerce/kore-cms | b41bc24454024c545d5e5ef8bfa61948631eb475 | [
"MIT"
] | 2 | 2018-06-17T07:36:40.000Z | 2018-10-09T18:43:39.000Z | Kore.Web.Common/Constants.cs | widecommerce/kore-cms | b41bc24454024c545d5e5ef8bfa61948631eb475 | [
"MIT"
] | 2 | 2021-05-16T03:53:07.000Z | 2021-11-11T00:25:06.000Z | Kore.Web.Common/Constants.cs | widecommerce/kore-cms | b41bc24454024c545d5e5ef8bfa61948631eb475 | [
"MIT"
] | 4 | 2018-08-03T16:49:14.000Z | 2021-06-05T10:41:22.000Z | namespace Kore.Web.Common
{
public static class Constants
{
public static class Areas
{
public const string Regions = "Admin/Regions";
}
public static class Tables
{
public const string Regions = "Kore_Common_Regions";
public const string RegionSettings = "Kore_Common_RegionSettings";
}
}
} | 25.25 | 79 | 0.566832 |
c6a27067612f7bdb1989b48c74eab95e2f247135 | 147 | rs | Rust | src/bin/rust-example.rs | zhangchunzhong/rust-project-template | 45a38e19143b7a8dc7553fadfd3ac2293a0bae3e | [
"Apache-2.0"
] | null | null | null | src/bin/rust-example.rs | zhangchunzhong/rust-project-template | 45a38e19143b7a8dc7553fadfd3ac2293a0bae3e | [
"Apache-2.0"
] | 1 | 2021-01-07T00:26:42.000Z | 2021-01-07T00:26:42.000Z | src/bin/rust-example.rs | zhangchunzhong/rust-project-template | 45a38e19143b7a8dc7553fadfd3ac2293a0bae3e | [
"Apache-2.0"
] | null | null | null | // Copyright 2020 The RustExample Authors.
//
// Code is licensed under Apache License, Version 2.0.
fn main() {
println!("Hello, world!");
}
| 18.375 | 54 | 0.666667 |
39452c15347a2d485a18a9835291cb7210b710c4 | 2,740 | py | Python | pytube/downloader_gui.py | jjisnow/pytube | 0fa30f6d92dd80f8b7873628174012258070c510 | [
"Unlicense"
] | null | null | null | pytube/downloader_gui.py | jjisnow/pytube | 0fa30f6d92dd80f8b7873628174012258070c510 | [
"Unlicense"
] | null | null | null | pytube/downloader_gui.py | jjisnow/pytube | 0fa30f6d92dd80f8b7873628174012258070c510 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python
import os
from PyQt5 import QtGui
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import *
from pytube import downloader
class MagicWizard(QWizard):
def __init__(self, parent=None):
super(MagicWizard, self).__init__(parent)
self.addPage(url_page(self))
self.addPage(itag_page(self))
self.addPage(final_path_page(self))
self.setWindowTitle("Pytube GUI Downloader")
self.setWindowIcon(
QtGui.QIcon(
os.path.join("..", "images", "rooster.png")
))
# self.resize(640,480)
class url_page(QWizardPage):
def __init__(self, parent=None):
super(url_page, self).__init__(parent)
self.setTitle("Choose video link")
self.setSubTitle("Please input a URL to download")
layout = QVBoxLayout()
self.myTextBox = QLineEdit(self)
self.myTextBox.setAlignment(Qt.AlignLeft)
self.registerField("TextBox", self.myTextBox)
layout.addWidget(self.myTextBox)
self.setLayout(layout)
class itag_page(QWizardPage):
def __init__(self, parent=None):
super(itag_page, self).__init__(parent)
self.setTitle("Choose itag")
self.setSubTitle("Choose an itag corresponding to a video or audio stream")
layout = QVBoxLayout()
self.label1 = QLabel()
layout.addWidget(self.label1)
hbox = QHBoxLayout()
layout.addLayout(hbox)
self.label2 = QLabel()
hbox.addWidget(self.label2, alignment=Qt.AlignLeft)
self.itag_box = QLineEdit()
self.registerField("iTag*", self.itag_box)
hbox.addWidget(self.itag_box)
self.setLayout(layout)
def initializePage(self):
tb = self.field("TextBox")
self.label2.setText("itag: ")
itag_descr = downloader.downloader(tb, "--list")
font = self.label1.font()
self.label1.setFont(QtGui.QFont("Courier", 6, QtGui.QFont.Medium))
self.label1.setText(itag_descr)
class final_path_page(QWizardPage):
def __init__(self, parent=None):
super(final_path_page, self).__init__(parent)
layout = QVBoxLayout()
self.label1 = QLabel()
layout.addWidget(self.label1)
self.setLayout(layout)
def initializePage(self):
tb = self.field("TextBox")
itag = self.field("iTag")
final_path = downloader.downloader(tb, "--itag", itag, "-v")
self.label1.setTextInteractionFlags(Qt.TextSelectableByMouse)
self.label1.setText(f"Final output file: \'{final_path}\'")
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
wizard = MagicWizard()
wizard.show()
sys.exit(app.exec_())
| 28.247423 | 83 | 0.640511 |
e00f0d21a1b1cd1fd438604295d296f7fa24abbf | 135 | h | C | Specs/Support/SpecHelper.h | yodle/BloodMagic | 4e1643307b3f6f9f9543144e8fbe835413966c57 | [
"MIT"
] | 165 | 2015-01-08T01:33:35.000Z | 2021-11-14T20:19:43.000Z | Specs/Support/SpecHelper.h | yodle/BloodMagic | 4e1643307b3f6f9f9543144e8fbe835413966c57 | [
"MIT"
] | 16 | 2015-07-07T14:18:32.000Z | 2020-02-16T21:48:58.000Z | Specs/Support/SpecHelper.h | yodle/BloodMagic | 4e1643307b3f6f9f9543144e8fbe835413966c57 | [
"MIT"
] | 24 | 2015-01-17T17:18:20.000Z | 2020-09-09T09:12:56.000Z | //
// Created by Alex Denisov on 12.07.13.
// Copyright (c) 2013 railsware. All rights reserved.
//
#import <Cedar-iOS/SpecHelper.h>
| 16.875 | 53 | 0.688889 |
0a60a4e505300a82df3e2e46d3d7658b49f2b72e | 1,030 | cs | C# | CSharp OOP Advanced/Enumerations and Attributes/5. Card CompareTo()/Startup.cs | alexandrateneva/CSharp-Fundamentals-SoftUni | f56bbae2e05cba3adcf56df7b5b53239eabdf073 | [
"MIT"
] | null | null | null | CSharp OOP Advanced/Enumerations and Attributes/5. Card CompareTo()/Startup.cs | alexandrateneva/CSharp-Fundamentals-SoftUni | f56bbae2e05cba3adcf56df7b5b53239eabdf073 | [
"MIT"
] | null | null | null | CSharp OOP Advanced/Enumerations and Attributes/5. Card CompareTo()/Startup.cs | alexandrateneva/CSharp-Fundamentals-SoftUni | f56bbae2e05cba3adcf56df7b5b53239eabdf073 | [
"MIT"
] | null | null | null | namespace _5.Card_CompareTo__
{
using System;
using _5.Card_CompareTo__.Enums;
using _5.Card_CompareTo__.Models;
public class Startup
{
public static void Main()
{
var rankOfFirstCard = (Rank)Enum.Parse(typeof(Rank), Console.ReadLine());
var suitOfFirstCard = (Suit)Enum.Parse(typeof(Suit), Console.ReadLine());
var firstCard = new Card(rankOfFirstCard, suitOfFirstCard);
var rankOfSecondCard = (Rank)Enum.Parse(typeof(Rank), Console.ReadLine());
var suitOfSecondCard = (Suit)Enum.Parse(typeof(Suit), Console.ReadLine());
var seconCard = new Card(rankOfSecondCard, suitOfSecondCard);
Card greaterCard;
var compare = firstCard.CompareTo(seconCard);
if (compare > 0)
{
greaterCard = firstCard;
}
else
{
greaterCard = seconCard;
}
Console.WriteLine(greaterCard);
}
}
}
| 31.212121 | 86 | 0.574757 |
5dae827d0c916f3116817b0609f462cbb578064b | 41,315 | cpp | C++ | c/common/tests/crtabstractions_unittests/crtabstractions_unittests.cpp | josesimoes/azure-iot-sdks | 75107fa7b0e614a83dfcd81aff4727541d81fa28 | [
"MIT"
] | 4 | 2017-08-15T10:02:59.000Z | 2021-12-20T10:48:25.000Z | c/common/tests/crtabstractions_unittests/crtabstractions_unittests.cpp | josesimoes/azure-iot-sdks | 75107fa7b0e614a83dfcd81aff4727541d81fa28 | [
"MIT"
] | null | null | null | c/common/tests/crtabstractions_unittests/crtabstractions_unittests.cpp | josesimoes/azure-iot-sdks | 75107fa7b0e614a83dfcd81aff4727541d81fa28 | [
"MIT"
] | 9 | 2016-10-08T12:33:33.000Z | 2021-12-23T09:46:31.000Z | // Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
#include <cstdlib>
#ifdef _CRTDBG_MAP_ALLOC
#include <crtdbg.h>
#endif
#include "testrunnerswitcher.h"
#include "crt_abstractions.h"
#include "errno.h"
#include <climits>
#include "micromock.h"
#ifdef _MSC_VER
#pragma warning(disable:4505)
#endif
#if defined _MSC_VER
#include "crtdbg.h"
static _invalid_parameter_handler oldInvalidParameterHandler;
static int oldReportType;
static void my_invalid_parameter_handler(
const wchar_t * expression,
const wchar_t * function,
const wchar_t * file,
unsigned int line,
uintptr_t pReserved
)
{
(void)expression;
(void)function;
(void)file;
(void)line;
(void)pReserved;
/*do nothing*/
}
/* The below defines are because on Windows platform, the secure version of the CRT functions will invoke WATSON if no invalid parameter handler provided by the user */
#define HOOK_INVALID_PARAMETER_HANDLER() {oldInvalidParameterHandler = _set_invalid_parameter_handler(my_invalid_parameter_handler);oldReportType=_CrtSetReportMode(_CRT_ASSERT, 0);}
#define UNHOOK_INVALID_PARAMETER_HANDLER() {(void)_CrtSetReportMode(_CRT_ASSERT, oldReportType); (void)_set_invalid_parameter_handler(oldInvalidParameterHandler);}
#else /* _MSC_VER */
#define HOOK_INVALID_PARAMETER_HANDLER() do{}while(0)
#define UNHOOK_INVALID_PARAMETER_HANDLER() do{}while(0)
#endif /* _MSC_VER */
static const unsigned int interestingUnsignedIntNumbersToBeConverted[] =
{
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
21,
32,
43,
54,
65,
76,
87,
98,
123,
1234,
12341,
UINT_MAX / 2,
UINT_MAX -1,
UINT_MAX,
42,
0x42
};
#ifndef SIZE_MAX
#define SIZE_MAX ((size_t)~(size_t)0)
#endif
static const size_t interestingSize_tNumbersToBeConverted[] =
{
0,
1,
2,
3,
4,
5,
6,
7,
8,
9,
10,
21,
32,
43,
54,
65,
76,
87,
98,
123,
1234,
12341,
SIZE_MAX / 2,
SIZE_MAX -1,
SIZE_MAX,
42,
0x42
};
static MICROMOCK_GLOBAL_SEMAPHORE_HANDLE g_dllByDll;
BEGIN_TEST_SUITE(CRTAbstractions_UnitTests)
TEST_SUITE_INITIALIZE(a)
{
INITIALIZE_MEMORY_DEBUG(g_dllByDll);
}
TEST_SUITE_CLEANUP(b)
{
DEINITIALIZE_MEMORY_DEBUG(g_dllByDll);
}
/* strcat_s */
// Tests_SRS_CRT_ABSTRACTIONS_99_008: [strcat_s shall append the src to dst and terminates the resulting string with a null character.]
// Tests_SRS_CRT_ABSTRACTIONS_99_009: [The initial character of src shall overwrite the terminating null character of dst.]
// Tests_SRS_CRT_ABSTRACTIONS_99_003: [strcat_s shall return Zero upon success.]
TEST_FUNCTION(strcat_s_Appends_Source_To_Destination)
{
// arrange
char dstString[128] = "Destination";
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "Source";
int result;
// act
result = strcat_s(dstString, dstSizeInBytes, srcString);
// assert
ASSERT_ARE_EQUAL(char_ptr, "DestinationSource", dstString);
ASSERT_ARE_EQUAL(int, 0, result);
}
TEST_FUNCTION(strcat_s_Appends_Empty_Source_To_Destination)
{
// arrange
char dstString[128] = "Destination";
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "";
int result;
// act
result = strcat_s(dstString, dstSizeInBytes, srcString);
// assert
ASSERT_ARE_EQUAL(char_ptr, "Destination", dstString);
ASSERT_ARE_EQUAL(int, 0, result);
}
TEST_FUNCTION(strcat_s_Appends_Source_To_Empty_Destination)
{
// arrange
char dstString[128] = "";
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "Source";
int result;
// act
result = strcat_s(dstString, dstSizeInBytes, srcString);
// assert
ASSERT_ARE_EQUAL(char_ptr, "Source", dstString);
ASSERT_ARE_EQUAL(int, 0, result);
}
TEST_FUNCTION(strcat_s_Appends_Empty_Source_To_Empty_Destination)
{
// arrange
char dstString[128] = "";
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "";
int result;
// act
result = strcat_s(dstString, dstSizeInBytes, srcString);
// assert
ASSERT_ARE_EQUAL(char_ptr, "", dstString);
ASSERT_ARE_EQUAL(int, 0, result);
}
// Tests_SRS_CRT_ABSTRACTIONS_99_004: [If dst is NULL or unterminated, the error code returned shall be EINVAL & dst shall not be modified.]
TEST_FUNCTION(strcat_s_With_NULL_Destination_Fails)
{
// arrange
char* dstString = NULL;
size_t sizeInBytes = sizeof(dstString);
char srcString[] = "Source";
int result;
// act
HOOK_INVALID_PARAMETER_HANDLER();
#ifdef _MSC_VER
#pragma warning(suppress: 6387) /* This is test code, explictly calling with NULL argument */
#endif
result = strcat_s(dstString, sizeInBytes, srcString);
UNHOOK_INVALID_PARAMETER_HANDLER();
// assert
ASSERT_IS_NULL(dstString);
ASSERT_ARE_EQUAL(int, EINVAL, result);
}
TEST_FUNCTION(strcat_s_With_Unterminated_Destination_Fails)
{
// arrange
char dstString[128];
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "Source";
int result;
for (size_t i = 0; i < dstSizeInBytes; i++)
{
dstString[i] = 'z';
}
// act
HOOK_INVALID_PARAMETER_HANDLER();
result = strcat_s(dstString, dstSizeInBytes, srcString);
UNHOOK_INVALID_PARAMETER_HANDLER();
// assert
#ifndef _MSC_VER /* MSDN claims that content of destination buffer is not modified, but that is not true. Filing bug. */
for (size_t i = 0; i < dstSizeInBytes; i++)
{
ASSERT_ARE_EQUAL(char, 'z', dstString[i]);
}
#endif
ASSERT_ARE_EQUAL(int, EINVAL, result);
}
// Tests_SRS_CRT_ABSTRACTIONS_99_005: [If src is NULL, the error code returned shall be EINVAL and dst[0] shall be set to 0.]
TEST_FUNCTION(strcat_s_With_NULL_Source_Fails)
{
// arrange
char dstString[128] = "Source";
size_t dstSizeInBytes = sizeof(dstString);
char* srcString = NULL;
int result;
// act
HOOK_INVALID_PARAMETER_HANDLER();
#ifdef _MSC_VER
#pragma warning(suppress: 6387) /* This is test code, explictly calling with NULL argument */
#endif
result = strcat_s(dstString, dstSizeInBytes, srcString);
UNHOOK_INVALID_PARAMETER_HANDLER();
// assert
ASSERT_ARE_EQUAL(char,'\0', dstString[0]);
ASSERT_ARE_EQUAL(int, EINVAL, result);
}
// Tests_SRS_CRT_ABSTRACTIONS_99_006: [If the dstSizeInBytes is 0 or smaller than the required size for dst & src, the error code returned shall be ERANGE & dst[0] set to 0.]
TEST_FUNCTION(strcat_s_With_dstSizeInBytes_Equals_Zero_Fails)
{
// arrange
char dstString[128] = "Destination";
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "Source";
int result;
// act
dstSizeInBytes = 0;
HOOK_INVALID_PARAMETER_HANDLER();
result = strcat_s(dstString, dstSizeInBytes, srcString);
UNHOOK_INVALID_PARAMETER_HANDLER();
// assert
#ifdef _MSC_VER /*MSDN Claims that destination buffer would be set to empty & ERANGE is the error, but not the case. Filing bug.*/
ASSERT_ARE_EQUAL(char_ptr, "Destination", dstString);
ASSERT_ARE_EQUAL(int, EINVAL, result);
#else
ASSERT_ARE_EQUAL(char, '\0', dstString[0]);
ASSERT_ARE_EQUAL(int, ERANGE, result);
#endif
}
TEST_FUNCTION(strcat_s_With_dstSizeInBytes_Smaller_Than_dst_and_src_Fails)
{
// arrange
char dstString[128] = "Destination";
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "Source";
int result;
// act
HOOK_INVALID_PARAMETER_HANDLER();
dstSizeInBytes = strlen(dstString) + (strlen(srcString) - 3);
result = strcat_s(dstString, dstSizeInBytes, srcString);
UNHOOK_INVALID_PARAMETER_HANDLER();
// assert
ASSERT_ARE_EQUAL(char,'\0', dstString[0]);
ASSERT_ARE_EQUAL(int, ERANGE, result);
}
/* strcpy_s */
// Tests_SRS_CRT_ABSTRACTIONS_99_016: [strcpy_s shall copy the contents in the address of src, including the terminating null character, to the location that's specified by dst.]
// Tests_SRS_CRT_ABSTRACTIONS_99_011 : [strcpy_s shall return Zero upon success]
TEST_FUNCTION(strcpy_s_copies_Source_into_Destination)
{
// arrange
char dstString[128] = "Destination";
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "Source";
int result = 0;
// act
result = strcpy_s(dstString, dstSizeInBytes, srcString);
// assert
ASSERT_ARE_EQUAL(char_ptr, "Source", dstString);
ASSERT_ARE_EQUAL(int, 0, result);
}
TEST_FUNCTION(strcpy_s_copies_Empty_Source_into_Destination)
{
// arrange
char dstString[128] = "Destination";
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "";
int result = 0;
// act
result = strcpy_s(dstString, dstSizeInBytes, srcString);
// assert
ASSERT_ARE_EQUAL(char_ptr, "", dstString);
ASSERT_ARE_EQUAL(int, 0, result);
}
TEST_FUNCTION(strcpy_s_copies_Source_into_Empty_Destination)
{
// arrange
char dstString[128] = "";
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "Source";
int result = 0;
// act
result = strcpy_s(dstString, dstSizeInBytes, srcString);
// assert
ASSERT_ARE_EQUAL(char_ptr, "Source", dstString);
ASSERT_ARE_EQUAL(int, 0, result);
}
TEST_FUNCTION(strcpy_s_copies_Empty_Source_into_Empty_Destination)
{
// arrange
char dstString[128] = "";
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "";
int result = 0;
// act
result = strcpy_s(dstString, dstSizeInBytes, srcString);
// assert
ASSERT_ARE_EQUAL(char_ptr, "", dstString);
ASSERT_ARE_EQUAL(int, 0, result);
}
// Tests_SRS_CRT_ABSTRACTIONS_99_012 : [If dst is NULL, the error code returned shall be EINVAL & dst shall not be modified.]
TEST_FUNCTION(strcpy_s_With_NULL_Destination_Fails)
{
// arrange
char* dstString = NULL;
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "Source";
int result = 0;
// act
HOOK_INVALID_PARAMETER_HANDLER();
#ifdef _MSC_VER
#pragma warning(suppress: 6387) /* This is test code, explictly calling with NULL argument */
#endif
result = strcpy_s(dstString, dstSizeInBytes, srcString);
UNHOOK_INVALID_PARAMETER_HANDLER();
// assert
ASSERT_ARE_EQUAL(char_ptr, NULL, dstString);
ASSERT_ARE_EQUAL(int, EINVAL, result);
}
// Tests_SRS_CRT_ABSTRACTIONS_99_013 : [If src is NULL, the error code returned shall be EINVAL and dst[0] shall be set to 0.]
TEST_FUNCTION(strcpy_s_With_NULL_Source_Fails)
{
// arrange
char dstString[128] = "Destination";
size_t dstSizeInBytes = sizeof(dstString);
char* srcString = NULL;
int result = 0;
// act
HOOK_INVALID_PARAMETER_HANDLER();
#ifdef _MSC_VER
#pragma warning(suppress: 6387) /* This is test code, explictly calling with NULL argument */
#endif
result = strcpy_s(dstString, dstSizeInBytes, srcString);
UNHOOK_INVALID_PARAMETER_HANDLER();
// assert
ASSERT_ARE_EQUAL(char,'\0', dstString[0]);
ASSERT_ARE_EQUAL(int, EINVAL, result);
}
// Tests_SRS_CRT_ABSTRACTIONS_99_014 : [If the dstSizeInBytes is 0 or smaller than the required size for the src string, the error code returned shall be ERANGE & dst[0] set to 0.]
TEST_FUNCTION(strcpy_s_With_dstSizeInBytes_Equals_Zero_Fails)
{
// arrange
char dstString[128] = "Destination";
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "Source";
int result;
// act
dstSizeInBytes = 0;
HOOK_INVALID_PARAMETER_HANDLER();
result = strcpy_s(dstString, dstSizeInBytes, srcString);
UNHOOK_INVALID_PARAMETER_HANDLER();
// assert
#ifdef _MSC_VER /*MSDN Claims that destination buffer would be set to empty & ERANGE is the error, but not the case. Filing bug.*/
ASSERT_ARE_EQUAL(char_ptr, "Destination", dstString);
ASSERT_ARE_EQUAL(int, EINVAL, result);
#else
ASSERT_ARE_EQUAL(char, '\0', dstString[0]);
ASSERT_ARE_EQUAL(int, ERANGE, result);
#endif
}
TEST_FUNCTION(strcpy_s_With_dstSizeInBytes_Smaller_Than_source_Fails)
{
// arrange
char dstString[128] = "Destination";
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "Source";
int result;
// act
HOOK_INVALID_PARAMETER_HANDLER();
dstSizeInBytes = sizeof(srcString) - 2;
result = strcpy_s(dstString, dstSizeInBytes, srcString);
UNHOOK_INVALID_PARAMETER_HANDLER();
// assert
ASSERT_ARE_EQUAL(char,'\0', dstString[0]);
ASSERT_ARE_EQUAL(int, ERANGE, result);
}
/* strncpy_s */
// Tests_SRS_CRT_ABSTRACTIONS_99_025 : [strncpy_s shall copy the first N characters of src to dst, where N is the lesser of MaxCount and the length of src.]
// Tests_SRS_CRT_ABSTRACTIONS_99_041 : [If those N characters will fit within dst(whose size is given as dstSizeInBytes) and still leave room for a null terminator, then those characters shall be copied and a terminating null is appended; otherwise, strDest[0] is set to the null character and ERANGE error code returned per requirement below.]
// Tests_SRS_CRT_ABSTRACTIONS_99_018: [strncpy_s shall return Zero upon success]
TEST_FUNCTION(strncpy_s_copies_N_chars_of_source_to_destination_where_maxCount_equals_source_Length)
{
// arrange
char dstString[] = "Destination";
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "Source";
size_t maxCount = sizeof(srcString);
int result;
// act
result = strncpy_s(dstString, dstSizeInBytes, srcString, maxCount);
// assert
ASSERT_ARE_EQUAL(char_ptr, "Source", dstString);
ASSERT_ARE_EQUAL(int, 0, result);
}
TEST_FUNCTION(strncpy_s_copies_N_chars_of_source_to_destination_where_maxCount_is_larger_than_Source_Length)
{
// arrange
char dstString[] = "Destination";
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "Source";
size_t maxCount = sizeof(srcString);
int result;
// act
result = strncpy_s(dstString, dstSizeInBytes, srcString, maxCount+5);
// assert
ASSERT_ARE_EQUAL(char_ptr, "Source", dstString);
ASSERT_ARE_EQUAL(int, 0, result);
}
TEST_FUNCTION(strncpy_s_copies_N_chars_of_source_to_destination_where_maxCount_is_less_than_source_length)
{
// arrange
char dstString[] = "Destination";
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "Source";
size_t maxCount = sizeof(srcString);
int result;
// act
result = strncpy_s(dstString, dstSizeInBytes, srcString, maxCount - 3);
// assert
ASSERT_ARE_EQUAL(char_ptr, "Sour", dstString);
ASSERT_ARE_EQUAL(int, 0, result);
}
// Tests_SRS_CRT_ABSTRACTIONS_99_026 : [If MaxCount is _TRUNCATE(defined as - 1), then as much of src as will fit into dst shall be copied while still leaving room for the terminating null to be appended.]
TEST_FUNCTION(strncpy_s_with_maxCount_set_to_TRUNCATE_and_destination_fits_source)
{
// arrange
char dstString[] = "Destination";
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "Source";
size_t maxCount = sizeof(srcString);
int result;
// act
maxCount = _TRUNCATE;
result = strncpy_s(dstString, dstSizeInBytes, srcString, maxCount);
// assert
ASSERT_ARE_EQUAL(char_ptr, "Source", dstString);
ASSERT_ARE_EQUAL(int, 0, result);
}
// Tests_SRS_CRT_ABSTRACTIONS_99_026 : [If MaxCount is _TRUNCATE(defined as - 1), then as much of src as will fit into dst shall be copied while still leaving room for the terminating null to be appended.]
// Tests_SRS_CRT_ABSTRACTIONS_99_019: [If truncation occurred as a result of the copy, the error code returned shall be STRUNCATE .]
TEST_FUNCTION(strncpy_s_with_maxCount_set_to_TRUNCATE_and_destination_is_smaller_than_source)
{
// arrange
char dstString[] = "Dest";
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "Source";
size_t maxCount = sizeof(srcString);
int result;
// act
maxCount = _TRUNCATE;
result = strncpy_s(dstString, dstSizeInBytes, srcString, maxCount);
// assert
ASSERT_ARE_EQUAL(char_ptr, "Sour", dstString);
ASSERT_ARE_EQUAL(int, STRUNCATE, result);
}
// Tests_SRS_CRT_ABSTRACTIONS_99_020 : [If dst is NULL, the error code returned shall be EINVAL and dst shall not be modified.]
TEST_FUNCTION(strncpy_s_fails_with_destination_set_to_NULL)
{
// arrange
char* dstString = NULL;
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "Source";
size_t maxCount = sizeof(srcString);
int result;
// act
HOOK_INVALID_PARAMETER_HANDLER();
#ifdef _MSC_VER
#pragma warning(suppress: 6387) /* This is test code, explictly calling with NULL argument */
#endif
result = strncpy_s(dstString, dstSizeInBytes, srcString, maxCount);
UNHOOK_INVALID_PARAMETER_HANDLER();
// assert
ASSERT_IS_NULL(dstString);
ASSERT_ARE_EQUAL(int, EINVAL, result);
}
// Tests_SRS_CRT_ABSTRACTIONS_99_021: [If src is NULL, the error code returned shall be EINVAL and dst[0] shall be set to 0.]
TEST_FUNCTION(strncpy_s_fails_with_source_set_to_NULL)
{
// arrange
char dstString[] = "Destination";
size_t dstSizeInBytes = sizeof(dstString);
char* srcString = NULL;
size_t maxCount = sizeof(srcString);
int result;
// act
HOOK_INVALID_PARAMETER_HANDLER();
#ifdef _MSC_VER
#pragma warning(suppress: 6387) /* This is test code, explictly calling with NULL argument */
#endif
result = strncpy_s(dstString, dstSizeInBytes, srcString, maxCount);
UNHOOK_INVALID_PARAMETER_HANDLER();
// assert
ASSERT_ARE_EQUAL(char,'\0', dstString[0]);
ASSERT_ARE_EQUAL(int, EINVAL, result);
}
// Tests_SRS_CRT_ABSTRACTIONS_99_022: [If the dstSizeInBytes is 0, the error code returned shall be EINVAL and dst shall not be modified.]
TEST_FUNCTION(strncpy_s_fails_with_dstSizeInBytes_set_to_Zero)
{
// arrange
char dstString[] = "Destination";
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "Source";
size_t maxCount = sizeof(srcString);
int result;
// act
HOOK_INVALID_PARAMETER_HANDLER();
dstSizeInBytes = 0;
result = strncpy_s(dstString, dstSizeInBytes, srcString, maxCount);
UNHOOK_INVALID_PARAMETER_HANDLER();
// assert
ASSERT_ARE_EQUAL(char_ptr, "Destination", dstString);
ASSERT_ARE_EQUAL(int, EINVAL, result);
}
// Tests_SRS_CRT_ABSTRACTIONS_99_023 : [If dst is not NULL & dstSizeInBytes is smaller than the required size for the src string, the error code returned shall be ERANGE and dst[0] shall be set to 0.]
TEST_FUNCTION(strncpy_s_dstSizeInBytes_is_smaller_than_the_required_size_for_source)
{
// arrange
char dstString[] = "Dest";
size_t dstSizeInBytes = sizeof(dstString);
char srcString[] = "Source";
size_t maxCount = sizeof(srcString);
int result;
// act
HOOK_INVALID_PARAMETER_HANDLER();
result = strncpy_s(dstString, dstSizeInBytes, srcString, maxCount);
UNHOOK_INVALID_PARAMETER_HANDLER();
// assert
ASSERT_ARE_EQUAL(char,'\0', dstString[0]);
ASSERT_ARE_EQUAL(int, ERANGE, result);
}
/* sprintf_s */
// Tests_SRS_CRT_ABSTRACTIONS_99_029: [The sprintf_s function shall format and store series of characters and values in dst.Each argument(if any) is converted and output according to the corresponding Format Specification in the format variable.]
// Tests_SRS_CRT_ABSTRACTIONS_99_031: [A null character is appended after the last character written.]
// Tests_SRS_CRT_ABSTRACTIONS_99_027: [sprintf_s shall return the number of characters stored in dst upon success. This number shall not include the terminating null character.]
TEST_FUNCTION(sprintf_s_formats_and_stores_chars_and_values_in_destination)
{
// arrange
char dstString[1024];
size_t dstSizeInBytes = sizeof(dstString);
char expectedString[] = "sprintf_s: 123, hello, Z, 1.5";
int expectedStringSize = (int)(sizeof(expectedString));
int result;
// act
result = sprintf_s(dstString, dstSizeInBytes, "sprintf_s: %d, %s, %c, %3.1f", 123, "hello", 'Z', 1.5f);
// assert
ASSERT_ARE_EQUAL(char_ptr, expectedString, dstString);
ASSERT_ARE_EQUAL(int, expectedStringSize-1, result);
}
// Tests_SRS_CRT_ABSTRACTIONS_99_028: [If dst or format is a null pointer, sprintf_s shall return -1.]
TEST_FUNCTION(sprintf_s_fails_with_dst_set_to_null)
{
// arrange
char* dstString = NULL;
size_t dstSizeInBytes = sizeof(dstString);
int result;
// act
HOOK_INVALID_PARAMETER_HANDLER();
#ifdef _MSC_VER
#pragma warning(suppress: 6387) /* This is test code, explictly calling with NULL argument */
#endif
result = sprintf_s(dstString, dstSizeInBytes, "sprintf_s: %d, %s, %c, %3.1f", 123, "hello", 'Z', 1.5f);
UNHOOK_INVALID_PARAMETER_HANDLER();
// assert
ASSERT_ARE_EQUAL(int, -1, result);
ASSERT_ARE_EQUAL(int, EINVAL, errno);
}
TEST_FUNCTION(sprintf_s_fails_with_format_set_to_null)
{
// arrange
char dstString[1024];
size_t dstSizeInBytes = sizeof(dstString);
int result;
// act
HOOK_INVALID_PARAMETER_HANDLER();
#ifdef _MSC_VER
#pragma warning(suppress: 6387) /* This is test code, explictly calling with NULL argument */
#endif
result = sprintf_s(dstString, dstSizeInBytes, NULL);
UNHOOK_INVALID_PARAMETER_HANDLER();
// assert
ASSERT_ARE_EQUAL(int, -1, result);
ASSERT_ARE_EQUAL(int, EINVAL, errno);
}
// Tests_SRS_CRT_ABSTRACTIONS_99_034 : [If the dst buffer is too small for the text being printed, then dst is set to an empty string and the function shall return -1.]
TEST_FUNCTION(sprintf_s_fails_with_dst_too_small)
{
// arrange
char dstString[5];
size_t dstSizeInBytes = sizeof(dstString);
int result;
// act
HOOK_INVALID_PARAMETER_HANDLER();
result = sprintf_s(dstString, dstSizeInBytes, "sprintf_s: %d, %s, %c, %3.1f", 123, "hello", 'Z', 1.5f);
UNHOOK_INVALID_PARAMETER_HANDLER();
// assert
ASSERT_ARE_EQUAL(char_ptr, "", dstString);
ASSERT_ARE_EQUAL(int, -1, result);
}
TEST_FUNCTION(sprintf_s_fails_with_dst_buffer_size_not_fitting_null_char)
{
// arrange
char dstString[5];
size_t dstSizeInBytes = sizeof(dstString);
int result;
// act
HOOK_INVALID_PARAMETER_HANDLER();
result = sprintf_s(dstString, dstSizeInBytes, "12345");
UNHOOK_INVALID_PARAMETER_HANDLER();
// assert
ASSERT_ARE_EQUAL(char_ptr, "", dstString);
ASSERT_ARE_EQUAL(int, -1, result);
}
/* mallocAndStrcpy_s */
// Tests_SRS_CRT_ABSTRACTIONS_99_038 : [mallocAndstrcpy_s shall allocate memory for destination buffer to fit the string in the source parameter.]
// Tests_SRS_CRT_ABSTRACTIONS_99_039 : [mallocAndstrcpy_s shall copy the contents in the address source, including the terminating null character into location specified by the destination pointer after the memory allocation.]
// Tests_SRS_CRT_ABSTRACTIONS_99_035: [mallocAndstrcpy_s shall return Zero upon success]
TEST_FUNCTION(mallocAndStrcpy_s_copies_source_string_into_allocated_memory)
{
// arrange
char* destString = NULL;
char srcString[] = "Source";
int result;
// act
result = mallocAndStrcpy_s(&destString, srcString);
// assert
ASSERT_ARE_EQUAL(char_ptr, destString, srcString);
ASSERT_ARE_EQUAL(int, 0, result);
///cleanup
free(destString);
}
// Tests_SRS_CRT_ABSTRACTIONS_99_036: [destination parameter or source parameter is NULL, the error code returned shall be EINVAL and destination shall not be modified.]
TEST_FUNCTION(mallocAndStrcpy_s_fails_with_destination_pointer_set_to_null)
{
// arrange
char** destPointer = NULL;
char srcString[] = "Source";
int result;
// act
result = mallocAndStrcpy_s(destPointer, srcString);
// assert
ASSERT_ARE_EQUAL(int, EINVAL, result);
}
TEST_FUNCTION(mallocAndStrcpy_s_fails_with_source_set_to_null)
{
// arrange
char* destString = (char*)("Destination");
char* srcString = NULL;
int result;
// act
result = mallocAndStrcpy_s(&destString, srcString);
// assert
ASSERT_ARE_EQUAL(char_ptr, "Destination", destString);
ASSERT_ARE_EQUAL(int, EINVAL, result);
}
/*http://vstfrd:8080/Azure/RD/_workitems/edit/3216760*/
#if 0
// Tests_SRS_CRT_ABSTRACTIONS_99_037: [Upon failure to allocate memory for the destination, the function will return ENOMEM.]
TEST_FUNCTION(mallocAndStrcpy_s_fails_upon_failure_to_allocate_memory)
{
// arrange
char* destString = NULL;
char* srcString = "Source";
int result;
mallocSize = 0;
// act
#ifdef _CRTDBG_MAP_ALLOC
HOOK_FUNCTION(_malloc_dbg, malloc_null);
result = mallocAndStrcpy_s(&destString, srcString);
UNHOOK_FUNCTION(_malloc_dbg, malloc_null);
#else
HOOK_FUNCTION(malloc, malloc_null);
result = mallocAndStrcpy_s(&destString, srcString);
UNHOOK_FUNCTION(malloc, malloc_null);
#endif
// assert
ASSERT_ARE_EQUAL(int, ENOMEM, result);
ASSERT_ARE_EQUAL(size_t,strlen(srcString)+1, mallocSize);
}
#endif
/*Tests_SRS_CRT_ABSTRACTIONS_02_003: [If destination is NULL then unsignedIntToString shall fail.] */
TEST_FUNCTION(unsignedIntToString_fails_when_destination_is_NULL)
{
// arrange
// act
auto result = unsignedIntToString(NULL, 100, 43);
// assert
ASSERT_ARE_NOT_EQUAL(int, 0, result);
}
/*Tests_SRS_CRT_ABSTRACTIONS_02_002: [If the conversion fails for any reason (for example, insufficient buffer space), a non-zero return value shall be supplied and unsignedIntToString shall fail.] */
TEST_FUNCTION(unsignedIntToString_fails_when_destination_is_not_sufficient_for_1_digit)
{
// arrange
char destination[1000];
unsigned int toBeConverted = 1;
size_t destinationSize = 1;
///act
int result = unsignedIntToString(destination, destinationSize, toBeConverted);
///assert
ASSERT_ARE_NOT_EQUAL(int, 0, result);
}
/*Tests_SRS_CRT_ABSTRACTIONS_02_002: [If the conversion fails for any reason (for example, insufficient buffer space), a non-zero return value shall be supplied and unsignedIntToString shall fail.] */
TEST_FUNCTION(unsignedIntToString_fails_when_destination_is_not_sufficient_for_more_than_1_digit)
{
// arrange
char destination[1000];
unsigned int toBeConverted = 1; /*7 would not be a right starting digit*/
size_t destinationSize = 1;
while (toBeConverted <= (UINT_MAX / 10))
{
///arrange
destinationSize++;
toBeConverted *= 10;
///act
int result = unsignedIntToString(destination, destinationSize, toBeConverted);
///assert
ASSERT_ARE_NOT_EQUAL(int, 0, result);
}
}
/*Tests_SRS_CRT_ABSTRACTIONS_02_001: [unsignedIntToString shall convert the parameter value to its decimal representation as a string in the buffer indicated by parameter destination having the size indicated by parameter destinationSize.] */
TEST_FUNCTION(unsignedIntToString_succeeds_1_digit)
{
// arrange
char destination[1000];
unsigned int toBeConverted = 2;
size_t destinationSize = 2;
///act
int result = unsignedIntToString(destination, destinationSize, toBeConverted);
///assert
ASSERT_ARE_EQUAL(int, 0, result);
ASSERT_ARE_EQUAL(char_ptr, "2", destination);
}
/*Tests_SRS_CRT_ABSTRACTIONS_02_001: [unsignedIntToString shall convert the parameter value to its decimal representation as a string in the buffer indicated by parameter destination having the size indicated by parameter destinationSize.] */
/*Tests_SRS_CRT_ABSTRACTIONS_02_004: [If the conversion has been successfull then unsignedIntToString shall return 0.] */
TEST_FUNCTION(unsignedIntToString_succeeds_for_interesting_numbers)
{
// arrange
char destination[1000];
size_t i;
for (i = 0; i<sizeof(interestingUnsignedIntNumbersToBeConverted) / sizeof(interestingUnsignedIntNumbersToBeConverted[0]); i++)
{
///act
int result = unsignedIntToString(destination, 1000, interestingUnsignedIntNumbersToBeConverted[i]);
///assert
ASSERT_ARE_EQUAL(int, 0, result);
unsigned int valueFromString = 0;
size_t pos = 0;
while (destination[pos] != '\0')
{
if (valueFromString > (UINT_MAX / 10))
{
ASSERT_FAIL("string produced was too big... ");
}
else
{
valueFromString *= 10;
valueFromString += (destination[pos] - '0');
}
pos++;
}
if (interestingUnsignedIntNumbersToBeConverted[i] != valueFromString)
{
ASSERT_FAIL("unexpected value");
}
}
}
/*Tests_SRS_CRT_ABSTRACTIONS_02_001: [unsignedIntToString shall convert the parameter value to its decimal representation as a string in the buffer indicated by parameter destination having the size indicated by parameter destinationSize.] */
/*Tests_SRS_CRT_ABSTRACTIONS_02_004: [If the conversion has been successfull then unsignedIntToString shall return 0.] */
TEST_FUNCTION(unsignedIntToString_succeeds_for_space_just_about_right)
{
// arrange
char destination[1000];
unsigned int toBeConverted = 1; /*7 would not be a right starting digit*/
size_t destinationSize = 2;
while (toBeConverted <= (UINT_MAX / 10))
{
///arrange
destinationSize++;
toBeConverted *= 10;
///act
int result = unsignedIntToString(destination, destinationSize, toBeConverted);
///assert
ASSERT_ARE_EQUAL(int, 0, result);
unsigned int valueFromString = 0;
size_t pos = 0;
while (destination[pos] != '\0')
{
if (valueFromString > (UINT_MAX / 10))
{
ASSERT_FAIL("string produced was too big... ");
}
else
{
valueFromString *= 10;
valueFromString += (destination[pos] - '0');
}
pos++;
}
if (toBeConverted != valueFromString)
{
ASSERT_FAIL("unexpected value");
}
}
}
/*Tests_SRS_CRT_ABSTRACTIONS_02_007: [If destination is NULL then size_tToString shall fail.] */
TEST_FUNCTION(size_tToString_fails_when_destination_is_NULL)
{
// arrange
// act
auto result = size_tToString(NULL, 100, 43);
// assert
ASSERT_ARE_NOT_EQUAL(int, 0, result);
}
/*Tests_SRS_CRT_ABSTRACTIONS_02_006: [If the conversion fails for any reason (for example, insufficient buffer space), a non-zero return value shall be supplied and size_tToString shall fail.] */
TEST_FUNCTION(size_tToString_fails_when_destination_is_not_sufficient_for_1_digit)
{
// arrange
char destination[1000];
size_t toBeConverted = 1;
size_t destinationSize = 1;
///act
int result = size_tToString(destination, destinationSize, toBeConverted);
///assert
ASSERT_ARE_NOT_EQUAL(int, 0, result);
}
/*Tests_SRS_CRT_ABSTRACTIONS_02_006: [If the conversion fails for any reason (for example, insufficient buffer space), a non-zero return value shall be supplied and size_tToString shall fail.] */
TEST_FUNCTION(size_tToString_fails_when_destination_is_not_sufficient_for_more_than_1_digit)
{
// arrange
char destination[1000];
size_t toBeConverted = 1; /*7 would not be a right starting digit*/
size_t destinationSize = 1;
while (toBeConverted <= (UINT_MAX / 10))
{
///arrange
destinationSize++;
toBeConverted *= 10;
///act
int result = size_tToString(destination, destinationSize, toBeConverted);
///assert
ASSERT_ARE_NOT_EQUAL(int, 0, result);
}
}
/*Tests_SRS_CRT_ABSTRACTIONS_02_001: [size_tToString shall convert the parameter value to its decimal representation as a string in the buffer indicated by parameter destination having the size indicated by parameter destinationSize.] */
TEST_FUNCTION(size_tToString_succeeds_1_digit)
{
// arrange
char destination[1000];
size_t toBeConverted = 2;
size_t destinationSize = 2;
///act
int result = size_tToString(destination, destinationSize, toBeConverted);
///assert
ASSERT_ARE_EQUAL(int, 0, result);
ASSERT_ARE_EQUAL(char_ptr, "2", destination);
}
/*Tests_SRS_CRT_ABSTRACTIONS_02_001: [size_tToString shall convert the parameter value to its decimal representation as a string in the buffer indicated by parameter destination having the size indicated by parameter destinationSize.] */
/*Tests_SRS_CRT_ABSTRACTIONS_02_004: [If the conversion has been successfull then size_tToString shall return 0.] */
TEST_FUNCTION(size_tToString_succeeds_for_interesting_numbers)
{
// arrange
char destination[1000];
size_t i;
for (i = 0; i<sizeof(interestingSize_tNumbersToBeConverted) / sizeof(interestingSize_tNumbersToBeConverted[0]); i++)
{
///act
int result = size_tToString(destination, 1000, interestingSize_tNumbersToBeConverted[i]);
///assert
ASSERT_ARE_EQUAL(int, 0, result);
size_t valueFromString = 0;
size_t pos = 0;
while (destination[pos] != '\0')
{
if (valueFromString > (SIZE_MAX / 10))
{
ASSERT_FAIL("string produced was too big... ");
}
else
{
valueFromString *= 10;
valueFromString += (destination[pos] - '0');
}
pos++;
}
if (interestingSize_tNumbersToBeConverted[i] != valueFromString)
{
ASSERT_FAIL("unexpected value");
}
}
}
/*Tests_SRS_CRT_ABSTRACTIONS_02_001: [size_tToString shall convert the parameter value to its decimal representation as a string in the buffer indicated by parameter destination having the size indicated by parameter destinationSize.] */
/*Tests_SRS_CRT_ABSTRACTIONS_02_004: [If the conversion has been successfull then size_tToString shall return 0.] */
TEST_FUNCTION(size_tToString_succeeds_for_space_just_about_right)
{
// arrange
char destination[1000];
size_t toBeConverted = 1; /*7 would not be a right starting digit*/
size_t destinationSize = 2;
while (toBeConverted <= (SIZE_MAX / 10))
{
///arrange
destinationSize++;
toBeConverted *= 10;
///act
int result = size_tToString(destination, destinationSize, toBeConverted);
///assert
ASSERT_ARE_EQUAL(int, 0, result);
size_t valueFromString = 0;
size_t pos = 0;
while (destination[pos] != '\0')
{
if (valueFromString > (SIZE_MAX / 10))
{
ASSERT_FAIL("string produced was too big... ");
}
else
{
valueFromString *= 10;
valueFromString += (destination[pos] - '0');
}
pos++;
}
if (toBeConverted != valueFromString)
{
ASSERT_FAIL("unexpected value");
}
}
}
END_TEST_SUITE(CRTAbstractions_UnitTests)
| 36.789849 | 352 | 0.600557 |
7f6e382acb08e9f591eb53116543221d27413df0 | 616 | php | PHP | modules/user/models/CommentForm.php | Pavel-Tsymbal/socialnetwork | 82a427f676c778282e9048d18cddb0c92a8b8006 | [
"BSD-3-Clause"
] | null | null | null | modules/user/models/CommentForm.php | Pavel-Tsymbal/socialnetwork | 82a427f676c778282e9048d18cddb0c92a8b8006 | [
"BSD-3-Clause"
] | null | null | null | modules/user/models/CommentForm.php | Pavel-Tsymbal/socialnetwork | 82a427f676c778282e9048d18cddb0c92a8b8006 | [
"BSD-3-Clause"
] | null | null | null | <?php
/**
* Created by PhpStorm.
* User: pavel
* Date: 19.09.18
* Time: 10:16
*/
namespace app\modules\user\models;
use yii\base\Model;
class CommentForm extends Model
{
public $parent_id;
public $author_id;
public $author_name;
public $author_avatar;
public $message;
/**
* @return array the validation rules.
*/
public function rules()
{
return [
['parent_id','integer'],
['author_id','integer'],
['author_name', 'string'],
['author_avatar', 'string'],
['message', 'string'],
];
}
} | 17.6 | 42 | 0.540584 |
1bff5898e6ac4c81c427d8cfbca2109862a601b3 | 2,359 | sql | SQL | data/queries/women_in_voting_center.sql | kuralabs/mivotico-tse2sql | 51c5d7695a930d6a38e603cb6a12f09f5e1e563b | [
"Apache-2.0"
] | null | null | null | data/queries/women_in_voting_center.sql | kuralabs/mivotico-tse2sql | 51c5d7695a930d6a38e603cb6a12f09f5e1e563b | [
"Apache-2.0"
] | 8 | 2016-02-13T03:57:13.000Z | 2016-02-18T11:49:33.000Z | data/queries/women_in_voting_center.sql | kuralabs/mivotico-tse2sql | 51c5d7695a930d6a38e603cb6a12f09f5e1e563b | [
"Apache-2.0"
] | null | null | null | # To find the voting centers of a district but looking by a person id
SELECT id_voting_center, voting_center.name, voting_center.district_id_district
FROM voting_center
JOIN voter ON voter.district_id_district = voting_center.district_id_district
WHERE id_voter = <ID_PERSON>;
/*
+------------------+-------------------------------------------------+----------------------+
| id_voting_center | name | district_id_district |
+------------------+-------------------------------------------------+----------------------+
| 111005001 | Escuela Estado De Israel | 111005 |
| 111005002 | Colegio Tecnico Profesional Vazquez De Coronado | 111005 |
+------------------+-------------------------------------------------+----------------------+
*/
# To find women in voting centers by voting center id
SELECT COUNT(*) AS Mujeres, voting_center.name AS `Centro de Votacion`
FROM voter
JOIN site_per_voting_center ON voter.id_site = site_per_voting_center.id_site
JOIN voting_center ON site_per_voting_center.voting_center_id_voting_center = voting_center.id_voting_center
WHERE voting_center.id_voting_center = 111005001 AND voter.sex = 2
GROUP BY voting_center.name
ORDER BY Mujeres DESC;
/*
+---------+--------------------------+
| Mujeres | Centro de Votacion |
+---------+--------------------------+
| 6321 | Escuela Estado De Israel |
+---------+--------------------------+
1 row in set (0.11 sec)
*/
# To find women in voting centers by district id
SELECT COUNT(*) AS Mujeres, voting_center.name AS `Centro de Votacion`
FROM voter
JOIN site_per_voting_center ON voter.id_site = site_per_voting_center.id_site
JOIN voting_center ON site_per_voting_center.voting_center_id_voting_center = voting_center.id_voting_center
WHERE voting_center.district_id_district = 111005 AND voter.sex = 2
GROUP BY voting_center.name
ORDER BY Mujeres DESC;
/*
+---------+-------------------------------------------------+
| Mujeres | Centro de Votacion |
+---------+-------------------------------------------------+
| 6321 | Escuela Estado De Israel |
| 2534 | Colegio Tecnico Profesional Vazquez De Coronado |
+---------+-------------------------------------------------+
2 rows in set (0.11 sec)
*/
| 46.254902 | 109 | 0.543027 |
40f0208e5857b75590e7d40d695974348866b1e6 | 298 | rb | Ruby | test/integration/user_flows_test.rb | nTraum/tf2movies | 3dae7f051036d611e6b59b8704ec43d5a58d66be | [
"MIT"
] | 3 | 2017-03-25T17:31:41.000Z | 2021-05-28T13:03:30.000Z | test/integration/user_flows_test.rb | nTraum/tf2movies | 3dae7f051036d611e6b59b8704ec43d5a58d66be | [
"MIT"
] | 1 | 2015-03-01T16:37:36.000Z | 2015-05-18T20:06:17.000Z | test/integration/user_flows_test.rb | nTraum/tf2movies | 3dae7f051036d611e6b59b8704ec43d5a58d66be | [
"MIT"
] | null | null | null | require 'test_helper'
# To be handled correctly this spec must end with "Integration Test"
describe 'UserFlows Integration Test' do
include SharedIntegrationSteps
it 'must be able to login for the first time' do
login
end
it 'must be able to logout' do
login
logout
end
end
| 19.866667 | 68 | 0.731544 |
efb02f49771502bcb4801243419e05e9489889b5 | 454 | lua | Lua | lua/plugins/dashboard.lua | coolabhays/neovim-lua | 42c4254278e31fd966449bb8709d47cb296c0b71 | [
"MIT"
] | null | null | null | lua/plugins/dashboard.lua | coolabhays/neovim-lua | 42c4254278e31fd966449bb8709d47cb296c0b71 | [
"MIT"
] | null | null | null | lua/plugins/dashboard.lua | coolabhays/neovim-lua | 42c4254278e31fd966449bb8709d47cb296c0b71 | [
"MIT"
] | null | null | null | vim.g.dashboard_default_executive ='telescope' -- vim-clap, fzf, telescope
vim.g.dashboard_custom_section = {
a = {description = {' Find File '}, command = 'Telescope find_files'},
d = {description = {' Search Text '}, command = 'Telescope live_grep'},
b = {description = {' Recent Files '}, command = 'Telescope oldfiles'},
e = {description = {' Config '}, command = 'edit ~/.config/nvim/init.lua'}
}
| 50.444444 | 90 | 0.601322 |
0a537c304d3f58112f4b7f82a8ee4c832e1176fa | 1,413 | cs | C# | Plugins/Aspose.Cells Vs OpenXML Spreadsheets/OpenXML Missing Features/Set External Links in Formula/Program.cs | sergioruizdeveloper/Aspose.Cells-for-.NET | e0a946a5a1fd60164268f41546e424270ac7cbd9 | [
"MIT"
] | 157 | 2016-05-08T05:12:41.000Z | 2022-03-29T11:37:26.000Z | Plugins/Aspose.Cells Vs OpenXML Spreadsheets/OpenXML Missing Features/Set External Links in Formula/Program.cs | sergioruizdeveloper/Aspose.Cells-for-.NET | e0a946a5a1fd60164268f41546e424270ac7cbd9 | [
"MIT"
] | 13 | 2016-06-24T12:04:29.000Z | 2022-03-15T02:37:15.000Z | Plugins/Aspose.Cells Vs OpenXML Spreadsheets/OpenXML Missing Features/Set External Links in Formula/Program.cs | sergioruizdeveloper/Aspose.Cells-for-.NET | e0a946a5a1fd60164268f41546e424270ac7cbd9 | [
"MIT"
] | 71 | 2016-04-09T07:15:53.000Z | 2022-03-29T07:38:07.000Z | using Aspose.Cells;
/*
This project uses Automatic Package Restore feature of NuGet to resolve Aspose.Cells for .NET API reference when the project is build. Please check https://docs.nuget.org/consume/nuget-faq for more information. If you do not wish to use NuGet, you can manually download Aspose.Cells for .NET API from http://www.aspose.com/downloads, install it and then add its reference to this project. For any issues, questions or suggestions please feel free to contact us using http://www.aspose.com/community/forums/default.aspx
*/
namespace Aspose.Plugins.AsposeVSOpenXML
{
class Program
{
static void Main(string[] args)
{
string FilePath = @"..\..\..\Sample Files\";
string FileName = FilePath + "Set External Links in Formula.xlsx";
//Instantiate a new Workbook.
Workbook workbook = new Workbook();
//Get first Worksheet
Worksheet sheet = workbook.Worksheets[0];
//Get Cells collection
Aspose.Cells.Cells cells = sheet.Cells;
//Set formula with external links
cells["A1"].Formula = "=SUM('[book1.xls]Sheet1'!A2, '[book1.xls]Sheet1'!A4)";
//Set formula with external links
cells["A2"].Formula = "='[book1.xls]Sheet1'!A8";
//Save the workbook
workbook.Save(FileName);
}
}
}
| 39.25 | 518 | 0.636235 |
25cb282c5eba8fe1f7a7c3703958032819e036fe | 140 | cs | C# | Working with Abstraction/Exercise/P03_JediGalaxy/Player.cs | MiroslavPeychev/C-Sharp-OOP | 00b0ec1c064d78f3f3839a75c329d0a528a4b013 | [
"MIT"
] | 1 | 2021-12-12T20:11:58.000Z | 2021-12-12T20:11:58.000Z | Working with Abstraction/Exercise/P03_JediGalaxy/Player.cs | MiroslavPeychev/C-Sharp-OOP | 00b0ec1c064d78f3f3839a75c329d0a528a4b013 | [
"MIT"
] | null | null | null | Working with Abstraction/Exercise/P03_JediGalaxy/Player.cs | MiroslavPeychev/C-Sharp-OOP | 00b0ec1c064d78f3f3839a75c329d0a528a4b013 | [
"MIT"
] | null | null | null | namespace P03_JediGalaxy
{
public class Player
{
public int Row { get; set; }
public int Col { get; set; }
}
} | 15.555556 | 36 | 0.542857 |
2585c30791643a9d85047ee741242e601914e418 | 344 | cs | C# | Command.cs | 1kevgriff/ProjectDover | 36e615255cb93f32caccd038ca6470b2de7fe34b | [
"MIT"
] | null | null | null | Command.cs | 1kevgriff/ProjectDover | 36e615255cb93f32caccd038ca6470b2de7fe34b | [
"MIT"
] | 1 | 2018-10-04T12:13:40.000Z | 2018-10-04T12:13:40.000Z | Command.cs | 1kevgriff/ProjectDover | 36e615255cb93f32caccd038ca6470b2de7fe34b | [
"MIT"
] | 2 | 2018-09-11T20:12:26.000Z | 2018-10-04T11:16:39.000Z | namespace ProjectDover
{
public enum Command
{
UNKNOWN = -1,
COMMAND_HANDLED,
COMMAND_QUIT,
COMMAND_NORTH,
COMMAND_SOUTH,
COMMAND_EAST,
COMMAND_WEST,
COMMAND_LOOK,
COMMAND_TAKE,
COMMAND_INVENTORY,
COMMAND_SUMMARY,
COMMAND_SAVE
}
}
| 17.2 | 26 | 0.555233 |
45c6b2d8d58dc82c9c66e5024f542ea6b7c92d12 | 1,293 | py | Python | app/handlers/callback/select_all_product_for_recipe.py | Katel212/MyPersonalKitchenBot | 03de0beeaf2665e8b3ddd1709da3d4edcd422b80 | [
"MIT"
] | null | null | null | app/handlers/callback/select_all_product_for_recipe.py | Katel212/MyPersonalKitchenBot | 03de0beeaf2665e8b3ddd1709da3d4edcd422b80 | [
"MIT"
] | 5 | 2020-12-22T17:53:05.000Z | 2021-04-07T20:00:47.000Z | app/handlers/callback/select_all_product_for_recipe.py | Katel212/MyPersonalKitchenBot | 03de0beeaf2665e8b3ddd1709da3d4edcd422b80 | [
"MIT"
] | null | null | null | import re
from aiogram import types
from aiogram.dispatcher import filters, FSMContext
from app.models import Product, ShoppingList, ShoppingListToProduct
from app.misc import dp
from app.states.state_ingredients import IngredientsForRecipe
@dp.callback_query_handler(filters.Regexp(r'select_all_product_for_recipe'), state=IngredientsForRecipe.ingredients)
async def select_all_product_for_recipe(query: types.CallbackQuery, state: FSMContext):
async with state.proxy() as st:
name = re.match(r'select_all_product_for_recipe', query.data).groups()
products = await Product.query.where(Product.user_id == query.from_user.id).gino.all()
shopping_list_product_connections = await ShoppingList \
.join(ShoppingListToProduct, ShoppingListToProduct.shopping_list_id == ShoppingList.id) \
.select(ShoppingList.user_id == query.from_user.id) \
.gino.all()
shopping_list_product_ids = [data[3] for data in shopping_list_product_connections]
products = list(filter(lambda item: item.id not in shopping_list_product_ids, products))
if len(st.get("ingredients", [])) == 0:
st["ingredients"] = []
for prod in products:
st["ingredients"].append(prod.name)
await query.answer()
| 46.178571 | 116 | 0.730858 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.