hexsha
stringlengths 40
40
| size
int64 5
1.05M
| ext
stringclasses 98
values | lang
stringclasses 21
values | max_stars_repo_path
stringlengths 3
945
| max_stars_repo_name
stringlengths 4
118
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
368k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
945
| max_issues_repo_name
stringlengths 4
118
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
134k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
945
| max_forks_repo_name
stringlengths 4
135
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
1.05M
| avg_line_length
float64 1
1.03M
| max_line_length
int64 2
1.03M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
07400126f265a84bfe7f3ebd9c5ddc72fffda1cf | 1,577 | css | CSS | app/styles/main.css | patelpriyam203/FE_Bloctime | 1b95ad1092a1d7b4fe7a7191dc69b03624881f9e | [
"Apache-2.0"
] | null | null | null | app/styles/main.css | patelpriyam203/FE_Bloctime | 1b95ad1092a1d7b4fe7a7191dc69b03624881f9e | [
"Apache-2.0"
] | null | null | null | app/styles/main.css | patelpriyam203/FE_Bloctime | 1b95ad1092a1d7b4fe7a7191dc69b03624881f9e | [
"Apache-2.0"
] | null | null | null | * {
margin: 0;
padding: 0;
box-sizing: border-box;
}
html,
body {
background-color: #FFFFFF;
font-family: 'Lato', sans-serif;
font-weight: 300;
font-size: 25px;
text-rendering: optimizeLegibility;
}
.row {
max-width: 1140px;
margin: 0 auto;
width: auto;
height: auto;
position: absolute;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
/* border: 1px solid black; */
border-radius: 50px 50px;
}
.break {
/* margin: 30px 30px; */
padding-top: 25px;
font-size: 40px;
color: #fff;
font-weight: 250;
text-align: center;
width: 300px;
height: 100px;
background: #979797;
border: 1px solid #979797;
border-radius: 50px 50px;
/* border: 1px solid black; */
}
.clock {
margin-top: 10px;
margin-bottom: 20px;
font-size: 50px;
text-align: center;
padding: 25px 0;
color: #CA3939;
/* border: 1px solid black; */
}
.ion-play {
margin-left: 95px;
padding: 10px 15px 10px 17px;
font-size: 30px;
color: #979797;
border: 2px solid #979797;
border-radius: 50%;
/* border: 1px solid black; */
}
.ion-stop {
margin-left: 95px;
padding: 10px 15px;
font-size: 30px;
color: #979797;
border: 2px solid #979797;
border-radius: 50%;
/* border: 1px solid black; */
}
.ion-skip-forward {
padding: 10px 15px;
font-size: 30px;
color: #979797;
border: 2px solid #979797;
border-radius: 50%;
/* border: 1px solid black; */
}
.breakCounter {
font-size: 20px;
margin-bottom: 30px;
padding-top: 50px;
color: #979797;
text-align: center;
/* border: 1px solid black; */
}
| 17.32967 | 37 | 0.629042 |
c4fef3501d1e79063d25b709e2a8c0c0df6017e9 | 289 | cpp | C++ | Dataset/Leetcode/test/112/795.cpp | kkcookies99/UAST | fff81885aa07901786141a71e5600a08d7cb4868 | [
"MIT"
] | null | null | null | Dataset/Leetcode/test/112/795.cpp | kkcookies99/UAST | fff81885aa07901786141a71e5600a08d7cb4868 | [
"MIT"
] | null | null | null | Dataset/Leetcode/test/112/795.cpp | kkcookies99/UAST | fff81885aa07901786141a71e5600a08d7cb4868 | [
"MIT"
] | null | null | null | class Solution {
public:
bool XXX(TreeNode* root, int sum) {
if(root==nullptr) return false;//空树
sum-=root->val;
if(root->left==nullptr&&root->right==nullptr) //叶节点
return sum==0;
return XXX(root->left,sum)||XXX(root->right,sum);
}
};
| 24.083333 | 59 | 0.557093 |
3f48d92a98b96e432692991fb3cf0b0ccf030344 | 589 | rb | Ruby | tools/generate_ticket_for_webpage.rb | nhoriguchi/ticket_cli | 040bab5d5f5549a62c42c579b9154f8770c2269d | [
"Apache-2.0"
] | null | null | null | tools/generate_ticket_for_webpage.rb | nhoriguchi/ticket_cli | 040bab5d5f5549a62c42c579b9154f8770c2269d | [
"Apache-2.0"
] | null | null | null | tools/generate_ticket_for_webpage.rb | nhoriguchi/ticket_cli | 040bab5d5f5549a62c42c579b9154f8770c2269d | [
"Apache-2.0"
] | null | null | null | require 'open-uri'
require 'nokogiri'
class GenerateTicketForWebsite
def self.run url
page_content = open(url).read
doc = Nokogiri::HTML(page_content)
return doc.at_css('title').text
end
end
url = ARGV[0]
proj = ARGV[1]
title = GenerateTicketForWebsite.run url
raise if proj == ""
template = "
---
Project: #{proj}
Subject: #{title}
Status:
Type:
EstimatedTime: 1
StartDate:
DueDate:
Parent: null
Assigned: null
Duration:
Progress: 0
---
#{url}
"
require_relative '../main.rb'
File.write("/tmp/.upload.md", template)
MainCommand.cmd ["new", "-f", "/tmp/.upload.md"]
| 15.5 | 48 | 0.689304 |
2c5d7fde761a09651f0f0812d45e53fdce8f2ceb | 8,257 | py | Python | discovery-provider/alembic/versions/5bcbe23f6c70_user_track_collection_mat_views.py | ppak10/audius-protocol | 4dd9df787cbd39f86c5623ce7899b3855b7b314e | [
"Apache-2.0"
] | null | null | null | discovery-provider/alembic/versions/5bcbe23f6c70_user_track_collection_mat_views.py | ppak10/audius-protocol | 4dd9df787cbd39f86c5623ce7899b3855b7b314e | [
"Apache-2.0"
] | null | null | null | discovery-provider/alembic/versions/5bcbe23f6c70_user_track_collection_mat_views.py | ppak10/audius-protocol | 4dd9df787cbd39f86c5623ce7899b3855b7b314e | [
"Apache-2.0"
] | null | null | null | """user-track-collection-mat-views
Revision ID: 5bcbe23f6c70
Revises: 2ff46a8686fa
Create Date: 2021-04-12 20:01:40.395480
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '5bcbe23f6c70'
down_revision = '2ff46a8686fa'
branch_labels = None
depends_on = None
def upgrade():
connection = op.get_bind()
connection.execute('''
begin;
--- ======================= AGGREGATE USER =======================
DROP MATERIALIZED VIEW IF EXISTS aggregate_user;
DROP INDEX IF EXISTS aggregate_user_idx;
CREATE MATERIALIZED VIEW aggregate_user as
SELECT
distinct(u.user_id),
COALESCE (user_track.track_count, 0) as track_count,
COALESCE (user_playlist.playlist_count, 0) as playlist_count,
COALESCE (user_album.album_count, 0) as album_count,
COALESCE (user_follower.follower_count, 0) as follower_count,
COALESCE (user_followee.followee_count, 0) as following_count,
COALESCE (user_repost.repost_count, 0) as repost_count,
COALESCE (user_track_save.save_count, 0) as track_save_count
FROM
users u
-- join on subquery for tracks created
LEFT OUTER JOIN (
SELECT
t.owner_id as owner_id,
count(t.owner_id) as track_count
FROM
tracks t
WHERE
t.is_current is True AND
t.is_delete is False AND
t.is_unlisted is False AND
t.stem_of is Null
GROUP BY t.owner_id
) as user_track ON user_track.owner_id = u.user_id
-- join on subquery for playlists created
LEFT OUTER JOIN (
SELECT
p.playlist_owner_id as owner_id,
count(p.playlist_owner_id) as playlist_count
FROM
playlists p
WHERE
p.is_album is False AND
p.is_current is True AND
p.is_delete is False AND
p.is_private is False
GROUP BY p.playlist_owner_id
) as user_playlist ON user_playlist.owner_id = u.user_id
-- join on subquery for albums created
LEFT OUTER JOIN (
SELECT
p.playlist_owner_id as owner_id,
count(p.playlist_owner_id) as album_count
FROM
playlists p
WHERE
p.is_album is True AND
p.is_current is True AND
p.is_delete is False AND
p.is_private is False
GROUP BY p.playlist_owner_id
) user_album ON user_album.owner_id = u.user_id
-- join on subquery for followers
LEFT OUTER JOIN (
SELECT
f.followee_user_id as followee_user_id,
count(f.followee_user_id) as follower_count
FROM
follows f
WHERE
f.is_current is True AND
f.is_delete is False
GROUP BY f.followee_user_id
) user_follower ON user_follower.followee_user_id = u.user_id
-- join on subquery for followee
LEFT OUTER JOIN (
SELECT
f.follower_user_id as follower_user_id,
count(f.follower_user_id) as followee_count
FROM
follows f
WHERE
f.is_current is True AND
f.is_delete is False
GROUP BY f.follower_user_id
) user_followee ON user_followee.follower_user_id = u.user_id
-- join on subquery for reposts
LEFT OUTER JOIN (
SELECT
r.user_id as user_id,
count(r.user_id) as repost_count
FROM
reposts r
WHERE
r.is_current is True AND
r.is_delete is False
GROUP BY r.user_id
) user_repost ON user_repost.user_id = u.user_id
-- join on subquery for track saves
LEFT OUTER JOIN (
SELECT
s.user_id as user_id,
count(s.user_id) as save_count
FROM
saves s
WHERE
s.is_current is True AND
s.save_type = 'track' AND
s.is_delete is False
GROUP BY s.user_id
) user_track_save ON user_track_save.user_id = u.user_id
WHERE
u.is_current is True;
CREATE UNIQUE INDEX aggregate_user_idx ON aggregate_user (user_id);
--- ======================= AGGREGATE TRACK =======================
DROP MATERIALIZED VIEW IF EXISTS aggregate_track;
DROP INDEX IF EXISTS aggregate_track_idx;
CREATE MATERIALIZED VIEW aggregate_track as
SELECT
t.track_id,
COALESCE (track_repost.repost_count, 0) as repost_count,
COALESCE (track_save.save_count, 0) as save_count
FROM
tracks t
-- inner join on subquery for reposts
LEFT OUTER JOIN (
SELECT
r.repost_item_id as track_id,
count(r.repost_item_id) as repost_count
FROM
reposts r
WHERE
r.is_current is True AND
r.repost_type = 'track' AND
r.is_delete is False
GROUP BY r.repost_item_id
) track_repost ON track_repost.track_id = t.track_id
-- inner join on subquery for track saves
LEFT OUTER JOIN (
SELECT
s.save_item_id as track_id,
count(s.save_item_id) as save_count
FROM
saves s
WHERE
s.is_current is True AND
s.save_type = 'track' AND
s.is_delete is False
GROUP BY s.save_item_id
) track_save ON track_save.track_id = t.track_id
WHERE
t.is_current is True AND
t.is_delete is False;
CREATE UNIQUE INDEX aggregate_track_idx ON aggregate_track (track_id);
--- ======================= AGGREGATE PLAYLIST =======================
DROP MATERIALIZED VIEW IF EXISTS aggregate_playlist;
DROP INDEX IF EXISTS aggregate_playlist_idx;
CREATE MATERIALIZED VIEW aggregate_playlist as
SELECT
p.playlist_id,
p.is_album,
COALESCE (playlist_repost.repost_count, 0) as repost_count,
COALESCE (playlist_save.save_count, 0) as save_count
FROM
playlists p
-- inner join on subquery for reposts
LEFT OUTER JOIN (
SELECT
r.repost_item_id as playlist_id,
count(r.repost_item_id) as repost_count
FROM
reposts r
WHERE
r.is_current is True AND
(r.repost_type = 'playlist' OR r.repost_type = 'album') AND
r.is_delete is False
GROUP BY r.repost_item_id
) playlist_repost ON playlist_repost.playlist_id = p.playlist_id
-- inner join on subquery for track saves
LEFT OUTER JOIN (
SELECT
s.save_item_id as playlist_id,
count(s.save_item_id) as save_count
FROM
saves s
WHERE
s.is_current is True AND
(s.save_type = 'playlist' OR s.save_type = 'album') AND
s.is_delete is False
GROUP BY s.save_item_id
) playlist_save ON playlist_save.playlist_id = p.playlist_id
WHERE
p.is_current is True AND
p.is_delete is False;
CREATE UNIQUE INDEX aggregate_playlist_idx ON aggregate_playlist (playlist_id);
commit;
''')
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
# ### end Alembic commands ###
connection = op.get_bind()
connection.execute('''
begin;
DROP INDEX IF EXISTS aggregate_user_idx;
DROP INDEX IF EXISTS aggregate_track_idx;
DROP INDEX IF EXISTS aggregate_playlist_idx;
DROP MATERIALIZED VIEW aggregate_user;
DROP MATERIALIZED VIEW aggregate_track;
DROP MATERIALIZED VIEW aggregate_playlist;
commit;
''')
| 34.548117 | 87 | 0.569335 |
316b73efd0d17de0c413c387c8038cfd237dbdba | 780 | ps1 | PowerShell | day-08/part-1.ps1 | martinkonopka/AdventOfCode2019 | a6b799b07881ecd48f1a4cf295c897ddd057c884 | [
"MIT"
] | 1 | 2019-12-01T19:07:36.000Z | 2019-12-01T19:07:36.000Z | day-08/part-1.ps1 | martinkonopka/AdventOfCode2019 | a6b799b07881ecd48f1a4cf295c897ddd057c884 | [
"MIT"
] | null | null | null | day-08/part-1.ps1 | martinkonopka/AdventOfCode2019 | a6b799b07881ecd48f1a4cf295c897ddd057c884 | [
"MIT"
] | null | null | null | param(
[string]$InputPath = ".\input.txt"
, [int]$LayerWidth = 25
, [int]$LayerHeight = 6
)
Get-Content -Path $InputPath `
| % { $_.ToCharArray() } `
| Group-Object -Property { [Math]::Floor($script:counter++ / ($script:LayerWidth * $script:LayerHeight)) } `
| % { [string]::new($_.Group) } `
| Sort-Object { ($_ | Select-String '0' -AllMatches | Select-Object -ExpandProperty "Matches" | Measure-Object | Select-Object -ExpandProperty Count) } `
| Select-Object -First 1 `
| % {
($_ | Select-String '1' -AllMatches | Select-Object -ExpandProperty "Matches" | Measure-Object | Select-Object -ExpandProperty Count) `
* ($_ | Select-String '2' -AllMatches | Select-Object -ExpandProperty "Matches" | Measure-Object | Select-Object -ExpandProperty Count)
}
| 43.333333 | 153 | 0.657692 |
c9e159f59a305214df41231cca74fb1287d0364a | 960 | tsx | TypeScript | src/layouts/index.tsx | dnjstrom/dnjstrom | 275f145292a1b9f49ad2b473d204d324fb22af6e | [
"MIT"
] | null | null | null | src/layouts/index.tsx | dnjstrom/dnjstrom | 275f145292a1b9f49ad2b473d204d324fb22af6e | [
"MIT"
] | null | null | null | src/layouts/index.tsx | dnjstrom/dnjstrom | 275f145292a1b9f49ad2b473d204d324fb22af6e | [
"MIT"
] | null | null | null | import React from "react"
import Helmet from "react-helmet"
import styled from "styled-components"
import "./main.css"
const App = styled.main`
max-width: 700px;
padding: 1.5rem;
margin: 0 auto;
a {
color: #2d9cdb;
text-decoration: none;
word-break: break-word;
&:hover {
text-decoration: underline;
}
}
`
const Layout: React.SFC<ILayoutProps> = ({ children }) => (
<div>
<Helmet>
<html lang="en" />
<title>Daniel Ström</title>
<meta
name="description"
content="I'm Daniel, a web developer, and this is where I write about thoughts I've had or things I've done."
/>
<meta
name="keywords"
content="blog, tech, programming, software development, culture, leadership, coaching"
/>
</Helmet>
<App>
<section>{children()}</section>
</App>
</div>
)
export default Layout
interface ILayoutProps {
children: () => JSX.Element
}
| 19.2 | 117 | 0.607292 |
39fdbe3f49cdd69fe13872bd21b6441657a6f1c4 | 436 | swift | Swift | Shifu/Classes/Extensions/ArrayExtension.swift | horidream/shifu | 1764db27dbefb4d1bd061ca1702a4ddf2382489e | [
"MIT"
] | null | null | null | Shifu/Classes/Extensions/ArrayExtension.swift | horidream/shifu | 1764db27dbefb4d1bd061ca1702a4ddf2382489e | [
"MIT"
] | null | null | null | Shifu/Classes/Extensions/ArrayExtension.swift | horidream/shifu | 1764db27dbefb4d1bd061ca1702a4ddf2382489e | [
"MIT"
] | null | null | null | //
// ArrayExtension.swift
// aSong
//
// Created by Baoli Zhai on 2019/4/19.
// Copyright © 2019 Baoli Zhai. All rights reserved.
//
import Foundation
extension Array {
public func get( _ index: Int, _ defaultValue:Element? = nil) -> Element? {
var index = index;
if index < 0{
index = self.count + index;
}
return index < count && index >= 0 ? self[index] : defaultValue
}
}
| 20.761905 | 79 | 0.584862 |
af1a32fba33d48fe9bf2f576a88c2ce3de8a8008 | 207 | py | Python | django_rest_auth_embedded/tests/urls/__init__.py | Volkova-Natalia/django_rest_auth_embedded | 43fe1d23f59332a7794365348989599cde44af6e | [
"MIT"
] | null | null | null | django_rest_auth_embedded/tests/urls/__init__.py | Volkova-Natalia/django_rest_auth_embedded | 43fe1d23f59332a7794365348989599cde44af6e | [
"MIT"
] | 1 | 2021-02-26T16:56:31.000Z | 2021-03-24T09:47:43.000Z | django_rest_auth_embedded/tests/urls/__init__.py | Volkova-Natalia/django_rest_auth_embedded | 43fe1d23f59332a7794365348989599cde44af6e | [
"MIT"
] | null | null | null | from .base import BaseUrlsTestCase
from .registration import RegistrationUrlsTestCase
from .login import LoginUrlsTestCase
from .logout import LogoutUrlsTestCase
from .auth_info import AuthInfoUrlsTestCase
| 29.571429 | 50 | 0.874396 |
05cadb5253d9cd0e235a0b2d81ea938d88b7e606 | 345 | sql | SQL | migrations/2021-05-29-114935_get_gurls/up.sql | ojhermann/guardian_backend | fa89dd96c02620fc06cd51886e47bddcd9493065 | [
"MIT"
] | null | null | null | migrations/2021-05-29-114935_get_gurls/up.sql | ojhermann/guardian_backend | fa89dd96c02620fc06cd51886e47bddcd9493065 | [
"MIT"
] | 2 | 2021-05-24T14:07:28.000Z | 2021-07-12T13:18:58.000Z | migrations/2021-05-29-114935_get_gurls/up.sql | ojhermann/guardian_backend | fa89dd96c02620fc06cd51886e47bddcd9493065 | [
"MIT"
] | null | null | null | -- Your SQL goes here
CREATE OR REPLACE FUNCTION public.get_gurls(start_id INTEGER, end_id INTEGER)
RETURNS SETOF gurl_response
AS $$
BEGIN
RETURN QUERY
SELECT g.id,
g.url,
g.created_at,
g.liked
FROM public.gurls as g
WHERE g.id >= start_id
AND g.id < end_id;
END;
$$
LANGUAGE plpgsql;
| 20.294118 | 77 | 0.631884 |
5bc9dffa7688d6b3635972a3e0a854f49dc37227 | 295 | css | CSS | src/components/Container/Container.css | mustang-roy/pokedex | 612e96ae3d7729a1d6ce532196863f0a6f8b74d0 | [
"Apache-2.0"
] | 1 | 2020-10-22T17:17:25.000Z | 2020-10-22T17:17:25.000Z | src/components/Container/Container.css | mustang-roy/pokedex | 612e96ae3d7729a1d6ce532196863f0a6f8b74d0 | [
"Apache-2.0"
] | 24 | 2020-10-21T15:03:15.000Z | 2020-10-24T10:58:47.000Z | src/components/Container/Container.css | mustang-roy/pokedex | 612e96ae3d7729a1d6ce532196863f0a6f8b74d0 | [
"Apache-2.0"
] | null | null | null | .container {
background-color: #dc0a2d;
display: flex;
flex-direction: column;
height: 100vh;
justify-content: space-between;
width: 100vw;
}
.main {
display: flex;
justify-content: center;
}
.main-screen {
display: flex;
}
.search-list {
display: flex;
width: 500px;
}
| 12.826087 | 33 | 0.657627 |
25bcc4d6cec2a0e178619bb9ddbe25ffe2ff4cca | 2,084 | cs | C# | GrimSearch.FileUtils/CharacterFiles/CharacterInfo.cs | hallgeirl/gd-item-search | 76359496f4a5e5a8b982bfe4254e8bc68e33ad58 | [
"MIT"
] | 7 | 2018-07-10T07:56:59.000Z | 2020-08-30T18:05:54.000Z | GrimSearch.FileUtils/CharacterFiles/CharacterInfo.cs | hallgeirl/gd-item-search | 76359496f4a5e5a8b982bfe4254e8bc68e33ad58 | [
"MIT"
] | 15 | 2018-05-21T06:00:38.000Z | 2019-07-26T11:54:24.000Z | GrimSearch.FileUtils/CharacterFiles/CharacterInfo.cs | hallgeirl/gd-item-search | 76359496f4a5e5a8b982bfe4254e8bc68e33ad58 | [
"MIT"
] | 1 | 2020-12-06T17:30:57.000Z | 2020-12-06T17:30:57.000Z | using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace GrimSearch.Utils.CharacterFiles
{
public class CharacterInfo
{
public string texture;
public UInt32 money;
public UInt32 lootMode;
public UInt32 currentTribute;
public byte isInMainQuest;
public byte hasBeenInGame;
public byte difficulty;
public byte greatestDifficulty;
public byte greatestSurvivalDifficulty;
public byte compassState;
public byte skillWindowShowHelp;
public byte alternateConfig;
public byte alternateConfigEnabled;
public void Read(GDFileReader file)
{
Block b = new Block();
if (file.ReadBlockStart(b) != 1)
throw new Exception();
var version = file.ReadInt();
if (version < 3 || version > 5) // version
throw new Exception("Invalid character info version");
isInMainQuest = file.ReadByte();
hasBeenInGame = file.ReadByte();
difficulty = file.ReadByte();
greatestDifficulty = file.ReadByte();
money = file.ReadInt();
if (version >= 4)
{
greatestSurvivalDifficulty = file.ReadByte();
currentTribute = file.ReadInt();
}
compassState = file.ReadByte();
if (version >= 2 && version <= 4)
lootMode = file.ReadInt();
skillWindowShowHelp = file.ReadByte();
alternateConfig = file.ReadByte();
alternateConfigEnabled = file.ReadByte();
texture = GDString.Read(file);
if (version >= 5)
{
uint size = file.ReadInt(true);
var lootFilters = new byte[size];
for (int i = 0; i < lootFilters.Length; i ++)
{
lootFilters[i] = file.ReadByte();
}
}
file.ReadBlockEnd(b);
}
}
}
| 29.771429 | 70 | 0.535509 |
a42a57d4d019c7e7e06fd3be83c07d9408767579 | 4,255 | php | PHP | language_examples/delphix_curl.php | adeelmalik78/dxapikit | 11714f3f9ad0abb7308dd3aa4dad981b05f6ba74 | [
"Apache-2.0"
] | 2 | 2017-09-21T15:36:16.000Z | 2020-09-15T03:28:55.000Z | language_examples/delphix_curl.php | adeelmalik78/dxapikit | 11714f3f9ad0abb7308dd3aa4dad981b05f6ba74 | [
"Apache-2.0"
] | 1 | 2017-08-21T15:45:24.000Z | 2017-08-21T15:47:26.000Z | language_examples/delphix_curl.php | adeelmalik78/dxapikit | 11714f3f9ad0abb7308dd3aa4dad981b05f6ba74 | [
"Apache-2.0"
] | 6 | 2018-03-05T14:32:00.000Z | 2021-07-14T13:13:16.000Z | <?php
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Copyright (c) 2017 by Delphix. All rights reserved.
//
// Program Name : delphix_curl.php
// Description : Delphix API Example for PHP
// Author : Alan Bitterman
// Created : 2017-08-09
// Version : v1.0.0
//
// Requirements :
// 1.) curl command line libraries
// 2.) Change values below as required
//
// Usage: php delphix_curl.php
//
///////////////////////////////////////////////////////////
// DELPHIX CORP //
// Please make changes to the parameters below as req'd! //
///////////////////////////////////////////////////////////
//
// Variables ...
//
$BaseURL = "http://172.16.160.195/resources/json/delphix";
$username = "delphix_admin";
$password = "delphix";
///////////////////////////////////////////////////////////
// NO CHANGES REQUIRED BELOW THIS POINT //
///////////////////////////////////////////////////////////
//
// Session ...
//
$data = array("type" => "APISession"
, "version" => array( "type" => "APIVersion", "major" => 1, "minor" => 7, "micro" => 0)
);
//print_r($data);
$data_string = json_encode($data);
echo "Session json> $data_string \n";
$ch = curl_init("$BaseURL/session");
curl_setopt($ch, CURLOPT_CUSTOMREQUEST, "POST"); // POST
curl_setopt($ch, CURLOPT_POSTFIELDS, $data_string);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
curl_setopt($ch, CURLOPT_COOKIESESSION, true); // Use Cookie Session for API Authentication ...
curl_setopt($ch, CURLOPT_COOKIEJAR, 'cookie.txt');
curl_setopt($ch, CURLOPT_HTTPHEADER, array(
'Content-Type: application/json',
'Content-Length: ' . strlen($data_string))
);
$result = curl_exec($ch);
echo "Session Results> $result \n";
//
// Login ...
//
$data = array(
"type" => "LoginRequest"
, "username" => "$username"
, "password" => "$password"
);
//print_r($data);
$data_string = json_encode($data);
echo "Login json> $data_string \n";
curl_setopt($ch, CURLOPT_URL,"$BaseURL/login");
curl_setopt($ch, CURLOPT_CUSTOMREQUEST, "POST"); // POST
curl_setopt($ch, CURLOPT_POSTFIELDS, $data_string);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
///curl_setopt($ch, CURLOPT_COOKIESESSION, true); // use in session only
curl_setopt($ch, CURLOPT_COOKIEJAR, 'cookie.txt');
curl_setopt($ch, CURLOPT_HTTPHEADER, array(
'Content-Type: application/json',
'Content-Length: ' . strlen($data_string))
);
$result = curl_exec($ch);
echo "Login Results> $result \n";
/////////////////////////////////////////////////////////////
//
// Add Logic as Required ...
//
//
// Delphix Engine About API ...
//
echo "Calling About API ...\n";
curl_setopt($ch, CURLOPT_URL,"$BaseURL/about");
curl_setopt($ch, CURLOPT_CUSTOMREQUEST, "GET"); // GET
///curl_setopt($ch, CURLOPT_POSTFIELDS, $data_string); // using GET, no POST data required ...
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
///curl_setopt($ch, CURLOPT_COOKIESESSION, true); // use in session only
curl_setopt($ch, CURLOPT_COOKIEJAR, 'cookie.txt');
curl_setopt($ch, CURLOPT_HTTPHEADER, array( 'Content-Type: application/json' ));
$result = curl_exec($ch);
echo "About Results> $result \n";
echo "Converting json string to a PHP Array \n";
$arr = json_decode($result);
print_r($arr);
/////////////////////////////////////////////////////////////
//
// The End ...
//
curl_close($ch); // close curl session (required to keep open for authentication)
?>
| 34.04 | 117 | 0.572738 |
da8fac638c3b077538919c0dd0f21c4adccf4320 | 14,702 | php | PHP | templates/fonts/MiddleSaxonyText.php | briceparent/Shopsailors | c9a0a968521266fa073a4e444392d79d6d706d38 | [
"MIT"
] | 3 | 2017-11-24T02:29:53.000Z | 2021-02-12T07:35:35.000Z | templates/fonts/MiddleSaxonyText.php | briceparent/Shopsailors | c9a0a968521266fa073a4e444392d79d6d706d38 | [
"MIT"
] | null | null | null | templates/fonts/MiddleSaxonyText.php | briceparent/Shopsailors | c9a0a968521266fa073a4e444392d79d6d706d38 | [
"MIT"
] | 1 | 2017-03-19T18:57:08.000Z | 2017-03-19T18:57:08.000Z | <?php
/**
* Copyright Shopsailors (2009)
*
* [email protected]
*
* This file is a part of a computer program whose purpose is to create,
* administrate and use a shop over the web.
*
* This software is governed by the CeCILL license under French law and
* abiding by the rules of distribution of free software. You can use,
* modify and/ or redistribute the software under the terms of the CeCILL
* license as circulated by CEA, CNRS and INRIA at the following URL
* "http://www.cecill.info".
*
* As a counterpart to the access to the source code and rights to copy,
* modify and redistribute granted by the license, users are provided only
* with a limited warranty and the software's author, the holder of the
* economic rights, and the successive licensors have only limited
* liability.
*
* In this respect, the user's attention is drawn to the risks associated
* with loading, using, modifying and/or developing or reproducing the
* software by the user in light of its specific status of free software,
* that may mean that it is complicated to manipulate, and that also
* therefore means that it is reserved for developers and experienced
* professionals having in-depth computer knowledge. Users are therefore
* encouraged to load and test the software's suitability as regards their
* requirements in conditions enabling the security of their systems and/or
* data to be ensured and, more generally, to use and operate it in the
* same conditions as regards security.
*
* The fact that you are presently reading this means that you have had
* knowledge of the CeCILL license and that you accept its terms.
**/
/**
* This file was generated automatically by the Shopsailors engine.
**/
if(!defined('SH_MARKER')){
header('location: directCallForbidden.php');
}
$boxes = array (
6 =>
array (
'fontSize' => 3,
'top' => 5,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 1,
2 => 195,
3 => 1,
4 => 195,
5 => -5,
6 => -2,
7 => -5,
),
),
7 =>
array (
'fontSize' => 4,
'top' => 6,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 1,
2 => 255,
3 => 1,
4 => 255,
5 => -6,
6 => -2,
7 => -6,
),
),
8 =>
array (
'fontSize' => 4,
'top' => 6,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 1,
2 => 255,
3 => 1,
4 => 255,
5 => -6,
6 => -2,
7 => -6,
),
),
9 =>
array (
'fontSize' => 5,
'top' => 7,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 2,
2 => 318,
3 => 2,
4 => 318,
5 => -7,
6 => -2,
7 => -7,
),
),
10 =>
array (
'fontSize' => 6,
'top' => 8,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 2,
2 => 379,
3 => 2,
4 => 379,
5 => -8,
6 => -2,
7 => -8,
),
),
11 =>
array (
'fontSize' => 7,
'top' => 9,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 2,
2 => 440,
3 => 2,
4 => 440,
5 => -9,
6 => -2,
7 => -9,
),
),
12 =>
array (
'fontSize' => 7,
'top' => 9,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 2,
2 => 440,
3 => 2,
4 => 440,
5 => -9,
6 => -2,
7 => -9,
),
),
13 =>
array (
'fontSize' => 8,
'top' => 10,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 3,
2 => 505,
3 => 3,
4 => 505,
5 => -10,
6 => -2,
7 => -10,
),
),
14 =>
array (
'fontSize' => 9,
'top' => 11,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 3,
2 => 566,
3 => 3,
4 => 566,
5 => -11,
6 => -2,
7 => -11,
),
),
15 =>
array (
'fontSize' => 10,
'top' => 12,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 3,
2 => 630,
3 => 3,
4 => 630,
5 => -12,
6 => -2,
7 => -12,
),
),
16 =>
array (
'fontSize' => 10,
'top' => 12,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 3,
2 => 630,
3 => 3,
4 => 630,
5 => -12,
6 => -2,
7 => -12,
),
),
17 =>
array (
'fontSize' => 11,
'top' => 13,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 4,
2 => 693,
3 => 4,
4 => 693,
5 => -13,
6 => -2,
7 => -13,
),
),
18 =>
array (
'fontSize' => 12,
'top' => 14,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 4,
2 => 756,
3 => 4,
4 => 756,
5 => -14,
6 => -2,
7 => -14,
),
),
19 =>
array (
'fontSize' => 14,
'top' => 15,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 4,
2 => 883,
3 => 4,
4 => 883,
5 => -15,
6 => -2,
7 => -15,
),
),
20 =>
array (
'fontSize' => 14,
'top' => 15,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 4,
2 => 883,
3 => 4,
4 => 883,
5 => -15,
6 => -2,
7 => -15,
),
),
21 =>
array (
'fontSize' => 15,
'top' => 16,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 5,
2 => 946,
3 => 5,
4 => 946,
5 => -16,
6 => -2,
7 => -16,
),
),
22 =>
array (
'fontSize' => 16,
'top' => 17,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 5,
2 => 1008,
3 => 5,
4 => 1008,
5 => -17,
6 => -2,
7 => -17,
),
),
23 =>
array (
'fontSize' => 17,
'top' => 18,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 5,
2 => 1069,
3 => 5,
4 => 1069,
5 => -18,
6 => -2,
7 => -18,
),
),
24 =>
array (
'fontSize' => 17,
'top' => 18,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 5,
2 => 1069,
3 => 5,
4 => 1069,
5 => -18,
6 => -2,
7 => -18,
),
),
25 =>
array (
'fontSize' => 18,
'top' => 19,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 6,
2 => 1136,
3 => 6,
4 => 1136,
5 => -19,
6 => -2,
7 => -19,
),
),
26 =>
array (
'fontSize' => 19,
'top' => 20,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 6,
2 => 1198,
3 => 6,
4 => 1198,
5 => -20,
6 => -2,
7 => -20,
),
),
27 =>
array (
'fontSize' => 20,
'top' => 21,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 6,
2 => 1262,
3 => 6,
4 => 1262,
5 => -21,
6 => -2,
7 => -21,
),
),
28 =>
array (
'fontSize' => 20,
'top' => 21,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 6,
2 => 1262,
3 => 6,
4 => 1262,
5 => -21,
6 => -2,
7 => -21,
),
),
29 =>
array (
'fontSize' => 21,
'top' => 22,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 7,
2 => 1325,
3 => 7,
4 => 1325,
5 => -22,
6 => -2,
7 => -22,
),
),
30 =>
array (
'fontSize' => 22,
'top' => 23,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 7,
2 => 1387,
3 => 7,
4 => 1387,
5 => -23,
6 => -2,
7 => -23,
),
),
32 =>
array (
'fontSize' => 23,
'top' => 24,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 7,
2 => 1446,
3 => 7,
4 => 1446,
5 => -24,
6 => -2,
7 => -24,
),
),
34 =>
array (
'fontSize' => 25,
'top' => 26,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 8,
2 => 1576,
3 => 8,
4 => 1576,
5 => -26,
6 => -2,
7 => -26,
),
),
36 =>
array (
'fontSize' => 26,
'top' => 27,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 8,
2 => 1639,
3 => 8,
4 => 1639,
5 => -27,
6 => -2,
7 => -27,
),
),
38 =>
array (
'fontSize' => 28,
'top' => 29,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 9,
2 => 1764,
3 => 9,
4 => 1764,
5 => -29,
6 => -2,
7 => -29,
),
),
40 =>
array (
'fontSize' => 29,
'top' => 30,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 9,
2 => 1829,
3 => 9,
4 => 1829,
5 => -30,
6 => -2,
7 => -30,
),
),
42 =>
array (
'fontSize' => 31,
'top' => 32,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 10,
2 => 1952,
3 => 10,
4 => 1952,
5 => -32,
6 => -2,
7 => -32,
),
),
44 =>
array (
'fontSize' => 32,
'top' => 33,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 10,
2 => 2015,
3 => 10,
4 => 2015,
5 => -33,
6 => -2,
7 => -33,
),
),
46 =>
array (
'fontSize' => 34,
'top' => 35,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 11,
2 => 2143,
3 => 11,
4 => 2143,
5 => -35,
6 => -2,
7 => -35,
),
),
48 =>
array (
'fontSize' => 35,
'top' => 36,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 11,
2 => 2205,
3 => 11,
4 => 2205,
5 => -36,
6 => -2,
7 => -36,
),
),
50 =>
array (
'fontSize' => 38,
'top' => 38,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 12,
2 => 2392,
3 => 12,
4 => 2392,
5 => -38,
6 => -2,
7 => -38,
),
),
55 =>
array (
'fontSize' => 42,
'top' => 42,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 13,
2 => 2646,
3 => 13,
4 => 2646,
5 => -42,
6 => -2,
7 => -42,
),
),
60 =>
array (
'fontSize' => 45,
'top' => 45,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 14,
2 => 2835,
3 => 14,
4 => 2835,
5 => -45,
6 => -2,
7 => -45,
),
),
65 =>
array (
'fontSize' => 49,
'top' => 49,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 16,
2 => 3087,
3 => 16,
4 => 3087,
5 => -49,
6 => -2,
7 => -49,
),
),
70 =>
array (
'fontSize' => 53,
'top' => 53,
'left' => 0,
'box' =>
array (
0 => -2,
1 => 17,
2 => 3339,
3 => 17,
4 => 3339,
5 => -53,
6 => -2,
7 => -53,
),
),
75 =>
array (
'fontSize' => 57,
'top' => 56,
'left' => 1,
'box' =>
array (
0 => -3,
1 => 18,
2 => 3591,
3 => 18,
4 => 3591,
5 => -56,
6 => -3,
7 => -56,
),
),
80 =>
array (
'fontSize' => 60,
'top' => 59,
'left' => 1,
'box' =>
array (
0 => -3,
1 => 19,
2 => 3783,
3 => 19,
4 => 3783,
5 => -59,
6 => -3,
7 => -59,
),
),
85 =>
array (
'fontSize' => 65,
'top' => 64,
'left' => 1,
'box' =>
array (
0 => -3,
1 => 21,
2 => 4092,
3 => 21,
4 => 4092,
5 => -64,
6 => -3,
7 => -64,
),
),
90 =>
array (
'fontSize' => 69,
'top' => 68,
'left' => 1,
'box' =>
array (
0 => -3,
1 => 22,
2 => 4345,
3 => 22,
4 => 4345,
5 => -68,
6 => -3,
7 => -68,
),
),
100 =>
array (
'fontSize' => 77,
'top' => 75,
'left' => 1,
'box' =>
array (
0 => -3,
1 => 25,
2 => 4850,
3 => 25,
4 => 4850,
5 => -75,
6 => -3,
7 => -75,
),
),
110 =>
array (
'fontSize' => 85,
'top' => 83,
'left' => 1,
'box' =>
array (
0 => -3,
1 => 27,
2 => 5352,
3 => 27,
4 => 5352,
5 => -83,
6 => -3,
7 => -83,
),
),
120 =>
array (
'fontSize' => 92,
'top' => 90,
'left' => 1,
'box' =>
array (
0 => -3,
1 => 29,
2 => 5793,
3 => 29,
4 => 5793,
5 => -90,
6 => -3,
7 => -90,
),
),
130 =>
array (
'fontSize' => 101,
'top' => 98,
'left' => 1,
'box' =>
array (
0 => -3,
1 => 32,
2 => 6361,
3 => 32,
4 => 6361,
5 => -98,
6 => -3,
7 => -98,
),
),
140 =>
array (
'fontSize' => 108,
'top' => 105,
'left' => 1,
'box' =>
array (
0 => -3,
1 => 35,
2 => 6801,
3 => 35,
4 => 6801,
5 => -105,
6 => -3,
7 => -105,
),
),
150 =>
array (
'fontSize' => 117,
'top' => 113,
'left' => 2,
'box' =>
array (
0 => -4,
1 => 37,
2 => 7369,
3 => 37,
4 => 7369,
5 => -113,
6 => -4,
7 => -113,
),
),
160 =>
array (
'fontSize' => 124,
'top' => 120,
'left' => 2,
'box' =>
array (
0 => -4,
1 => 40,
2 => 7813,
3 => 40,
4 => 7813,
5 => -120,
6 => -4,
7 => -120,
),
),
170 =>
array (
'fontSize' => 131,
'top' => 127,
'left' => 2,
'box' =>
array (
0 => -4,
1 => 42,
2 => 8252,
3 => 42,
4 => 8252,
5 => -127,
6 => -4,
7 => -127,
),
),
180 =>
array (
'fontSize' => 140,
'top' => 135,
'left' => 2,
'box' =>
array (
0 => -4,
1 => 45,
2 => 8820,
3 => 45,
4 => 8820,
5 => -135,
6 => -4,
7 => -135,
),
),
190 =>
array (
'fontSize' => 147,
'top' => 142,
'left' => 2,
'box' =>
array (
0 => -4,
1 => 47,
2 => 9259,
3 => 47,
4 => 9259,
5 => -142,
6 => -4,
7 => -142,
),
),
200 =>
array (
'fontSize' => 155,
'top' => 150,
'left' => 2,
'box' =>
array (
0 => -4,
1 => 50,
2 => 9767,
3 => 50,
4 => 9767,
5 => -150,
6 => -4,
7 => -150,
),
),
); | 15.251037 | 76 | 0.311794 |
c94db5f4c87d78b4b9a522bff8f703e3f04794cf | 1,554 | ts | TypeScript | projects/helpers/pipes/test/br-phone.pipe.spec.ts | gdoor-sistemas/sak | eec871a6d9fd5ecebfa7d26d41747f975e6577b6 | [
"MIT"
] | null | null | null | projects/helpers/pipes/test/br-phone.pipe.spec.ts | gdoor-sistemas/sak | eec871a6d9fd5ecebfa7d26d41747f975e6577b6 | [
"MIT"
] | null | null | null | projects/helpers/pipes/test/br-phone.pipe.spec.ts | gdoor-sistemas/sak | eec871a6d9fd5ecebfa7d26d41747f975e6577b6 | [
"MIT"
] | null | null | null | import { ComponentFixture, TestBed } from '@angular/core/testing';
import { Component } from '@angular/core';
import { BrPhonePipe } from '../src/br-phone.pipe';
import { Format } from '@gdoor/helpers';
@Component({template: `<span [innerHTML]="value | brPhone:link"></span>`})
class TestHostComponent {
public value: string | number;
public link = false;
}
describe('PhonePipe', () => {
let component: TestHostComponent;
let fixture: ComponentFixture<TestHostComponent>;
beforeEach(async () => {
await TestBed.configureTestingModule({
declarations: [
TestHostComponent,
BrPhonePipe,
],
}).compileComponents();
fixture = TestBed.createComponent(TestHostComponent);
component = fixture.componentInstance;
});
it('should compile', () => {
expect(component).toBeTruthy();
});
it('should render piped value without link', () => {
component.value = '(0xx49) 3441 - 3100';
fixture.detectChanges();
expect(fixture.nativeElement.innerText.trim()).toBe(Format.phone(component.value));
expect(fixture.nativeElement.querySelector('a.link')).toBeFalsy();
});
it('should render piped value with link', () => {
component.value = '(0xx49) 3441 - 3100';
component.link = true;
fixture.detectChanges();
expect(fixture.nativeElement.querySelector('a.link')).toBeTruthy();
});
it('should format a number', () => {
component.value = 34413100;
fixture.detectChanges();
expect(fixture.nativeElement.innerText.trim()).toMatch(/3441.*3100/);
});
});
| 29.884615 | 87 | 0.667954 |
b2e439f950ee724e944b2b3fbf4083c3ab6f422f | 1,182 | css | CSS | css/unslider.css | DemonHe/Exercise | 5da3cc4106dc5401ec5eb7f9650d535001628b11 | [
"MIT"
] | null | null | null | css/unslider.css | DemonHe/Exercise | 5da3cc4106dc5401ec5eb7f9650d535001628b11 | [
"MIT"
] | null | null | null | css/unslider.css | DemonHe/Exercise | 5da3cc4106dc5401ec5eb7f9650d535001628b11 | [
"MIT"
] | null | null | null | /**
* Here's where everything gets included. You don't need
* to change anything here, and doing so might break
* stuff. Here be dragons and all that.
*/
/**
* Default variables
*
* While these can be set with JavaScript, it's probably
* better and faster to just set them here, compile to
* CSS and include that instead to use some of that
* hardware-accelerated goodness.
*/
.unslider {
overflow: auto;
margin: 0;
padding: 0;
border-radius: 10px;
}
.unslider-wrap {
position: relative;
}
.unslider-wrap.unslider-carousel > li {
float: left;
}
.unslider-vertical > ul {
height: 100%;
}
.unslider-vertical li {
float: none;
width: 100%;
}
.unslider-fade {
position: relative;
}
.unslider-fade .unslider-wrap li {
position: absolute;
left: 0;
top: 0;
right: 0;
z-index: 8;
}
.unslider-fade .unslider-wrap li.unslider-active {
z-index: 10;
}
.unslider ul,
.unslider ol,
.unslider li {
list-style: none;
/* Reset any weird spacing */
margin: 0;
padding: 0;
border: none;
}
.unslider-arrow {
position: absolute;
left: 20px;
z-index: 2;
cursor: pointer;
}
.unslider-arrow.next {
left: auto;
right: 20px;
}
| 18.184615 | 58 | 0.656514 |
dac0b07369d14ae193a6ca370fa1b347567a6c50 | 292 | rb | Ruby | ObjectOriented/Objective.rb | GuilhermeHaetinger/RISKyBusiness | a7fcbc6396bd65eaee03c8b4decc79b1ae20ae6d | [
"MIT"
] | null | null | null | ObjectOriented/Objective.rb | GuilhermeHaetinger/RISKyBusiness | a7fcbc6396bd65eaee03c8b4decc79b1ae20ae6d | [
"MIT"
] | null | null | null | ObjectOriented/Objective.rb | GuilhermeHaetinger/RISKyBusiness | a7fcbc6396bd65eaee03c8b4decc79b1ae20ae6d | [
"MIT"
] | null | null | null | $LOAD_PATH << '.'
require 'singleton'
require 'Objective'
require './modules/Interface'
class Objective
include Interface
def initialize(name)
@name = name
end
def isObjectiveFulfilled(playerId)
Objective.api_not_implemented(self)
end
def getName()
@name
end
end | 14.6 | 39 | 0.712329 |
37d3677e91f4368f082649c4480f289f3be8c85f | 580 | dart | Dart | lib/src/client/client_options.dart | QiXi/game_socket | 17d975d05962be5e65f78d0d3977701ef673d8a1 | [
"MIT"
] | 8 | 2021-07-08T14:34:08.000Z | 2022-02-05T03:21:29.000Z | lib/src/client/client_options.dart | QiXi/game_socket | 17d975d05962be5e65f78d0d3977701ef673d8a1 | [
"MIT"
] | 1 | 2022-02-16T20:14:15.000Z | 2022-02-16T20:14:15.000Z | lib/src/client/client_options.dart | QiXi/game_socket | 17d975d05962be5e65f78d0d3977701ef673d8a1 | [
"MIT"
] | 4 | 2021-07-16T07:04:46.000Z | 2022-02-11T17:28:00.000Z | import '../../protocol.dart';
class ClientOptions {
final List<Schema?> _schemas = List.filled(256, null);
bool supportRawData;
int reconnectInterval = 1;
int maxReconnectAttempts = 3;
Duration? connectTimeout = Duration(seconds: 20);
bool disconnectOnHighPing = true;
int limitHighPing = 500;
ClientOptions() : supportRawData = false {
addSchema(GameSocketSchema());
addSchema(RoomSchema());
}
Schema? addSchema(Schema schema) {
_schemas[schema.code] = schema;
}
Schema? getSchema(int code, int version) {
return _schemas[code];
}
}
| 23.2 | 56 | 0.693103 |
7e5e0f763a67b55ffbcb79e1588c9e20b08d2c84 | 1,184 | rb | Ruby | lib/adapters/wallaby/active_record/model_service_provider/validator.rb | wallaby-rails/wallaby-activerecord | 128eabf40182882122f41504823c32ccebbc0f24 | [
"MIT"
] | 2 | 2020-02-06T16:33:17.000Z | 2022-02-01T22:02:52.000Z | lib/adapters/wallaby/active_record/model_service_provider/validator.rb | wallaby-rails/wallaby-activerecord | 128eabf40182882122f41504823c32ccebbc0f24 | [
"MIT"
] | 1 | 2020-04-09T11:35:54.000Z | 2020-04-09T11:36:23.000Z | lib/adapters/wallaby/active_record/model_service_provider/validator.rb | wallaby-rails/wallaby-activerecord | 128eabf40182882122f41504823c32ccebbc0f24 | [
"MIT"
] | 1 | 2020-02-06T16:33:19.000Z | 2020-02-06T16:33:19.000Z | # frozen_string_literal: true
module Wallaby
class ActiveRecord
class ModelServiceProvider
# Validate values for record create / update
class Validator
# @param model_decorator [Wallaby::ModelDecorator]
def initialize(model_decorator)
@model_decorator = model_decorator
end
# @param resource [Object] resource object
# @return [true] if the resource object is valid
# @return [false] otherwise
def valid?(resource)
resource.attributes.each do |field_name, values|
metadata = @model_decorator.fields[field_name]
next if valid_range_type? values, metadata
resource.errors.add field_name, 'required for range data'
end
resource.errors.blank?
end
protected
# @param values [Array]
# @return [true] if the values are valid range values
# @return [false] otherwise
def valid_range_type?(values, metadata)
!metadata \
|| !%w(daterange tsrange tstzrange).include?(metadata[:type]) \
|| !values.try(:any?, &:blank?)
end
end
end
end
end
| 29.6 | 75 | 0.613176 |
45b54bbd9600917663b68af3a511dd2a5dd1390f | 11,380 | py | Python | buildscripts/linter/git.py | MartinNeupauer/mongo | 6cc2dfe7edd312b8596355edef454e15988e350e | [
"Apache-2.0"
] | 1 | 2019-05-15T03:41:50.000Z | 2019-05-15T03:41:50.000Z | buildscripts/linter/git.py | MartinNeupauer/mongo | 6cc2dfe7edd312b8596355edef454e15988e350e | [
"Apache-2.0"
] | 2 | 2021-03-26T00:01:11.000Z | 2021-03-26T00:02:19.000Z | buildscripts/linter/git.py | MartinNeupauer/mongo | 6cc2dfe7edd312b8596355edef454e15988e350e | [
"Apache-2.0"
] | null | null | null | """Git Utility functions."""
from __future__ import absolute_import
from __future__ import print_function
import itertools
import os
import re
import subprocess
from typing import Any, Callable, List, Tuple
from buildscripts import moduleconfig
from buildscripts.resmokelib.utils import globstar
# Path to the modules in the mongodb source tree
# Has to match the string in SConstruct
MODULE_DIR = "src/mongo/db/modules"
def get_base_dir():
# type: () -> str
"""
Get the base directory for mongo repo.
This script assumes that it is running in buildscripts/, and uses
that to find the base directory.
"""
try:
return subprocess.check_output(['git', 'rev-parse', '--show-toplevel']).rstrip()
except subprocess.CalledProcessError:
# We are not in a valid git directory. Use the script path instead.
return os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
def get_repos():
# type: () -> List[Repo]
"""Get a list of Repos to check linters for."""
base_dir = get_base_dir()
# Get a list of modules
# TODO: how do we filter rocks, does it matter?
mongo_modules = moduleconfig.discover_module_directories(
os.path.join(base_dir, MODULE_DIR), None)
paths = [os.path.join(base_dir, MODULE_DIR, m) for m in mongo_modules]
paths.append(base_dir)
return [Repo(p) for p in paths]
class Repo(object):
"""Class encapsulates all knowledge about a git repository, and its metadata to run linters."""
def __init__(self, path):
# type: (str) -> None
"""Construct a repo object."""
self.path = path
def _callgito(self, args):
# type: (List[str]) -> str
"""Call git for this repository, and return the captured output."""
# These two flags are the equivalent of -C in newer versions of Git
# but we use these to support versions pre 1.8.5 but it depends on the command
# and what the current directory is
if "ls-files" in args:
# This command depends on the current directory and works better if not run with
# work-tree
return subprocess.check_output(['git', '--git-dir', os.path.join(self.path, ".git")] +
args)
else:
return subprocess.check_output([
'git', '--git-dir', os.path.join(self.path, ".git"), '--work-tree', self.path
] + args)
def _callgit(self, args):
# type: (List[str]) -> int
"""
Call git for this repository without capturing output.
This is designed to be used when git returns non-zero exit codes.
"""
# These two flags are the equivalent of -C in newer versions of Git
# but we use these to support versions pre 1.8.5 but it depends on the command
# and what the current directory is
return subprocess.call([
'git',
'--git-dir',
os.path.join(self.path, ".git"),
] + args)
def _get_local_dir(self, path):
# type: (str) -> str
"""Get a directory path relative to the git root directory."""
if os.path.isabs(path):
path = os.path.relpath(path, self.path)
# Normalize Windows style paths to Unix style which git uses on all platforms
path = path.replace("\\", "/")
return path
def get_candidates(self, candidates, filter_function):
# type: (List[str], Callable[[str], bool]) -> List[str]
"""
Get the set of candidate files to check by querying the repository.
Returns the full path to the file for clang-format to consume.
"""
if candidates is not None and len(candidates) > 0:
candidates = [self._get_local_dir(f) for f in candidates]
valid_files = list(
set(candidates).intersection(self.get_candidate_files(filter_function)))
else:
valid_files = list(self.get_candidate_files(filter_function))
# Get the full file name here
valid_files = [os.path.normpath(os.path.join(self.path, f)) for f in valid_files]
return valid_files
def _git_ls_files(self, cmd, filter_function):
# type: (List[str], Callable[[str], bool]) -> List[str]
"""Run git-ls-files and filter the list of files to a valid candidate list."""
gito = self._callgito(cmd)
# This allows us to pick all the interesting files
# in the mongo and mongo-enterprise repos
file_list = [line.rstrip() for line in gito.splitlines() if filter_function(line.rstrip())]
return file_list
def get_candidate_files(self, filter_function):
# type: (Callable[[str], bool]) -> List[str]
"""Query git to get a list of all files in the repo to consider for analysis."""
return self._git_ls_files(["ls-files", "--cached"], filter_function)
def get_working_tree_candidate_files(self, filter_function):
# type: (Callable[[str], bool]) -> List[str]
# pylint: disable=invalid-name
"""Query git to get a list of all files in the working tree to consider for analysis."""
return self._git_ls_files(["ls-files", "--cached", "--others"], filter_function)
def get_working_tree_candidates(self, filter_function):
# type: (Callable[[str], bool]) -> List[str]
"""
Get the set of candidate files to check by querying the repository.
Returns the full path to the file for clang-format to consume.
"""
valid_files = list(self.get_working_tree_candidate_files(filter_function))
# Get the full file name here
valid_files = [os.path.normpath(os.path.join(self.path, f)) for f in valid_files]
# Filter out files that git thinks exist but were removed.
valid_files = [f for f in valid_files if os.path.exists(f)]
return valid_files
def is_detached(self):
# type: () -> bool
"""Return true if the current working tree in a detached HEAD state."""
# symbolic-ref returns 1 if the repo is in a detached HEAD state
return self._callgit(["symbolic-ref", "--quiet", "HEAD"]) == 1
def is_ancestor(self, parent, child):
# type: (str, str) -> bool
"""Return true if the specified parent hash an ancestor of child hash."""
# merge base returns 0 if parent is an ancestor of child
return not self._callgit(["merge-base", "--is-ancestor", parent, child])
def is_commit(self, sha1):
# type: (str) -> bool
"""Return true if the specified hash is a valid git commit."""
# cat-file -e returns 0 if it is a valid hash
return not self._callgit(["cat-file", "-e", "%s^{commit}" % sha1])
def is_working_tree_dirty(self):
# type: () -> bool
"""Return true the current working tree have changes."""
# diff returns 1 if the working tree has local changes
return self._callgit(["diff", "--quiet"]) == 1
def does_branch_exist(self, branch):
# type: (str) -> bool
"""Return true if the branch exists."""
# rev-parse returns 0 if the branch exists
return not self._callgit(["rev-parse", "--verify", branch])
def get_merge_base(self, commit):
# type: (str) -> str
"""Get the merge base between 'commit' and HEAD."""
return self._callgito(["merge-base", "HEAD", commit]).rstrip()
def get_branch_name(self):
# type: () -> str
"""
Get the current branch name, short form.
This returns "master", not "refs/head/master".
Will not work if the current branch is detached.
"""
branch = self.rev_parse(["--abbrev-ref", "HEAD"])
if branch == "HEAD":
raise ValueError("Branch is currently detached")
return branch
def add(self, command):
# type: (List[str]) -> str
"""Git add wrapper."""
return self._callgito(["add"] + command)
def checkout(self, command):
# type: (List[str]) -> str
"""Git checkout wrapper."""
return self._callgito(["checkout"] + command)
def commit(self, command):
# type: (List[str]) -> str
"""Git commit wrapper."""
return self._callgito(["commit"] + command)
def diff(self, command):
# type: (List[str]) -> str
"""Git diff wrapper."""
return self._callgito(["diff"] + command)
def log(self, command):
# type: (List[str]) -> str
"""Git log wrapper."""
return self._callgito(["log"] + command)
def rev_parse(self, command):
# type: (List[str]) -> str
"""Git rev-parse wrapper."""
return self._callgito(["rev-parse"] + command).rstrip()
def rm(self, command):
# type: (List[str]) -> str
# pylint: disable=invalid-name
"""Git rm wrapper."""
return self._callgito(["rm"] + command)
def show(self, command):
# type: (List[str]) -> str
"""Git show wrapper."""
return self._callgito(["show"] + command)
def expand_file_string(glob_pattern):
# type: (str) -> List[str]
"""Expand a string that represents a set of files."""
return [os.path.abspath(f) for f in globstar.iglob(glob_pattern)]
def get_files_to_check_working_tree(filter_function):
# type: (Callable[[str], bool]) -> List[str]
"""
Get a list of files to check from the working tree.
This will pick up files not managed by git.
"""
repos = get_repos()
valid_files = list(
itertools.chain.from_iterable(
[r.get_working_tree_candidates(filter_function) for r in repos]))
return valid_files
def get_files_to_check(files, filter_function):
# type: (List[str], Callable[[str], bool]) -> List[str]
"""Get a list of files that need to be checked based on which files are managed by git."""
# Get a list of candidate_files
candidates_nested = [expand_file_string(f) for f in files]
candidates = list(itertools.chain.from_iterable(candidates_nested))
if len(files) > 0 and len(candidates) == 0:
raise ValueError("Globs '%s' did not find any files with glob." % (files))
repos = get_repos()
valid_files = list(
itertools.chain.from_iterable(
[r.get_candidates(candidates, filter_function) for r in repos]))
if len(files) > 0 and len(valid_files) == 0:
raise ValueError("Globs '%s' did not find any files with glob in git." % (files))
return valid_files
def get_files_to_check_from_patch(patches, filter_function):
# type: (List[str], Callable[[str], bool]) -> List[str]
"""Take a patch file generated by git diff, and scan the patch for a list of files to check."""
candidates = [] # type: List[str]
# Get a list of candidate_files
check = re.compile(r"^diff --git a\/([\w\/\.\-]+) b\/[\w\/\.\-]+")
lines = [] # type: List[str]
for patch in patches:
with open(patch, "rb") as infile:
lines += infile.readlines()
candidates = [check.match(line).group(1) for line in lines if check.match(line)]
repos = get_repos()
valid_files = list(
itertools.chain.from_iterable(
[r.get_candidates(candidates, filter_function) for r in repos]))
return valid_files
| 35.899054 | 99 | 0.618366 |
fdfb72295221706f4f4b5c3ffce58eb9ca67efa5 | 13,606 | ps1 | PowerShell | WS2012R2/lisa/setupscripts/StartVM.ps1 | leifei87/lis-test | 5fa93406f372902d8025efcd94442b9e3af8a1a4 | [
"Apache-2.0"
] | 1 | 2021-07-31T11:16:07.000Z | 2021-07-31T11:16:07.000Z | WS2012R2/lisa/setupscripts/StartVM.ps1 | leifei87/lis-test | 5fa93406f372902d8025efcd94442b9e3af8a1a4 | [
"Apache-2.0"
] | null | null | null | WS2012R2/lisa/setupscripts/StartVM.ps1 | leifei87/lis-test | 5fa93406f372902d8025efcd94442b9e3af8a1a4 | [
"Apache-2.0"
] | 1 | 2018-04-28T07:00:36.000Z | 2018-04-28T07:00:36.000Z | #####################################################################
#
# Linux on Hyper-V and Azure Test Code, ver. 1.0.0
# Copyright (c) Microsoft Corporation
#
# All rights reserved.
# Licensed under the Apache License, Version 2.0 (the ""License"");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
# OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
# ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR
# PURPOSE, MERCHANTABLITY OR NON-INFRINGEMENT.
#
# See the Apache Version 2.0 License for specific language governing
# permissions and limitations under the License.
#
#####################################################################
<#
.Synopsis
Run the StartVM test.
Description:
This script sets up additional Network Adapters for a second (dependency) VM, starts it first and configures the interface files
in the OS. Afterwards the main test is started together with the main VM.
It can be used with the main Linux distributions. For the time being it is customized for use with the Networking tests.
The following testParams are mandatory:
NIC=NIC type, Network Type, Network Name, MAC Address
NIC Type can be one of the following:
NetworkAdapter
LegacyNetworkAdapter
Network Type can be one of the following:
External
Internal
Private
Network Name is the name of a existing network.
Only the Network Name parameter is used by this script, but the others are still necessary, in order to have the same
parameters as the NET_ADD_NIC_MAC script.
The following is an example of a testParam for removing a NIC
"NIC=NetworkAdapter,Internal,InternalNet,001600112200"
VM2NAME=name_of_second_VM
this is the name of the second VM. It will not be managed by the LIS framework, but by this script.
The following testParams are optional:
STATIC_IP=xx.xx.xx.xx
xx.xx.xx.xx is a valid IPv4 Address. If not specified, a default value of 10.10.10.1 will be used.
This will be assigned to VM1's test NIC.
STATIC_IP2=xx.xx.xx.xx
xx.xx.xx.xx is a valid IPv4 Address. If not specified, an IP Address from the same subnet as VM1's STATIC_IP
will be computed (usually the first address != STATIC_IP in the subnet).This will be assigned as VM2's test NIC.
NETMASK=yy.yy.yy.yy
yy.yy.yy.yy is a valid netmask (the subnet to which the tested netAdapters belong). If not specified, a default value of 255.255.255.0 will be used.
LEAVE_TRAIL=yes/no
if set to yes and the NET_ADD_NIC_MAC.ps1 script was called from within this script for VM2, then it will not be removed
at the end of the script. Also temporary bash scripts generated during the test will not be deleted.
All test scripts must return a boolean ($true or $false)
to indicate if the script completed successfully or not.
.Parameter vmName
Name of the first VM implicated in the test .
.Parameter hvServer
Name of the Hyper-V server hosting the VM.
.Parameter testParams
Test data for this test case
.Example
StartVM -vmName myVM -hvServer localhost -testParams "NIC=NetworkAdapter,Private,Private,001600112200;VM2NAME=vm2Name"
#>
param([string] $vmName, [string] $hvServer, [string] $testParams)
Set-PSDebug -Strict
#######################################################################
#
# Main script body
#
#######################################################################
# Check input arguments
if ($hvServer -eq $null)
{
"Error: hvServer is null"
return $False
}
if ($testParams -eq $null)
{
"Error: testParams is null"
return $False
}
# Write out test Params
$testParams
# sshKey used to authenticate ssh connection and send commands
$sshKey = $null
# IP Address of first VM
$ipv4 = $null
# IP Address of second VM
$ipv4VM2 = $null
# Name of second VM
$vm2Name = $null
# VM1 IPv4 Address
$vm1StaticIP = $null
# VM2 IPv4 Address
$vm2StaticIP = $null
# Netmask used by both VMs
$netmask = $null
# boolean to leave a trail
$leaveTrail = $null
# switch name
$networkName = $null
#Snapshot name
$snapshotParam = $null
#Test IPv6
$Test_IPv6 = $null
#Test IPv6
$vm2MacAddress = $null
#ifcfg bootproto
$bootproto = $null
# change working directory to root dir
$testParams -match "RootDir=([^;]+)"
if (-not $?)
{
"Mandatory param RootDir=Path; not found!"
return $false
}
$rootDir = $Matches[1]
if (Test-Path $rootDir)
{
Set-Location -Path $rootDir
if (-not $?)
{
"Error: Could not change directory to $rootDir !"
return $false
}
"Changed working directory to $rootDir"
}
else
{
"Error: RootDir = $rootDir is not a valid path"
return $false
}
# Source TCUitls.ps1 for getipv4 and other functions
if (Test-Path ".\setupScripts\TCUtils.ps1")
{
. .\setupScripts\TCUtils.ps1
}
else
{
"Error: Could not find setupScripts\TCUtils.ps1"
return $false
}
# Source NET_UTILS.ps1 for network functions
if (Test-Path ".\setupScripts\NET_UTILS.ps1")
{
. .\setupScripts\NET_UTILS.ps1
}
else
{
"Error: Could not find setupScripts\NET_Utils.ps1"
return $false
}
$isDynamic = $false
$params = $testParams.Split(';')
foreach ($p in $params) {
$fields = $p.Split("=")
switch ($fields[0].Trim()) {
"NIC"
{
$nicArgs = $fields[1].Split(',')
if ($nicArgs.Length -eq 3) {
$CurrentDir= "$pwd\"
$testfile = "macAddress.file"
$pathToFile="$CurrentDir"+"$testfile"
$isDynamic = $true
}
}
}
}
if ($isDynamic -eq $true) {
$streamReader = [System.IO.StreamReader] $pathToFile
$vm1MacAddress = $null
}
foreach ($p in $params)
{
$fields = $p.Split("=")
switch ($fields[0].Trim())
{
"VM2NAME" { $vm2Name = $fields[1].Trim() }
"SshKey" { $sshKey = $fields[1].Trim() }
"ipv4" { $ipv4 = $fields[1].Trim() }
"STATIC_IP" { $vm1StaticIP = $fields[1].Trim() }
"STATIC_IP2" { $vm2StaticIP = $fields[1].Trim() }
"Test_IPv6" { $Test_IPv6 = $fields[1].Trim() }
"NETMASK" { $netmask = $fields[1].Trim() }
"MAC" { $vm2MacAddress = $fields[1].Trim() }
"LEAVE_TRAIL" { $leaveTrail = $fields[1].Trim() }
"SnapshotName" { $SnapshotName = $fields[1].Trim() }
"NIC"
{
$nicArgs = $fields[1].Split(',')
if ($nicArgs.Length -lt 3)
{
"Error: Incorrect number of arguments for NIC test parameter: $p"
return $false
}
$nicType = $nicArgs[0].Trim()
$networkType = $nicArgs[1].Trim()
$networkName = $nicArgs[2].Trim()
if ($nicArgs.Length -eq 4) {
$vm1MacAddress = $nicArgs[3].Trim()
}
$legacy = $false
#
# Validate the network adapter type
#
if ("NetworkAdapter" -notcontains $nicType)
{
"Error: Invalid NIC type: $nicType . Must be 'NetworkAdapter'"
return $false
}
#
# Validate the Network type
#
if (@("External", "Internal", "Private") -notcontains $networkType)
{
"Error: Invalid netowrk type: $networkType . Network type must be either: External, Internal, Private"
return $false
}
#
# Make sure the network exists
#
$vmSwitch = Get-VMSwitch -Name $networkName -ComputerName $hvServer
if (-not $vmSwitch)
{
"Error: Invalid network name: $networkName . The network does not exist."
return $false
}
if ($isDynamic -eq $true){
$vm1MacAddress = $streamReader.ReadLine()
}
else {
$retVal = isValidMAC $vm1MacAddress
if (-not $retVal)
{
"Invalid Mac Address $vm1MacAddress"
return $false
}
}
#
# Get Nic with given MAC Address
#
$vm1nic = Get-VMNetworkAdapter -VMName $vmName -ComputerName $hvServer -IsLegacy:$false | where {$_.MacAddress -eq $vm1MacAddress }
if ($vm1nic)
{
"$vmName found NIC with MAC $vm1MacAddress ."
}
else
{
"Error: $vmName - No NIC found with MAC $vm1MacAddress ."
return $false
}
}
default {} # unknown param - just ignore it
}
}
if ($isDynamic -eq $true)
{
$streamReader.close()
}
if (-not $vm2Name)
{
"Error: test parameter vm2Name was not specified"
return $False
}
# make sure vm2 is not the same as vm1
if ("$vm2Name" -like "$vmName")
{
"Error: vm2 must be different from the test VM."
return $false
}
if (-not $sshKey)
{
"Error: test parameter sshKey was not specified"
return $False
}
if (-not $netmask)
{
$netmask = 255.255.255.0
}
if (-not $vm2StaticIP)
{
$bootproto = "dhcp"
}
else
{
$bootproto = "static"
}
#set the parameter for the snapshot
$snapshotParam = "SnapshotName = ${SnapshotName}"
#revert VM2
.\setupScripts\RevertSnapshot.ps1 -vmName $vm2Name -hvServer $hvServer -testParams $snapshotParam
Start-sleep -s 5
#
# Verify the VMs exists
#
$vm2 = Get-VM -Name $vm2Name -ComputerName $hvServer -ErrorAction SilentlyContinue
if (-not $vm2)
{
"Error: VM ${vm2Name} does not exist"
return $False
}
# hold testParam data for NET_ADD_NIC_MAC script
$vm2testParam = $null
# remember if we added the NIC or it was already there.
$scriptAddedNIC = $false
# Check for a NIC of the given network type on VM2
$vm2nic = $null
$nic2 = Get-VMNetworkAdapter -VMName $vm2Name -ComputerName $hvServer -IsLegacy:$false | where { $_.SwitchName -like "$networkName" }
#Generate a Mac address for the VM's test nic, if this is not a specified parameter
if (-not $vm2MacAddress) {
$vm2MacAddress = getRandUnusedMAC $hvServer
$CurrentDir= "$pwd\"
$testfile = "macAddressDependency.file"
$pathToFile="$CurrentDir"+"$testfile"
$streamWrite = [System.IO.StreamWriter] $pathToFile
$streamWrite.WriteLine($vm2MacAddress)
$streamWrite.close()
}
#construct NET_ADD_NIC_MAC Parameter
$vm2testParam = "NIC=NetworkAdapter,$networkType,$networkName,$vm2MacAddress"
if ( Test-Path ".\setupscripts\NET_ADD_NIC_MAC.ps1")
{
# Make sure VM2 is shutdown
if (Get-VM -Name $vm2Name -ComputerName $hvServer | Where { $_.State -like "Running" })
{
Stop-VM $vm2Name -force
if (-not $?)
{
"Error: Unable to shut $vm2Name down (in order to add a new network Adapter)"
return $false
}
# wait for VM to finish shutting down
$timeout = 60
while (Get-VM -Name $vm2Name -ComputerName $hvServer | Where { $_.State -notlike "Off" })
{
if ($timeout -le 0)
{
"Error: Unable to shutdown $vm2Name"
return $false
}
start-sleep -s 5
$timeout = $timeout - 5
}
}
.\setupscripts\NET_ADD_NIC_MAC.ps1 -vmName $vm2Name -hvServer $hvServer -testParams $vm2testParam
}
else
{
"Error: Could not find setupScripts\NET_ADD_NIC_MAC.ps1 ."
return $false
}
if (-Not $?)
{
"Error: Cannot add new NIC to $vm2Name"
return $false
}
# get the newly added NIC
$vm2nic = Get-VMNetworkAdapter -VMName $vm2Name -ComputerName $hvServer -IsLegacy:$false | where { $_.MacAddress -like "$vm2MacAddress" }
if (-not $vm2nic)
{
"Error: Could not retrieve the newly added NIC to VM2"
return $false
}
$scriptAddedNIC = $true
#
# Start VM2
#
if (Get-VM -Name $vm2Name -ComputerName $hvServer | Where { $_.State -notlike "Running" })
{
Start-VM -Name $vm2Name -ComputerName $hvServer
if (-not $?)
{
"Error: Unable to start VM ${vm2Name}"
$error[0].Exception
return $False
}
}
$timeout = 400 # seconds
if (-not (WaitForVMToStartKVP $vm2Name $hvServer $timeout))
{
"Warning: $vm2Name never started KVP"
}
# get vm2 ipv4
$vm2ipv4 = GetIPv4 $vm2Name $hvServer
# wait for ssh to start
$timeout = 200 #seconds
if (-not (WaitForVMToStartSSH $vm2ipv4 $timeout))
{
"Error: VM ${vm2Name} never started"
Stop-VM $vm2Name -ComputerName $hvServer -force | out-null
return $False
}
# send utils.sh to VM2
if (-not (Test-Path ".\remote-scripts\ica\utils.sh"))
{
"Error: Unable to find remote-scripts\ica\utils.sh "
return $false
}
"Sending .\remote-scripts\ica\utils.sh to $vm2ipv4 , authenticating with $sshKey"
$retVal = SendFileToVM "$vm2ipv4" "$sshKey" ".\remote-scripts\ica\utils.sh" "/root/utils.sh"
if (-not $retVal)
{
"Failed sending file to VM!"
return $False
}
"Successfully sent utils.sh"
"Configuring test interface (${vm2MacAddress}) on $vm2Name (${vm2ipv4}) "
$retVal = CreateInterfaceConfig $vm2ipv4 $sshKey $bootproto $vm2MacAddress $vm2StaticIP $netmask
if (-not $retVal)
{
"Failed to create Interface on vm $vm2ipv4 for interface with mac $vm2MacAddress, by setting a static IP of $vm2StaticIP netmask $netmask"
return $false
}
#get the ipv4 of the test adapter allocated by DHCP
"vm2 Name = ${vm2Name}"
"vm2 ipv4 = ${vm2ipv4}"
"vm2 MAC = ${vm2MacAddress}"
return $true
| 26.419417 | 160 | 0.619947 |
84b86d3228a6f3d3fa037cd59a6387faac75e130 | 400 | kt | Kotlin | compiler/testData/diagnostics/tests/resolve/overloadConflicts/genericClash.kt | jdemeulenaere/kotlin | bbdbc2896bc780b96ce43d9707d3735635e7dbad | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | compiler/testData/diagnostics/tests/resolve/overloadConflicts/genericClash.kt | jdemeulenaere/kotlin | bbdbc2896bc780b96ce43d9707d3735635e7dbad | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | compiler/testData/diagnostics/tests/resolve/overloadConflicts/genericClash.kt | jdemeulenaere/kotlin | bbdbc2896bc780b96ce43d9707d3735635e7dbad | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | // !DIAGNOSTICS: -UNUSED_PARAMETER
interface A<T> {
fun foo(x: T)
fun foo(x: String)
fun <E> baz(x: E, y: String)
fun <E> baz(x: String, y: E)
}
fun <E> baz(x: E, y: String) {}
fun <E> baz(x: String, y: E) {}
fun bar(x: A<String>) {
x.<!OVERLOAD_RESOLUTION_AMBIGUITY!>foo<!>("")
x.<!CANNOT_COMPLETE_RESOLVE!>baz<!>("", "")
<!CANNOT_COMPLETE_RESOLVE!>baz<!>("", "")
}
| 20 | 49 | 0.5575 |
79dce13cf4e07d3080acfa107dfa926203ec3761 | 81 | php | PHP | app/Admin/bootstrap.php | 17634062908/laravel-web | c573a9b17cfb24790e146db53ad58f2f4142733c | [
"MIT"
] | null | null | null | app/Admin/bootstrap.php | 17634062908/laravel-web | c573a9b17cfb24790e146db53ad58f2f4142733c | [
"MIT"
] | 1 | 2021-02-02T19:14:37.000Z | 2021-02-02T19:14:37.000Z | app/Admin/bootstrap.php | weiqiqaq/vue-admin | 811a6677f6b1688086c77d6b48c3980e7642d254 | [
"MIT"
] | null | null | null | <?php
//Admin::css(["https://unpkg.com/element-ui/lib/theme-chalk/index.css"]);
| 20.25 | 73 | 0.666667 |
dd7db3ac24fa956a04612db4a2291a7ca196b8c1 | 2,003 | java | Java | kmeans-computation/src/main/java/com/cloudera/oryx/kmeans/computation/evaluate/WeightVectorsFn.java | snavjot/oryx | 770b3c29289c622195c818522ee82ebffcdcda69 | [
"BSD-3-Clause"
] | 1 | 2019-06-27T11:34:14.000Z | 2019-06-27T11:34:14.000Z | kmeans-computation/src/main/java/com/cloudera/oryx/kmeans/computation/evaluate/WeightVectorsFn.java | BenediktSchackenberg/oryx | f358cdbb700f39c9870d84b2ad0e8ea2edc98692 | [
"BSD-3-Clause"
] | null | null | null | kmeans-computation/src/main/java/com/cloudera/oryx/kmeans/computation/evaluate/WeightVectorsFn.java | BenediktSchackenberg/oryx | f358cdbb700f39c9870d84b2ad0e8ea2edc98692 | [
"BSD-3-Clause"
] | 1 | 2021-03-24T12:40:39.000Z | 2021-03-24T12:40:39.000Z | /*
* Copyright (c) 2013, Cloudera, Inc. All Rights Reserved.
*
* Cloudera, Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"). You may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for
* the specific language governing permissions and limitations under the
* License.
*/
package com.cloudera.oryx.kmeans.computation.evaluate;
import com.cloudera.oryx.computation.common.fn.OryxDoFn;
import com.cloudera.oryx.kmeans.computation.AvroUtils;
import com.cloudera.oryx.kmeans.common.WeightedRealVector;
import com.cloudera.oryx.kmeans.computation.cluster.KSketchIndex;
import org.apache.crunch.CrunchRuntimeException;
import org.apache.crunch.Emitter;
import org.apache.crunch.Pair;
import java.util.List;
public final class WeightVectorsFn extends OryxDoFn<ClosestSketchVectorData, Pair<Integer, WeightedRealVector>> {
private KSketchIndex index;
private final String indexKey;
public WeightVectorsFn(String indexKey) {
this.indexKey = indexKey;
}
public WeightVectorsFn(KSketchIndex index) {
this.index = index;
this.indexKey = null;
}
@Override
public void initialize() {
super.initialize();
if (index == null) {
try {
index = AvroUtils.readSerialized(indexKey, getConfiguration());
} catch (Exception e) {
throw new CrunchRuntimeException(e);
}
}
}
@Override
public void process(ClosestSketchVectorData input, Emitter<Pair<Integer, WeightedRealVector>> emitter) {
List<List<WeightedRealVector>> data = index.getWeightedVectors(input);
for (int foldId = 0; foldId < data.size(); foldId++) {
for (WeightedRealVector wrv : data.get(foldId)) {
emitter.emit(Pair.of(foldId, wrv));
}
}
}
}
| 31.296875 | 113 | 0.724913 |
436e821a1cb5e3c09f6b78d1f70f55ff4861e61d | 382 | ts | TypeScript | deno_runtime/_global/deno_event.ts | guobin211/design-patterns | 33f4e9661b91bc0e163b9a8e42a3321f88053956 | [
"MIT"
] | null | null | null | deno_runtime/_global/deno_event.ts | guobin211/design-patterns | 33f4e9661b91bc0e163b9a8e42a3321f88053956 | [
"MIT"
] | null | null | null | deno_runtime/_global/deno_event.ts | guobin211/design-patterns | 33f4e9661b91bc0e163b9a8e42a3321f88053956 | [
"MIT"
] | 1 | 2020-07-21T09:56:34.000Z | 2020-07-21T09:56:34.000Z | class ReactivePerson implements EventListenerObject {
constructor(public name = "jack", public age = 22) {}
handleEvent(evt: Event): void | Promise<void> {
return new Promise((resolve) => {
console.log(evt);
resolve();
});
}
}
const jack = new ReactivePerson();
addEventListener("click", (e) => jack.handleEvent(e));
dispatchEvent(new Event("click"));
| 22.470588 | 55 | 0.657068 |
033137ef34982aecff103599add74082c233e9e7 | 111 | sql | SQL | 09. Exam Prep/02. Exam - 24 April 2017/03. Update/Update.sql | pirocorp/Databases-Basics-MS-SQL-Server | 2049499b2b8f7d011be79abc0b326486258e4d0a | [
"MIT"
] | null | null | null | 09. Exam Prep/02. Exam - 24 April 2017/03. Update/Update.sql | pirocorp/Databases-Basics-MS-SQL-Server | 2049499b2b8f7d011be79abc0b326486258e4d0a | [
"MIT"
] | null | null | null | 09. Exam Prep/02. Exam - 24 April 2017/03. Update/Update.sql | pirocorp/Databases-Basics-MS-SQL-Server | 2049499b2b8f7d011be79abc0b326486258e4d0a | [
"MIT"
] | null | null | null | SELECT *
FROM Jobs
UPDATE Jobs
SET MechanicId = 3, [Status] = 'In Progress'
WHERE [Status] = 'Pending' | 15.857143 | 47 | 0.648649 |
1215a1e867eed82e8ef6ce9ed1073f13cb2b1188 | 412 | swift | Swift | Carthage/Checkouts/Spatial/SpatialExample/view/main/card/content/middle/MiddleContent.swift | eonist/ProgressIndicator | 9ea95314f497527b893a6b5449db7781d47ecf30 | [
"MIT"
] | 3 | 2018-11-25T12:00:44.000Z | 2022-01-23T04:40:26.000Z | Carthage/Checkouts/Spatial/SpatialExample/view/main/card/content/middle/MiddleContent.swift | eonist/ProgressIndicator | 9ea95314f497527b893a6b5449db7781d47ecf30 | [
"MIT"
] | null | null | null | Carthage/Checkouts/Spatial/SpatialExample/view/main/card/content/middle/MiddleContent.swift | eonist/ProgressIndicator | 9ea95314f497527b893a6b5449db7781d47ecf30 | [
"MIT"
] | null | null | null | import UIKit
import Spatial
class MiddleContent: UIView {
lazy var itemViews: [ItemView] = createItemViews()
override init(frame: CGRect) {
super.init(frame: frame)
self.backgroundColor = .yellow
_ = itemViews
}
/**
* Boilerplate
*/
@available(*, unavailable)
required init?(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
}
| 21.684211 | 57 | 0.648058 |
0bd8045f2dbcf54caf11bd55a7c98fc6696f4f23 | 8,958 | cpp | C++ | TestRequirements/src/TestRequirements.cpp | parakhnr/TestHarness | d74c3e4b7f47f73dcc0e2dbfc152c05a7d8a22c7 | [
"MIT"
] | null | null | null | TestRequirements/src/TestRequirements.cpp | parakhnr/TestHarness | d74c3e4b7f47f73dcc0e2dbfc152c05a7d8a22c7 | [
"MIT"
] | null | null | null | TestRequirements/src/TestRequirements.cpp | parakhnr/TestHarness | d74c3e4b7f47f73dcc0e2dbfc152c05a7d8a22c7 | [
"MIT"
] | null | null | null |
/////////////////////////////////////////////////////////////////////////////
// TestRequirements.cpp - Tests for the all the requirements in project 3 //
// ----------------------------------------------------------------------- //
// Language: Visual C++, Visual Studio 2017 //
// Platform: Dell Inspiron 7378, Core i5, Windows 10 //
// Application: Graduate coursework, Fall - 2018 //
// Author: Naman Parakh, Syracuse University //
// [email protected] //
/////////////////////////////////////////////////////////////////////////////
/*
* Module Operations:
* -------------------
* This module provide the tests for all the requirements of the project 3.
*
* Required Files:
* ---------------
* ILogger.h FileUtilities.h ITest.h
*
* Maintainence History:
* ---------------------
* Version 1.0 : 8th November 2017
* - Initial release
*/
#include "../../TestHarness/src/testutils/ILogger.h"
#include "../../TestHarness/src/testutils/ITest.h"
#include <iostream>
#include <sstream>
#include "../../CppCommWithFileXfer/Utilities/FileUtilities/FileUtilities.h"
using namespace Utilities;
//////////////////////////////////////////////////////////////////////////////
// Requirement 1
// - Demonstrate Use of c++
class DLL_DECL Requirement1 : public ITest {
public:
bool execute() {
std::ostringstream stream;
std::string msg = "Requirement 1 - Use of C++ files\n";
Patterns patterns;
patterns.push_back("*.cpp");
patterns.push_back("*.h");
showDirContents("../../../TestHarness/src", msg, patterns, stream);
logger->write(stream.str());
return true;
}
Author author() {
return "Jim Moriarity";
}
Name name() {
return "Requirement 1";
}
void setHostedLogger(IHostedLogger* hostedLogger) {
logger = hostedLogger;
}
private:
IHostedLogger* logger;
};
//////////////////////////////////////////////////////////////////////////////
// Requirement2
// - Demonstrate use of WPF for user display
class DLL_DECL Requirement2 : public ITest {
public:
bool execute() {
std::string msg = "Requirement 2 - Use of WPF";
std::ostringstream stream;
Patterns patterns;
showDirContents("../../../GUI/", msg, patterns, stream);
logger->write(stream.str());
return true;
}
Author author() {
return "Jim Moriarity";
}
Name name() {
return "Requirement 2";
}
void setHostedLogger(IHostedLogger* hostedLogger) {
logger = hostedLogger;
}
private:
IHostedLogger* logger;
};
//////////////////////////////////////////////////////////////////////////////
// Requirement3
// -Demonstrate asynchronous client
class DLL_DECL Requirement3 : public ITest {
public:
bool execute() {
std::string msg = "Requirement 3 - Demonstrate client doesn't wait after sending request to the server for receveing the reply";
std::ostringstream stream;
showFileLines("../../../GUI/MainWindow.xaml.cs", 202, 220, msg, stream);
showFileLines("../../../GUI/MainWindow.xaml.cs", 359, 377, msg, stream);
logger->write(stream.str());
return true;
}
Author author() {
return "Jim Moriarity";
}
Name name() {
return "Requirement 3";
}
void setHostedLogger(IHostedLogger* hostedLogger) {
logger = hostedLogger;
}
private:
IHostedLogger* logger;
};
//////////////////////////////////////////////////////////////////////////////
// Requirement4
// - Demonstrate display of GUI
class DLL_DECL Requirement4 : public ITest {
public:
bool execute() {
std::string msg = "Requirement 4 - Display the GUI";
std::ostringstream stream;
displayFileContents(msg, "../../../GUI/MainWindow.xaml", stream);
displayFileContents(msg, "../../../GUI/PopupWindow.xaml", stream);
logger->write(stream.str());
return true;
}
Author author() {
return "Jim Moriarity";
}
Name name() {
return "Requirement 4";
}
void setHostedLogger(IHostedLogger* hostedLogger) {
logger = hostedLogger;
}
private:
IHostedLogger* logger;
};
//////////////////////////////////////////////////////////////////////////////
// Requirement5
// - Demonstrate different messages as per requirement
class DLL_DECL Requirement5 : public ITest {
public:
bool execute() {
std::string msg = "Requirement 5 - Demonstration of messages as per requirement";
std::ostringstream stream;
stream << "\n Get directories and Get files message format";
showFileLines("../../../GUI/MainWindow.xaml.cs", 289, 398, msg, stream);
stream << "\n Test request message format";
showFileLines("../../../GUI/MainWindow.xaml.cs", 371, 376, msg, stream);
stream << "\n Test request result message format";
showFileLines("../../../TestHarness/src/TestHarness.cpp", 67, 72, msg, stream);
logger->write(stream.str());
return true;
}
Author author() {
return "Jim Moriarity";
}
Name name() {
return "Requirement 5";
}
void setHostedLogger(IHostedLogger* hostedLogger) {
logger = hostedLogger;
}
private:
IHostedLogger* logger;
};
//////////////////////////////////////////////////////////////////////////////
// Requirement6
// - Demonstrate sending test request from client to server
class DLL_DECL Requirement6 : public ITest {
public:
bool execute() {
std::string msg = "Requirement 6 - Sending Test request from client to server";
std::ostringstream stream;
showFileLines("../../../GUI/MainWindow.xaml.cs", 359, 377, msg, stream);
logger->write(stream.str());
return true;
}
Author author() {
return "Jim Moriarity";
}
Name name() {
return "Requirement 6";
}
void setHostedLogger(IHostedLogger* hostedLogger) {
logger = hostedLogger;
}
private:
IHostedLogger* logger;
};
//////////////////////////////////////////////////////////////////////////////
// Requirement7
// - Demonstrate executing tests as series of test requests in child process
class DLL_DECL Requirement7 : public ITest {
public:
bool execute() {
std::string msg = "Requirement 7 - Executing tests as series of test requests in child process";
std::ostringstream stream;
stream << "\n Listening for test request and forwarding it to child";
showFileLines("../../../TestHarnessCore/src/TestHarnessCore.cpp", 125, 157, msg, stream);
stream << "\n Child processing the test request";
showFileLines("../../../TestHarness/src/TestHarness.cpp", 112, 133, msg, stream);
showFileLines("../../../TestHarness/src/TestHarness.cpp", 47, 77, msg, stream);
logger->write(stream.str());
return true;
}
Author author() {
return "Jim Moriarity";
}
Name name() {
return "Requirement 7";
}
void setHostedLogger(IHostedLogger* hostedLogger) {
logger = hostedLogger;
}
private:
IHostedLogger* logger;
};
//////////////////////////////////////////////////////////////////////////////
// Requirement8
// - Demonstrate sending result messages post testing the dll
class DLL_DECL Requirement8 : public ITest {
public:
bool execute() {
std::string msg = "Requirement 8 - Sending result messages post testing the dll";
std::ostringstream stream;
showFileLines("../../../TestHarness/src/TestHarness.cpp", 47, 77, msg, stream);
logger->write(stream.str());
return true;
}
Author author() {
return "Jim Moriarity";
}
Name name() {
return "Requirement 8";
}
void setHostedLogger(IHostedLogger* hostedLogger) {
logger = hostedLogger;
}
private:
IHostedLogger* logger;
};
//////////////////////////////////////////////////////////////////////////////
// Requirement9
// - Demonstrate concurrent running of two or more clients.
class DLL_DECL Requirement9 : public ITest {
public:
bool execute() {
std::string msg = "Requirement 9 - Concurrent running of two or more client";
std::ostringstream stream;
showFileLines("../../../TestHarness/src/TestHarness.cpp", 47, 77, msg, stream);
logger->write(stream.str());
return true;
}
Author author() {
return "Jim Moriarity";
}
Name name() {
return "Requirement 9";
}
void setHostedLogger(IHostedLogger* hostedLogger) {
logger = hostedLogger;
}
private:
IHostedLogger* logger;
};
//////////////////////////////////////////////////////////////////////////////
// Requirement 10
// - Container for the all the tests to performed for the testing the project3
// requirements
class DLL_DECL TestRequirements : public ITests {
public:
std::vector<ITest*> getTests() {
std::vector<ITest*> _tests;
_tests.push_back(new Requirement1());
_tests.push_back(new Requirement2());
_tests.push_back(new Requirement3());
_tests.push_back(new Requirement4());
_tests.push_back(new Requirement5());
_tests.push_back(new Requirement6());
_tests.push_back(new Requirement7());
_tests.push_back(new Requirement8());
_tests.push_back(new Requirement9());
return _tests;
}
};
//------< utility functiton to get the reference to test container >-------------
DLL_DECL ITests* getITests() {
return new TestRequirements();
} | 26.581602 | 133 | 0.598683 |
5bc1845522ba697bb87f6d07d928ccb900534e32 | 4,377 | css | CSS | packages/vue/src/main.css | LayreBoi/Client | 9d9ebc85bbfd25d35c7467c3a7efa901b16d8959 | [
"WTFPL"
] | null | null | null | packages/vue/src/main.css | LayreBoi/Client | 9d9ebc85bbfd25d35c7467c3a7efa901b16d8959 | [
"WTFPL"
] | null | null | null | packages/vue/src/main.css | LayreBoi/Client | 9d9ebc85bbfd25d35c7467c3a7efa901b16d8959 | [
"WTFPL"
] | null | null | null | .theme-dark {
--color-primary-300: 211,167,255; /* #D3A7FF */ /* Main Color */
--color-primary-400: 180,105,255; /* #B469FF */
--color-primary-500: 129,43,215; /* #812BD7 */
--color-background-200: 1,4,9; /* #010409 */
--color-background-300: 15,19,25; /* #0F1319 */
--color-background-400: 24,29,37; /* #181D25 */
--color-background-500: 35,41,49; /* #232931 */
--color-background-600: 42,48,56; /* #2A3038 */
--color-tooltip-background: 255, 255, 255; /* #ffffff */
--color-tooltip-text: 1,4,9; /* #010409 */
--color-text: 255, 255, 255; /* #ffffff */
--color-caution: 255, 0, 122; /* #ff007a */
--color-warning: 255,168,0; /* #FFA800 */
--color-active: 0,255,103; /* #00ff67 */
--color-100mbps: var(--color-warning);
--color-1000mbps: var(--color-active);
--color-10000mbps: 0,255,238; /* #00ffee */
--color-100000mbps: 187,255,255; /* #bbffff */
}
.theme-nord {
--color-primary-300: 129,161,193; /* #81a1c1 */ /* Main Color */
--color-primary-400: 129,161,193; /* #81A1C1 */
--color-primary-500: 94,129,172; /* #5E81AC */
--color-background-200: 46,52,64; /* #2E3440 */
--color-background-300: 59,66,82; /* #3B4252 */
--color-background-400: 67,76,94; /* #434C5E */
--color-background-500: 31,35,43; /* #1f232b */
--color-background-600: 46,52,64; /* #2E3440 */
--color-tooltip-background: 73,83,102; /* #495366 */
--color-tooltip-text: 129,161,193; /* #81A1C1 */
--color-text: 136,192,208; /* #88C0D0 */
--color-caution: 191,97,106; /* #BF616A */
--color-active: 163,190,140; /* #A3BE8C */
--color-warning: 235,203,139; /* #EBCB8B */
--color-100mbps: var(--color-warning);
--color-1000mbps: var(--color-active);
--color-10000mbps: var(--color-text);
--color-100000mbps: 236,239,244; /* #ECEFF4 */
}
.theme-opera {
--color-primary-300: 194,153,255; /* #c299ff */ /* Main Color */
--color-primary-400: 183,135,255; /* #b787ff */
--color-primary-500: 169,112,255; /* #A970FF */
--color-background-200: 14,12,29; /* #0E0C1D */
--color-background-300: 22,18,43; /* #16122B */
--color-background-400: 36,31,71; /* #241F47 */
--color-background-500: 51,35,82; /* #332352 */
--color-background-600: 57,48,112; /* #393070 */
--color-tooltip-background: 255,255,255; /* #ffffff */
--color-tooltip-text: 7,6,14; /* #07060E */
--color-text: 211,181,255; /* #d3b5ff */
--color-caution: 255,0,179; /* #ff00b3 */
--color-warning: 255,0,72; /* #ff0048 */
--color-active: 4,194,156; /* #04c29c */
--color-100mbps: var(--color-warning);
--color-1000mbps: var(--color-active);
--color-10000mbps: 0,255,238; /* #00ffee */
--color-100000mbps: 187,255,255; /* #bbffff */
}
.theme-galaxy {
--color-primary-300: 111,0,255; /* #6f00ff */ /* Main Color */
--color-primary-400: 129,79,255; /* #814fff */
--color-primary-500: 25,2,55; /* #190237 */
--color-background-200: 0,0,43; /* #00002b */
--color-background-300: 25,2,55; /* #190237 */
--color-background-400: 37,0,107; /* #25006b */
--color-background-500: 25,2,55; /* #190237 */
--color-background-600: 29,77,228; /* #1d4de4 */
--color-tooltip-background: 151,64,220; /* #9740dc */
--color-tooltip-text: 255,255,255; /* #ffffff */
--color-text: 255,255,255; /* #ffffff */
--color-caution: 225,0,216; /* #e100d8 */
--color-warning: 59,77,228; /* #3b4de4 */
--color-active: 39,229,254; /* #27e5fe */
--color-100mbps: var(--color-warning);
--color-1000mbps: var(--color-active);
--color-10000mbps: 190,50,150; /* #be3296 */
--color-100000mbps: 255,0,216; /* #ff00d8 */
}
.drag {
-webkit-app-region: drag;
}
.no-drag {
-webkit-app-region: no-drag;
} | 40.906542 | 82 | 0.495316 |
85cc5d0bed101ad52bd521e4dac794d6b0141881 | 1,842 | sql | SQL | Sql/Schema/Stored Procedures/Articles/Article_Add.sql | Datasilk/Saber-Collector | e66b4cc489f5a649d62489b2a38155fe1822505c | [
"Apache-2.0"
] | null | null | null | Sql/Schema/Stored Procedures/Articles/Article_Add.sql | Datasilk/Saber-Collector | e66b4cc489f5a649d62489b2a38155fe1822505c | [
"Apache-2.0"
] | null | null | null | Sql/Schema/Stored Procedures/Articles/Article_Add.sql | Datasilk/Saber-Collector | e66b4cc489f5a649d62489b2a38155fe1822505c | [
"Apache-2.0"
] | null | null | null | IF EXISTS (SELECT * FROM sys.objects WHERE type = 'P' AND name = 'Article_Add')
DROP PROCEDURE [dbo].[Article_Add]
GO
CREATE PROCEDURE [dbo].[Article_Add]
@feedId int = 0,
@subjects int = 0,
@subjectId int = 0,
@score smallint = 0,
@domain nvarchar(50),
@url nvarchar(250),
@title nvarchar(250),
@summary nvarchar(250),
@filesize float = 0,
@linkcount int = 0,
@linkwordcount int = 0,
@wordcount int = 0,
@sentencecount smallint = 0,
@paragraphcount smallint = 0,
@importantcount smallint = 0,
@yearstart smallint = 0,
@yearend smallint = 0,
@years nvarchar(50),
@images smallint = 0,
@datepublished datetime,
@relavance smallint = 1,
@importance smallint = 1,
@fiction smallint = 1,
@analyzed float = 0.1,
@active bit = 1
AS
DECLARE @articleId int = NEXT VALUE FOR SequenceArticles
DECLARE @domainId int = 0
SELECT @domainId = domainId FROM Domains WHERE domain=@domain
INSERT INTO Articles
(articleId, feedId, subjects, subjectId, domainId, score, domain, url, title, summary, filesize, linkcount, linkwordcount, wordcount, sentencecount, paragraphcount, importantcount, analyzecount,
yearstart, yearend, years, images, datecreated, datepublished, relavance, importance, fiction, analyzed, active)
VALUES
(@articleId, @feedId, @subjects, @subjectId, @domainId, @score, @domain, @url, @title, @summary, @filesize, @linkcount, @linkwordcount, @wordcount, @sentencecount, @paragraphcount, @importantcount, 1,
@yearstart, @yearend, @years, @images, GETDATE(), @datepublished, @relavance, @importance, @fiction, @analyzed, @active)
--move related Download Queue record into Downloads table
INSERT INTO Downloads ([id], [feedId], [domainId], [status], [tries], [url], [path], [datecreated])
SELECT * FROM DownloadQueue WHERE [url]=@url
DELETE FROM DownloadQueue WHERE [url] = @url
SELECT @articleId
| 38.375 | 201 | 0.724756 |
c6db7ea08d8a0eb8166bf1d9880432fa2239fd18 | 302 | lua | Lua | plugins/global_access_control/config.lua | lh83mail/ngr | 20821b956955714608b89bfce355299e11377800 | [
"Apache-2.0"
] | 97 | 2019-11-09T02:01:15.000Z | 2021-11-08T10:09:49.000Z | plugins/global_access_control/config.lua | lh83mail/ngr | 20821b956955714608b89bfce355299e11377800 | [
"Apache-2.0"
] | 21 | 2019-08-03T13:06:14.000Z | 2020-02-04T16:22:26.000Z | plugins/global_access_control/config.lua | lh83mail/ngr | 20821b956955714608b89bfce355299e11377800 | [
"Apache-2.0"
] | 33 | 2019-11-29T10:41:42.000Z | 2022-01-07T09:33:12.000Z | ---
--- 配置
--- Created by Go Go Easy Team.
--- DateTime: 2018/5/16 下午6:04
---
local _M = {}
_M.name = "global_access_control"
_M.small_error_types = {
-- see error_utils
gac = {
type_service_not_found = "ctl.no_service",
type_host_error = "ctl.host_error"
}
}
return _M
| 15.1 | 50 | 0.609272 |
e3df31fd6b1f7881ae450faabcc5eb46d3b87199 | 635 | swift | Swift | ChLayout/Extensions/UITableView+ChLayount.swift | sionyx/ChLayout | a24a93918141f9475edb0e0efada5b44e74e3f30 | [
"MIT"
] | 5 | 2020-02-04T15:39:13.000Z | 2021-04-17T20:32:51.000Z | ChLayout/Extensions/UITableView+ChLayount.swift | sionyx/ChLayout | a24a93918141f9475edb0e0efada5b44e74e3f30 | [
"MIT"
] | null | null | null | ChLayout/Extensions/UITableView+ChLayount.swift | sionyx/ChLayout | a24a93918141f9475edb0e0efada5b44e74e3f30 | [
"MIT"
] | null | null | null | //
// UITableView+ChLayount.swift
// ChLayout
//
// Created by Вадим Балашов on 05.05.2020.
// Copyright © 2020 sionyx.ru. All rights reserved.
//
import UIKit
extension UITableView {
public func allowsSelection(_ allowsSelection: Bool) -> Self {
self.allowsSelection = allowsSelection
return self
}
public func separatorStyle(_ separatorStyle: UITableViewCell.SeparatorStyle) -> Self {
self.separatorStyle = separatorStyle
return self
}
public func separatorColor(_ separatorColor: UIColor) -> Self {
self.separatorColor = separatorColor
return self
}
}
| 23.518519 | 90 | 0.680315 |
0d7ac01aeb02f6debe83a757cad5f57a6a7ebb1e | 735 | rb | Ruby | wsapi-with-faraday/rally-post-apikey-auth.rb | CustomAgile/ruby-rest-apps | a54668824376da7dd6893e4fa30f24f345a7e32f | [
"MIT"
] | null | null | null | wsapi-with-faraday/rally-post-apikey-auth.rb | CustomAgile/ruby-rest-apps | a54668824376da7dd6893e4fa30f24f345a7e32f | [
"MIT"
] | null | null | null | wsapi-with-faraday/rally-post-apikey-auth.rb | CustomAgile/ruby-rest-apps | a54668824376da7dd6893e4fa30f24f345a7e32f | [
"MIT"
] | null | null | null | require 'faraday'
require 'yaml'
require 'base64'
require 'json'
require 'pp'
config = YAML.load_file('./configs/rally.yml')
apikey = config['apikey']
workspace = config['workspace'].to_s
conn = Faraday.new(:url => 'https://rally1.rallydev.com/slm/webservice/v2.0') do |faraday|
faraday.request :url_encoded
faraday.response :logger
faraday.adapter Faraday.default_adapter
end
res = conn.post do |req|
req.url 'defect/create?workspace=/workspace/' + workspace
req.headers['Content-Type'] = 'application/json'
req.headers['zsessionid'] = apikey
req.body = '{ "defect":{"name": "bad defect F"} }'
end
parsed = JSON(res.body)
oid = parsed["CreateResult"]["Object"]["ObjectID"]
p oid
| 25.344828 | 90 | 0.67483 |
da47f2cd93c320630c68087103820467e57002f8 | 1,375 | php | PHP | src/Message/RecurringProfileAddResponse.php | joomdonation/omnipay-payflow-extended | 97464fa53bbaf945d16cbe5320d9530e6522b8fa | [
"MIT"
] | 1 | 2016-10-09T04:01:36.000Z | 2016-10-09T04:01:36.000Z | src/Message/RecurringProfileAddResponse.php | joomdonation/omnipay-payflow-extended | 97464fa53bbaf945d16cbe5320d9530e6522b8fa | [
"MIT"
] | null | null | null | src/Message/RecurringProfileAddResponse.php | joomdonation/omnipay-payflow-extended | 97464fa53bbaf945d16cbe5320d9530e6522b8fa | [
"MIT"
] | 6 | 2016-05-06T04:29:29.000Z | 2021-09-16T17:05:55.000Z | <?php
namespace Omnipay\PayflowExtended\Message;
use Omnipay\Payflow\Message\Response;
class RecurringProfileAddResponse extends Response
{
// this is the optional sale or authorization transaction
public function isSuccessful()
{
return isset($this->data['TRXRESULT']) && '0' === $this->data['TRXRESULT'];
}
// this is the optional sale or authorization transaction
public function getTransactionReference()
{
return isset($this->data['TRXPNREF']) ? $this->data['TRXPNREF'] : null;
}
// this is the optional sale or authorization transaction
public function getMessage()
{
return isset($this->data['RESPMSG']) ? $this->data['RESPMSG'] : null;
}
// this is for the profile action
public function isProfileActionSuccessful()
{
return isset($this->data['RESULT']) && '0' === $this->data['RESULT'];
}
// this is for the profile action
public function getProfileReference()
{
return isset($this->data['RPREF']) ? $this->data['RPREF'] : null;
}
// this is for the profile action
public function getProfileID()
{
return isset($this->data['PROFILEID']) ? $this->data['PROFILEID'] : null;
}
}
| 30.555556 | 87 | 0.573818 |
da87d386ad5baaf0e09ebbdb6c81fb7672f1658b | 6,037 | php | PHP | Server/htdocs/AppController/commands_RSM/shared/classLbxConcepts_init.php | Redsauce/RSM-Core | 9fd0c674dc81d3d344a3d86e11c782f8b2d2f34c | [
"MIT"
] | 2 | 2017-12-14T17:21:58.000Z | 2020-09-27T09:31:13.000Z | Server/htdocs/AppController/commands_RSM/shared/classLbxConcepts_init.php | Redsauce/RSM-Core | 9fd0c674dc81d3d344a3d86e11c782f8b2d2f34c | [
"MIT"
] | null | null | null | Server/htdocs/AppController/commands_RSM/shared/classLbxConcepts_init.php | Redsauce/RSM-Core | 9fd0c674dc81d3d344a3d86e11c782f8b2d2f34c | [
"MIT"
] | null | null | null | <?php
// Database connection startup
require_once "../utilities/RSdatabase.php";
require_once "../utilities/RSMitemsManagement.php";
require_once "../utilities/RSMlistsManagement.php";
// definitions
$clientID = $GLOBALS['RS_POST']['clientID'];
$userID = $GLOBALS['RS_POST']['userID'];
$operationID = $GLOBALS['RS_POST']['operationID'];
$subAccountID = $GLOBALS['RS_POST']['subAccountID'];
// get concepts item type
$itemTypeID = getClientItemTypeID_RelatedWith_byName($definitions['concepts'], $clientID);
// get concepts properties IDs
$conceptNamePropertyID = getClientPropertyID_RelatedWith_byName($definitions['conceptName'], $clientID);
$conceptProjectPropertyID = getClientPropertyID_RelatedWith_byName($definitions['conceptProjectID'], $clientID);
$conceptUnitsPropertyID = getClientPropertyID_RelatedWith_byName($definitions['conceptUnits'], $clientID);
$conceptIVAPropertyID = getClientPropertyID_RelatedWith_byName($definitions['conceptIVA'], $clientID);
$conceptPricePropertyID = getClientPropertyID_RelatedWith_byName($definitions['conceptPrice'], $clientID);
$conceptDeductionPropertyID = getClientPropertyID_RelatedWith_byName($definitions['conceptDeduction'], $clientID);
$conceptStockItemPropertyID = getClientPropertyID_RelatedWith_byName($definitions['conceptStockItemID'], $clientID);
// get concepts properties allowed
$propertiesAllowed = getVisibleProperties($itemTypeID, $clientID, $userID);
// --- FIRST PART RESULTS ---
// get properties names (they will be assigned to the list columns)
if (in_array($conceptNamePropertyID, $propertiesAllowed)) { $nameAllowed = '1'; } else { $nameAllowed = '0'; }
if (in_array($conceptProjectPropertyID, $propertiesAllowed)) { $projAllowed = '1'; } else { $projAllowed = '0'; }
if (in_array($conceptUnitsPropertyID, $propertiesAllowed)) { $unitsAllowed = '1'; } else { $unitsAllowed = '0'; }
if (in_array($conceptIVAPropertyID, $propertiesAllowed)) { $IVAAllowed = '1'; } else { $IVAAllowed = '0'; }
if (in_array($conceptPricePropertyID, $propertiesAllowed)) { $priceAllowed = '1'; } else { $priceAllowed = '0'; }
if (in_array($conceptDeductionPropertyID, $propertiesAllowed)) { $deductionAllowed = '1'; } else { $deductionAllowed = '0'; }
if (in_array($conceptStockItemPropertyID, $propertiesAllowed)) { $stockItemIDAllowed = '1'; } else { $stockItemIDAllowed = '0'; }
$results[0]['concepts'] = getClientItemTypeName($itemTypeID, $clientID);
$results[0]['name' ] = getClientPropertyName($conceptNamePropertyID , $clientID).'::'.$nameAllowed; // fix me: separator used -> ::
$results[0]['project' ] = getClientPropertyName($conceptProjectPropertyID , $clientID).'::'.$projAllowed;
$results[0]['units' ] = getClientPropertyName($conceptUnitsPropertyID , $clientID).'::'.$unitsAllowed;
$results[0]['VAT' ] = getClientPropertyName($conceptIVAPropertyID , $clientID).'::'.$IVAAllowed;
$results[0]['price' ] = getClientPropertyName($conceptPricePropertyID , $clientID).'::'.$priceAllowed;
$results[0]['deduction' ] = getClientPropertyName($conceptDeductionPropertyID , $clientID).'::'.$deductionAllowed;
$results[0]['stockItemID' ] = getClientPropertyName($conceptStockItemPropertyID , $clientID).'::'.$stockItemIDAllowed;
// get properties default values
$results[0]['nameDefValue' ] = getClientPropertyDefaultValue($conceptNamePropertyID , $clientID);
$results[0]['projectIDDefValue' ] = getClientPropertyDefaultValue($conceptProjectPropertyID , $clientID);
$results[0]['unitsDefValue' ] = getClientPropertyDefaultValue($conceptUnitsPropertyID , $clientID);
$results[0]['VATDefValue' ] = getClientPropertyDefaultValue($conceptIVAPropertyID , $clientID);
$results[0]['priceDefValue' ] = getClientPropertyDefaultValue($conceptPricePropertyID , $clientID);
$results[0]['deductionDefValue' ] = getClientPropertyDefaultValue($conceptDeductionPropertyID , $clientID);
$results[0]['projectDefValue' ] = translateSingleIdentifier ($conceptProjectPropertyID, $results[0]['projectIDDefValue'], $clientID);
$results[0]['stockItemDefValue' ] = getClientPropertyDefaultValue($conceptStockItemPropertyID , $clientID);
// get operations and projects item types
$operationsItemTypeID = getClientItemTypeID_RelatedWith_byName($definitions['operations'], $clientID);
$projectsItemTypeID = getClientItemTypeID_RelatedWith_byName($definitions['projects'], $clientID);
$subAccountsItemTypeID = getClientItemTypeID_RelatedWith_byName($definitions['subAccounts'], $clientID);
// get operation subAccountID
if($subAccountID==""){
$subAccountID = getPropertyValue($definitions['operationSubAccountID'], $operationsItemTypeID, $operationID, $clientID);
}
// get operation subAccountName
$results[0]['subAccountName'] = getMainPropertyValue($subAccountsItemTypeID, $subAccountID, $clientID);
// build filter properties array
$filterProperties = array();
$filterProperties[] = array('ID' => getClientPropertyID_RelatedWith_byName($definitions['projectClient'], $clientID), 'value' => $subAccountID, 'mode' => 'IN');
// build return properties array
$returnProperties = array();
$returnProperties[] = array('ID' => getMainPropertyID($projectsItemTypeID, $clientID), 'name' => 'mainValue');
// get projects
$projectsQueryResults = IQ_getFilteredItemsIDs($projectsItemTypeID, $clientID, $filterProperties, $returnProperties, 'mainValue');
if ($projectsQueryResults->num_rows == 0) {
// If there were no results returned, we will list all the open projects
$openStatus = getValue(getClientListValueID_RelatedWith(getAppListValueID('projectStatusOpen'), $clientID), $clientID);
$filterProperties = array();
$filterProperties[] = array('ID' => getClientPropertyID_RelatedWith_byName($definitions['projectStatus'], $clientID), 'value' => $openStatus);
$projectsQueryResults = IQ_getFilteredItemsIDs($projectsItemTypeID, $clientID, $filterProperties, $returnProperties, 'mainValue');
}
// --- SECOND PART RESULTS ---
while ($row = $projectsQueryResults->fetch_assoc()) {
$results[] = $row;
}
// And return XML results
RSReturnArrayQueryResults($results);
?>
| 59.772277 | 160 | 0.763956 |
23e0b37666294d49c13fc4ed06d6222ec1446fed | 1,093 | js | JavaScript | flow-admin/src/main/resources/static/assets/DeptTree.935e5a2c.js | hcxiaoliu/flow | 444a347e83e46708850f936cde89d279c5b376fb | [
"Apache-2.0"
] | null | null | null | flow-admin/src/main/resources/static/assets/DeptTree.935e5a2c.js | hcxiaoliu/flow | 444a347e83e46708850f936cde89d279c5b376fb | [
"Apache-2.0"
] | null | null | null | flow-admin/src/main/resources/static/assets/DeptTree.935e5a2c.js | hcxiaoliu/flow | 444a347e83e46708850f936cde89d279c5b376fb | [
"Apache-2.0"
] | null | null | null | import{_ as e}from"./Tree.vue_vue&type=style&index=0&lang.e8be757a.js";import{a as t}from"./system.79f5df33.js";import{k as r,r as a,I as s,K as n,o,n as i,q as l}from"./vendor.56d2c57f.js";import"./index.407e4786.js";import"./useContextMenu.553121b5.js";/* empty css */import"./useExpose.46777d04.js";var c=r({name:"DeptTree",components:{BasicTree:e},emits:["select"],setup(e,{emit:r}){const n=a([]);function o(){return e=this,r=null,a=function*(){n.value=yield t()},new Promise(((t,s)=>{var n=e=>{try{i(a.next(e))}catch(t){s(t)}},o=e=>{try{i(a.throw(e))}catch(t){s(t)}},i=e=>e.done?t(e.value):Promise.resolve(e.value).then(n,o);i((a=a.apply(e,r)).next())}));var e,r,a}return s((()=>{o()})),{treeData:n,handleSelect:function(e,t){r("select",e[0])}}}});const d={class:"bg-white m-4 mr-0 overflow-hidden"};c.render=function(e,t,r,a,s,c){const m=n("BasicTree");return o(),i("div",d,[l(m,{title:"部门列表",toolbar:"",search:"",clickRowToExpand:!1,treeData:e.treeData,replaceFields:{key:"id",title:"deptName"},onSelect:e.handleSelect},null,8,["treeData","onSelect"])])};export default c;
| 546.5 | 1,092 | 0.664227 |
964a5fb47c86e0aecd16630f65ac399191d273f5 | 614 | js | JavaScript | app/src/js/controllers/taskController.js | lightwish01/test-electron | 2fd7af23d48c458e0a90d85d36d90c7ae4d3d776 | [
"MIT"
] | null | null | null | app/src/js/controllers/taskController.js | lightwish01/test-electron | 2fd7af23d48c458e0a90d85d36d90c7ae4d3d776 | [
"MIT"
] | null | null | null | app/src/js/controllers/taskController.js | lightwish01/test-electron | 2fd7af23d48c458e0a90d85d36d90c7ae4d3d776 | [
"MIT"
] | null | null | null | 'use strict'
const db = require("../db/connection")
const taskModel = require("../db/tasks")
app.controller('taskController', function ($scope, $http, $timeout, $window) {
$scope.tasks = []
taskModel.createTable()
$scope.save = function(task) {
delete $scope.teste
taskModel.add({ task: task, callback: function() {
$scope.tasks.push(task)
}})
}
$scope.load = function() {
taskModel.load(function(results) {
console.log("angular:" + JSON.stringify(results))
$scope.tasks = results
})
}
$scope.load()
})
| 21.928571 | 78 | 0.568404 |
bf85580bdedb5c99327c7d6556fa101680257790 | 5,986 | swift | Swift | DKMacLibrary/Classes/ExCalendars.swift | chorokichi/DKMacLibrary | d2eb4fb8b9c54f502d507d4dbebe10a06237e186 | [
"MIT"
] | null | null | null | DKMacLibrary/Classes/ExCalendars.swift | chorokichi/DKMacLibrary | d2eb4fb8b9c54f502d507d4dbebe10a06237e186 | [
"MIT"
] | null | null | null | DKMacLibrary/Classes/ExCalendars.swift | chorokichi/DKMacLibrary | d2eb4fb8b9c54f502d507d4dbebe10a06237e186 | [
"MIT"
] | null | null | null | //
// PTCalendars.swift
// PeopleTable
//
// Created by yuya on 2017/06/01.
// Copyright © 2017年 yuya. All rights reserved.
//
import Foundation
public protocol ExCalendarsProtocol {
associatedtype T
func getMarkedDay() -> RichDate<T>
func getFirstDay() -> RichDate<T>
func getLastDay() -> RichDate<T>
func getDays() -> [RichDate<T>]
func getDay(_ year: Int, _ month: Int, _ day: Int) -> RichDate<T>?
func update(_ newDate: Date)
func updateContent(_ index: Int, new: T)
func updateAllContents(by newList: [T])
func initAllContents(by new: T)
}
/// 月カレンダー
/// 使い方
/// 1. 初期化でDateとカレンダーサイズ、週初めの曜日を指定。ここで設定したDateの日が強調されて、その月のカレンダーが表示される
/// 2. updateで日付を更新すると月も変わる
public class ExCalendars<T> {
private let numOfWeeks: Int
public let startWeek: WeekDay
// 注目している日付
private var markedDate: RichDate<T>
private var days: [RichDate<T>] = []
public init(markedDate: Date = Date(), numOfWeeks: Int, startWeek: WeekDay = .Sun) {
let date: RichDate<T> = RichDate(from: markedDate)
self.markedDate = date
self.numOfWeeks = numOfWeeks
guard numOfWeeks > 0 else {fatalError("週数は必ず0以上")}
self.startWeek = startWeek
self.construct()
}
public func printCalendarAsGrid() {
ExLog.log("################################", format: .NoPostFix)
var weekEnd = startWeek
var log = ""
// 曜日表示
(0...5).forEach { (_) in
log = "\(log)\(weekEnd.getKey()) "
weekEnd.next()
}
log = "\(log)\(weekEnd.getKey()) "
ExLog.log(log, format: .NoPostFix)
log.removeAll()
// 日付表示
var months: [String] = []
days.forEach { (richDate: RichDate<T>) in
let yearAndMonth = "\(richDate.year)/\(richDate.month)"
if !months.contains(yearAndMonth) {
months.append(yearAndMonth)
}
let dayStr = "\(richDate.day)"
let prefix: String
let remark = ((richDate == self.markedDate) ? "*" : " ")
if dayStr.count == 1 {
prefix = " \(remark)"
} else {
prefix = "\(remark)"
}
log = "\(log)\(prefix)\(dayStr) "
if richDate.weekDay == weekEnd {
months.forEach { (subInfo) in
log = "\(log)\(subInfo) "
}
ExLog.log(log, format: .NoPostFix)
log.removeAll()
months.removeAll()
}
}
ExLog.log("################################", format: .NoPostFix)
}
/// 日付をリストでコンソールに出力するメソッド。RichDateの中身(data)のdebugDescriptionを出力する。
/// そのため、内容を自分でカスタマイズしたい場合はTクラスにCustomDebugStringConvertibleを継承させて、
/// debugDescriptionを自分で定義すること
public func printCalendarAsList() {
ExLog.log("################################", format: .NoPostFix)
days.forEach { (richDate: RichDate<T>) in
let header = String(format: "%04d/%02d/%02d(%@): ", richDate.year, richDate.month, richDate.day, richDate.weekDay.getKey())
let content = richDate.data.debugDescription
ExLog.log("\(header)\(content)", format: .NoPostFix)
}
ExLog.log("################################", format: .NoPostFix)
}
private func construct() {
// カレンダーの最初の日付
// April, 2019
// Sun Mon Tue Wed Thu Fru Sat
// 31 1 2 3 4 5 6
// 7 8 9 10 11 12 13
// 14 15 16 17 18 19 20
let firstDayInWeek = self.markedDate.createFirstDayInThisWeek(startWeekDay: self.startWeek)
let firstDayOnCalendar = self.getFirstDayOnCalendar(firstDayInWeek: firstDayInWeek)
self.days = []
for i in 0 ..< numOfWeeks*7 {
let date = ExDate.dateByAddingDay(value: i, date: firstDayOnCalendar.date)
let lDate: RichDate<T> = RichDate(from: date)
self.days.append(lDate)
}
}
private func getFirstDayOnCalendar(firstDayInWeek: RichDate<T>) -> RichDate<T> {
var firstDayOnCalendar = firstDayInWeek
var i = numOfWeeks
while firstDayOnCalendar.month == self.markedDate.month && i > 1 {
i = i - 1
firstDayOnCalendar = firstDayOnCalendar.createRichDate(added: -7)
}
return firstDayOnCalendar
}
}
extension ExCalendars: ExCalendarsProtocol {
public func getMarkedDay() -> RichDate<T> {
return self.markedDate
}
public func getFirstDay() -> RichDate<T> {
return days.first!
}
public func getLastDay() -> RichDate<T> {
return days.last!
}
public func getDays() -> [RichDate<T>] {
return days
}
public func getDay(_ year: Int, _ month: Int, _ day: Int) -> RichDate<T>? {
let filteredDays = days.filter {($0.year == year && $0.month == month && $0.day == day)}
guard filteredDays.count < 2 else {
ExLog.fatalError("A date should be only one. \(filteredDays)")
return nil
}
if filteredDays.count == 0 {
return nil
} else {
return filteredDays[0]
}
}
public func update(_ newDate: Date) {
let date: RichDate<T> = RichDate(from: newDate)
self.markedDate = date
self.construct()
}
public func updateContent(_ index: Int, new: T) {
self.days[index].data = new
}
public func updateAllContents(by newList: [T]) {
assert(self.days.count == newList.count)
for index in 0 ..< self.days.count {
self.days[index].data = newList[index]
}
}
public func initAllContents(by new: T) {
for index in 0 ..< self.days.count {
self.days[index].data = new
}
}
}
| 32.010695 | 135 | 0.542432 |
df28c74a6c9312c1eaa17708998d4ec8aea4239d | 2,915 | rb | Ruby | app/models/copy_staging/serialize_work.rb | sciencehistory/scihist_digicoll | 2943d5eb5f261eae96ebe8c1253294bd4a8fefa2 | [
"Apache-2.0"
] | 4 | 2019-08-29T17:37:29.000Z | 2022-02-21T20:39:20.000Z | app/models/copy_staging/serialize_work.rb | sciencehistory/scihist_digicoll | 2943d5eb5f261eae96ebe8c1253294bd4a8fefa2 | [
"Apache-2.0"
] | 1,103 | 2018-11-28T18:53:17.000Z | 2022-03-31T15:13:27.000Z | app/models/copy_staging/serialize_work.rb | sciencehistory/scihist_digicoll | 2943d5eb5f261eae96ebe8c1253294bd4a8fefa2 | [
"Apache-2.0"
] | 2 | 2021-01-11T17:25:11.000Z | 2021-04-21T20:41:40.000Z | module CopyStaging
# Serializes a work as JSON, along with info on storage buckets files are stored in.
# Used by the scihist:copy_staging_work:serialize_work rake task, which is used by
# the `heroku:copy_data` rake task. This is normally run on a staging server.
#
# All of this is to copy works from staging to a local dev instance.
#
# See also corresponding RestoreWork model. The JSON generated by this model
# is consumed by that one, so must match.
#
# SerializeWork.new(work).as_json #=> Hash
# SerializeWork.new(work).to_json #=> serialized string
#
class SerializeWork
attr_accessor :work
def initialize(work)
@work = work
end
def as_json
{
"models" => serialized_models,
"shrine_s3_storage_staging" => {
"store" => shrine_config(:store),
"kithe_derivatives" => shrine_config(:kithe_derivatives)
}
}
end
def to_json
as_json.to_json
end
private
# * The main work for this serializer
# * all it's children (recursively, for multi-level nested)
# * Any OralHistoryContent "sidecar" for main work (we don't bother checking children, cause
# we don't do that with our data right now)
#
# They are all serialized as a one item hash, with model name as key, and
# attributes as value.
#
# We try to serialize in order, so restoring in order will not violate any foreign
# key referential integrity. But circular foreign key referential integrity on parent/representative
# for children makes that not entirely possible.
def serialized_models
serialize_model(work)
end
# A method called recursively, initially by #serialized_models, to get all children
def serialize_model(model)
model_attributes = model.attributes
oral_history_content = []
if model.kind_of?(Kithe::Asset)
# hacky workaround
# https://github.com/sciencehistory/kithe/pull/75
model_attributes.delete("representative_id")
model_attributes.delete("leaf_representative_id")
elsif model.kind_of?(Work) && model.oral_history_content
oral_history_content << { model.oral_history_content.class.name => model.oral_history_content.attributes.except("id") }
end
mine = [{ model.class.name => model_attributes }]
children = model.members.flat_map do |member|
serialize_model(member)
end
mine + children + oral_history_content
end
def shrine_config(shrine_storage_key)
storage = Shrine.storages[shrine_storage_key.to_sym]
unless storage.kind_of?(Shrine::Storage::S3)
raise ArgumentError, "We only know how to work with S3 storage for Shrine.storages[:store], not #{storage.class.name}"
end
{
"bucket_name" => storage.bucket.name,
"prefix" => storage.prefix
}
end
end
end
| 32.388889 | 127 | 0.678559 |
6d35fc89c330567a2d93dd7b41615be9841e62b1 | 2,287 | rs | Rust | proz_pooldance/src/resource.rs | maciejczyzewski/sem6 | a1ef556afef8c59b7148f0e6c2378ed0afc6910f | [
"MIT"
] | null | null | null | proz_pooldance/src/resource.rs | maciejczyzewski/sem6 | a1ef556afef8c59b7148f0e6c2378ed0afc6910f | [
"MIT"
] | 8 | 2021-06-08T21:07:23.000Z | 2022-03-12T00:20:10.000Z | proz_pooldance/src/resource.rs | maciejczyzewski/sem6 | a1ef556afef8c59b7148f0e6c2378ed0afc6910f | [
"MIT"
] | 1 | 2020-03-29T11:33:05.000Z | 2020-03-29T11:33:05.000Z | use crate::node::{broadcast, Message, Node};
use rand::Rng;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Resource {
pub code: String,
pub token: i32,
pub owner: i32,
pub status: String,
pub transfer_with: i32,
pub transfer_luck: i32,
pub time: i32,
pub epoch: i32,
// time out if not resting
// stilling if timeout
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct PairMessage {
pub owner_sex: i32,
pub owner: i32,
pub collecting_resources: HashMap<String, i32>,
}
pub fn init(
nr: i32,
resources: &mut HashMap<String, Resource>,
transfer_luck: i32,
) {
println!("HELLO FROM INIT!");
for _ in 0..2 {
let idx: i32 =
(nr * 100_000) + (rand::thread_rng().gen::<i32>() % 1000).abs();
resources.insert(
idx.to_string(),
Resource {
token: idx,
owner: nr,
code: "masc".to_string(),
status: "green".to_string(),
transfer_with: -1, // # FIXME: nie ujemne wartosci node-ow
transfer_luck,
time: 0,
epoch: 0,
},
);
}
for _ in 0..5 {
let idx: i32 =
(nr * 100_000) + (rand::thread_rng().gen::<i32>() % 1000).abs();
resources.insert(
idx.to_string(),
Resource {
token: idx,
owner: nr,
code: "warzywa 1kg".to_string(),
status: "green".to_string(),
transfer_with: -1, // # FIXME: nie ujemne wartosci node-ow
transfer_luck,
time: 0,
epoch: 0,
},
);
}
}
pub fn spread(node: &Node) {
for (_key, val) in node.resources.iter() {
/*if val.owner != node.id.nr {
continue;
}*/
let val2 = val.clone();
let resource_r = serde_json::to_string(&val2).unwrap();
broadcast(
node,
Message {
addr: node.id.nr,
code: "resource-info".to_string(),
data: resource_r.to_string(),
},
);
}
}
| 25.988636 | 76 | 0.491911 |
2da9cd2bf4e50fba2ecf3ad663f87727a9e91bd1 | 145 | sql | SQL | Ren.CMS.Net/Source/Ren.CMS.Net-Modules/Ren.CMS.Net.Installer/InstallModule/Areas/Installer/Schema/nfcms_Permissionkeys.Table.sql | nfMalde/Ren.CMS.NET | 47173a06dee294dd4b17966b8b561f0b962f55e3 | [
"MIT"
] | null | null | null | Ren.CMS.Net/Source/Ren.CMS.Net-Modules/Ren.CMS.Net.Installer/InstallModule/Areas/Installer/Schema/nfcms_Permissionkeys.Table.sql | nfMalde/Ren.CMS.NET | 47173a06dee294dd4b17966b8b561f0b962f55e3 | [
"MIT"
] | null | null | null | Ren.CMS.Net/Source/Ren.CMS.Net-Modules/Ren.CMS.Net.Installer/InstallModule/Areas/Installer/Schema/nfcms_Permissionkeys.Table.sql | nfMalde/Ren.CMS.NET | 47173a06dee294dd4b17966b8b561f0b962f55e3 | [
"MIT"
] | null | null | null | CREATE TABLE ren_cms_Permissionkeys (
defaultVal VARCHAR (50) NOT NULL
, langLine VARCHAR (50) NOT NULL
, pkey VARCHAR (150) NOT NULL
)
| 24.166667 | 37 | 0.717241 |
cd726fc45f7d425ad7fa071af38093cc41c714cc | 965 | cs | C# | src/Web/Masa.Mc.Web.Admin/Store/NoticeState.cs | masastack/MASA.MC | 3b15e43a70b028e41a946bac767bed5d575b57c9 | [
"Apache-2.0"
] | null | null | null | src/Web/Masa.Mc.Web.Admin/Store/NoticeState.cs | masastack/MASA.MC | 3b15e43a70b028e41a946bac767bed5d575b57c9 | [
"Apache-2.0"
] | null | null | null | src/Web/Masa.Mc.Web.Admin/Store/NoticeState.cs | masastack/MASA.MC | 3b15e43a70b028e41a946bac767bed5d575b57c9 | [
"Apache-2.0"
] | null | null | null | // Copyright (c) MASA Stack All rights reserved.
// Licensed under the Apache License. See LICENSE.txt in the project root for license information.
namespace Masa.Mc.Web.Admin.Store;
public class NoticeState
{
public bool IsRead => !Notices.Any(x => !x.IsRead);
public List<WebsiteMessageDto> Notices
{
get => _notices;
set
{
if (_notices != value)
{
_notices = value;
OnNoticeChanged?.Invoke();
}
}
}
public delegate Task NoticeChanged();
public event NoticeChanged? OnNoticeChanged;
private List<WebsiteMessageDto> _notices = new();
public void SetNotices(List<WebsiteMessageDto> notices)
{
Notices = notices;
}
public void SetAllRead()
{
var notices = Notices.Select(x =>
{
x.IsRead = true;
return x;
});
Notices = notices.ToList();
}
}
| 21.931818 | 98 | 0.568912 |
e2c5cc7c68696fcfd221d9ba74078c32e1c0b2ad | 2,311 | py | Python | scripts/filter_toptags.py | MTG/jamendo-dataset | 472a14524bc8f257c9ad921307180a80cc5fd1f3 | [
"Apache-2.0"
] | 2 | 2019-06-06T03:48:20.000Z | 2019-06-11T08:19:28.000Z | scripts/filter_toptags.py | MTG/jamendo-dataset | 472a14524bc8f257c9ad921307180a80cc5fd1f3 | [
"Apache-2.0"
] | 6 | 2019-05-15T13:20:21.000Z | 2019-06-06T17:06:02.000Z | scripts/filter_toptags.py | MTG/jamendo-dataset | 472a14524bc8f257c9ad921307180a80cc5fd1f3 | [
"Apache-2.0"
] | null | null | null | import argparse
import commons
import get_statistics
import util
def filter_tags(tracks, tags, tag_threshold, directory=None, tags_file=None):
if directory is not None:
util.mkdir_p(directory)
# TODO: refactor to properly handle and not disconnect category+tag
tags_merged = {}
tags_with_prefix = {}
for category, category_tags in tags.items():
tags_merged.update(category_tags)
if tags_file is not None:
tags_with_prefix.update({tag: category + commons.TAG_HYPHEN + tag for tag in category_tags})
stats, total = get_statistics.get_statistics('all', tracks, {'all': tags_merged})
stats = stats.sort_values(by='tracks', ascending=False)
stats_filtered = stats[:tag_threshold]
if directory is not None:
get_statistics.write_statistics('all', stats_filtered, directory)
if tags_file is not None:
tag_list = stats_filtered['tag'].replace(tags_with_prefix).sort_values()
tag_list.to_csv(tags_file, sep='\t', index=False, header=False)
tags_top = set(stats_filtered['tag'])
tracks_to_delete = []
for track_id, track in tracks.items():
total_tags = 0
for category in commons.CATEGORIES:
track[category] &= tags_top
total_tags += len(track[category])
if total_tags == 0:
tracks_to_delete.append(track_id)
for track in tracks_to_delete:
tracks.pop(track)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Filters out less frequent tags according to the number of tracks')
parser.add_argument('tsv_file', help=commons.METADATA_DESCRIPTION)
parser.add_argument('tag_threshold', type=int, help='threshold number of tags')
parser.add_argument('output_file', help='output tsv file')
parser.add_argument('--stats-directory', default=None,
help='if this argument is set, statistics will be recomputed and written to this directory')
parser.add_argument('--tag-list', default=None, help='text file with filtered tags (top-n)')
args = parser.parse_args()
tracks, tags, extra = commons.read_file(args.tsv_file)
filter_tags(tracks, tags, args.tag_threshold, args.stats_directory, args.tag_list)
commons.write_file(tracks, args.output_file, extra)
| 39.169492 | 116 | 0.701428 |
e4636646189ee9a680737b2e169b49f579b29e60 | 2,414 | cc | C++ | hash.djb.native/binding.cc | bigeasy/hash | 48d2ca9f6dc9b37e1f5245e8849c230db01c4e68 | [
"MIT"
] | 10 | 2015-06-22T15:30:56.000Z | 2018-01-02T19:14:47.000Z | hash.djb.native/binding.cc | bigeasy/hash | 48d2ca9f6dc9b37e1f5245e8849c230db01c4e68 | [
"MIT"
] | 34 | 2015-05-12T21:29:46.000Z | 2022-02-07T06:27:24.000Z | hash.djb.native/binding.cc | bigeasy/hash | 48d2ca9f6dc9b37e1f5245e8849c230db01c4e68 | [
"MIT"
] | 1 | 2015-09-17T21:39:23.000Z | 2015-09-17T21:39:23.000Z | #include <node.h>
#include <node_buffer.h>
// #include <string_bytes.h>
#include <node_object_wrap.h>
#include <v8.h>
#include <stdio.h>
extern "C" {
typedef union {
void* buffer;
uint32_t number;
} hash_t;
extern hash_t hash_allocate (uint32_t seed);
extern void hash_free (hash_t hash);
extern int hash_block_size();
extern void hash_update(hash_t* hash, void* key, int len);
extern void hash_remainder(hash_t* hash, void* key, int len, void* out);
}
using namespace v8;
using namespace node;
class Hash : public ObjectWrap {
public:
static void Initialize(Handle<Object> target);
protected:
static Handle<Value> New(const Arguments& args);
static Handle<Value> HashBlock(const Arguments& args);
static Handle<Value> HashRemainder (const Arguments& args);
Hash () {
hash_.number = 0;
}
~Hash () {
}
private:
hash_t hash_;
};
void Hash::Initialize (Handle<Object> target) {
HandleScope scope;
Local<FunctionTemplate> t = FunctionTemplate::New(New);
t->InstanceTemplate()->SetInternalFieldCount(1);
NODE_SET_PROTOTYPE_METHOD(t, "block", HashBlock);
NODE_SET_PROTOTYPE_METHOD(t, "remainder", HashRemainder);
target->Set(String::New("Hash"), t->GetFunction());
}
Handle<Value> Hash::New (const Arguments& args) {
HandleScope scope;
uint32_t seed;
if (args.Length() == 0 || !args[0]->IsNumber()) {
seed = 0;
} else {
seed = args[0]->Uint32Value();
}
Hash* hash = new Hash();
hash->hash_ = hash_allocate(seed);
hash->Wrap(args.This());
return args.This();
}
Handle<Value> Hash::HashBlock (const Arguments& args) {
HandleScope scope;
Hash *hash = ObjectWrap::Unwrap<Hash>(args.This());
if (!Buffer::HasInstance(args[0])) {
return ThrowException(Exception::TypeError(String::New("Not a buffer")));
}
hash_update(&hash->hash_, Buffer::Data(args[0]), Buffer::Length(args[0]));
return args.This();
}
Handle<Value> Hash::HashRemainder (const Arguments& args) {
HandleScope scope;
uint32_t value;
Hash *hash = ObjectWrap::Unwrap<Hash>(args.This());
hash_remainder(&hash->hash_, Buffer::Data(args[0]), args[1]->Uint32Value(), &value);
return scope.Close(StringBytes::Encode(reinterpret_cast<const char*>(&value), 4, BUFFER));
}
void init(Handle<Object> target) {
Hash::Initialize(target);
}
NODE_MODULE(djb, init);
| 23.436893 | 94 | 0.6657 |
5fa3e3933f9d4c24b8d3df52b5e3a53fa44b3e6e | 1,644 | rb | Ruby | spec/support/lib/spec_shell.rb | sotownsend/plumbus | a608f4c637a9e519ea22f648267d3472a12997a7 | [
"MIT"
] | 1 | 2016-10-04T16:44:32.000Z | 2016-10-04T16:44:32.000Z | spec/support/lib/spec_shell.rb | sotownsend/plumbus | a608f4c637a9e519ea22f648267d3472a12997a7 | [
"MIT"
] | null | null | null | spec/support/lib/spec_shell.rb | sotownsend/plumbus | a608f4c637a9e519ea22f648267d3472a12997a7 | [
"MIT"
] | null | null | null | require 'open3'
class SpecShell
attr_accessor :stdout
attr_accessor :stderr
#Will execute right away
def initialize *args, timeout: 0.5
@timeout = timeout
@args = args
@stdout = []
@stderr = []
@exit_status = nil
@did_timeout = false
execute
end
def execute
Open3.popen3 *@args do |_stdin, _stdout, _stderr, wt|
@pid = wt[:pid] #Capture pid now as it will be unavailable later
begin
Timeout::timeout(@timeout) do
res = select [_stdout, _stderr], [], []
res.flatten.each do |p|
case p
when _stdout
@stdout << p.read
when _stderr
@stderr << p.read
end
end
end
rescue Timeout::Error
@did_timeout = true
rescue => e
ensure
unless @did_timeout
@exit_status = wt.value
end
cleanup
end
end
end
def timed_out?
return @did_timeout
end
def exit_value
return @exit_status.exitstatus if @exit_status
return nil
end
def succeeded?
exit_value == 0
end
def failed?
exit_value != 0 && exit_value != nil && @did_timeout == false
end
def cmd
return @args[0]
end
def inspect
stdout = @stdout.map{|e| "[stdout]: #{e}"}.join("\n")
stderr = @stderr.map{|e| "[stderr]: #{e}"}.join("\n")
str = <<HERE
#{"-"*100}
shell>#{@args.join(" ")}
#{stdout}
#{stderr}
#{"-"*100}
HERE
"\n" + str.split("\n").select{|e| e != ""}.join("\n") + "\n"
end
private
def cleanup
Process.kill :KILL, @pid if @pid
rescue Errno::ESRCH
rescue
end
end
| 18.266667 | 70 | 0.549878 |
e4049e064a6cfb9dbdf7cfbccf993857af77f8dc | 944 | cs | C# | C# OOP/Exams/CSharpOOPRetakeExam-18April2021/Easter/Repositories/EggRepository.cs | dimitar-yo-dimitrov/SoftUni-Software-Engineering | de1ea2871ec5b3853eaa1a93ccd0686d9dda0416 | [
"MIT"
] | null | null | null | C# OOP/Exams/CSharpOOPRetakeExam-18April2021/Easter/Repositories/EggRepository.cs | dimitar-yo-dimitrov/SoftUni-Software-Engineering | de1ea2871ec5b3853eaa1a93ccd0686d9dda0416 | [
"MIT"
] | null | null | null | C# OOP/Exams/CSharpOOPRetakeExam-18April2021/Easter/Repositories/EggRepository.cs | dimitar-yo-dimitrov/SoftUni-Software-Engineering | de1ea2871ec5b3853eaa1a93ccd0686d9dda0416 | [
"MIT"
] | null | null | null | using System.Collections.Generic;
using System.Linq;
using Easter.Models.Eggs.Contracts;
using Easter.Repositories.Contracts;
namespace Easter.Repositories
{
public class EggRepository : IRepository<IEgg>
{
private readonly List<IEgg> eggs;
public EggRepository()
{
this.eggs = new List<IEgg>();
}
public IReadOnlyCollection<IEgg> Models { get; private set; }
= new List<IEgg>();
public void Add(IEgg model)
{
this.eggs.Add(model);
Models = eggs;
}
public bool Remove(IEgg model)
{
if (eggs.Contains(model))
{
eggs.Remove(model);
Models = eggs;
return true;
}
return false;
}
public IEgg FindByName(string name)
=> this.eggs.FirstOrDefault(e => e.Name == name);
}
}
| 20.521739 | 70 | 0.525424 |
c4b421f980c2a384fcb3a9806084e3725979098a | 442 | cc | C++ | caffe2/opt/distributed_converter.cc | brooks-anderson/pytorch | dd928097938b6368fc7e2dc67721550d50ab08ea | [
"Intel"
] | 7 | 2021-05-29T16:31:51.000Z | 2022-02-21T18:52:25.000Z | caffe2/opt/distributed_converter.cc | stas00/pytorch | 6a085648d81ce88ff59d6d1438fdb3707a0d6fb7 | [
"Intel"
] | 1 | 2021-05-10T01:18:33.000Z | 2021-05-10T01:18:33.000Z | caffe2/opt/distributed_converter.cc | stas00/pytorch | 6a085648d81ce88ff59d6d1438fdb3707a0d6fb7 | [
"Intel"
] | 1 | 2021-12-26T23:20:06.000Z | 2021-12-26T23:20:06.000Z | #include "caffe2/opt/converter.h"
namespace caffe2 {
namespace {
using namespace nom;
using namespace nom::repr;
TRIVIAL_CONVERTER(Declare);
// NOLINTNEXTLINE(cppcoreguidelines-avoid-non-const-global-variables)
REGISTER_CONVERTER(Declare, DeclareConverter);
TRIVIAL_CONVERTER(Export);
// NOLINTNEXTLINE(cppcoreguidelines-avoid-non-const-global-variables)
REGISTER_CONVERTER(Export, ExportConverter);
} // namespace
} // namespace caffe2
| 23.263158 | 69 | 0.807692 |
43a2eefd7eeb3f7070673487b70f2520d303ad30 | 39 | ts | TypeScript | app/@esri/calcite-ui-icons/js/caretSquareUp24F.d.ts | lizeidsness/minimalist | 57657f7b37d2251b95dafe92e1d3776301a3a1c0 | [
"Apache-2.0"
] | 3 | 2020-07-17T21:04:01.000Z | 2021-11-30T15:14:45.000Z | app/@esri/calcite-ui-icons/js/caretSquareUp24F.d.ts | lizeidsness/minimalist | 57657f7b37d2251b95dafe92e1d3776301a3a1c0 | [
"Apache-2.0"
] | 4 | 2020-04-16T19:24:25.000Z | 2021-11-29T20:15:32.000Z | app/@esri/calcite-ui-icons/js/caretSquareUp24F.d.ts | lizeidsness/minimalist | 57657f7b37d2251b95dafe92e1d3776301a3a1c0 | [
"Apache-2.0"
] | 5 | 2020-06-11T20:51:53.000Z | 2021-11-30T15:22:13.000Z | export const caretSquareUp24F: string;
| 19.5 | 38 | 0.846154 |
bdfeb0ceb81d3ab145ddc50a2fff9b6ca6bc29cf | 2,787 | swift | Swift | Mia/UI/WebView/WebViewModal.swift | fossabot/Mia | e67fd12abe1452eb2dafcfae3ec5609be413a3e3 | [
"MIT"
] | null | null | null | Mia/UI/WebView/WebViewModal.swift | fossabot/Mia | e67fd12abe1452eb2dafcfae3ec5609be413a3e3 | [
"MIT"
] | null | null | null | Mia/UI/WebView/WebViewModal.swift | fossabot/Mia | e67fd12abe1452eb2dafcfae3ec5609be413a3e3 | [
"MIT"
] | null | null | null | import UIKit
public class WebViewModal: UINavigationController {
public enum Theme {
case light
case dark
case color(UIColor, UIColor) // tint, buttons
}
weak var webViewDelegate: UIWebViewDelegate? = nil
public convenience init(urlString: String) {
var urlString = urlString
if !urlString.hasPrefix("https://") && !urlString.hasPrefix("http://") {
urlString = "https://" + urlString
}
self.init(pageURL: URL(string: urlString)!)
}
public convenience init(urlString: String, theme: Theme) {
self.init(pageURL: URL(string: urlString)!, theme: theme)
}
public convenience init(pageURL: URL) {
self.init(request: URLRequest(url: pageURL))
}
public convenience init(pageURL: URL, theme: Theme) {
self.init(request: URLRequest(url: pageURL), theme: theme)
}
public init(request: URLRequest, theme: Theme = .light) {
let webViewController = WebView(aRequest: request)
webViewController.storedStatusColor = UINavigationBar.appearance().barStyle
let doneButton = UIBarButtonItem(image: Icon.WebView.dismiss, style: UIBarButtonItemStyle.plain, target: webViewController, action: #selector(WebView.doneButtonTapped))
switch theme {
case .light:
doneButton.tintColor = .darkGray
webViewController.buttonColor = .darkGray
webViewController.titleColor = .black
UINavigationBar.appearance().barStyle = .default
case .dark:
doneButton.tintColor = .white
webViewController.buttonColor = .white
webViewController.titleColor = .groupTableViewBackground
UINavigationBar.appearance().barStyle = .black
case .color(let tint, let buttoncolor):
doneButton.tintColor = tint
webViewController.buttonColor = buttoncolor
webViewController.titleColor = tint
}
if (UIDevice.current.userInterfaceIdiom == .pad) {
webViewController.navigationItem.leftBarButtonItem = doneButton
} else {
webViewController.navigationItem.rightBarButtonItem = doneButton
}
super.init(rootViewController: webViewController)
}
override init(nibName nibNameOrNil: String?, bundle nibBundleOrNil: Bundle?) {
super.init(nibName: nibNameOrNil, bundle: nibBundleOrNil)
}
required public init(coder aDecoder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
override public func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(false)
}
}
| 31.314607 | 176 | 0.627915 |
ef4695cf17e6cd7d56bf62e7540bbfcd315358de | 88 | php | PHP | resources/views/home.blade.php | shiivaa68/shop-admin | 107505b614dc8024451bcbe8e2889db54e525ad9 | [
"MIT"
] | null | null | null | resources/views/home.blade.php | shiivaa68/shop-admin | 107505b614dc8024451bcbe8e2889db54e525ad9 | [
"MIT"
] | null | null | null | resources/views/home.blade.php | shiivaa68/shop-admin | 107505b614dc8024451bcbe8e2889db54e525ad9 | [
"MIT"
] | null | null | null | @extends('layouts.admin')
@section('content')
<h3>
پنل مدیریت
</h3>
@endsection
| 8 | 25 | 0.636364 |
fa3b963e8bff7662eee819b9d93b379986e5926b | 3,241 | cpp | C++ | tests/utils_test.cpp | louiz/batajelo | 4d8edce8da9d3b17dbad68eb4881d7f6fee2f76e | [
"BSL-1.0",
"BSD-2-Clause",
"Zlib",
"MIT"
] | 7 | 2015-01-28T09:17:08.000Z | 2020-04-21T13:51:16.000Z | tests/utils_test.cpp | louiz/batajelo | 4d8edce8da9d3b17dbad68eb4881d7f6fee2f76e | [
"BSL-1.0",
"BSD-2-Clause",
"Zlib",
"MIT"
] | null | null | null | tests/utils_test.cpp | louiz/batajelo | 4d8edce8da9d3b17dbad68eb4881d7f6fee2f76e | [
"BSL-1.0",
"BSD-2-Clause",
"Zlib",
"MIT"
] | 1 | 2020-07-11T09:20:25.000Z | 2020-07-11T09:20:25.000Z | #include <utils/base64.hpp>
#include <utils/string.hpp>
#include <utils/scopeguard.hpp>
#include <utils/zlib.hpp>
#include <utils/time.hpp>
#include "catch.hpp"
TEST_CASE("Both", "[base64]")
{
std::string hello = "Coucou ici";
CHECK(base64_decode(base64_encode(reinterpret_cast<const unsigned char*>(hello.c_str()), hello.size())) ==
"Coucou ici");
}
TEST_CASE("encode", "[base64]")
{
std::string encoded("TWFuIGlzIGRpc3Rpbmd1aXNoZWQsIG5vdCBvbmx5IGJ5IGhpcyByZWFzb24sIGJ1dCBieSB0aGlzIHNpbmd1bGFyIHBhc3Npb24gZnJvbSBvdGhlciBhbmltYWxzLCB3aGljaCBpcyBhIGx1c3Qgb2YgdGhlIG1pbmQsIHRoYXQgYnkgYSBwZXJzZXZlcmFuY2Ugb2YgZGVsaWdodCBpbiB0aGUgY29udGludWVkIGFuZCBpbmRlZmF0aWdhYmxlIGdlbmVyYXRpb24gb2Yga25vd2xlZGdlLCBleGNlZWRzIHRoZSBzaG9ydCB2ZWhlbWVuY2Ugb2YgYW55IGNhcm5hbCBwbGVhc3VyZS4=");
CHECK(base64_decode(encoded) == "Man is distinguished, not only by his reason, but by this singular passion from other animals, which is a lust of the mind, that by a perseverance of delight in the continued and indefatigable generation of knowledge, exceeds the short vehemence of any carnal pleasure.");
}
TEST_CASE("decompressAndCompress", "[zlib]")
{
std::string original("coucou coucou");
CHECK(zlib_decompress(zlib_compress(original)) == "coucou coucou");
}
TEST_CASE("trim", "[strings]")
{
std::string original(" coucou\n");
utils::trim(original);
CHECK(original == "coucou");
original = " coucou";
utils::trim(original);
CHECK(original == "coucou");
original = "coucou ";
utils::trim(original);
CHECK(original == "coucou");
original = "coucou";
utils::trim(original);
CHECK(original == "coucou");
original = "\n\ncoucou \r\n ";
utils::trim(original);
CHECK(original == "coucou");
}
TEST_CASE("Scopeguard")
{
int i = 5;
{
CHECK(i == 5);
utils::ScopeGuard guard([&i]() {--i;});
CHECK(i == 5);
}
CHECK(i == 4);
{
CHECK(i == 4);
utils::ScopeGuard guard;
guard.add_callback([&i]() {--i;});
guard.add_callback([&i]() {--i;});
CHECK(i == 4);
}
CHECK(i == 2);
{
CHECK(i == 2);
utils::ScopeGuard guard;
guard.add_callback([&i]() {--i;});
CHECK(i == 2);
guard.disable();
}
CHECK(i == 2);
}
TEST_CASE("BasicTick", "[time]")
{
using namespace std::chrono_literals;
utils::Duration dt = 1s;
auto ticks = utils::get_number_of_ticks(dt);
CHECK(ticks == 50);
CHECK(dt == 0s);
}
TEST_CASE("BasicTick2", "[time]")
{
using namespace std::chrono_literals;
utils::Duration dt = 500000us;
auto ticks = utils::get_number_of_ticks(dt);
CHECK(ticks == 25);
CHECK(dt == 0s);
}
TEST_CASE("Ticks", "[time]")
{
using namespace std::chrono_literals;
utils::Duration dt = 420000us;
auto ticks = utils::get_number_of_ticks(dt);
CHECK(ticks == 21);
CHECK(dt == 0us);
}
TEST_CASE("ConvertToFloatingSeconds", "[time]")
{
using namespace std::chrono_literals;
utils::Duration dt = 8420000us;
auto secs = utils::sec(dt);
CHECK(secs == 8.42s);
dt = -8420000us;
secs = utils::sec(dt);
CHECK(secs == -8.42s);
}
TEST_CASE("NullTime", "[time]")
{
using namespace std::chrono_literals;
utils::Duration dt = 0h;
auto ticks = utils::get_number_of_ticks(dt);
CHECK(ticks == 0);
CHECK(dt == 0us);
}
| 24.930769 | 386 | 0.676026 |
b886fe138da5b970070e842133e46f478581999d | 130 | asm | Assembly | libsrc/_DEVELOPMENT/math/float/am9511/c/sccz80/cam32_sccz80_sin.asm | ahjelm/z88dk | c4de367f39a76b41f6390ceeab77737e148178fa | [
"ClArtistic"
] | 640 | 2017-01-14T23:33:45.000Z | 2022-03-30T11:28:42.000Z | libsrc/_DEVELOPMENT/math/float/am9511/c/sccz80/cam32_sccz80_sin.asm | C-Chads/z88dk | a4141a8e51205c6414b4ae3263b633c4265778e6 | [
"ClArtistic"
] | 1,600 | 2017-01-15T16:12:02.000Z | 2022-03-31T12:11:12.000Z | libsrc/_DEVELOPMENT/math/float/am9511/c/sccz80/cam32_sccz80_sin.asm | C-Chads/z88dk | a4141a8e51205c6414b4ae3263b633c4265778e6 | [
"ClArtistic"
] | 215 | 2017-01-17T10:43:03.000Z | 2022-03-23T17:25:02.000Z |
SECTION code_fp_am9511
PUBLIC cam32_sccz80_sin
EXTERN asm_am9511_sin_fastcall
defc cam32_sccz80_sin = asm_am9511_sin_fastcall
| 14.444444 | 47 | 0.884615 |
7704d3b6a6b121d5c22b841f7282967642d4a90f | 7,526 | py | Python | archivist_samples/synsation/jitsuinator.py | jitsuin-inc/archivist-samples | 26940fd1659aa9a1cd8b7aade668c107780ceadd | [
"MIT"
] | 2 | 2021-05-27T09:29:41.000Z | 2022-03-25T13:31:08.000Z | archivist_samples/synsation/jitsuinator.py | jitsuin-inc/archivist-samples | 26940fd1659aa9a1cd8b7aade668c107780ceadd | [
"MIT"
] | 14 | 2021-05-26T07:12:49.000Z | 2022-01-25T17:25:17.000Z | archivist_samples/synsation/jitsuinator.py | jitsuin-inc/archivist-samples | 26940fd1659aa9a1cd8b7aade668c107780ceadd | [
"MIT"
] | 1 | 2021-05-26T16:05:19.000Z | 2021-05-26T16:05:19.000Z | # Copyright 2019 Jitsuin, inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# WARNING: Proof of concept code: Not for release
# Copies the flow of the Jitsuinator demo script
"""Jitsuinator"""
# pylint: disable=missing-docstring
# pylint: disable=too-many-statements
import datetime
import logging
from sys import exit as sys_exit
from sys import stdout as sys_stdout
import time
import uuid
from archivist import about
from archivist.errors import ArchivistNotFoundError
from archivist.parser import common_parser
from ..testing.asset import MyAsset
from ..testing.parser import common_endpoint
from ..testing.time_warp import TimeWarp
from .util import attachment_upload_from_file
LOGGER = logging.getLogger(__name__)
def demo_flow(ac, asset_id, asset_type, tw, wait):
# Demo flow:
# -> Asset is created, nothing to see here
# -> White hat hacker reports vulnerability
# -> OEM fixes it and issues the patch
# -> Integrator approves the patch and issues new safety certificate
# -> Owner accepts new version and issues maintenance request to have
# it installed by the operator
# -> Operator schedules downtime and patches it
# -> All is well
job_corval = str(uuid.uuid4())
cve_corval = str(uuid.uuid4())
# -> Asset is created, nothing to see here
# -> White hat hacker reports vulnerability
if wait:
time.sleep(wait)
LOGGER.info("White Hat Hacker...")
else:
input("Press to enact White Hat Hacker")
cve_id = "CVE2020-deadbeef"
MyAsset(ac, asset_id, tw, "[email protected]",).report_vulnerability(
(
f"Synsation Industries {asset_type}s are vulnerable "
f"to {cve_id}. Upgrade as soon as possible."
),
cve_id,
cve_corval,
)
# -> OEM fixes it and issues the patch
if wait:
time.sleep(wait)
LOGGER.info("OEM patch...")
else:
input("Press to enact OEM issue patch")
MyAsset(ac, asset_id, tw, "[email protected]",).patch_vulnerability(
f"Patch for critical vulnerability '{cve_id}' released in version 1.6",
(
"SHA256-sum for official 1.6 release: "
"68ada47318341d060c387a765dd854b57334ab1f7322d22c155428414feb7518"
),
)
# -> Integrator approves the patch and issues new safety certificate
if wait:
time.sleep(wait)
LOGGER.info("Integrator approval...")
else:
input("Press to enact Integrator approves")
iattachment = attachment_upload_from_file(
ac, "trafficlightconformance.png", "image/png"
)
rattachment = attachment_upload_from_file(
ac, "trafficlightconformance.pdf", "application/pdf"
)
MyAsset(ac, asset_id, tw, "[email protected]",).certify_patch(
"Safety conformance approved for version 1.6. See attached conformance report",
"DVA Conformance Report attached",
{
"arc_primary_image_identity": iattachment["identity"],
"arc_attachments": [
{
"arc_display_name": "arc_primary_image",
"arc_attachment_identity": iattachment["identity"],
"arc_hash_value": iattachment["hash"]["value"],
"arc_hash_alg": iattachment["hash"]["alg"],
},
{
"arc_display_name": "Conformance Report",
"arc_attachment_identity": rattachment["identity"],
"arc_hash_value": rattachment["hash"]["value"],
"arc_hash_alg": rattachment["hash"]["alg"],
},
],
},
extra_attrs={"synsation_conformance_report": rattachment["identity"]},
)
# -> Owner accepts new version and issues maintenance request to have it installed
if wait:
time.sleep(wait)
LOGGER.info("Owner approval...")
else:
input("Press to enact Owner approves")
MyAsset(ac, asset_id, tw, "[email protected]",).service_required(
"Version 1.6 accepted. Please install ASAP",
job_corval,
)
# -> Operator schedules downtime and patches it
if wait:
time.sleep(wait)
LOGGER.info("Maintenance and patch...")
else:
input("Press to enact Maintenance")
MyAsset(ac, asset_id, tw, "[email protected]",).service(
f"Upgraded and restarted {asset_type} during safe downtime window",
job_corval,
)
MyAsset(ac, asset_id, tw, "[email protected]",).update_firmware(
"Responding to vulnerability 'CVE2020-deadbeef' with patch 'v1.6'",
"1.6",
cve_corval,
)
# -> All is well
LOGGER.info("Done")
# Main app
##########
def run(ac, args):
"""logic goes here"""
LOGGER.info("Using version %s of jitsuin-archivist", about.__version__)
LOGGER.info("Looking for asset...")
try:
asset = ac.assets.read_by_signature(
attrs={"arc_display_name": args.asset_name},
)
except ArchivistNotFoundError:
LOGGER.info("Asset not found. Aborting.")
sys_exit(1)
asset_id = asset["identity"]
attrs = asset["attributes"]
asset_type = attrs["arc_display_type"] if "arc_display_type" in attrs else "Device"
LOGGER.info("Creating time warp...")
tw = TimeWarp(args.start_date, args.fast_forward)
LOGGER.info("Beginning simulation...")
demo_flow(ac, asset_id, asset_type, tw, args.wait)
LOGGER.info("Done.")
sys_exit(0)
def entry():
parser, _ = common_parser("Runs the Jitsuinator demo script manually")
parser.add_argument(
"--namespace",
type=str,
dest="namespace",
action="store",
default=None,
help="namespace of item population (to enable parallel demos",
)
parser.add_argument(
"-n",
"--asset_name",
type=str,
dest="asset_name",
action="store",
default="tcl.ccj.01",
help="Name of the asset to ship",
)
parser.add_argument(
"-s",
"--start-date",
type=lambda d: datetime.datetime.strptime(d, "%Y%m%d"),
dest="start_date",
action="store",
default=datetime.date.today() - datetime.timedelta(days=1),
help="Start date for event series (format: yyyymmdd)",
)
parser.add_argument(
"-f",
"--fast-forward",
type=float,
dest="fast_forward",
action="store",
default=3600,
help="Fast forward time in event series (default: 1 second = 1 hour)",
)
parser.add_argument(
"-w",
"--wait",
type=float,
dest="wait",
action="store",
default=0.0,
help="auto-advance after WAIT seconds",
)
args = parser.parse_args()
poc = common_endpoint("synsation", args)
run(poc, args)
parser.print_help(sys_stdout)
sys_exit(1)
| 30.593496 | 87 | 0.627425 |
cd6a10efbd3b27a61cd683bbd7adf99deabb1ef1 | 3,593 | cs | C# | FlowR/FlowR.Tests/TestBase.cs | Portor25/FlowR | eece86d64fee9a612f24487891a0f877cd77c1f3 | [
"Apache-2.0"
] | 1 | 2019-11-22T08:32:47.000Z | 2019-11-22T08:32:47.000Z | FlowR/FlowR.Tests/TestBase.cs | Portor25/FlowR | eece86d64fee9a612f24487891a0f877cd77c1f3 | [
"Apache-2.0"
] | null | null | null | FlowR/FlowR.Tests/TestBase.cs | Portor25/FlowR | eece86d64fee9a612f24487891a0f877cd77c1f3 | [
"Apache-2.0"
] | 4 | 2019-11-22T08:32:25.000Z | 2021-12-28T01:04:50.000Z | using MediatR;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Moq;
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using FlowR.Microsoft.Extensions.Logging;
using FlowR.StepLibrary.Activities;
using Xunit.Abstractions;
namespace FlowR.Tests
{
public abstract class TestBase : ILoggerProvider
{
#region TestLogger class
private class TestLogger : ILogger
{
private readonly string _categoryName;
private readonly ITestOutputHelper _testOutput;
public TestLogger(string categoryName, ITestOutputHelper output)
{
if (string.IsNullOrEmpty(categoryName))
throw new ArgumentException("message", nameof(categoryName));
_categoryName = categoryName;
_testOutput = output ?? throw new ArgumentNullException(nameof(output));
}
public IDisposable BeginScope<TState>(TState state)
{
return new LogScope();
}
public bool IsEnabled(LogLevel logLevel)
{
return true;
}
public void Log<TState>(LogLevel logLevel, EventId eventId, TState state, Exception exception, Func<TState, Exception, string> formatter)
{
var message = $"{logLevel.ToString().ToUpper()}: {formatter(state, exception)}";
_testOutput.WriteLine(message);
}
private class LogScope : IDisposable
{
public void Dispose()
{
}
}
}
#endregion
#region Member declarations
protected readonly ITestOutputHelper _output;
#endregion
#region Constructors
protected TestBase(ITestOutputHelper output)
{
_output = output;
}
#endregion
#region ILoggerProvider implementation
public ILogger CreateLogger(string categoryName)
{
return new TestLogger(categoryName, _output);
}
public void Dispose()
{
}
protected (IMediator, ILogger) GetMediator<T>() where T : IFlowStepRequest
{
return GetMediator<T>(addServices: null);
}
protected (IMediator, ILogger) GetMediator<T>(Action<ServiceCollection> addServices) where T : IFlowStepRequest
{
var serviceCollection = new ServiceCollection();
serviceCollection
.AddDebugLogging(this)
.AddMediatR(typeof(IFlowHandler).Assembly)
.AddMediatR(typeof(SetBoolFlowValueRequest).Assembly)
.AddTransient(typeof(IFlowLogger<>), typeof(CoreFlowLogger<>))
.AddMediatR(typeof(T).Assembly);
addServices?.Invoke(serviceCollection);
serviceCollection.BuildServiceProvider(this, out var mediator, out var logger);
return (mediator, logger);
}
#endregion
}
#region ExtensionMethods
static class ExtensionMethods
{
#pragma warning disable IDE0060 // Remove unused parameter
public static ILogger<T> GetLogger<T>(this ServiceProvider serviceCollection, T obj)
#pragma warning restore IDE0060 // Remove unused parameter
{
return serviceCollection
.GetService<ILoggerFactory>()?
.CreateLogger<T>();
}
}
#endregion
}
| 28.070313 | 149 | 0.60423 |
2ff68c81b54e463fb09babaf10ff193d2c90f9bb | 52,261 | py | Python | fortlab/resolver/kgsearch.py | grnydawn/fortlab | 524daa6dd7c99c1ca4bf6088a8ba3e1bcd096d5d | [
"MIT"
] | null | null | null | fortlab/resolver/kgsearch.py | grnydawn/fortlab | 524daa6dd7c99c1ca4bf6088a8ba3e1bcd096d5d | [
"MIT"
] | 1 | 2021-03-29T14:54:22.000Z | 2021-03-29T14:54:51.000Z | fortlab/resolver/kgsearch.py | grnydawn/fortlab | 524daa6dd7c99c1ca4bf6088a8ba3e1bcd096d5d | [
"MIT"
] | null | null | null | """Fortran statments and expressions supported by current KGen
Module content
---------------
"""
# kgen_search.py
from fortlab.kgutils import show_tree, logger
from fortlab.resolver.kgparse import KGGenType
import fortlab.resolver.Fortran2003 as Fortran2003
#from typedecl_statements import TypeDeclarationStatement, TypeStmt, Procedure # TEEMP
from fortlab.resolver.typedecl_statements import TypeDeclarationStatement, TypeStmt
from fortlab.resolver.block_statements import Type, TypeDecl, Function, Subroutine, Interface, execution_part, Associate
from fortlab.resolver.statements import External, Common, SpecificBinding, Enumerator
from collections import OrderedDict
#import logging
#logger = logging.getLogger('kgen')
res_default = [ TypeDeclarationStatement ]
#res_external = [ External, Procedure ] # TEMP
res_external = [ External ]
res_typedecl = [ TypeDeclarationStatement ]
res_typestmt = [ TypeStmt ]
res_derivedtype = [ Type, TypeDecl ]
res_associate = [ Associate ]
res_kind = [ TypeDeclarationStatement ] + res_derivedtype
res_typespec = [ TypeDeclarationStatement ] + res_derivedtype
res_value = ([ TypeDeclarationStatement, Function, Interface, Enumerator ] +
res_external + res_associate)
res_subroutine = [ Subroutine, Interface ] + res_external
res_function = [ Function, Interface ] + res_external
res_subprogram = [ Subroutine, Function, Interface ] + res_external
res_common = [ Common ]
res_ptr_object = [ SpecificBinding, TypeDeclarationStatement ]
res_target = res_subprogram + res_typedecl
res_anything = res_typespec + res_subprogram + [ SpecificBinding, Common, Type, TypeDecl ]
###############################################################################
################################### COMMON ####################################
###############################################################################
class SearchException(Exception):
pass
def f2003_search_unknowns(stmt, node, config, resolvers=None, gentype=None):
"""Identify unknowns whose declaration statement will be searched by KGen.
Parameters
----------
stmt : F2PY parser statement object
Specify a statement object to be searched
node : F2PY Fortran2003 parser object
Specify an expression object to be searched
resolvers : A list of statement classes for resolver
Limits the classes of resolver
gentype : Type of state data (IN or OUT)
Specify the type of state data
Returns
-------
None
See also
--------
get_name_or_defer
get_name
defer
defer_names
"""
if node is None: return
# save in unknowns dict in stmt
if not hasattr(stmt, 'unknowns'):
stmt.unknowns = OrderedDict()
# skip searching if specified
if ( hasattr(node, 'skip_search') and node.skip_search ) or \
( hasattr(node, 'parent') and hasattr(node.parent, 'skip_search') and node.parent.skip_search ):
return
clsname = node.__class__.__name__
if clsname=='Name':
get_name(stmt, node, resolvers, config, gentype=gentype)
return
itemclsname = None
try:
if clsname.endswith('_List'):
_clsname = clsname[:-5]
for item in node.items:
if item is None: continue
itemclsname = item.__class__.__name__
if itemclsname=='Name':
get_name(stmt, item, resolvers, config, gentype=gentype)
else:
exec('search_%s(stmt, item, config, gentype=gentype)' % itemclsname)
elif clsname.startswith('End_'):
pass
else:
exec('search_%s(stmt, node, config, gentype=gentype)' % clsname)
except Exception as e:
errname = clsname
if itemclsname:
errname = itemclsname
errmsg = "Error: Fortran specification of %s is not supported yet."%errname
logger.exception(errmsg)
if config["search"]['promote_exception']:
raise
else:
print('')
print(errmsg)
print('')
print("'kgen.log' in output folder contains detail information of this error.")
print("If you send the log file to '[email protected]', that could be very")
print("helpful for us to support this Fortran spec. in future KGEN version.")
print('')
import sys
sys.exit(-1)
def get_name_or_defer(stmt, node, resolvers, config, defer=True, gentype=None):
"""Select a name to be searched, or defer to lower level of nodes in AST.
Parameters
----------
stmt : F2PY parser statement object
Specify a statement object to be searched
node : F2PY Fortran2003 parser object
Specify an expression object to be searched
resolvers : A list of statement classes for resolver
Limits the classes of resolver
defer : bool
check if to search lower level of nodes in AST.
gentype : Type of state data (IN or OUT)
Specify the type of state data
Returns
-------
None
See also
--------
f2003_search_unknowns
get_name
defer
defer_names
"""
from fortlab.kgutils import KGName, pack_innamepath, match_namepath
from fortlab.resolver.kgparse import ResState
from fortlab.resolver.kgintrinsics import Intrinsic_Procedures
from fortlab.resolver.base_classes import is_except
if node is None: return
# uncomment below line for debug
#print node.__class__, str(node)
if isinstance(node, Fortran2003.Name):
# skip if intrinsic
if node.string.lower() in Intrinsic_Procedures:
excepts = config["search"]['except']
if config["search"]['skip_intrinsic'] and not is_except(node, stmt, excepts):
if hasattr(node, 'parent') and not isinstance(node.parent, Fortran2003.Part_Ref) and \
not (isinstance(node.parent, Fortran2003.Function_Reference) and node.string.lower()=='null') and \
not (isinstance(node.parent, Fortran2003.Specific_Binding) and node.string.lower()=='null'):
logger.debug('Intrinsic procedure name of "%s" is used for name resolution'% \
(node.string.lower()))
logger.debug('\tnear "%s"'% stmt.item.line)
logger.debug('\tin %s'% stmt.reader.id)
else:
#if node.string.lower()!='null':
# logger.debug('Intrinsic procedure name of "%s" is skipped from name resolution'% \
# (node.string.lower()))
#logger.debug('\tnear "%s"'% stmt.item.line)
#logger.debug('\tin %s'% stmt.reader.id)
return
elif not config["search"]['skip_intrinsic'] and is_except(node, stmt, excepts):
if hasattr(node, 'parent') and not isinstance(node.parent, Fortran2003.Part_Ref) and \
not (isinstance(node.parent, Fortran2003.Function_Reference) and node.string.lower()=='null') and \
not (isinstance(node.parent, Fortran2003.Specific_Binding) and node.string.lower()=='null'):
#logger.debug('Intrinsic procedure name of "%s" is NOT skipped from name resolution'% \
# (node.string.lower()))
#logger.debug('\tnear "%s"'% stmt.item.line)
#logger.debug('\tin %s'% stmt.reader.id)
pass
else:
if node.string.lower()!='null':
logger.debug('Intrinsic procedure name of "%s" is skipped from name resolution'% \
(node.string.lower()))
logger.debug('\tnear "%s"'% stmt.item.line)
logger.debug('\tin %s'% stmt.reader.id)
return
# skip if excluded
#if config.exclude.has_key('namepath') and stmt.__class__ in execution_part:
if isinstance(config, list): import pdb; pdb.set_trace()
if 'namepath' in config["exclude"]:
for pattern, actions in config["exclude"]['namepath'].items():
name = node.string.lower()
namepath = pack_innamepath(stmt, name)
#logger.debug('%s and %s are being checked for exclusion'%(pattern, namepath))
if match_namepath(pattern, namepath):
#logger.debug('%s and %s are mathched for exclusion'%(pattern, namepath))
if not hasattr(stmt, 'exclude_names'): stmt.exclude_names = OrderedDict()
if name in stmt.exclude_names:
stmt.exclude_names[name].extend(actions)
else:
stmt.exclude_names[name] = actions
node.skip_search = True
if hasattr(node, 'parent'): node.parent.skip_search = True
return
ukey = KGName(pack_innamepath(stmt, node.string.lower()), node=node, stmt=stmt)
if gentype is None: gentype = KGGenType.STATE_IN
if resolvers is None:
stmt.unknowns[ukey] = ResState(gentype, ukey, stmt, res_default)
else:
stmt.unknowns[ukey] = ResState(gentype, ukey, stmt, resolvers)
logger.debug('%s is saved as unknown' % node.string.lower())
elif defer:
f2003_search_unknowns(stmt, node, config, resolvers, gentype=gentype)
def get_name(stmt, node, resolvers, config, gentype=None):
get_name_or_defer(stmt, node, resolvers, config, defer=False, gentype=gentype)
def defer(stmt, node, config, gentype=None):
if isinstance(node, Fortran2003.Name):
raise SearchException('%s can not be Name class' % str(node))
f2003_search_unknowns(stmt, node, config, gentype=gentype)
def defer_items(stmt, node, config, gentype=None):
if hasattr(node, 'items'):
for item in node.items:
if isinstance(item, Fortran2003.Name):
raise SearchException('%s can not be Name class' % str(item))
f2003_search_unknowns(stmt, item, config, gentype=gentype)
###############################################################################
################################### SEARCH ####################################
###############################################################################
def search_Type_Declaration_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Type_Declaration_Stmt node"""
from fortlab.kgutils import pack_innamepath, match_namepath
# collect excluded names
if 'namepath' in config["exclude"]:
for pattern, actions in config["exclude"]['namepath'].items():
decls = []
if isinstance(node.items[2], Fortran2003.Entity_Decl):
decls.append(node.items[2].items[0].string.lower())
elif isinstance(node.items[2], Fortran2003.Entity_Decl_List):
for item in node.items[2].items:
decls.append(item.items[0].string.lower())
for decl in decls:
namepath = pack_innamepath(stmt, decl)
if match_namepath(pattern, namepath):
if not hasattr(stmt, 'exclude_names'): stmt.exclude_names = OrderedDict()
if decl in stmt.exclude_names:
stmt.exclude_names[decl].extend(actions)
else:
stmt.exclude_names[decl] = actions
defer_items(stmt, node, config)
def search_Intrinsic_Type_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Intrinsic_Type_Spec node"""
defer(stmt, node.items[1], config)
def search_Kind_Selector(stmt, node, config, gentype=None):
""" Identifying a name in Kind_Selector node"""
get_name_or_defer(stmt, node.items[1], res_kind, config)
def search_Entity_Decl(stmt, node, config, gentype=None):
""" Identifying a name in Entity_Decl node"""
defer(stmt, node.items[1], config)
get_name_or_defer(stmt, node.items[2], res_value, config)
get_name_or_defer(stmt, node.items[3], res_value, config)
def search_Explicit_Shape_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Explicit_Shape_Spec node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Dimension_Attr_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Dimension_Attr_Spec node"""
defer(stmt, node.items[1], config)
def search_Add_Operand(stmt, node, config, gentype=None):
""" Identifying a name in Add_Operand node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Mult_Operand(stmt, node, config, gentype=None):
""" Identifying a name in Mult_Operand node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Attr_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Attr_Spec node"""
defer_items(stmt, node, config)
def search_Initialization(stmt, node, config, gentype=None):
""" Identifying a name in Initialization node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Part_Ref(stmt, node, config, gentype=None):
""" Identifying a name in Part_Ref node"""
get_name_or_defer(stmt, node.items[0], res_value, config, gentype=gentype)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Structure_Constructor_2(stmt, node, config, gentype=None):
""" Identifying a name in Structure_Constructor_2 node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Int_Literal_Constant(stmt, node, config, gentype=None):
""" Identifying a name in Int_Literal_Constant node"""
if node.items[1]:
get_name_or_defer(stmt, Fortran2003.Name(node.items[1]), res_typedecl, config)
def search_Signed_Int_Literal_Constant(stmt, node, config, gentype=None):
""" Identifying a name in Signed_Int_Literal_Constant node"""
if node.items[1]:
get_name_or_defer(stmt, Fortran2003.Name(node.items[1]), res_typedecl, config)
def search_Real_Literal_Constant(stmt, node, config, gentype=None):
""" Identifying a name in Real_Literal_Constant node"""
if node.items[1]:
get_name_or_defer(stmt, Fortran2003.Name(node.items[1]), res_typedecl, config)
def search_Signed_Real_Literal_Constant(stmt, node, config, gentype=None):
""" Identifying a name in Signed_Real_Literal_Constant node"""
if node.items[1]:
get_name_or_defer(stmt, Fortran2003.Name(node.items[1]), res_typedecl, config)
def search_Subroutine_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Subroutine_Stmt node"""
get_name_or_defer(stmt, node.items[2], res_typedecl, config) # dummy args
get_name_or_defer(stmt, node.items[3], res_typedecl, config) # postfix
def search_Comment(stmt, node, config, gentype=None):
""" Identifying a name in Comment node"""
if hasattr(stmt, 'write_state'):
for var in stmt.write_state:
f2003obj = Fortran2003.Variable(var)
get_name_or_defer(stmt, f2003obj, res_typedecl, config)
def search_Nonlabel_Do_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Nonlabel_Do_Stmt node"""
if len(node.items)==3:
defer(stmt, node.items[2], config)
elif len(node.items)==2:
if isinstance(node.items[0], str):
defer(stmt, node.items[1], config)
def search_Loop_Control(stmt, node, config, gentype=None):
""" Identifying a name in Loop_Control node"""
if len(node.items)==1:
get_name_or_defer(stmt, node.items[0], res_value, config)
else:
get_name_or_defer(stmt, node.items[0], res_typedecl, config, gentype=KGGenType.STATE_OUT)
if isinstance(node.items[1], list):
for item in node.items[1]:
get_name_or_defer(stmt, item, res_value, config)
else:
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Assignment_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Assignment_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config, gentype=KGGenType.STATE_OUT)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Level_2_Expr(stmt, node, config, gentype=None):
""" Identifying a name in Level_2_Expr node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Parenthesis(stmt, node, config, gentype=None):
""" Identifying a name in Parenthesis node"""
get_name_or_defer(stmt, node.items[1], res_value, config, gentype=gentype)
def search_str(stmt, string, config, gentype=None):
pass
def search_Function_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Function_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_derivedtype, config ) # prefix
get_name_or_defer(stmt, node.items[2], res_typedecl, config) # dummy args
get_name_or_defer(stmt, node.items[3], res_typedecl, config)
def search_Assumed_Shape_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Assumed_Shape_Spec node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Allocate_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Allocate_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_typespec, config)
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
defer(stmt, node.items[2], config)
def search_Allocation(stmt, node, config, gentype=None):
""" Identifying a name in Allocation node"""
get_name_or_defer(stmt, node.items[0], res_typedecl, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
#if len(node.items)>1:
# defer_items(stmt, node.items[1:])
def search_Allocate_Shape_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Allocate_Shape_Spec node"""
if node.items:
for item in node.items:
get_name_or_defer(stmt, item, res_value, config)
def search_Use_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Use_Stmt node"""
pass
def search_If_Then_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in If_Then_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
def search_Level_4_Expr(stmt, node, config, gentype=None):
""" Identifying a name in Level_4_Expr node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_If_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in If_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Else_If_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Else_If_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Else_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Else_Stmt node"""
pass
def search_Level_2_Unary_Expr(stmt, node, config, gentype=None):
""" Identifying a name in Level_2_Unary_Expr node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Label_Do_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Label_Do_Stmt node"""
defer(stmt, node.items[2], config)
def search_Array_Constructor(stmt, node, config, gentype=None):
""" Identifying a name in Array_Constructor node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Array_Section(stmt, node, config, gentype=None):
""" Identifying a name in Array_Section node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
defer(stmt, node.items[1], config)
def search_Substring_Range(stmt, node, config, gentype=None):
""" Identifying a name in Substring_Range node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Select_Case_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Select_Case_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
def search_Case_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Case_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
def search_Case_Selector(stmt, node, config, gentype=None):
""" Identifying a name in Case_Selector node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
def search_Call_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Call_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_subroutine, config)
#if isinstance(node.items[1], Fortran2003.Name):
# get_name_or_defer(stmt, node.items[1], res_value)
#else:
# defer(stmt, node.items[1])
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Char_Literal_Constant(stmt, node, config, gentype=None):
""" Identifying a name in Char_Literal_Constant node"""
if node.items[1]:
get_name_or_defer(stmt, Fortran2003.Name(node.items[1]), res_typedecl, config)
#get_name_or_defer(stmt, node.items[0], res_typedecl)
def search_Length_Selector(stmt, node, config, gentype=None):
""" Identifying a name in Length_Selector node"""
for item in node.items:
get_name_or_defer(stmt, item, res_value, config)
def search_Type_Param_Value(stmt, node, config, gentype=None):
""" Identifying a name in Type_Param_Value node"""
# NOTE: need to verify its content structure
if node.item:
get_name_or_defer(stmt, node.item, res_value, config)
def search_Write_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Write_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Read_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Read_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Io_Control_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Io_Control_Spec node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Stop_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Stop_Stmt node"""
pass
def search_Contains_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Contains_Stmt node"""
pass
def search_Subscript_Triplet(stmt, node, config, gentype=None):
""" Identifying a name in Subscript_Triplet node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Interface_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Interface_Stmt node"""
pass
def search_Procedure_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Procedure_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_subprogram, config)
def search_Prefix(stmt, node, config, gentype=None):
""" Identifying a name in Prefix node"""
for item in node.items:
get_name_or_defer(stmt, node.items[0], res_anything, config)
def search_Prefix_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Prefix_Spec node"""
if node.item or hasattr(node, 'items'):
raise ProgramException('Unexpected item or items attr')
def search_Logical_Literal_Constant(stmt, node, config, gentype=None):
""" Identifying a name in Logical_Literal_Constant node"""
if node.items[1]:
get_name_or_defer(stmt, Fortran2003.Name(node.items[1]), res_typedecl, config)
#get_name_or_defer(stmt, node.items[1], res_typedecl)
def search_Access_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Access_Spec node"""
pass
def search_And_Operand(stmt, node, config, gentype=None):
""" Identifying a name in And_Operand node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Equiv_Operand(stmt, node, config, gentype=None):
""" Identifying a name in Equiv_Operand node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Or_Operand(stmt, node, config, gentype=None):
""" Identifying a name in Or_Operand node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Where_Construct_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Where_Construct_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
def search_Elsewhere_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Elsewhere_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Suffix(stmt, node, config, gentype=None):
""" Identifying a name in Suffix node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Declaration_Type_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Declaration_Type_Spec node"""
get_name_or_defer(stmt, node.items[1], res_derivedtype, config)
def search_Data_Ref(stmt, node, config, gentype=None):
""" Identifying a name in Data_Ref node"""
from fortlab.kgutils import KGName
# NOTE: to limit the scope of data saving in derived type,
# the last part_ref would be the one that has config, gentype=gentype
if isinstance(node.items[0], Fortran2003.Name):
get_name_or_defer(stmt, node.items[0], res_value, config, gentype=gentype)
elif isinstance(node.items[0], Fortran2003.Part_Ref):
get_name_or_defer(stmt, node.items[0].items[0], res_value, config, gentype=gentype)
get_name_or_defer(stmt, node.items[0].items[1], res_value, config)
for item in node.items[1:]:
if isinstance(item, Fortran2003.Name): pass
elif isinstance(item, Fortran2003.Part_Ref):
get_name_or_defer(stmt, item.items[1], res_value, config)
elif item is None: pass
else: raise ProgramException('Unknown type: %s'%item.__class)
def search_Structure_Constructor(stmt, node, config, gentype=None):
""" Identifying a name in Structure_Constructor node"""
#get_name_or_defer(stmt, node.items[0], res_derivedtype)
# NOTE: parser found ordinary subprogram as Structure_Constructor
get_name_or_defer(stmt, node.items[0], res_value + res_derivedtype, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Binary_Constant(stmt, node, config, gentype=None):
""" Identifying a name in Binary_Constant node"""
pass
def search_Octal_Constant(stmt, node, config, gentype=None):
""" Identifying a name in Octal_Constant node"""
pass
def search_Hex_Constant(stmt, node, config, gentype=None):
""" Identifying a name in Hex_Constant node"""
pass
def search_Intrinsic_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Intrinsic_Stmt node"""
pass
#get_name_or_defer(stmt, node.items[1], res_subprogram)
def search_Derived_Type_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Derived_Type_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Access_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Access_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_anything, config)
def search_Function_Reference(stmt, node, config, gentype=None):
""" Identifying a name in Function_Reference node"""
get_name_or_defer(stmt, node.items[0], res_function, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Return_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Return_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_function, config)
def search_Print_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Print_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Format(stmt, node, config, gentype=None):
""" Identifying a name in Format node"""
if hasattr(node, 'items') and len(node.items)>0:
get_name_or_defer(stmt, node.items[0], res_value, config)
def search_Implicit_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Implicit_Stmt node"""
if hasattr(node, 'items') and len(node.items)>0:
get_name_or_defer(stmt, node.items[0], res_value, config)
def search_Exit_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Exit_Stmt node"""
pass
def search_Pointer_Assignment_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Pointer_Assignment_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_ptr_object, config) # data pointer obj or procedure pointer obj
get_name_or_defer(stmt, node.items[2], res_target, config) # data target or procedure target
def search_Proc_Component_Ref(stmt, node, config, gentype=None):
""" Identifying a name in Proc_Component_Ref node"""
get_name_or_defer(stmt, node.items[0], res_value, config, gentype=gentype)
# Type definition may handle a procedure component name?
#get_name_or_defer(stmt, node.items[2], res_value)
def search_Io_Unit(stmt, node, config, gentype=None):
""" Identifying a name in Io_Unit node"""
if hasattr(node, 'items') and len(node.items)>0:
get_name_or_defer(stmt, node.items[0], res_value, config)
def search_Level_3_Expr(stmt, node, config, gentype=None):
""" Identifying a name in Level_3_Expr node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Open_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Open_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Connect_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Connect_Spec node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Endfile_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Endfile_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Position_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Position_Spec node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Close_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Close_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Close_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Close_Spec node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Label(stmt, node, config, gentype=None):
""" Identifying a name in Label node"""
pass
def search_Io_Implied_Do(stmt, node, config, gentype=None):
""" Identifying a name in Io_Implied_Do node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Io_Implied_Do_Control(stmt, node, config, gentype=None):
""" Identifying a name in Io_Implied_Do_Control node"""
get_name_or_defer(stmt, node.items[0], res_typedecl, config, gentype=KGGenType.STATE_OUT)
get_name_or_defer(stmt, node.items[1], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
get_name_or_defer(stmt, node.items[3], res_value, config)
def search_Format_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Format_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
# No need for searching format-items?
#get_name_or_defer(stmt, node.items[1], res_value)
def search_Format_Specification(stmt, node, config, gentype=None):
""" Identifying a name in Format_Specification node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Format_Item_C1002(stmt, node, config, gentype=None):
""" Identifying a name in Format_Item_C1002 node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Control_Edit_Desc(stmt, node, config, gentype=None):
""" Identifying a name in Control_Edit_Desc node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Format_Item(stmt, node, config, gentype=None):
""" Identifying a name in Format_Item node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Alloc_Opt(stmt, node, config, gentype=None):
""" Identifying a name in Alloc_Opt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Deallocate_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Deallocate_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Cycle_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Cycle_Stmt node"""
pass
def search_External_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in External_Stmt node"""
get_name_or_defer(stmt, node.items[1], \
[ TypeDeclarationStatement, Function, Subroutine ], config)
def search_Case_Value_Range(stmt, node, config, gentype=None):
""" Identifying a name in Case_Value_Range node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Forall_Construct_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Forall_Construct_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Forall_Header(stmt, node, config, gentype=None):
""" Identifying a name in Forall_Header node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Forall_Triplet_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Forall_Triplet_Spec node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
get_name_or_defer(stmt, node.items[3], res_value, config)
def search_Goto_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Goto_Stmt node"""
pass
def search_Continue_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Continue_Stmt node"""
pass
def search_Wait_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Wait_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Wait_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Wait_Spec node"""
if hasattr(node, 'items') and len(node.items)>0:
for item in node.items:
get_name_or_defer(stmt, item, res_value, config)
def search_Rewind_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Rewind_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Flush_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Flush_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Import_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Import_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_anything, config)
def search_Block_Data_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Block_Data_Stmt node"""
# NOTE: Temporary solution
pass
def search_Data_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Data_Stmt node"""
if hasattr(node, 'items') and len(node.items)>0:
for item in node.items:
get_name_or_defer(stmt, item, res_typedecl, config)
def search_Data_Stmt_Value(stmt, node, config, gentype=None):
""" Identifying a name in Data_Stmt_Value node"""
get_name_or_defer(stmt, node.items[0], res_typedecl, config)
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Save_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Save_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Asynchronous_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Asynchronous_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Allocatable_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Allocatable_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Common_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Common_Stmt node"""
if hasattr(node, 'items') and len(node.items)>0:
for itemlist in node.items:
for name, _item in itemlist:
get_name_or_defer(stmt, _item, res_value, config)
def search_Data_Stmt_Set(stmt, node, config, gentype=None):
""" Identifying a name in Data_Stmt_Set node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Dimension_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Dimension_Stmt node"""
if hasattr(node, 'items') and len(node.items)>0:
for itemlist in node.items:
for name, _item in itemlist:
get_name_or_defer(stmt, _item, res_value, config)
def search_Equivalence_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Equivalence_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Equivalence_Set(stmt, node, config, gentype=None):
""" Identifying a name in Equivalence_Set node"""
get_name_or_defer(stmt, node.items[0], res_typedecl, config)
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Intent_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Intent_Stmt node"""
#get_name_or_defer(stmt, node.items[0], res_typedecl)
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Intent_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Intent_Spec node"""
pass
def search_Namelist_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Namelist_Stmt node"""
if hasattr(node, 'items') and len(node.items)>0:
for nlname, nlgroup in node.items:
get_name_or_defer(stmt, nlgroup, res_typedecl, config)
def search_Optional_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Optional_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Pointer_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Pointer_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_anything, config)
def search_Protected_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Protected_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Target_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Target_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_anything, config)
def search_Target_Entity_Decl(stmt, node, config, gentype=None):
""" Identifying a name in Target_Entity_Decl node"""
get_name_or_defer(stmt, node.items[0], res_anything, config)
defer(stmt, node.items[1], config)
get_name_or_defer(stmt, node.items[2], res_value, config)
get_name_or_defer(stmt, node.items[3], res_value, config)
def search_Volatile_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Volatile_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_anything, config)
def search_Value_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Value_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Backspace_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Backspace_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Forall_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Forall_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Inquire_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Inquire_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Inquire_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Inquire_Spec node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Nullify_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Nullify_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Where_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Where_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Arithmetic_If_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Arithmetic_If_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
get_name_or_defer(stmt, node.items[3], res_value, config)
def search_Computed_Goto_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Computed_Goto_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Actual_Arg_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Actual_Arg_Spec node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Data_Pointer_Object(stmt, node, config, gentype=None):
""" Identifying a name in Data_Pointer_Object node"""
get_name_or_defer(stmt, node.items[0], res_value, config, gentype=gentype)
if node.items[2] and not isinstance(node.items[2], Fortran2003.Name):
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Type_Attr_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Type_Attr_Spec node"""
if isinstance(node.items[0], str) and node.items[0]=='EXTENDS':
get_name_or_defer(stmt, node.items[1], res_derivedtype, config)
else:
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Level_5_Expr(stmt, node, config, gentype=None):
""" Identifying a name in Level_5_Expr node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Parameter_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Parameter_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Named_Constant_Def(stmt, node, config, gentype=None):
""" Identifying a name in Named_Constant_Def node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Saved_Entity(stmt, node, config, gentype=None):
""" Identifying a name in Saved_Entity node"""
if len(node.items)==3 and node.items[0]=='/' and node.items[2]=='/':
get_name_or_defer(stmt, node.items[1], res_common, config)
else:
for item in node.items:
get_name_or_defer(stmt, item, res_value, config)
def search_Alloc_Opt(stmt, node, config, gentype=None):
""" Identifying a name in Alloc_Opt node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Dealloc_Opt(stmt, node, config, gentype=None):
""" Identifying a name in Dealloc_Opt node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Generic_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Generic_Spec node"""
pass
def search_Assumed_Size_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Assumed_Size_Spec node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Common_Block_Object(stmt, node, config, gentype=None):
""" Identifying a name in Common_Block_Object node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Ac_Implied_Do(stmt, node, config, gentype=None):
""" Identifying a name in Ac_Implied_Do node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Ac_Implied_Do_Control(stmt, node, config, gentype=None):
""" Identifying a name in Ac_Implied_Do_Control node"""
get_name_or_defer(stmt, node.items[0], res_value, config, gentype=KGGenType.STATE_OUT)
if node.items[1]:
for item in node.items[1]:
get_name_or_defer(stmt, item, res_value, config)
def search_Specific_Binding(stmt, node, config, gentype=None):
""" Identifying a name in Specific_Binding node"""
get_name_or_defer(stmt, node.items[0], res_typespec + [ Interface ], config)
get_name_or_defer(stmt, node.items[1], res_value, config)
if not hasattr(node.items[1], 'string') or 'DEFERRED' not in node.items[1].string:
if node.items[3] is None:
get_name_or_defer(stmt, node.items[2], res_subprogram, config)
else:
get_name_or_defer(stmt, node.items[3], res_subprogram, config)
def search_Binding_Attr(stmt, node, config, gentype=None):
""" Identifying a name in Binding_Attr node"""
pass
def search_Masked_Elsewhere_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Masked_Elsewhere_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
def search_Procedure_Designator(stmt, node, config, gentype=None):
""" Identifying a name in Procedure_Designator node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
def search_Associate_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Associate_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Association(stmt, node, config, gentype=None):
""" Identifying a name in Association node"""
get_name_or_defer(stmt, node.items[2], res_value, config)
def search_Generic_Binding(stmt, node, config, gentype=None):
""" Identifying a name in Generic_Binding node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[2], [ SpecificBinding ], config)
def search_Complex_Literal_Constant(stmt, node, config, gentype=None):
""" Identifying a name in Complex_Literal_Constant node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Char_Length(stmt, node, config, gentype=None):
""" Identifying a name in Char_Length node"""
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Data_Implied_Do(stmt, node, config, gentype=None):
""" Identifying a name in Data_Implied_Do node"""
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config, gentype=KGGenType.STATE_OUT)
get_name_or_defer(stmt, node.items[2], res_value, config)
get_name_or_defer(stmt, node.items[3], res_value, config)
get_name_or_defer(stmt, node.items[4], res_value, config)
def search_Ac_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Ac_Spec node"""
defer(stmt, node.items[0], config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Sequence_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Sequence_Stmt node"""
pass
def search_Stmt_Function_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in Stmt_Function_Stmt node"""
get_name_or_defer(stmt, node.items[0], res_typedecl, config)
def search_Language_Binding_Spec(stmt, node, config, gentype=None):
""" Identifying a name in Language_Binding_Spec node"""
# No need to resolve exteranl c library routines
pass
def search_Select_Type_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in search_Select_Type_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_typedecl, config)
def search_Type_Guard_Stmt(stmt, node, config, gentype=None):
""" Identifying a name in search_Type_Guard_Stmt node"""
get_name_or_defer(stmt, node.items[1], res_typespec, config)
#show_tree(node)
#import pdb ;pdb.set_trace()
def search_Implicit_Spec(stmt, node, config, gentype=None):
"""
<implicit-spec> = <declaration-type-spec> ( <letter-spec-list> )
"""
get_name_or_defer(stmt, node.items[0], res_typespec, config)
get_name_or_defer(stmt, node.items[1], res_typespec, config)
def search_Letter_Spec(stmt, node, config, gentype=None):
pass
def search_Procedure_Declaration_Stmt(stmt, node, config, gentype=None):
get_name_or_defer(stmt, node.items[0], [Interface], config)
get_name_or_defer(stmt, node.items[1], res_value, config)
#show_tree(node)
#import pdb ;pdb.set_trace()
def search_Binding_PASS_Arg_Name(stmt, node, config, gentype=None):
#show_tree(node)
#import pdb ;pdb.set_trace()
pass
def search_Char_Selector(stmt, node, config, gentype=None):
get_name_or_defer(stmt, node.items[0], res_value, config)
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Enum_Def_Stmt(stmt, node, config, gentype=None):
pass
def search_Enumerator_Def_Stmt(stmt, node, config, gentype=None):
get_name_or_defer(stmt, node.items[1], res_value, config)
def search_Enumerator(stmt, node, config, gentype=None):
get_name_or_defer(stmt, node.items[2], res_value, config)
| 43.843121 | 120 | 0.697384 |
356146ea4a3084a8f7b8c048beab069eb0e3d980 | 79 | sh | Shell | costar_simulation/robot/jaco_description/transformURDF.sh | cpaxton/costar_plan | be5c12f9d0e9d7078e6a5c283d3be059e7f3d040 | [
"Apache-2.0"
] | 66 | 2018-10-31T04:58:53.000Z | 2022-03-17T02:32:25.000Z | costar_simulation/robot/jaco_description/transformURDF.sh | cpaxton/costar_plan | be5c12f9d0e9d7078e6a5c283d3be059e7f3d040 | [
"Apache-2.0"
] | 8 | 2018-10-23T21:19:25.000Z | 2018-12-03T02:08:41.000Z | costar_simulation/robot/jaco_description/transformURDF.sh | cpaxton/costar_plan | be5c12f9d0e9d7078e6a5c283d3be059e7f3d040 | [
"Apache-2.0"
] | 25 | 2018-10-19T00:54:17.000Z | 2021-10-10T08:28:15.000Z | rosrun xacro xacro --inorder urdf/jaco_robot.urdf.xacro > urdf/jaco_robot.urdf
| 39.5 | 78 | 0.810127 |
a3209894be9188a6146fafca8382c1e55d3a5130 | 8,224 | java | Java | src/java/org/apache/nutch/crawl/AbstractFetchSchedule.java | NextCenturyCorporation/nutch | 241ad5a614aab9baf671caebbcf7ad0ffb809f61 | [
"Apache-2.0"
] | 1 | 2019-08-15T06:49:46.000Z | 2019-08-15T06:49:46.000Z | src/java/org/apache/nutch/crawl/AbstractFetchSchedule.java | NextCenturyCorporation/nutch | 241ad5a614aab9baf671caebbcf7ad0ffb809f61 | [
"Apache-2.0"
] | null | null | null | src/java/org/apache/nutch/crawl/AbstractFetchSchedule.java | NextCenturyCorporation/nutch | 241ad5a614aab9baf671caebbcf7ad0ffb809f61 | [
"Apache-2.0"
] | 1 | 2020-03-20T09:29:26.000Z | 2020-03-20T09:29:26.000Z | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nutch.crawl;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.io.Text;
import org.apache.nutch.crawl.CrawlDatum;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.invoke.MethodHandles;
/**
* This class provides common methods for implementations of
* <code>FetchSchedule</code>.
*
* @author Andrzej Bialecki
*/
public abstract class AbstractFetchSchedule extends Configured implements
FetchSchedule {
private static final Logger LOG = LoggerFactory
.getLogger(MethodHandles.lookup().lookupClass());
protected int defaultInterval;
protected int maxInterval;
public AbstractFetchSchedule() {
super(null);
}
public AbstractFetchSchedule(Configuration conf) {
super(conf);
}
public void setConf(Configuration conf) {
super.setConf(conf);
if (conf == null)
return;
defaultInterval = conf.getInt("db.fetch.interval.default", 0);
maxInterval = conf.getInt("db.fetch.interval.max", 0);
LOG.info("defaultInterval=" + defaultInterval);
LOG.info("maxInterval=" + maxInterval);
}
/**
* Initialize fetch schedule related data. Implementations should at least set
* the <code>fetchTime</code> and <code>fetchInterval</code>. The default
* implementation sets the <code>fetchTime</code> to now, using the default
* <code>fetchInterval</code>.
*
* @param url
* URL of the page.
*
* @param datum
* datum instance to be initialized (modified in place).
*/
public CrawlDatum initializeSchedule(Text url, CrawlDatum datum) {
datum.setFetchTime(System.currentTimeMillis());
datum.setFetchInterval(defaultInterval);
datum.setRetriesSinceFetch(0);
return datum;
}
/**
* Sets the <code>fetchInterval</code> and <code>fetchTime</code> on a
* successfully fetched page. NOTE: this implementation resets the retry
* counter - extending classes should call super.setFetchSchedule() to
* preserve this behavior.
*/
public CrawlDatum setFetchSchedule(Text url, CrawlDatum datum,
long prevFetchTime, long prevModifiedTime, long fetchTime,
long modifiedTime, int state) {
datum.setRetriesSinceFetch(0);
return datum;
}
/**
* This method specifies how to schedule refetching of pages marked as GONE.
* Default implementation increases fetchInterval by 50% but the value may
* never exceed <code>maxInterval</code>.
*
* @param url
* URL of the page.
*
* @param datum
* datum instance to be adjusted.
*
* @return adjusted page information, including all original information.
* NOTE: this may be a different instance than @see CrawlDatum, but
* implementations should make sure that it contains at least all
* information from @see CrawlDatum.
*/
public CrawlDatum setPageGoneSchedule(Text url, CrawlDatum datum,
long prevFetchTime, long prevModifiedTime, long fetchTime) {
// no page is truly GONE ... just increase the interval by 50%
// and try much later.
if ((datum.getFetchInterval() * 1.5f) < maxInterval)
datum.setFetchInterval(datum.getFetchInterval() * 1.5f);
else
datum.setFetchInterval(maxInterval * 0.9f);
datum.setFetchTime(fetchTime + (long) datum.getFetchInterval() * 1000);
return datum;
}
/**
* This method adjusts the fetch schedule if fetching needs to be re-tried due
* to transient errors. The default implementation sets the next fetch time 1
* day in the future and increases the retry counter.
*
* @param url
* URL of the page.
*
* @param datum
* page information.
*
* @param prevFetchTime
* previous fetch time.
*
* @param prevModifiedTime
* previous modified time.
*
* @param fetchTime
* current fetch time.
*
* @return adjusted page information, including all original information.
* NOTE: this may be a different instance than @see CrawlDatum, but
* implementations should make sure that it contains at least all
* information from @see CrawlDatum.
*/
public CrawlDatum setPageRetrySchedule(Text url, CrawlDatum datum,
long prevFetchTime, long prevModifiedTime, long fetchTime) {
datum.setFetchTime(fetchTime + (long) SECONDS_PER_DAY * 1000);
datum.setRetriesSinceFetch(datum.getRetriesSinceFetch() + 1);
return datum;
}
/**
* This method return the last fetch time of the CrawlDatum
*
* @return the date as a long.
*/
public long calculateLastFetchTime(CrawlDatum datum) {
if (datum.getStatus() == CrawlDatum.STATUS_DB_UNFETCHED) {
return 0L;
} else {
return datum.getFetchTime() - (long) datum.getFetchInterval() * 1000;
}
}
/**
* This method provides information whether the page is suitable for selection
* in the current fetchlist. NOTE: a true return value does not guarantee that
* the page will be fetched, it just allows it to be included in the further
* selection process based on scores. The default implementation checks
* <code>fetchTime</code>, if it is higher than the <code>curTime</code> it
* returns false, and true otherwise. It will also check that fetchTime is not
* too remote (more than <code>maxInterval</code>, in which case it lowers the
* interval and returns true.
*
* @param url
* URL of the page.
*
* @param datum
* datum instance.
*
* @param curTime
* reference time (usually set to the time when the fetchlist
* generation process was started).
*
* @return true, if the page should be considered for inclusion in the current
* fetchlist, otherwise false.
*/
public boolean shouldFetch(Text url, CrawlDatum datum, long curTime) {
// pages are never truly GONE - we have to check them from time to time.
// pages with too long a fetchInterval are adjusted so that they fit within
// a maximum fetchInterval (segment retention period).
if (datum.getFetchTime() - curTime > (long) maxInterval * 1000) {
if (datum.getFetchInterval() > maxInterval) {
datum.setFetchInterval(maxInterval * 0.9f);
}
datum.setFetchTime(curTime);
}
if (datum.getFetchTime() > curTime) {
return false; // not time yet
}
return true;
}
/**
* This method resets fetchTime, fetchInterval, modifiedTime,
* retriesSinceFetch and page signature, so that it forces refetching.
*
* @param url
* URL of the page.
*
* @param datum
* datum instance.
*
* @param asap
* if true, force refetch as soon as possible - this sets the
* fetchTime to now. If false, force refetch whenever the next fetch
* time is set.
*/
public CrawlDatum forceRefetch(Text url, CrawlDatum datum, boolean asap) {
// reduce fetchInterval so that it fits within the max value
if (datum.getFetchInterval() > maxInterval)
datum.setFetchInterval(maxInterval * 0.9f);
datum.setStatus(CrawlDatum.STATUS_DB_UNFETCHED);
datum.setRetriesSinceFetch(0);
datum.setSignature(null);
datum.setModifiedTime(0L);
if (asap)
datum.setFetchTime(System.currentTimeMillis());
return datum;
}
}
| 34.995745 | 80 | 0.683731 |
c550fb165aa1d74a5f3e0a50bb83b25c8bce86e2 | 2,002 | sql | SQL | Homework/DBFundamentals/Databases Basics/10.ExamPrep24April2017/ExamExercises/ExamExercises/01. DDL.sql | GitHarr/SoftUni | 1c51efc70a97a1be17e2590a9243d01a9be343aa | [
"MIT"
] | null | null | null | Homework/DBFundamentals/Databases Basics/10.ExamPrep24April2017/ExamExercises/ExamExercises/01. DDL.sql | GitHarr/SoftUni | 1c51efc70a97a1be17e2590a9243d01a9be343aa | [
"MIT"
] | null | null | null | Homework/DBFundamentals/Databases Basics/10.ExamPrep24April2017/ExamExercises/ExamExercises/01. DDL.sql | GitHarr/SoftUni | 1c51efc70a97a1be17e2590a9243d01a9be343aa | [
"MIT"
] | null | null | null | CREATE TABLE Clients(
ClientId INT PRIMARY KEY IDENTITY,
FirstName VARCHAR(50) NOT NULL,
LastName VARCHAR(50) NOT NULL,
Phone VARCHAR(12) NOT NULL CHECK(LEN(Phone) = 12)
)
CREATE TABLE Mechanics(
MechanicId INT PRIMARY KEY IDENTITY,
FirstName VARCHAR(50) NOT NULL,
LastName VARCHAR(50) NOT NULL,
[Address] VARCHAR(255) NOT NULL
)
CREATE TABLE Models(
ModelId INT PRIMARY KEY IDENTITY,
[Name] VARCHAR(50) UNIQUE NOT NULL
)
CREATE TABLE Vendors(
VendorId INT PRIMARY KEY IDENTITY,
[Name] VARCHAR(50) UNIQUE NOT NULL
)
CREATE TABLE Parts(
PartId INT PRIMARY KEY IDENTITY,
SerialNumber VARCHAR(50) UNIQUE NOT NULL,
[Description] VARCHAR(255),
Price DECIMAL(15, 2) NOT NULL CHECK(Price > 0),
VendorId INT NOT NULL FOREIGN KEY(VendorId) REFERENCES Vendors(VendorId),
StockQty INT NOT NULL CHECK(StockQty >= 0) DEFAULT 0
)
CREATE TABLE Jobs(
JobId INT PRIMARY KEY IDENTITY,
ModelId INT NOT NULL FOREIGN KEY(ModelId) REFERENCES Models(ModelId),
[Status] VARCHAR(11) NOT NULL
CHECK([Status] = 'Pending' OR [Status] = 'In Progress'
OR [Status] = 'Finished')
DEFAULT 'Pending',
ClientId INT NOT NULL FOREIGN KEY(ClientId) REFERENCES Clients(ClientId),
MechanicId INT FOREIGN KEY(MechanicId) REFERENCES Mechanics(MechanicId),
IssueDate DATE NOT NULL,
FinishDate DATE
)
CREATE TABLE Orders(
OrderId INT PRIMARY KEY IDENTITY,
JobId INT NOT NULL FOREIGN KEY(JobId) REFERENCES Jobs(JobId),
IssueDate DATE,
Delivered BIT NOT NULL DEFAULT 0
)
CREATE TABLE OrderParts(
OrderId INT NOT NULL
FOREIGN KEY(OrderId) REFERENCES Orders(OrderId),
PartId INT NOT NULL
FOREIGN KEY(PartId) REFERENCES Parts(PartId),
Quantity INT NOT NULL CHECK(Quantity > 0) DEFAULT 1,
CONSTRAINT PK_OrderParts PRIMARY KEY(OrderId, PartId)
)
CREATE TABLE PartsNeeded(
JobId INT NOT NULL FOREIGN KEY(JobId) REFERENCES Jobs(JobId),
PartId INT NOT NULL FOREIGN KEY(PartId) REFERENCES Parts(PartId),
Quantity INT NULL CHECK(Quantity > 0) DEFAULT 1,
CONSTRAINT PK_PartsNeeded PRIMARY KEY(JobId, PartId)
)
| 27.054054 | 73 | 0.765235 |
2fd36762448234362f7ae61b16d238480b32dac4 | 1,842 | py | Python | day13.py | jm-projects/AdventOfCode2021 | 5c6630bf6130d4a40da4bc585e86fc8f4fd89749 | [
"MIT"
] | null | null | null | day13.py | jm-projects/AdventOfCode2021 | 5c6630bf6130d4a40da4bc585e86fc8f4fd89749 | [
"MIT"
] | null | null | null | day13.py | jm-projects/AdventOfCode2021 | 5c6630bf6130d4a40da4bc585e86fc8f4fd89749 | [
"MIT"
] | null | null | null | from numpy.core.numeric import count_nonzero
import pandas as pd
import numpy as np
import re
data = pd.read_csv("data/day13.csv", header = None, dtype=str, delimiter= '\n')[0]
codes = [re.split("\s\S\S\s", word) for word in data.values][1:]
# Challenge 1
word = np.array(data.values)[0]
c_dic = {c[0]:c[1] for c in codes}
c_dic['0'+word[0]] = ''
c_dic[word[-1]+'0'] = ''
word = '0'+word+'0'
steps = 10
dic = {key:0 for key in c_dic}
for i in range(len(word)-1): dic[word[i]+word[i+1]] += 1
for step in range(steps):
dic2 = {k:val for k, val in dic.items()}
for key in dic:
if key[0] != '0' and key[-1] != '0':
res = c_dic[key]
dic2[key[0]+res] += dic[key]
dic2[res+key[1]] += dic[key]
dic2[key] -= dic[key]
dic = {k:val for k, val in dic2.items()}
occ = {val:0 for k,val in c_dic.items()}
for char in word: occ[char] = 0
for key in dic:
occ[key[0]] += dic[key]
occ[key[1]] += dic[key]
out = np.sort([val for k,val in occ.items()])
print((out[-1]-out[2])//2)
# Challenge 2
word = np.array(data.values)[0]
c_dic = {c[0]:c[1] for c in codes}
c_dic['0'+word[0]] = ''
c_dic[word[-1]+'0'] = ''
word = '0'+word+'0'
steps = 40
dic = {key:0 for key in c_dic}
for i in range(len(word)-1): dic[word[i]+word[i+1]] += 1
for step in range(steps):
dic2 = {k:val for k, val in dic.items()}
for key in dic:
if key[0] != '0' and key[-1] != '0':
res = c_dic[key]
dic2[key[0]+res] += dic[key]
dic2[res+key[1]] += dic[key]
dic2[key] -= dic[key]
dic = {k:val for k, val in dic2.items()}
occ = {val:0 for k,val in c_dic.items()}
for char in word: occ[char] = 0
for key in dic:
occ[key[0]] += dic[key]
occ[key[1]] += dic[key]
out = np.sort([val for k,val in occ.items()])
print((out[-1]-out[2])//2) | 25.943662 | 82 | 0.555375 |
ef524c94ee56a0cce74b182b45c486443fda64af | 1,666 | js | JavaScript | src/components/Chef.js | jackowfish/MealScheduler | 6adf858401a72698bbb9707045d2468c352e07e0 | [
"MIT"
] | 1 | 2021-09-29T23:37:18.000Z | 2021-09-29T23:37:18.000Z | src/components/Chef.js | jackowfish/MealScheduler | 6adf858401a72698bbb9707045d2468c352e07e0 | [
"MIT"
] | null | null | null | src/components/Chef.js | jackowfish/MealScheduler | 6adf858401a72698bbb9707045d2468c352e07e0 | [
"MIT"
] | 1 | 2021-09-29T19:25:44.000Z | 2021-09-29T19:25:44.000Z | import React, { useEffect, useRef, useState } from 'react';
import { Container, Row, Col, Card,Button, ButtonGroup} from 'react-bootstrap';
import '../styles/Chef.css'
import MealForm from './MealForm.js'
const Chef = () => {
const [mealCreation, setMealCreation] = useState(false);
const addMealCreation = () => setMealCreation(!mealCreation);
const mealFormItems = mealCreation ?
(
<Card>
<Card.Body>
<Card.Title className="text-center">
Plan your meal!
</Card.Title>
</Card.Body>
<Card.Body>
<MealForm/>
</Card.Body>
</Card>
) : null
return (
<div>
<Container>
<Row>
<Col>
<div><br/><br/></div>
</Col>
</Row>
<Row>
<Col>
<Button variant="primary" size="large" block onClick={addMealCreation}>
Add a Plate
</Button>
</Col>
</Row>
<Row>
<Col>
<div><br/><br/></div>
</Col>
</Row>
<Row>
<Col style={{display: 'flex', justifyContent: 'center'}}>
{mealFormItems}
</Col>
</Row>
</Container>
</div>
);
}
export default Chef; | 30.851852 | 99 | 0.370348 |
b02792b02c8236cc65aca48706d7909a87e126ee | 1,170 | py | Python | flit_core/flit_core/tests/test_versionno.py | ksunden/flit | 5ed24842e614af813bd67ecf6ee918961fbf0ac2 | [
"BSD-3-Clause"
] | 1,642 | 2015-03-26T18:22:25.000Z | 2021-12-01T00:10:52.000Z | flit_core/flit_core/tests/test_versionno.py | ksunden/flit | 5ed24842e614af813bd67ecf6ee918961fbf0ac2 | [
"BSD-3-Clause"
] | 427 | 2015-03-16T20:22:17.000Z | 2021-12-01T14:15:38.000Z | flit_core/flit_core/tests/test_versionno.py | ksunden/flit | 5ed24842e614af813bd67ecf6ee918961fbf0ac2 | [
"BSD-3-Clause"
] | 110 | 2015-03-28T02:50:28.000Z | 2021-11-27T09:36:37.000Z | import pytest
from flit_core.common import InvalidVersion
from flit_core.versionno import normalise_version
def test_normalise_version():
nv = normalise_version
assert nv('4.3.1') == '4.3.1'
assert nv('1.0b2') == '1.0b2'
assert nv('2!1.3') == '2!1.3'
# Prereleases
assert nv('1.0B2') == '1.0b2'
assert nv('1.0.b2') == '1.0b2'
assert nv('1.0beta2') == '1.0b2'
assert nv('1.01beta002') == '1.1b2'
assert nv('1.0-preview2') == '1.0rc2'
assert nv('1.0_c') == '1.0rc0'
# Post releases
assert nv('1.0post-2') == '1.0.post2'
assert nv('1.0post') == '1.0.post0'
assert nv('1.0-rev3') == '1.0.post3'
assert nv('1.0-2') == '1.0.post2'
# Development versions
assert nv('1.0dev-2') == '1.0.dev2'
assert nv('1.0dev') == '1.0.dev0'
assert nv('1.0-dev3') == '1.0.dev3'
assert nv('1.0+ubuntu-01') == '1.0+ubuntu.1'
assert nv('v1.3-pre2') == '1.3rc2'
assert nv(' 1.2.5.6\t') == '1.2.5.6'
assert nv('1.0-alpha3-post02+ubuntu_xenial_5') == '1.0a3.post2+ubuntu.xenial.5'
with pytest.raises(InvalidVersion):
nv('3!')
with pytest.raises(InvalidVersion):
nv('abc')
| 28.536585 | 83 | 0.573504 |
f3f3a9a9f3e3e2beb0efb89587475b7484efc41b | 1,615 | ts | TypeScript | libs/dashboards/src/lib/transformations/automated-response-summary/asset-group-by-trigger.transformation.ts | alertlogic/al-magma-console | 63aff315fc2de3e24527ea8b77bd4e466cfb5d84 | [
"MIT"
] | null | null | null | libs/dashboards/src/lib/transformations/automated-response-summary/asset-group-by-trigger.transformation.ts | alertlogic/al-magma-console | 63aff315fc2de3e24527ea8b77bd4e466cfb5d84 | [
"MIT"
] | 4 | 2021-11-01T14:33:45.000Z | 2021-11-22T17:23:46.000Z | libs/dashboards/src/lib/transformations/automated-response-summary/asset-group-by-trigger.transformation.ts | alertlogic/al-magma-console | 63aff315fc2de3e24527ea8b77bd4e466cfb5d84 | [
"MIT"
] | null | null | null | import { Widget as WidgetConfig, TableListConfig, ZeroStateReason } from '@al/ng-visualizations-components';
import { AlResponderAggregationsSummaryItem } from '@al/responder';
export const summarizeAssetGroup = (items: AlResponderAggregationsSummaryItem[]): number => {
let count = 0;
items.forEach(summary => {
count = count + (Object.values(summary)?.[0] as number);
});
return count;
};
export const processAssetGroup = (assetGroups: {
[key: string]: number | {
trigger_ids: AlResponderAggregationsSummaryItem[];
playbook_ids: AlResponderAggregationsSummaryItem[];
}
}[]): {
asset_group: string;
count_trigger: number;
}[] => {
const assetGroup: {
asset_group: string;
count_trigger: number;
}[] = [];
assetGroups.forEach(element => {
const assetName = Object.keys(element).filter(elementKey => elementKey !== 'aggregations')[0];
const triggerCount = element['aggregations']['trigger_ids'].length;
assetGroup.push({ asset_group: assetName, count_trigger: triggerCount });
});
return assetGroup;
};
export const assetGroupByTrigger = (response, item?: WidgetConfig) => {
if (response.aggregations.asset_groups.length === 0) {
return {
nodata: true,
reason: ZeroStateReason.Zero
};
}
const summary = processAssetGroup(response.aggregations.asset_groups);
const tableConfig: TableListConfig = {
headers: [
{ name: 'Asset Group', field: 'asset_group', class: 'left multiline-content' },
{ name: 'Trigger Count', field: 'count_trigger', class: 'right' }
],
body: summary
};
return tableConfig;
};
| 31.057692 | 108 | 0.688545 |
dda50025aca1dee1f038e562d298c86429139a44 | 5,762 | java | Java | src/main/java/com/baidu/hugegraph/structure/graph/Edge.java | imbaGeek/hugegraph-client | 451e08f98c97c0a8c16285af4ab370baac9c4efa | [
"Apache-2.0"
] | null | null | null | src/main/java/com/baidu/hugegraph/structure/graph/Edge.java | imbaGeek/hugegraph-client | 451e08f98c97c0a8c16285af4ab370baac9c4efa | [
"Apache-2.0"
] | null | null | null | src/main/java/com/baidu/hugegraph/structure/graph/Edge.java | imbaGeek/hugegraph-client | 451e08f98c97c0a8c16285af4ab370baac9c4efa | [
"Apache-2.0"
] | 1 | 2022-01-05T08:42:12.000Z | 2022-01-05T08:42:12.000Z | /*
* Copyright 2017 HugeGraph Authors
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.baidu.hugegraph.structure.graph;
import com.baidu.hugegraph.exception.InvalidOperationException;
import com.baidu.hugegraph.structure.GraphElement;
import com.baidu.hugegraph.util.E;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
public class Edge extends GraphElement {
@JsonProperty("id")
private String id;
@JsonProperty("outV")
private Object sourceId;
@JsonProperty("inV")
private Object targetId;
@JsonProperty("outVLabel")
private String sourceLabel;
@JsonProperty("inVLabel")
private String targetLabel;
private Vertex source;
private Vertex target;
@JsonCreator
public Edge(@JsonProperty("label") String label) {
this.label = label;
this.type = "edge";
this.sourceId = null;
this.targetId = null;
this.sourceLabel = null;
this.targetLabel = null;
this.source = null;
this.target = null;
}
public String id() {
return this.id;
}
public void id(String id) {
this.id = id;
}
public Object sourceId() {
if (this.sourceId == null && this.source != null) {
this.sourceId = this.source.id();
}
if (this.sourceId == null) {
throw new InvalidOperationException(
"Must set source vertex id or add vertices " +
"before add edges");
}
return this.sourceId;
}
public void sourceId(Object sourceId) {
E.checkArgumentNotNull(sourceId, "The source vertex id can't be null");
this.sourceId = sourceId;
}
public Edge source(Vertex source) {
if (source.id() == null) {
this.source = source;
}
this.sourceId = source.id();
this.sourceLabel = source.label();
return this;
}
public Object targetId() {
if (this.targetId == null && this.target != null) {
this.targetId = this.target.id();
}
if (this.sourceId == null) {
throw new InvalidOperationException(
"Must set source vertex id or add vertices " +
"before add edges");
}
return this.targetId;
}
public void targetId(Object targetId) {
E.checkArgumentNotNull(targetId, "The target vertex id can't be null");
this.targetId = targetId;
}
public Edge target(Vertex target) {
if (target.id() == null) {
this.target = target;
}
this.targetId = target.id();
this.targetLabel = target.label();
return this;
}
public String sourceLabel() {
return this.sourceLabel;
}
public void sourceLabel(String sourceLabel) {
this.sourceLabel = sourceLabel;
}
public String targetLabel() {
return this.targetLabel;
}
public void targetLabel(String targetLabel) {
this.targetLabel = targetLabel;
}
@Override
public Edge property(String key, Object value) {
E.checkNotNull(key, "The property name can not be null");
E.checkNotNull(value, "The property value can not be null");
if (this.fresh()) {
return (Edge) super.property(key, value);
} else {
return this.setProperty(key, value);
}
}
@Override
protected Edge setProperty(String key, Object value) {
Edge edge = new Edge(this.label);
edge.id(this.id);
edge.sourceId(this.sourceId);
edge.targetId(this.targetId);
edge.property(key, value);
// NOTE: append can also be used to update property
edge = this.manager.appendEdgeProperty(edge);
super.property(key, edge.property(key));
return this;
}
@Override
public Edge removeProperty(String key) {
E.checkNotNull(key, "The property name can not be null");
if (!this.properties.containsKey(key)) {
throw new InvalidOperationException(
"The edge '%s' doesn't have the property '%s'",
this.id, key);
}
Edge edge = new Edge(this.label);
edge.id(this.id);
edge.sourceId(this.sourceId);
edge.targetId(this.targetId);
Object value = this.properties.get(key);
edge.property(key, value);
this.manager.eliminateEdgeProperty(edge);
this.properties().remove(key);
return this;
}
@Override
public String toString() {
return String.format("{id=%s, sourceId=%s, sourceLabel=%s, " +
"targetId=%s, targetLabel=%s, " +
"label=%s, properties=%s}",
this.id, this.sourceId, this.sourceLabel,
this.targetId, this.targetLabel,
this.label, this.properties);
}
}
| 30.648936 | 79 | 0.606734 |
25b647933e0fe62385c8bd2289787ddf6ffbc995 | 419 | cs | C# | PushoverClient/PushOverRequestArguments.cs | diggforbeer/Pushover.NET | e1b03a9209f8c0e8961287b07972cdd356027721 | [
"MIT"
] | null | null | null | PushoverClient/PushOverRequestArguments.cs | diggforbeer/Pushover.NET | e1b03a9209f8c0e8961287b07972cdd356027721 | [
"MIT"
] | null | null | null | PushoverClient/PushOverRequestArguments.cs | diggforbeer/Pushover.NET | e1b03a9209f8c0e8961287b07972cdd356027721 | [
"MIT"
] | null | null | null | // ReSharper disable InconsistentNaming
namespace PushoverClient
{
public class PushoverRequestArguments
{
public string token { get; set; }
public string user { get; set; }
public string device { get; set; }
public string title { get; set; }
public string message { get; set; }
public int priority { get; set; }
public string sound { get; set; }
}
} | 29.928571 | 43 | 0.608592 |
eca61a31b49032fab9cb1146edfc0b4343e3ae2e | 2,254 | sql | SQL | Databases/Commerce.Database/Schema Objects/Schemas/dbo/Programmability/Stored Procedures/SetOfferActiveState.sql | BOONRewardsInc/rewards | 256f86bf7a17c997b7415752cd22ce95d41359ab | [
"MIT"
] | 13 | 2017-01-09T14:53:43.000Z | 2021-03-02T14:46:09.000Z | Databases/Commerce.Database/Schema Objects/Schemas/dbo/Programmability/Stored Procedures/SetOfferActiveState.sql | BOONRewardsInc/rewards | 256f86bf7a17c997b7415752cd22ce95d41359ab | [
"MIT"
] | 1 | 2019-01-15T02:36:09.000Z | 2019-01-15T02:36:09.000Z | Databases/Commerce.Database/Schema Objects/Schemas/dbo/Programmability/Stored Procedures/SetOfferActiveState.sql | BOONRewardsInc/rewards | 256f86bf7a17c997b7415752cd22ce95d41359ab | [
"MIT"
] | 12 | 2017-01-12T16:15:33.000Z | 2021-03-24T13:40:49.000Z | --
-- Copyright (c) Microsoft Corporation. All rights reserved.
-- Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
--
---------------------------------------------------------------------------------------------------------------------------------
-- SetOfferActiveState.sql
-- Copyright (c) Microsoft Corporation. All rights reserved.
---------------------------------------------------------------------------------------------------------------------------------
-- SetOfferActiveState
-- Sets the specified offer to the specified Active state if it's not in that state already.
-- Parameters:
-- @globalID uniqueidentifier: The ID assigned within the wider system to this offer.
-- @activeState bit: The Active state to which to set the offer.
create procedure dbo.SetOfferActiveState @globalID uniqueidentifier,
@activeState bit
as
set nocount on;
begin try
begin transaction
-- Determine if the specified offer exists in the Earn system.
declare @offerId int = (select Offers.Id from dbo.Offers where GlobalID = @globalID);
-- If an offer with the specifed global ID exists within the Earn system, attempt to update its Active state.
if (@offerId is not null)
begin
-- If the Offer is not already in the specified Active state, set it to that state.
if ((select Active from dbo.Offers where Offers.Id = @offerId) <> @activeState)
begin
update dbo.Offers set Active = @activeState where Offers.Id = @offerId;
end
-- Otherwise, let the caller know that no change was needed.
else
begin
raiserror('OfferUnchanged', 16, 1);
end
end
-- Otherwise raise an exception.
else
begin
raiserror('OfferNotFound', 16, 1);
end
commit transaction
end try
begin catch
-- Rollback the transaction and then re-raise the error.
if (@@trancount > 0) rollback transaction;
declare @errorMessage nvarchar(4000) = ERROR_MESSAGE();
declare @errorSeverity int = ERROR_SEVERITY();
raiserror(@errorMessage, @errorSeverity, 1)
end catch
GO | 41.740741 | 129 | 0.585626 |
254768fd1fd67d8b826eeec87686694202631393 | 710 | js | JavaScript | src/DemoEntryPoint.js | caldera-design/s3-uploader | 217c1b4c2e472df8ff2600c2c9882c9daf0cb7f6 | [
"MIT"
] | 1 | 2018-07-02T19:38:31.000Z | 2018-07-02T19:38:31.000Z | src/DemoEntryPoint.js | caldera-design/s3-uploader | 217c1b4c2e472df8ff2600c2c9882c9daf0cb7f6 | [
"MIT"
] | null | null | null | src/DemoEntryPoint.js | caldera-design/s3-uploader | 217c1b4c2e472df8ff2600c2c9882c9daf0cb7f6 | [
"MIT"
] | null | null | null |
import React from 'react';
import { render } from 'react-dom';
import S3Uploader from './LibraryEntryPoint';
const styles = {
container: {
width: 300
},
uploadForm: {
border: '2px solid grey'
}
};
function RootComponent() {
return (
<div style={styles.container}>
<S3Uploader bucket="caldera.design.dlc.s3.amazonaws.com"
region="us-west-2"
accessKey="AKIAIS22XIJECTUVQSYA"
signatureUrl={'http://localhost:6001/api/s3/signature'}
style={styles.uploadForm}/>
</div>
);
}
render(<RootComponent/>, document.getElementById('react-main'));
| 24.482759 | 81 | 0.552113 |
ddc99b37f1c20ab7d819813d8bc88cf1cd2730ff | 576 | java | Java | ses-app/ses-web-ros/src/main/java/com/redescooter/ses/web/ros/dao/base/OpeSysRoleDataMapper.java | moutainhigh/ses-server | e1ee6ac34499950ef4b1b97efa0aaf4c4fec67c5 | [
"MIT"
] | null | null | null | ses-app/ses-web-ros/src/main/java/com/redescooter/ses/web/ros/dao/base/OpeSysRoleDataMapper.java | moutainhigh/ses-server | e1ee6ac34499950ef4b1b97efa0aaf4c4fec67c5 | [
"MIT"
] | null | null | null | ses-app/ses-web-ros/src/main/java/com/redescooter/ses/web/ros/dao/base/OpeSysRoleDataMapper.java | moutainhigh/ses-server | e1ee6ac34499950ef4b1b97efa0aaf4c4fec67c5 | [
"MIT"
] | 2 | 2021-08-31T07:59:28.000Z | 2021-10-16T10:55:44.000Z | package com.redescooter.ses.web.ros.dao.base;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.redescooter.ses.web.ros.dm.OpeSysRoleData;
import java.util.List;
import org.apache.ibatis.annotations.Param;
public interface OpeSysRoleDataMapper extends BaseMapper<OpeSysRoleData> {
int updateBatch(List<OpeSysRoleData> list);
int updateBatchSelective(List<OpeSysRoleData> list);
int batchInsert(@Param("list") List<OpeSysRoleData> list);
int insertOrUpdate(OpeSysRoleData record);
int insertOrUpdateSelective(OpeSysRoleData record);
} | 32 | 74 | 0.798611 |
0f26de7f2f581b571e181be3e53c9b29c50eef8b | 7,562 | go | Go | pkg/api/api.go | Meetic/blackbeard | 540a46789704f828a2d1a8b0ea46f1cb051202dc | [
"Apache-2.0"
] | 27 | 2018-01-10T12:55:26.000Z | 2021-08-30T15:42:32.000Z | pkg/api/api.go | Meetic/blackbeard | 540a46789704f828a2d1a8b0ea46f1cb051202dc | [
"Apache-2.0"
] | 63 | 2017-12-05T09:51:28.000Z | 2021-01-27T17:05:57.000Z | pkg/api/api.go | Meetic/blackbeard | 540a46789704f828a2d1a8b0ea46f1cb051202dc | [
"Apache-2.0"
] | 2 | 2018-09-06T16:09:04.000Z | 2018-12-06T14:59:04.000Z | package api
import (
"strings"
"time"
"github.com/sirupsen/logrus"
"github.com/Meetic/blackbeard/pkg/playbook"
"github.com/Meetic/blackbeard/pkg/resource"
"github.com/Meetic/blackbeard/pkg/version"
)
// Api represents the blackbeard entrypoint by defining the list of actions
// blackbeard is able to perform.
type Api interface {
Inventories() playbook.InventoryService
Namespaces() resource.NamespaceService
Playbooks() playbook.PlaybookService
Pods() resource.PodService
Create(namespace string) (playbook.Inventory, error)
Delete(namespace string, wait bool) error
ListExposedServices(namespace string) ([]resource.Service, error)
ListNamespaces() ([]Namespace, error)
Reset(namespace string, configPath string) error
Apply(namespace string, configPath string) error
Update(namespace string, inventory playbook.Inventory, configPath string) error
WaitForNamespaceReady(namespace string, timeout time.Duration, bar progress) error
GetVersion() (*Version, error)
DeleteResource(namespace string, resource string) error
}
type api struct {
inventories playbook.InventoryService
configs playbook.ConfigService
playbooks playbook.PlaybookService
namespaces resource.NamespaceService
pods resource.PodService
services resource.ServiceService
cluster resource.ClusterService
job resource.JobService
}
// NewApi creates a blackbeard api. The blackbeard api is responsible for managing playbooks and namespaces.
// Parameters are struct implementing respectively Inventory, Config, Namespace, Pod and Service interfaces.
func NewApi(
inventories playbook.InventoryRepository,
configs playbook.ConfigRepository,
playbooks playbook.PlaybookRepository,
namespaces resource.NamespaceRepository,
pods resource.PodRepository,
deployments resource.DeploymentRepository,
statefulsets resource.StatefulsetRepository,
services resource.ServiceRepository,
cluster resource.ClusterRepository,
job resource.JobRepository,
) Api {
api := &api{
inventories: playbook.NewInventoryService(inventories, playbook.NewPlaybookService(playbooks)),
configs: playbook.NewConfigService(configs, playbook.NewPlaybookService(playbooks)),
playbooks: playbook.NewPlaybookService(playbooks),
namespaces: resource.NewNamespaceService(namespaces, pods, deployments, statefulsets, job),
pods: resource.NewPodService(pods),
services: resource.NewServiceService(services),
cluster: resource.NewClusterService(cluster),
job: resource.NewJobService(job),
}
go api.WatchDelete()
return api
}
// Inventories returns the Inventory Service from the api
func (api *api) Inventories() playbook.InventoryService {
return api.inventories
}
// Namespaces returns the Namespace Service from the api
func (api *api) Namespaces() resource.NamespaceService {
return api.namespaces
}
// Playbooks returns the Playbook Service from the api
func (api *api) Playbooks() playbook.PlaybookService {
return api.playbooks
}
func (api *api) Pods() resource.PodService {
return api.pods
}
// Create is responsible for creating an inventory, a set of kubernetes configs and a kubernetes namespace
// for a given namespace.
// If an inventory already exist, Create will log the error and continue the process. Configs will be override.
func (api *api) Create(namespace string) (playbook.Inventory, error) {
if err := api.namespaces.Create(namespace); err != nil {
return playbook.Inventory{}, err
}
inv, err := api.inventories.Create(namespace)
if err != nil {
switch e := err.(type) {
default:
return playbook.Inventory{}, e
case *playbook.ErrorInventoryAlreadyExist:
logrus.Warn(e.Error())
logrus.Info("Process continue")
}
}
if err := api.configs.Generate(inv); err != nil {
return playbook.Inventory{}, err
}
return inv, nil
}
// Delete deletes the inventory, configs and kubernetes namespace for the given namespace.
func (api *api) Delete(namespace string, wait bool) error {
// delete namespace
if err := api.namespaces.Delete(namespace); err != nil {
return err
}
if !wait {
api.deletePlaybook(namespace)
}
return nil
}
// ListExposedServices returns a list of services exposed somehow outside of the kubernetes cluster.
// Exposed services could be :
// * NodePort type services
// * LoadBalancer type services
// * Http services exposed throw Ingress
func (api *api) ListExposedServices(namespace string) ([]resource.Service, error) {
return api.services.ListExposed(namespace)
}
// Reset resets an inventory, the associated configs and the kubernetes namespaces to default values.
// Defaults values are defines by the InventoryService GetDefault() method.
func (api *api) Reset(namespace string, configPath string) error {
//Reset inventory file
inv, err := api.inventories.Reset(namespace)
if err != nil {
return err
}
//Apply inventory to configuration
if err := api.configs.Generate(inv); err != nil {
return err
}
//Apply changes to Kubernetes
if err = api.namespaces.ApplyConfig(namespace, configPath); err != nil {
return err
}
return nil
}
// Apply override configs with new generated configs and apply the new configs to the kubernetes namespace.
// Warning : For now, Apply require a configPath as parameter.
// configPath is the location of configs for each namespace. This will change in the future since high level
// api should not be aware that configs are stored in files.
func (api *api) Apply(namespace string, configPath string) error {
inv, err := api.inventories.Get(namespace)
if err != nil {
return err
}
if err := api.configs.Generate(inv); err != nil {
return err
}
if err := api.namespaces.ApplyConfig(inv.Namespace, configPath); err != nil {
return err
}
return nil
}
// Update replace the inventory associated to the given namespace by the one set in parameters
// and apply the changes to configs and kubernetes namespace (using the Apply method)
func (api *api) Update(namespace string, inventory playbook.Inventory, configPath string) error {
if err := api.inventories.Update(namespace, inventory); err != nil {
return err
}
if err := api.Apply(namespace, configPath); err != nil {
return err
}
return nil
}
// Delete a resource from a namespace
// Deletion of a Job only for now
func (api *api) DeleteResource(namespace, resource string) error {
if err := api.job.Delete(namespace, resource); err != nil {
return err
}
return nil
}
func (api *api) deletePlaybook(namespace string) {
if inv, _ := api.inventories.Get(namespace); inv.Namespace == namespace {
api.inventories.Delete(namespace)
api.configs.Delete(namespace)
}
}
func (api *api) WatchDelete() {
api.namespaces.AddListener("http")
// handle delete of inventories and configs files
for event := range api.namespaces.Events("http") {
if event.Type == "DELETED" {
api.deletePlaybook(event.Namespace)
logrus.WithFields(logrus.Fields{
"component": "watcher",
"event": "delete",
"namespace": event.Namespace,
}).Debug("Playbook deleted")
}
}
}
type Version struct {
Blackbeard string `json:"blackbeard"`
Kubernetes string `json:"kubernetes"`
Kubectl string `json:"kubectl"`
}
func (api *api) GetVersion() (*Version, error) {
v, err := api.cluster.GetVersion()
if err != nil {
return nil, err
}
return &Version{
Blackbeard: version.GetVersion(),
Kubectl: strings.Join([]string{v.ClientVersion.Major, v.ClientVersion.Minor}, "."),
Kubernetes: strings.Join([]string{v.ServerVersion.Major, v.ServerVersion.Minor}, "."),
}, nil
}
| 30.12749 | 111 | 0.747157 |
e14a1e138581df6545990d6e12d01020a0fa06fb | 286 | rb | Ruby | lib/facter/shinken_mongo_uri.rb | lermit/puppet-shinken | 84d281f523e72b62a415bf6733dd22d2e11be313 | [
"Apache-2.0"
] | 4 | 2015-05-27T10:40:02.000Z | 2016-02-29T08:15:56.000Z | lib/facter/shinken_mongo_uri.rb | lermit/puppet-shinken | 84d281f523e72b62a415bf6733dd22d2e11be313 | [
"Apache-2.0"
] | null | null | null | lib/facter/shinken_mongo_uri.rb | lermit/puppet-shinken | 84d281f523e72b62a415bf6733dd22d2e11be313 | [
"Apache-2.0"
] | null | null | null | Facter.add("shinken_mongo_uri") do
setcode do
ret = nil
if File.file?('/etc/shinken/modules/mongodb.cfg')
File.foreach('/etc/shinken/modules/mongodb.cfg') do |line|
if line.strip =~ /uri\s+(.*)/
ret = $1
end
end
end
ret
end
end
| 20.428571 | 64 | 0.562937 |
4d34279df9bec806a27a6f742d630a37f40793a7 | 519 | cs | C# | Assets/Scripts/PlayerStat.cs | smjy/unity_game | c3b644452d8f9f011ce1545eed7cf8fb6c1f0169 | [
"MIT"
] | null | null | null | Assets/Scripts/PlayerStat.cs | smjy/unity_game | c3b644452d8f9f011ce1545eed7cf8fb6c1f0169 | [
"MIT"
] | null | null | null | Assets/Scripts/PlayerStat.cs | smjy/unity_game | c3b644452d8f9f011ce1545eed7cf8fb6c1f0169 | [
"MIT"
] | null | null | null | using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class PlayerStat : MonoBehaviour {
public static PlayerStat main;
[Header("玩家数据")]
[Tooltip("代码块")] public int code_blocks = 100;
[Tooltip("内存")] public float memory = 1024;
[Tooltip("姓名")] public string player_name = "Player1337";
private void Awake() {
if (main == null)
main = this;
else if (main != this)
Destroy(gameObject);
}
}
| 21.625 | 61 | 0.626204 |
e672ce486714934bd7472f01ef325a7e6865b64d | 3,073 | c | C | ikbd/mouse.c | simonsunnyboy/stlibs4gcc | 53e997c26ddd5cde09e2ca9e6f66be4057326598 | [
"MIT"
] | 5 | 2016-05-16T07:36:03.000Z | 2020-08-12T22:32:55.000Z | ikbd/mouse.c | simonsunnyboy/stlibs4gcc | 53e997c26ddd5cde09e2ca9e6f66be4057326598 | [
"MIT"
] | null | null | null | ikbd/mouse.c | simonsunnyboy/stlibs4gcc | 53e997c26ddd5cde09e2ca9e6f66be4057326598 | [
"MIT"
] | null | null | null | /*
*
* IKBD 6301 interrupt routine
* (c) 2010/11/14 by Simon Sunnyboy / Paradize <[email protected]>
* http://paradize.atari.org/
*
* mouse data collection
*
* derived from a similar routine Copyright (C) 2002 Patrice Mandin
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "ikbd.h"
/* driver internal variables*/
extern volatile uint8_t IKBD_MouseB; /* Mouse on port 0, buttons (driver internal) */
extern volatile int16_t IKBD_MouseX; /* Mouse X position (driver internal) */
extern volatile int16_t IKBD_MouseY; /* Mouse Y position (driver internal) */
/* variables */
volatile IKBD_MouseData IKBD_Mouse; /* mouse data */
uint8_t MouseThreshold = 128; /* mouse threshold (255 = 200%) */
/* reads the accumulated mouse packets and updates the mouse position */
void IKBD_ReadMouse()
{
int16_t xoffset, yoffset;
if(MouseThreshold > 0)
{
/* scale the mouse movement by the configurable threshold */
xoffset = (int16_t)((int32_t)(IKBD_MouseX * MouseThreshold) / 128);
yoffset = (int16_t)((int32_t)(IKBD_MouseY * MouseThreshold) / 128);
}
else
{
/* 0% scaling is not allowed -> scale as 100% */
xoffset = IKBD_MouseX;
yoffset = IKBD_MouseY;
}
IKBD_Mouse.b = IKBD_MouseB;
IKBD_Mouse.x = IKBD_Mouse.x + xoffset;
IKBD_Mouse.y = IKBD_Mouse.y + yoffset;
if(IKBD_Mouse.limit != 0)
{
/* mouse coordinates limitation is active? */
if(IKBD_Mouse.x < 0)
{
IKBD_Mouse.x = 0;
}
else if(IKBD_Mouse.x > IKBD_Mouse.w)
{
IKBD_Mouse.x = IKBD_Mouse.w;
}
if(IKBD_Mouse.y < 0)
{
IKBD_Mouse.y = 0;
}
else if(IKBD_Mouse.y > IKBD_Mouse.h)
{
IKBD_Mouse.y = IKBD_Mouse.h;
}
}
/* acknowledge accumulated mouse vector (driver internals) */
IKBD_MouseX = 0;
IKBD_MouseY = 0;
return;
}
/* allows to configure teh scaling of relative mouse movement */
void IKBD_SetMouseThreshold(uint8_t new_threshold)
{
MouseThreshold = new_threshold;
return;
}
/* allows to set current mouse position and define the screen size */
void IKBD_SetMouseOrigin(int16_t x, int16_t y, uint16_t w, uint16_t h)
{
IKBD_Mouse.x = x;
IKBD_Mouse.y = y;
IKBD_Mouse.w = w;
IKBD_Mouse.h = h;
IKBD_Mouse.b = 0;
IKBD_Mouse.limit = 1;
IKBD_MouseX = 0;
IKBD_MouseY = 0;
return;
}
| 28.719626 | 100 | 0.674585 |
43c3aa29062bfee2e4b14b0844b1160b26dd65de | 922 | tsx | TypeScript | src/components/InfoButton.tsx | BancambiosDeFi/Bancambios-DEX | 690f60f87caea2a019bb611549a549198ba3349f | [
"Apache-2.0"
] | null | null | null | src/components/InfoButton.tsx | BancambiosDeFi/Bancambios-DEX | 690f60f87caea2a019bb611549a549198ba3349f | [
"Apache-2.0"
] | null | null | null | src/components/InfoButton.tsx | BancambiosDeFi/Bancambios-DEX | 690f60f87caea2a019bb611549a549198ba3349f | [
"Apache-2.0"
] | 1 | 2022-03-11T13:51:44.000Z | 2022-03-11T13:51:44.000Z | import React from 'react';
import { makeStyles } from '@mui/styles';
import { IconButton } from '@mui/material';
import { ReactComponent as InfoIcon } from '../assets/icons/info-icon.svg';
interface InfoButtonProps {
id: string;
popoverId: string | undefined;
handleClick: (event: React.MouseEvent<HTMLButtonElement>) => void;
}
const useStyles = makeStyles(() => ({
infoIcon: {
width: 'fit-content !important',
height: 'fit-content !important',
padding: '0 !important',
marginLeft: '8px !important',
},
}));
const InfoButton: React.FC<InfoButtonProps> = ({ id, popoverId, handleClick }) => {
const styles = useStyles();
return (
<>
<IconButton
className={styles.infoIcon}
size="small"
aria-describedby={popoverId}
onClick={handleClick}
id={id}
>
<InfoIcon />
</IconButton>
</>
);
};
export default InfoButton;
| 23.05 | 83 | 0.626898 |
d7316a4cf531cb3d8e1d414700b0df4425c8ceaa | 746 | dart | Dart | example/src/fast_test.dart | kornel661/web-router | 580eec23b14ca75aefa39b62d70b79d4dfaf1a60 | [
"MIT"
] | null | null | null | example/src/fast_test.dart | kornel661/web-router | 580eec23b14ca75aefa39b62d70b79d4dfaf1a60 | [
"MIT"
] | 1 | 2015-03-21T20:54:48.000Z | 2015-03-21T20:54:48.000Z | example/src/fast_test.dart | kornel661/web-router | 580eec23b14ca75aefa39b62d70b79d4dfaf1a60 | [
"MIT"
] | null | null | null | /*
* SPA router
* Copyright (c) 2015 Kornel Maczyński.
* For other contributors, see Github.
*/
library fast_test;
import 'dart:async';
import 'package:polymer/polymer.dart';
import 'package:spa_router/spa_router.dart';
@CustomTag('fast-test')
class FastTest extends PolymerElement {
@published String urlA = "/anim/";
@published String urlB = "/anim/test";
@published String urlC = "/anim/fast test";
SpaRouter router = null;
@override
FastTest.created() : super.created();
void fastTest() {
Duration delay = new Duration(milliseconds: 20);
new Future.delayed(delay, () => router.go(urlA));
new Future.delayed(delay * 2, () => router.go(urlB));
new Future.delayed(delay * 3, () => router.go(urlC));
}
}
| 24.064516 | 57 | 0.674263 |
e74008e68a01d143286f495254c5052cabc015fe | 555 | lua | Lua | Commands/endGame.lua | petrroll/nota_houska_hlaa | 7e6ea0f83f7c8cf571a1b4f18544dac47e4c727d | [
"MIT"
] | null | null | null | Commands/endGame.lua | petrroll/nota_houska_hlaa | 7e6ea0f83f7c8cf571a1b4f18544dac47e4c727d | [
"MIT"
] | null | null | null | Commands/endGame.lua | petrroll/nota_houska_hlaa | 7e6ea0f83f7c8cf571a1b4f18544dac47e4c727d | [
"MIT"
] | null | null | null | function getInfo()
return {
onNoUnits = RUNNING, -- instant success
tooltip = "Ends game.",
parameterDefs = {}
}
end
-- get madatory module operators
VFS.Include("modules.lua") -- modules table
VFS.Include(modules.attach.data.path .. modules.attach.data.head) -- attach lib module
-- get other madatory dependencies
attach.Module(modules, "message") -- communication backend load
function Run(self, unitIds, p)
message.SendRules({
subject = "manualMissionEnd",
data = {},
})
return SUCCESS
end
function Reset(self)
end
| 19.821429 | 86 | 0.697297 |
6c61023ae99d83e2245ccf899e620d29320d4114 | 555 | lua | Lua | lib/EntityAttribute/lua/EntityValueMap.lua | adriano-di-giovanni/reaves | 3423bbabfed9a1c4ae2be8431e8856f5a05cb568 | [
"MIT"
] | null | null | null | lib/EntityAttribute/lua/EntityValueMap.lua | adriano-di-giovanni/reaves | 3423bbabfed9a1c4ae2be8431e8856f5a05cb568 | [
"MIT"
] | null | null | null | lib/EntityAttribute/lua/EntityValueMap.lua | adriano-di-giovanni/reaves | 3423bbabfed9a1c4ae2be8431e8856f5a05cb568 | [
"MIT"
] | null | null | null | local EntityValueMap = {}
EntityValueMap.__index = EntityValueMap
function EntityValueMap:create(keyPrefix)
local instance = {
key = keyPrefix .. ':EntityValueMap'
}
setmetatable(instance, EntityValueMap)
return instance
end
function EntityValueMap:get(entityId)
return redis.call('HGET', self.key, entityId)
end
function EntityValueMap:delete(entityId)
return redis.call('HDEL', self.key, entityId)
end
function EntityValueMap:upsert(entityId, newValue)
return redis.call('HSET', self.key, entityId, newValue)
end
| 24.130435 | 59 | 0.744144 |
b782063ee14e2fe3250be299d3f644e8aea6daa1 | 221 | cs | C# | src/Platforms/Android/SwankImplementation.cs | spektrummedia/swank | 81675c760e0eb6b5f0a704599783fe59bf2ccfcf | [
"MIT"
] | 8 | 2018-03-22T03:31:18.000Z | 2022-02-23T21:58:13.000Z | src/Platforms/Android/SwankImplementation.cs | spektrummedia/swank | 81675c760e0eb6b5f0a704599783fe59bf2ccfcf | [
"MIT"
] | 6 | 2018-03-13T17:08:40.000Z | 2019-12-05T14:49:31.000Z | src/Platforms/Android/SwankImplementation.cs | spektrumgeeks/swank | 81675c760e0eb6b5f0a704599783fe59bf2ccfcf | [
"MIT"
] | 4 | 2018-04-19T07:25:48.000Z | 2019-03-18T21:00:36.000Z | using CarouselView.FormsPlugin.Android;
namespace Plugin.Swank
{
public class SwankImplementation : ISwank
{
public static void Init()
{
CarouselViewRenderer.Init();
}
}
} | 18.416667 | 45 | 0.61086 |
c5364f42610fba271ed0e7c68e4a8cff35305a4a | 23 | css | CSS | src/components/markdown-renderer/markdown-renderer.css | web-component-org/markdown-renderer | aca2be75bacf8d7f13abc1578bf6d918c90b0cd4 | [
"MIT"
] | null | null | null | src/components/markdown-renderer/markdown-renderer.css | web-component-org/markdown-renderer | aca2be75bacf8d7f13abc1578bf6d918c90b0cd4 | [
"MIT"
] | null | null | null | src/components/markdown-renderer/markdown-renderer.css | web-component-org/markdown-renderer | aca2be75bacf8d7f13abc1578bf6d918c90b0cd4 | [
"MIT"
] | null | null | null | .markdown-renderer {
}
| 7.666667 | 20 | 0.695652 |
bb535ff7ba150be08b7c7439e10f420f9c089c98 | 1,997 | cs | C# | src/Uhuru.CloudFoundry.DEA/StagingInfo.cs | UhuruSoftware/vcap-dotnet | 8068834e62165fadc6727b0a9676a8f7363e38eb | [
"Apache-2.0",
"MIT"
] | 2 | 2016-05-25T20:24:06.000Z | 2018-05-15T12:10:21.000Z | src/Uhuru.CloudFoundry.DEA/StagingInfo.cs | UhuruSoftware/vcap-dotnet | 8068834e62165fadc6727b0a9676a8f7363e38eb | [
"Apache-2.0",
"MIT"
] | null | null | null | src/Uhuru.CloudFoundry.DEA/StagingInfo.cs | UhuruSoftware/vcap-dotnet | 8068834e62165fadc6727b0a9676a8f7363e38eb | [
"Apache-2.0",
"MIT"
] | null | null | null | using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using YamlDotNet.RepresentationModel;
using YamlDotNet.RepresentationModel.Serialization;
namespace Uhuru.CloudFoundry.DEA
{
class StagingInfo
{
public static string getStartCommand(string stagingInfoFile)
{
string startCommand;
using (var stream = new StreamReader(stagingInfoFile))
{
var yaml = new YamlStream();
yaml.Load(stream);
var startCommandScalar = new YamlScalarNode("start_command");
var elements = ((YamlMappingNode)yaml.Documents[0].RootNode).Children;
startCommand = elements[startCommandScalar].ToString();
}
return startCommand;
}
public static string GetDetectedBuildpack(string stagingInfoFile)
{
string buildpack = string.Empty;
if (File.Exists(stagingInfoFile))
{
using (var stream = new StreamReader(stagingInfoFile))
{
var yaml = new YamlStream();
yaml.Load(stream);
var startCommandScalar = new YamlScalarNode("detected_buildpack");
var elements = ((YamlMappingNode)yaml.Documents[0].RootNode).Children;
buildpack = elements[startCommandScalar].ToString();
}
}
return buildpack;
}
public static void SaveBuildpackInfo(string stagingInfoFile, string detectedBuildpack, string startCommand)
{
var info = new
{
detected_buildpack = detectedBuildpack,
start_command = startCommand
};
using(TextWriter writer = File.CreateText(stagingInfoFile))
{
new Serializer().Serialize(writer, info);
}
}
}
}
| 30.723077 | 115 | 0.569354 |
2ffe7b0d97625dde8b31f2f082c39a509a37a416 | 326 | py | Python | Tests/image_tests/renderpasses/test_CameraAnimation.py | Nuclearfossil/Falcor | 667dc68a51bbaf87a2a063f4f0ef8928990ed203 | [
"BSD-3-Clause"
] | 62 | 2022-02-04T10:34:29.000Z | 2022-03-31T19:41:20.000Z | Tests/image_tests/renderpasses/test_CameraAnimation.py | Nuclearfossil/Falcor | 667dc68a51bbaf87a2a063f4f0ef8928990ed203 | [
"BSD-3-Clause"
] | 1 | 2021-02-18T16:38:38.000Z | 2021-02-18T16:38:38.000Z | Tests/image_tests/renderpasses/test_CameraAnimation.py | fromasmtodisasm/Falcor | 300aee1d7a9609e427f07e8887fd9bcb377426b0 | [
"BSD-3-Clause"
] | 4 | 2022-02-04T16:08:30.000Z | 2022-03-09T09:39:41.000Z | from helpers import render_frames
from graphs.ForwardRendering import ForwardRendering as g
from falcor import *
g.unmarkOutput("ForwardLightingPass.motionVecs")
m.addGraph(g)
m.loadScene("grey_and_white_room/grey_and_white_room.fbx")
ctx = locals()
# default
render_frames(ctx, 'default', frames=[1,16,64,128,256])
exit()
| 23.285714 | 58 | 0.797546 |
052bb9f7930756e3fe4c7a7c6e43906d7b8c36b2 | 1,031 | css | CSS | public/css/pages/stats.css | RajicDenis/GRAS---Website | c20b292f1091756c589f2bd045aed9d3ef867c8a | [
"MIT"
] | null | null | null | public/css/pages/stats.css | RajicDenis/GRAS---Website | c20b292f1091756c589f2bd045aed9d3ef867c8a | [
"MIT"
] | null | null | null | public/css/pages/stats.css | RajicDenis/GRAS---Website | c20b292f1091756c589f2bd045aed9d3ef867c8a | [
"MIT"
] | null | null | null | .site-title, .site-box {
background: #182C4F;
}
.st-txt {
width: 500px !important;
}
.hexa-box {
width: 50%;
}
#hexGrid {
overflow: visible;
}
.hexIn {
display: -webkit-box;
display: -ms-flexbox;
display: flex;
-webkit-box-pack: center;
-ms-flex-pack: center;
justify-content: center;
-webkit-box-align: center;
-ms-flex-align: center;
align-items: center;
-webkit-box-shadow: 0 0 5px black;
box-shadow: 0 0 5px black;
}
.hex-nbr {
font-family: 'Amaranth', sans-serif;
font-size: 45px;
color: white;
-webkit-transition: all 0.2s ease-in-out;
transition: all 0.2s ease-in-out;
}
.hexIn > p, .hexIn > h1 {
color: white;
font-family: 'Amaranth', sans-serif;
font-size: 1.2em;
}
.hex h1, .hex p {
background: rgba(8,8,8,0.5);
}
.red {
background: #ff003f;
}
.yellow {
background: #F1C500;
}
.blue {
background: #3575D3;
}
.green {
background: #80ff00;
}
.lightblue {
background: #18BD9B;
}
.purple {
background: #6E1BB5;
}
.pink {
background: #FF0084;
} | 16.901639 | 42 | 0.622696 |
a3083ba0b0b723cafc603b76bef3045e1c1ab3ae | 454 | ts | TypeScript | cypress/support/test-setup.ts | PinkyJie/cypress-auto-stub-example | 2aae49f35209fa0e4b2594154bbba17a54776f62 | [
"MIT"
] | 54 | 2018-11-20T11:35:21.000Z | 2022-02-28T02:16:42.000Z | cypress/support/test-setup.ts | PinkyJie/cypress-auto-stub-example | 2aae49f35209fa0e4b2594154bbba17a54776f62 | [
"MIT"
] | 21 | 2019-04-26T01:01:50.000Z | 2022-02-17T22:07:47.000Z | cypress/support/test-setup.ts | PinkyJie/cypress-auto-stub-example | 2aae49f35209fa0e4b2594154bbba17a54776f62 | [
"MIT"
] | 16 | 2018-08-20T21:09:24.000Z | 2022-03-19T20:17:06.000Z | import {
setupCypressInterception,
writeRecordedAPIToFixture,
} from './utils/auto-stub';
import { getRunningConfig } from './utils/running-config';
before(() => {
cy._data = {};
getRunningConfig().then((runningConfig) => {
cy._config = runningConfig;
});
});
beforeEach(() => {
setupCypressInterception();
});
afterEach(() => {
const { isAPIRecording } = cy._config;
if (isAPIRecording) {
writeRecordedAPIToFixture();
}
});
| 18.916667 | 58 | 0.651982 |
3f8caf46b5d04f81ce9c3c83f9f660a27aa82a59 | 23,353 | php | PHP | zipfile/admin/includes/left.php | SonuVermaJattari/medkartteam | fbd678e91e7d47dbd790cacb4d2d93e706024616 | [
"MIT"
] | null | null | null | zipfile/admin/includes/left.php | SonuVermaJattari/medkartteam | fbd678e91e7d47dbd790cacb4d2d93e706024616 | [
"MIT"
] | null | null | null | zipfile/admin/includes/left.php | SonuVermaJattari/medkartteam | fbd678e91e7d47dbd790cacb4d2d93e706024616 | [
"MIT"
] | null | null | null | <aside class="left-side sidebar-offcanvas">
<!-- sidebar: style can be found in sidebar.less -->
<section class="sidebar">
<!-- Sidebar user panel -->
<a href="../index.php" >
<div class="user-panel">
<div class="pull-left image" style="text-align: center;margin-top: -47px;/* background-color: #f4f4f4; */"> <img src="<?php echo $DB->logo(); ?>" style="width: 87%;height: 100%;border: 0px !important;" class="" alt="<?php echo $DB->projectname(); ?>" /> </div>
</div>
</a>
<ul class="sidebar-menu">
<li class="active"> <a href="index.php"> <i class="fa fa-dashboard"></i> <span>Dashboard</span> </a> </li>
<li class="treeview"><a href="#"> <i class="fa fa-home" aria-hidden="true"></i> <span>Home</span> <i class="fa fa-angle-left pull-right"></i> </a>
<ul class="treeview-menu">
<li> <a href="contact_address.php"> <i class="fa fa-mobile-phone"></i>Contact Us</a></li>
<li> <a href="home_text.php"> <i class="fa fa-edit"></i>Home Text</a></li>
<li><a href="slider_view.php"><i class="fa fa-picture-o"></i>Slider</a></li>
<li><a href="afterSlider_view.php"><i class="fa fa-picture-o"></i>After Slider view</a></li>
</ul>
</li>
<li class="treeview"> <a href="#"> <i class="fa fa-bars"></i> <span>Menu/Sub menus</span> <i class="fa fa-angle-left pull-right"></i> </a>
<ul class="treeview-menu">
<li><a href="menu_view.php"><i class="fa fa-angle-double-right"></i>Menu</a></li>
<li class="treeview"> <a href="#"> <i class="fa fa-bars"></i> <span>Sub menu</span> <i class="fa fa-angle-left pull-right"></i> </a>
<ul class="treeview-menu">
<li><a href="sub_menu.php"><i class="fa fa-angle-double-right"></i>ADD Sub Menu</a></li>
<?php
$array=array();
$resultu_menu = mysql_query("select * from menu where link='0' OR link='products.php'");
while($menu_res = mysql_fetch_array($resultu_menu)){
$key=$menu_res['id'];
$array[$key]=$menu_res['menu'];
}
// print_r($array);
$sqlu = "select DISTINCT menu from sub_menu where status='1' ";
$resultu = mysql_query($sqlu);
while($menu = mysql_fetch_array($resultu)){
$id=$menu['menu'];
$array[$id];
//echo $menu_res['menu'];
//echo $menu['menu'];
?>
<li><a href="sub_menu_view.php?q=<?php echo $menu['menu']; ?>"><i class="fa fa-angle-double-right"></i><?php echo $array[$id];?> View/Edit</a></li>
<?php }?>
<li class="treeview">
<a href="#">
<i class="fa fa-bars"></i>
<span>Sub Sub Menu</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="sub_sub_menu.php"><i class="fa fa-angle-double-right"></i>ADD Sub Sub Menu</a></li>
<?php
$array_sub=array();
$resultu_sub_menu = mysql_query("select * from sub_menu where link='0' OR link='products.php'");
while($sub_menu_res = mysql_fetch_array($resultu_sub_menu)){
$key1=$sub_menu_res['id'];
//$key2=$sub_menu_res['menu'];
$array_sub[$key1]['sub_menu']=$sub_menu_res['sub_menu'];
$array_sub[$key1]['menu']=$sub_menu_res['menu'];
}
$sqlu = "select DISTINCT sub_menu from sub_sub_menu where status='1' ";
$resultu = mysql_query($sqlu);
while($menu = mysql_fetch_array($resultu)){
//print_r($array_sub);
//echo $array_sub[$menu['sub_menu']]['sub_menu'].$array[$array_sub[$menu['sub_menu']]['menu']];
//echo $array_sub[$menu['sub_menu']];
$xyz= $array[$array_sub[$menu['sub_menu']]['menu']].'('.$array_sub[$menu['sub_menu']]['sub_menu'].')';
?>
<li><a href="sub_sub_menu_view.php?q=<?php echo $menu['sub_menu']; ?>"><i class="fa fa-angle-double-right"></i>
<?php echo $array[$array_sub[$menu['sub_menu']]['menu']].' ('.$array_sub[$menu['sub_menu']]['sub_menu'].')<br>'.str_repeat(' ', 10); ?>View/Edit</a></li>
<?php }?>
</ul>
</li>
</ul>
</li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-pencil-square-o"></i>
<span>Shipping Charges</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="add_shippingcharges.php"><i class="fa fa-angle-double-right"></i>Add Shipping Charges</a></li>
<li><a href="list_shippingcharges.php"><i class="fa fa-angle-double-right"></i>View/Edit Shipping Charges</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-wrench"></i>
<span>Filter Management</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li class="treeview">
<a href="#">
<i class="fa fa-pencil-square-o"></i>
<span>Brand Management</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="add_brand.php"><i class="fa fa-angle-double-right"></i>Add Brand</a></li>
<li><a href="list_brand.php"><i class="fa fa-angle-double-right"></i>View/Edit Brand</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-pencil-square-o"></i>
<span>Discount Management</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="add_discount.php"><i class="fa fa-angle-double-right"></i>Add Discount</a></li>
<li><a href="list_discount.php"><i class="fa fa-angle-double-right"></i>View/Edit Discount</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-pencil-square-o"></i>
<span>Product Form Management</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="add_form.php"><i class="fa fa-angle-double-right"></i>Add Product Form</a></li>
<li><a href="list_form.php"><i class="fa fa-angle-double-right"></i>View/Edit Product Form</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-pencil-square-o"></i>
<span>uses Management</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="add_uses.php"><i class="fa fa-angle-double-right"></i>Add uses</a></li>
<li><a href="list_uses.php"><i class="fa fa-angle-double-right"></i>View/Edit uses</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-pencil-square-o"></i>
<span>Age Management</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="add_age.php"><i class="fa fa-angle-double-right"></i>Add Age</a></li>
<li><a href="list_age.php"><i class="fa fa-angle-double-right"></i>View/Edit Age</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-pencil-square-o"></i>
<span>Gender Management</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="add_gender.php"><i class="fa fa-angle-double-right"></i>Add Gender</a></li>
<li><a href="list_gender.php"><i class="fa fa-angle-double-right"></i>View/Edit Gender</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-pencil-square-o"></i>
<span>Packing Management</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="add_pack.php"><i class="fa fa-angle-double-right"></i>Add Packing</a></li>
<li><a href="list_pack.php?q=1"><i class="fa fa-angle-double-right"></i>Packing</a></li>
<li><a href="list_pack.php?q=2"><i class="fa fa-angle-double-right"></i> Packet/Box</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-pencil-square-o"></i>
<span>Company Name</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="add_company_name.php"><i class="fa fa-angle-double-right"></i>Add Company Name</a></li>
<li><a href="list_company_name.php"><i class="fa fa-angle-double-right"></i>Company Name</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-pencil-square-o"></i>
<span>Solt Management</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="add_solt.php"><i class="fa fa-angle-double-right"></i>Add Solt</a></li>
<li><a href="list_solt.php"><i class="fa fa-angle-double-right"></i>Solt</a></li>
</ul>
</li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-bars"></i>
<span>Products</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="products.php"><i class="fa fa-angle-double-right"></i>ADD Products</a></li>
<li><a href="product_view.php"><i class="fa fa-angle-double-right"></i>View Products</a></li>
<!-- <li class="treeview"><a href="#"> <i class="fa fa-bars" aria-hidden="true"></i> <span>View Products</span> <i class="fa fa-angle-left pull-right"></i> </a>
<ul class="treeview-menu">
<?php
$menu_array=$DB->fetch_menu('menu','id,menu','menu');
$submenu_array=$DB->fetch_menu('sub_menu','id,sub_menu','sub_menu');
$subsubmenu_array=$DB->fetch_menu('sub_sub_menu','id,sub_sub_menu','sub_sub_menu');
$sqlu = "SELECT *, count(id) FROM `products` GROUP BY menu,sub_menu,sub_sub_menu ORDER by products.enum";
$resultu = mysql_query($sqlu);
while($menu = mysql_fetch_array($resultu)){
//$xyz= $array[$array_sub[$menu['sub_menu']]['menu']].'('.$array_sub[$menu['sub_menu']]['sub_menu'].')';
?>
<li><a href="products_view.php?menu=<?php echo $menu['menu']; ?>&&sub_menu=<?php echo $menu['sub_menu']; ?>&&sub_sub_menu=<?php echo $menu['sub_sub_menu']; ?>"><i class="fa fa-angle-double-right"></i>
<?php
if(!empty($menu['sub_sub_menu'])){
echo $menu_array[$menu['menu']].'('.$submenu_array[$menu['sub_menu']].')'.'('.$subsubmenu_array[$menu['sub_sub_menu']].')';
}elseif(!empty($menu['sub_menu'])){
echo $menu_array[$menu['menu']].'('.$submenu_array[$menu['sub_menu']].')';
}elseif(!empty($menu['menu'])){
echo $menu_array[$menu['menu']];
}
?> <?php echo '('.$menu['count(id)'].')'; ?></a></li>
<?php }?>
</ul>
</li> -->
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-wrench"></i>
<span>Price</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="price_view.php"><i class="fa fa-angle-double-right"></i> Price View</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-bars"></i>
<span>Blog</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="blog_ctgry_view.php"><i class="fa fa-angle-double-right"></i>Blog categories</a></li>
<?php
$page = mysql_query("select * from `blog_id` where status='1' order by sort");
while($pg = mysql_fetch_array($page)){
?>
<li><a href="blog_add.php?q=<?php echo $pg['id'].'&&name='.$pg['title']; ?>"><i class="fa fa-angle-double-right"></i><?php echo $pg['title']; ?></a></li>
<?php
}
?>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-sun-o"></i>
<span>Pages Management</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="add_pages.php"><i class="fa fa-angle-double-right"></i>Add pages</a></li>
<li><a href="list_pages.php"><i class="fa fa-angle-double-right"></i>view pages</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-sun-o"></i>
<span>Tie up with manufacturer</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="add_tieup.php"><i class="fa fa-angle-double-right"></i>Add Tie up with manufacturer</a></li>
<li><a href="list_tieup.php"><i class="fa fa-angle-double-right"></i>view Tie up</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-quote-left"></i>
<span>Testimonials</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="testimonials_menu.php"><i class="fa fa-angle-double-right"></i>Testimonials </a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-bars"></i>
<span>Slider</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="slider.php"><i class="fa fa-angle-double-right"></i>ADD Slider</a></li>
<li><a href="slider_view.php"><i class="fa fa-angle-double-right"></i>Slider View/Edit</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-share-square-o"></i>
<span>Registration</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="registration.php"><i class="fa fa-angle-double-right"></i>Registered Usre's</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-share-square-o"></i>
<span>Pharmacist Registration</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="pharmacist_registration.php"><i class="fa fa-angle-double-right"></i>Pharmacist Registered </a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-users"></i>
<span>Newsletter Management</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="list_newsletter.php"><i class="fa fa-angle-double-right"></i>List User Newsletter</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-suitcase"></i>
<span>Order Management</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="orders_list.php"><i class="fa fa-angle-double-right"></i>All Order's </a></li>
<li><a href="orders_list.php?mode=COD"><i class="fa fa-angle-double-right"></i> COD Report</a></li>
<li><a href="orders_list.php?mode=Paied"><i class="fa fa-angle-double-right"></i>Online Payment Report</a></li>
</ul>
</li>
<?php /*?> <li class="treeview"><a href="#"><i class="fa fa-hospital-o" aria-hidden="true"></i> <span>Rooms</span> <i class="fa fa-angle-left pull-right"></i> </a>
<ul class="treeview-menu">
<li><a href="facilities_view.php"><i class="fa fa-cutlery"></i>Facilities</a></li>
<li><a href="room_name_view.php"><i class="fa fa-building-o"></i>Room's</a></li>
</ul>
</li>
<li class="treeview"><a href="#"> <i class="fa fa-picture-o" aria-hidden="true"></i> <span>Gallery</span> <i class="fa fa-angle-left pull-right"></i> </a>
<ul class="treeview-menu">
<li><a href="gallery_view.php"><i class="fa fa-picture-o"></i>Gallery</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-share-square-o"></i>
<span>Social Links</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="list_socialicons.php"><i class="fa fa-angle-double-right"></i>Social Links</a></li>
</ul>
</li>
<li class="treeview">
<a href="#">
<i class="fa fa-share-square-o"></i>
<span>Users</span>
<i class="fa fa-angle-left pull-right"></i>
</a>
<ul class="treeview-menu">
<li><a href="Booked_users.php"><i class="fa fa-angle-double-right"></i>Booked</a></li>
</ul>
</li><?php */?>
</ul>
</section>
</aside>
| 47.756646 | 265 | 0.388558 |
dda1dc8332418b31d121375d800441a97a49287a | 3,788 | py | Python | sktime/transformations/bootstrap/tests/test_mbb.py | biologioholic/sktime | 9d0391a04b11d22bd783b452f01aa5b4529b41a2 | [
"BSD-3-Clause"
] | 1 | 2021-12-22T02:45:39.000Z | 2021-12-22T02:45:39.000Z | sktime/transformations/bootstrap/tests/test_mbb.py | biologioholic/sktime | 9d0391a04b11d22bd783b452f01aa5b4529b41a2 | [
"BSD-3-Clause"
] | null | null | null | sktime/transformations/bootstrap/tests/test_mbb.py | biologioholic/sktime | 9d0391a04b11d22bd783b452f01aa5b4529b41a2 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3 -u
# -*- coding: utf-8 -*-
# copyright: sktime developers, BSD-3-Clause License (see LICENSE file)
"""Unit tests for Bootrstapping transformers."""
__author__ = ["ltsaprounis"]
import pandas as pd
import pytest
from sktime.datasets import load_airline
from sktime.transformations.bootstrap import (
MovingBlockBootstrapTransformer,
STLBootstrapTransformer,
)
from sktime.transformations.bootstrap._mbb import (
_get_series_name,
_moving_block_bootstrap,
)
y = load_airline()
y_index = y.index
def test_bootstrapping_transformer_no_seasonal_period():
"""Tests that an exception is raised if sp<2."""
with pytest.raises(NotImplementedError) as ex:
transformer = STLBootstrapTransformer(sp=1)
transformer.fit(y)
assert "STLBootstrapTransformer does not support non-seasonal data" == ex.value
def test_bootstrapping_transformer_series_shorter_than_sp():
"""Tests that an exception is raised if sp>len(y)."""
with pytest.raises(ValueError) as ex:
transformer = STLBootstrapTransformer(sp=12)
transformer.fit(y.iloc[1:9])
msg = "STLBootstrapTransformer requires that sp is greater than the length of X"
assert msg == ex.value
@pytest.mark.parametrize(
"transformer_class", [STLBootstrapTransformer, MovingBlockBootstrapTransformer]
)
def test_block_length_exception(transformer_class):
"""Tests that a Value error is raised when block_length is smaller than len(X)."""
msg = (
f"{transformer_class.__name__} requires that block_length"
" is greater than the length of X"
)
with pytest.raises(ValueError) as ex:
transformer = transformer_class(block_length=12)
transformer.fit_transform(y.iloc[1:9])
assert msg == ex.value
index_return_actual_true = pd.MultiIndex.from_product(
[["actual", "synthetic_0", "synthetic_1"], y_index]
)
index_return_actual_false = pd.MultiIndex.from_product(
[["synthetic_0", "synthetic_1"], y_index]
)
@pytest.mark.parametrize(
"transformer_class, return_actual, expected_index",
[
(
MovingBlockBootstrapTransformer,
True,
index_return_actual_true,
),
(
STLBootstrapTransformer,
True,
index_return_actual_true,
),
(
MovingBlockBootstrapTransformer,
False,
index_return_actual_false,
),
(
STLBootstrapTransformer,
False,
index_return_actual_false,
),
],
)
def test_bootstrap_transformers_panel_format(
transformer_class, return_actual, expected_index
):
"""Tests that the final panel has the right index."""
transformer = transformer_class(n_series=2, return_actual=return_actual)
y_hat = transformer.fit_transform(y)
assert expected_index.equals(y_hat.index) and (y_hat.columns[0] == y.name)
@pytest.mark.parametrize(
"block_length, replacement", [(1, True), (5, True), (1, False), (5, False)]
)
def test_moving_block_bootstrap(block_length, replacement):
"""Tests for the _moving_block_bootstrap.
1. the output series has the same index as the input
2. basic checks for the distribution of the bootstrapped values
i.e. actual min/max >= bootstapped min/max
"""
y_hat = _moving_block_bootstrap(
y, block_length=block_length, replacement=replacement
)
assert (
y_hat.index.equals(y_index)
& (y_hat.max() <= y.max())
& (y_hat.min() >= y.min())
)
@pytest.mark.parametrize("ts", [y, y.to_frame()])
def test_get_series_name(ts):
"""Test _get_series_name returns the right string."""
assert _get_series_name(ts) == "Number of airline passengers"
| 29.826772 | 88 | 0.682946 |
dd9adb8aadde8adbcd4afd9371367ce0abdea115 | 467 | java | Java | src/com/winterwell/es/client/GetResponse.java | anitawoodruff/elasticsearch-java-client | 9292900509d147ce9e0abcc3d2390851893c5d57 | [
"MIT"
] | 10 | 2016-02-04T11:15:38.000Z | 2022-01-12T15:04:04.000Z | src/com/winterwell/es/client/GetResponse.java | anitawoodruff/elasticsearch-java-client | 9292900509d147ce9e0abcc3d2390851893c5d57 | [
"MIT"
] | 3 | 2017-10-16T16:27:49.000Z | 2020-04-28T08:02:32.000Z | src/com/winterwell/es/client/GetResponse.java | anitawoodruff/elasticsearch-java-client | 9292900509d147ce9e0abcc3d2390851893c5d57 | [
"MIT"
] | 3 | 2017-07-27T09:35:22.000Z | 2020-06-19T17:30:09.000Z | package com.winterwell.es.client;
import java.util.Map;
public interface GetResponse extends IESResponse {
/**
* @return the _source object from a Get request.
* NB: This is aware of the just-the-source GetRequestBuilder option
*/
Map<String, Object> getSourceAsMap();
/**
* @return the _source object from a Get request.
* NB: This is aware of the just-the-source GetRequestBuilder option
*/
String getSourceAsString();
Long getVersion();
}
| 22.238095 | 70 | 0.721627 |
4bcae65b8890e4a34cd1548041c8505aff6896f6 | 25,909 | rs | Rust | pallets/mnt-token/src/tests.rs | minterest-finance/minterest-chain-node | b932f0dfd3cda93a3de30e87c8463ad23116fea1 | [
"Apache-2.0"
] | 10 | 2021-03-03T12:14:25.000Z | 2021-05-19T10:01:36.000Z | pallets/mnt-token/src/tests.rs | minterest-finance/minterest-chain-node | b932f0dfd3cda93a3de30e87c8463ad23116fea1 | [
"Apache-2.0"
] | 28 | 2021-03-02T08:09:08.000Z | 2021-07-27T10:52:46.000Z | pallets/mnt-token/src/tests.rs | minterest-finance/minterest-chain-node | b932f0dfd3cda93a3de30e87c8463ad23116fea1 | [
"Apache-2.0"
] | 1 | 2021-07-07T11:33:50.000Z | 2021-07-07T11:33:50.000Z | #![cfg(test)]
use super::Error;
use crate::mock::*;
use crate::{MntPoolState, MntState};
use frame_support::{assert_noop, assert_ok};
use minterest_primitives::{Balance, CurrencyId, Rate};
use orml_traits::MultiCurrency;
use pallet_traits::MntManager;
use sp_arithmetic::FixedPointNumber;
use sp_runtime::{
traits::{One, Zero},
DispatchError::BadOrigin,
};
const MNT_PALLET_START_BALANCE: Balance = 1_000_000 * DOLLARS;
fn get_mnt_account_balance(user: AccountId) -> Balance {
Currencies::free_balance(MNT, &user)
}
/// Move flywheel and check borrower balance
fn check_borrower(
pool_id: CurrencyId,
borrower: AccountId,
expected_mnt_balance: Balance,
expected_mnt_in_storage: Balance,
) {
assert_ok!(MntToken::update_pool_mnt_borrow_index(pool_id));
assert_ok!(MntToken::distribute_borrower_mnt(pool_id, &borrower, false));
let pool_state = MntToken::mnt_pool_state_storage(pool_id).borrow_state;
let borrower_index = MntToken::mnt_borrower_index_storage(pool_id, borrower);
assert_eq!(borrower_index, pool_state.mnt_distribution_index);
assert_eq!(get_mnt_account_balance(borrower), expected_mnt_balance);
assert_eq!(MntToken::mnt_accrued_storage(borrower), expected_mnt_in_storage);
}
/// Move flywheel and check supplier balance
fn check_supplier_accrued(
pool_id: CurrencyId,
supplier: AccountId,
expected_mnt_balance: Balance,
expected_mnt_in_storage: Balance,
) {
assert_ok!(MntToken::update_pool_mnt_supply_index(pool_id));
assert_ok!(MntToken::distribute_supplier_mnt(pool_id, &supplier, false));
assert_eq!(get_mnt_account_balance(supplier), expected_mnt_balance);
assert_eq!(MntToken::mnt_accrued_storage(supplier), expected_mnt_in_storage);
}
#[test]
fn distribute_mnt_to_borrower_with_threshold() {
ExtBuilder::default()
.enable_minting_for_all_pools(10 * DOLLARS)
.pool_borrow_underlying(DOT, 150_000 * DOLLARS)
.mnt_account_balance(MNT_PALLET_START_BALANCE)
.set_mnt_claim_threshold(20)
.pool_user_data(
DOT,
ALICE,
150_000 * DOLLARS,
Rate::saturating_from_rational(15, 10), // because pool borrow index is hardcoded to 1.5 too
true,
)
.build()
.execute_with(|| {
// Award for ALICE is 10 per block
// Threshold is 20
// So at the first step awarded tokens should be kept in internal storage
// At the second it should be transferred to ALICE and so on.
let dot_speed = 10 * DOLLARS;
assert_eq!(MntToken::mnt_speed_storage(DOT), dot_speed);
assert_ok!(MntToken::update_pool_mnt_borrow_index(DOT));
assert_ok!(MntToken::distribute_borrower_mnt(DOT, &ALICE, false));
check_borrower(DOT, ALICE, 0, 0);
System::set_block_number(2);
// 2 tokens in internal storage
check_borrower(DOT, ALICE, 0, dot_speed);
System::set_block_number(3);
// 4 tokens on account balance
check_borrower(DOT, ALICE, dot_speed * 2, 0);
System::set_block_number(4);
// 2 tokens in internal storage and 4 tokens on account balance
check_borrower(DOT, ALICE, dot_speed * 2, dot_speed);
System::set_block_number(5);
// 8 tokens on account balance
check_borrower(DOT, ALICE, dot_speed * 4, 0);
assert_eq!(
MNT_PALLET_START_BALANCE - get_mnt_account_balance(ALICE),
get_mnt_account_balance(MntToken::get_account_id())
)
});
}
#[test]
fn distribute_mnt_to_supplier_with_threshold() {
ExtBuilder::default()
.enable_minting_for_all_pools(10 * DOLLARS)
.mnt_account_balance(MNT_PALLET_START_BALANCE)
.set_mnt_claim_threshold(20)
.pool_borrow_underlying(DOT, 100 * DOLLARS)
.build()
.execute_with(|| {
// Award for ALICE is 10 per block
// Threshold is 20
// So at the first step awarded tokens should be kept in internal storage
// At the second it should be transferred to ALICE and so on.
let dot_speed = 10 * DOLLARS;
assert_eq!(MntToken::mnt_speed_storage(DOT), dot_speed);
// set total issuance
Currencies::deposit(MDOT, &ALICE, 100 * DOLLARS).unwrap();
check_supplier_accrued(DOT, ALICE, 0, dot_speed);
System::set_block_number(2);
check_supplier_accrued(DOT, ALICE, dot_speed * 2, 0);
System::set_block_number(3);
check_supplier_accrued(DOT, ALICE, dot_speed * 2, dot_speed);
System::set_block_number(4);
check_supplier_accrued(DOT, ALICE, dot_speed * 4, 0);
assert_eq!(
MNT_PALLET_START_BALANCE - get_mnt_account_balance(ALICE),
get_mnt_account_balance(MntToken::get_account_id())
)
});
}
#[test]
fn distribute_mnt_to_supplier_from_different_pools() {
ExtBuilder::default()
.mnt_enabled_pools(vec![(DOT, 2 * DOLLARS), (KSM, 8 * DOLLARS)])
.mnt_account_balance(MNT_PALLET_START_BALANCE)
.set_mnt_claim_threshold(0)
.pool_borrow_underlying(DOT, 100 * DOLLARS)
.pool_borrow_underlying(KSM, 100 * DOLLARS)
.build()
.execute_with(|| {
// Check accruing mnt tokens from two pools for supplier
let dot_mnt_speed = 2 * DOLLARS;
let ksm_mnt_speed = 8 * DOLLARS;
assert_eq!(MntToken::mnt_speed_storage(DOT), dot_mnt_speed);
assert_eq!(MntToken::mnt_speed_storage(KSM), ksm_mnt_speed);
// set total issuance
Currencies::deposit(MDOT, &ALICE, 100 * DOLLARS).unwrap();
Currencies::deposit(MKSM, &ALICE, 100 * DOLLARS).unwrap();
check_supplier_accrued(KSM, ALICE, ksm_mnt_speed, 0);
check_supplier_accrued(DOT, ALICE, ksm_mnt_speed + dot_mnt_speed, 0);
// The Block number wasn't changed, so we should get the same result without errors
check_supplier_accrued(DOT, ALICE, ksm_mnt_speed + dot_mnt_speed, 0);
assert_eq!(
MNT_PALLET_START_BALANCE - get_mnt_account_balance(ALICE),
get_mnt_account_balance(MntToken::get_account_id())
)
});
}
#[test]
fn distribute_mnt_to_borrower_from_different_pools() {
ExtBuilder::default()
.enable_minting_for_all_pools(5 * DOLLARS)
.pool_borrow_underlying(DOT, 150_000 * DOLLARS)
.pool_borrow_underlying(KSM, 150_000 * DOLLARS)
.mnt_account_balance(MNT_PALLET_START_BALANCE)
.set_mnt_claim_threshold(0)
.pool_user_data(
DOT,
ALICE,
150_000 * DOLLARS,
Rate::saturating_from_rational(15, 10), // because pool borrow index is hardcoded to 1.5
true,
)
.pool_user_data(
KSM,
ALICE,
150_000 * DOLLARS,
Rate::saturating_from_rational(15, 10), // because pool borrow index is hardcoded to 1.5
true,
)
.build()
.execute_with(|| {
// First interaction with protocol for distributors.
// This is a starting point to earn MNT token
assert_ok!(MntToken::update_pool_mnt_borrow_index(DOT));
assert_ok!(MntToken::update_pool_mnt_borrow_index(KSM));
assert_ok!(MntToken::distribute_borrower_mnt(KSM, &ALICE, false));
assert_ok!(MntToken::distribute_borrower_mnt(DOT, &ALICE, false));
System::set_block_number(2);
// Move flywheel
assert_ok!(MntToken::update_pool_mnt_borrow_index(DOT));
assert_ok!(MntToken::update_pool_mnt_borrow_index(KSM));
assert_ok!(MntToken::distribute_borrower_mnt(KSM, &ALICE, false));
assert_ok!(MntToken::distribute_borrower_mnt(DOT, &ALICE, false));
// Total distributed to Alice: 5 from DOT + 5 from KSM
assert_eq!(get_mnt_account_balance(ALICE), 10 * DOLLARS);
let dot_mnt_speed = 5 * DOLLARS;
// Check event about distributing mnt tokens by DOT pool
let borrower_index = MntToken::mnt_borrower_index_storage(DOT, ALICE);
let event = Event::MntToken(crate::Event::MntDistributedToBorrower(
DOT,
ALICE,
dot_mnt_speed,
borrower_index,
));
assert!(System::events().iter().any(|record| record.event == event));
assert_eq!(
MNT_PALLET_START_BALANCE - get_mnt_account_balance(ALICE),
get_mnt_account_balance(MntToken::get_account_id())
)
});
}
#[test]
fn distribute_borrowers_mnt() {
ExtBuilder::default()
.enable_minting_for_all_pools(10 * DOLLARS)
.mnt_account_balance(MNT_PALLET_START_BALANCE)
.set_mnt_claim_threshold(0)
.pool_borrow_underlying(DOT, 150_000 * DOLLARS)
.pool_user_data(
DOT,
ALICE,
30_000 * DOLLARS,
Rate::saturating_from_rational(15, 10), // because pool borrow index is hardcoded to 1.5
true,
)
.pool_user_data(
DOT,
BOB,
120_000 * DOLLARS,
Rate::saturating_from_rational(15, 10), // because pool borrow index is hardcoded to 1.5
true,
)
.build()
.execute_with(|| {
/*
Pool speed equals to 10
Pool total borrow is 150_000. Alice borrowed 30_000 and BOB - 120_000
This is a part of liquidity which belongs to Alice.
30 / 150 = 0.2.
10(mnt per block) * 0.2(alice part) = 2.
This is how many MNT tokens per block Alice should acquire as a borrower.
For Bob: 120 / 150 = 0.8; 0.8 * 10 = 8
First interaction with protocol for distributors.
This is started point to earn MNT token
*/
assert_ok!(MntToken::update_pool_mnt_borrow_index(DOT));
assert_ok!(MntToken::distribute_borrower_mnt(DOT, &ALICE, false));
assert_ok!(MntToken::distribute_borrower_mnt(DOT, &BOB, false));
System::set_block_number(2);
check_borrower(DOT, ALICE, 2 * DOLLARS, 0);
check_borrower(DOT, BOB, 8 * DOLLARS, 0);
assert_eq!(
MNT_PALLET_START_BALANCE - get_mnt_account_balance(ALICE) - get_mnt_account_balance(BOB),
get_mnt_account_balance(MntToken::get_account_id())
)
});
}
#[test]
fn distribute_borrower_mnt() {
ExtBuilder::default()
.enable_minting_for_all_pools(12 * DOLLARS)
.pool_borrow_underlying(DOT, 150_000 * DOLLARS)
.mnt_account_balance(MNT_PALLET_START_BALANCE)
.set_mnt_claim_threshold(0)
.pool_user_data(
DOT,
ALICE,
150_000 * DOLLARS,
Rate::saturating_from_rational(15, 10), // because pool borrow index is hardcoded to 1.5 too
true,
)
.build()
.execute_with(|| {
assert_eq!(
MNT_PALLET_START_BALANCE,
get_mnt_account_balance(MntToken::get_account_id())
);
let dot_speed = 12 * DOLLARS;
// First interaction with protocol for distributors.
// This is a starting point to earn MNT token
assert_ok!(MntToken::update_pool_mnt_borrow_index(DOT));
assert_ok!(MntToken::distribute_borrower_mnt(DOT, &ALICE, false));
System::set_block_number(2);
// Alice account borrow balance is 150_000
check_borrower(DOT, ALICE, dot_speed, 0);
// block_delta == 2
System::set_block_number(4);
check_borrower(DOT, ALICE, dot_speed * 3, 0);
// check twice, move flywheel again
check_borrower(DOT, ALICE, dot_speed * 3, 0);
assert_eq!(
MNT_PALLET_START_BALANCE - get_mnt_account_balance(ALICE),
get_mnt_account_balance(MntToken::get_account_id())
)
});
}
#[test]
fn test_update_pool_mnt_borrow_index() {
// TODO: check later
ExtBuilder::default()
.enable_minting_for_all_pools(10 * DOLLARS)
.pool_borrow_underlying(DOT, 15_000 * DOLLARS)
.pool_borrow_underlying(ETH, 30_000 * DOLLARS)
.pool_borrow_underlying(KSM, 45_000 * DOLLARS)
.pool_borrow_underlying(BTC, 60_000 * DOLLARS)
.build()
.execute_with(|| {
let initial_index = Rate::one();
System::set_block_number(1);
let check_borrow_index = |underlying_id: CurrencyId, pool_mnt_speed: Balance, total_borrow: Balance| {
MntToken::update_pool_mnt_borrow_index(underlying_id).unwrap();
// 1.5 current borrow_index. I use 15 in this function, that`s why I make total_borrow * 10
let borrow_total_amount = Rate::saturating_from_rational(total_borrow * 10, 15);
let expected_index = initial_index + Rate::from_inner(pool_mnt_speed) / borrow_total_amount;
let pool_state = MntToken::mnt_pool_state_storage(underlying_id);
assert_eq!(pool_state.borrow_state.mnt_distribution_index, expected_index);
};
check_borrow_index(DOT, 10 * DOLLARS, 15_000);
check_borrow_index(ETH, 10 * DOLLARS, 30_000);
check_borrow_index(KSM, 10 * DOLLARS, 45_000);
check_borrow_index(BTC, 10 * DOLLARS, 60_000);
});
}
#[test]
fn test_update_pool_mnt_borrow_index_simple() {
ExtBuilder::default()
.enable_minting_for_all_pools(1 * DOLLARS)
// total borrows needs to calculate mnt_speeds
.pool_borrow_underlying(DOT, 150_000 * DOLLARS)
.build()
.execute_with(|| {
/*
* Minting was enabled when block_number was equal to 0. Here block_number == 1.
So block_delta = 1
Input parameters: dot_speed = 1,
pool_borrowed = 150,
pool_borrow_index = 1.5,
mnt_acquired = delta_blocks * dot_speed = 1
This is how much currency was borrowed without interest
borrow_total_amount = pool_borrowed(150000) / pool_borrow_index(1.5) = 100000
How much MNT tokens were earned per block
ratio = mnt_acquired / borrow_total_amount = 0.00001
mnt_borrow_index = mnt_borrow_index(1 as initial value) + ratio(0.00001) = 1.00001
*ratio is amount of MNT tokens for 1 borrowed token
*/
MntToken::update_pool_mnt_borrow_index(DOT).unwrap();
let pool_state = MntToken::mnt_pool_state_storage(DOT);
assert_eq!(
pool_state.borrow_state.mnt_distribution_index,
Rate::saturating_from_rational(100001, 100000)
);
});
}
#[test]
fn test_distribute_mnt_tokens_to_suppliers() {
ExtBuilder::default()
.enable_minting_for_all_pools(10 * DOLLARS)
.mnt_account_balance(MNT_PALLET_START_BALANCE)
.set_mnt_claim_threshold(0)
// total borrows needs to calculate mnt_speeds
.pool_borrow_underlying(DOT, 50 * DOLLARS)
.build()
.execute_with(|| {
/*
Minting was enabled when block_number was equal to 0. Here block_number == 1.
So block_delta = 1
Input parameters: 10 mnt speed per block for every pool.
Total issuance is 100. Alice has 20 MDOT and BOB 80 MDOT.
This is part from whole circulated wrapped currency held by Alice.
20 / 100 = 0.2.
10(mnt per block) * 0.2(alice part) = 2.
This is how many Alice should acquire MNT tokens per block as supplier.
For Bob: 80 / 100 = 0.8; 0.8 * 10 = 8
*/
let alice_balance = 20 * DOLLARS;
let bob_balance = 80 * DOLLARS;
let alice_award_per_block = 2 * DOLLARS;
let bob_award_per_block = 8 * DOLLARS;
// set total issuance
Currencies::deposit(MDOT, &ALICE, alice_balance).unwrap();
Currencies::deposit(MDOT, &BOB, bob_balance).unwrap();
let move_flywheel = || {
MntToken::update_pool_mnt_supply_index(DOT).unwrap();
MntToken::distribute_supplier_mnt(DOT, &ALICE, false).unwrap();
MntToken::distribute_supplier_mnt(DOT, &BOB, false).unwrap();
};
let check_supplier_award =
|supplier_id: AccountId, distributed_amount: Balance, expected_user_mnt_balance: Balance| {
let pool_state = MntToken::mnt_pool_state_storage(DOT);
let supplier_index = MntToken::mnt_supplier_index_storage(DOT, supplier_id).unwrap();
assert_eq!(supplier_index, pool_state.supply_state.mnt_distribution_index);
assert_eq!(get_mnt_account_balance(supplier_id), expected_user_mnt_balance);
// it should be 0 because threshold is 0
assert_eq!(MntToken::mnt_accrued_storage(supplier_id), 0);
let supplier_index = MntToken::mnt_supplier_index_storage(DOT, supplier_id).unwrap();
let event = Event::MntToken(crate::Event::MntDistributedToSupplier(
DOT,
supplier_id,
distributed_amount,
supplier_index,
));
assert!(System::events().iter().any(|record| record.event == event));
};
/* -------TEST SCENARIO------- */
move_flywheel();
check_supplier_award(ALICE, alice_award_per_block, alice_award_per_block);
check_supplier_award(BOB, bob_award_per_block, bob_award_per_block);
// Go from first block to third
System::set_block_number(3);
let current_block = 3;
let block_delta = 2;
move_flywheel();
check_supplier_award(
BOB,
bob_award_per_block * block_delta,
bob_award_per_block * current_block,
);
check_supplier_award(
ALICE,
alice_award_per_block * block_delta,
alice_award_per_block * current_block,
);
assert_eq!(
MNT_PALLET_START_BALANCE - get_mnt_account_balance(ALICE) - get_mnt_account_balance(BOB),
get_mnt_account_balance(MntToken::get_account_id())
)
});
}
#[test]
fn test_update_pool_mnt_supply_index() {
ExtBuilder::default()
.enable_minting_for_all_pools(2 * DOLLARS)
// total borrows needs to calculate mnt_speeds
.pool_borrow_underlying(DOT, 50 * DOLLARS)
.pool_borrow_underlying(ETH, 50 * DOLLARS)
.pool_borrow_underlying(KSM, 50 * DOLLARS)
.pool_borrow_underlying(BTC, 50 * DOLLARS)
.build()
.execute_with(|| {
//
// * Minting was enabled when block_number was equal to 0. Here block_number == 1.
// So block_delta = 1
//
// set total issuance
let mdot_total_issuance = 10 * DOLLARS;
let meth_total_issuance = 20 * DOLLARS;
let mksm_total_issuance = 30 * DOLLARS;
let mbtc_total_issuance = 40 * DOLLARS;
Currencies::deposit(MDOT, &ALICE, mdot_total_issuance).unwrap();
Currencies::deposit(METH, &ALICE, meth_total_issuance).unwrap();
Currencies::deposit(MKSM, &ALICE, mksm_total_issuance).unwrap();
Currencies::deposit(MBTC, &ALICE, mbtc_total_issuance).unwrap();
let check_supply_index = |underlying_id: CurrencyId, mnt_speed: Balance, total_issuance: Balance| {
MntToken::update_pool_mnt_supply_index(underlying_id).unwrap();
let pool_state = MntToken::mnt_pool_state_storage(underlying_id);
assert_eq!(
pool_state.supply_state.mnt_distribution_index,
Rate::one() + Rate::from_inner(mnt_speed) / Rate::from_inner(total_issuance)
);
assert_eq!(pool_state.supply_state.index_updated_at_block, 1);
};
check_supply_index(DOT, 2 * DOLLARS, mdot_total_issuance);
check_supply_index(KSM, 2 * DOLLARS, mksm_total_issuance);
check_supply_index(ETH, 2 * DOLLARS, meth_total_issuance);
check_supply_index(BTC, 2 * DOLLARS, mbtc_total_issuance);
});
}
#[test]
fn test_update_pool_mnt_supply_index_simple() {
ExtBuilder::default()
// total_borrow shouldn't be zero at least for one market to calculate mnt speeds
.pool_borrow_underlying(ETH, 150_000 * DOLLARS)
.build()
.execute_with(|| {
// Input parameters:
// supply_state.block_number = 1, supply_state.index = 1,
// eth_speed = 10, total_supply = 20
// set total_issuance to 20
Currencies::deposit(METH, &ALICE, 20 * DOLLARS).unwrap();
assert_ok!(MntToken::set_speed(admin_origin(), ETH, 10 * DOLLARS));
System::set_block_number(2);
MntToken::update_pool_mnt_supply_index(ETH).unwrap();
let pool_state = MntToken::mnt_pool_state_storage(ETH);
// block_delta = current_block(2) - supply_state.block_number(1) = 1
// mnt_accrued = block_delta(1) * eth_speed(10) = 10
// ratio = mnt_accrued(10) / total_supply(20) = 0.5
// supply_state.index = supply_state.index(1) + ratio(0.5) = 1.5
// supply_state.block_number = current_block = 2
assert_eq!(
pool_state.supply_state.mnt_distribution_index,
Rate::saturating_from_rational(15, 10)
);
assert_eq!(pool_state.supply_state.index_updated_at_block, 2);
});
}
#[test]
fn test_minting_enable_disable() {
let check_mnt_storage = |pool_id, speed, borrow_index, supply_index, block_number| {
assert_eq!(MntToken::mnt_speed_storage(pool_id), speed);
assert_eq!(
MntToken::mnt_pool_state_storage(pool_id),
MntPoolState {
supply_state: MntState {
mnt_distribution_index: supply_index,
index_updated_at_block: block_number
},
borrow_state: MntState {
mnt_distribution_index: borrow_index,
index_updated_at_block: block_number
}
}
);
};
ExtBuilder::default()
.user_balance(ADMIN, MDOT, 100 * DOLLARS)
.pool_borrow_underlying(DOT, 50 * DOLLARS)
.pool_borrow_underlying(KSM, 50 * DOLLARS)
.mnt_account_balance(100 * DOLLARS)
.build()
.execute_with(|| {
// Try to disable minting for invalid underlying asset id
assert_noop!(
MntToken::set_speed(admin_origin(), MNT, Balance::zero()),
Error::<Runtime>::NotValidUnderlyingAssetId
);
// The dispatch origin of this call must be Root or 2/3 MinterestCouncil.
assert_noop!(MntToken::set_speed(alice_origin(), DOT, 1 * DOLLARS), BadOrigin);
// Unable to enable minting for non existing pool
assert_noop!(
MntToken::set_speed(admin_origin(), ETH, 2 * DOLLARS),
Error::<Runtime>::PoolNotFound
);
// Enable the distribution of MNT tokens in the DOT liquidity pool
let dot_speed = 2 * DOLLARS;
assert_ok!(MntToken::set_speed(admin_origin(), DOT, dot_speed));
let speed_changed_event = Event::MntToken(crate::Event::MntSpeedChanged(DOT, dot_speed));
assert!(System::events()
.iter()
.any(|record| record.event == speed_changed_event));
check_mnt_storage(DOT, dot_speed, Rate::one(), Rate::one(), 1);
System::set_block_number(5);
// Unable to disable an already disabled pool
assert_noop!(
MntToken::set_speed(admin_origin(), KSM, Balance::zero()),
Error::<Runtime>::MntMintingNotEnabled
);
// Enable the distribution of MNT tokens in the KSM liquidity pool
let ksm_speed = 2 * DOLLARS;
assert_ok!(MntToken::set_speed(admin_origin(), KSM, ksm_speed));
let speed_changed_event = Event::MntToken(crate::Event::MntSpeedChanged(KSM, ksm_speed));
assert!(System::events()
.iter()
.any(|record| record.event == speed_changed_event));
check_mnt_storage(KSM, ksm_speed, Rate::one(), Rate::one(), 5);
System::set_block_number(10);
// Disable the distribution of MNT tokens in the DOT liquidity pool
assert_ok!(MntToken::set_speed(admin_origin(), DOT, Balance::zero()));
let speed_changed_event = Event::MntToken(crate::Event::MntSpeedChanged(DOT, Balance::zero()));
assert!(System::events()
.iter()
.any(|record| record.event == speed_changed_event));
assert!(!crate::MntSpeedStorage::<Runtime>::contains_key(DOT));
check_mnt_storage(
DOT,
Balance::zero(),
Rate::from_inner(1_540000000000000000),
Rate::from_inner(1_180000000000000000),
10,
);
System::set_block_number(15);
assert_ok!(MntToken::update_pool_mnt_supply_index(DOT));
assert_ok!(MntToken::update_pool_mnt_borrow_index(DOT));
// Check that indices hadn't been updated while distribution is off
check_mnt_storage(
DOT,
Balance::zero(),
Rate::from_inner(1_540000000000000000),
Rate::from_inner(1_180000000000000000),
10,
);
System::set_block_number(20);
// Enable the distribution of MNT tokens in the DOT liquidity pool
// Check that the indexes have been saved and the block number has changed.
assert_ok!(MntToken::set_speed(admin_origin(), DOT, dot_speed));
check_mnt_storage(
DOT,
dot_speed,
Rate::from_inner(1_540000000000000000),
Rate::from_inner(1_180000000000000000),
20,
);
// Change the mnt_speed parameter for KSM liquidity pool.
// Check that the indexes have been updated and block number has changed.
assert_ok!(MntToken::set_speed(admin_origin(), KSM, ksm_speed + 1_u128));
check_mnt_storage(
KSM,
ksm_speed + 1_u128,
Rate::from_inner(1_900000000000000000),
Rate::from_inner(1_000000000000000000),
20,
);
});
}
#[test]
fn transfer_mnt_should_work() {
ExtBuilder::default()
.set_mnt_claim_threshold(20)
.mnt_account_balance(MNT_PALLET_START_BALANCE)
.build()
.execute_with(|| {
// distribute_all == false, user_accrued < threshold:
// we do not perform the transfer.
let first_transfer = 10 * DOLLARS;
assert_ok!(MntToken::transfer_mnt(&ALICE, first_transfer, false));
assert_eq!(
get_mnt_account_balance(MntToken::get_account_id()),
MNT_PALLET_START_BALANCE
);
assert_eq!(get_mnt_account_balance(ALICE), Balance::zero());
assert_eq!(MntToken::mnt_accrued_storage(ALICE), first_transfer);
// distribute_all == true, user_accrued > threshold:
// we perform the transfer.
let second_transfer = 200 * DOLLARS;
assert_ok!(MntToken::transfer_mnt(&ALICE, second_transfer, true));
assert_eq!(
get_mnt_account_balance(MntToken::get_account_id()),
MNT_PALLET_START_BALANCE - second_transfer
);
assert_eq!(get_mnt_account_balance(ALICE), second_transfer);
assert_eq!(MntToken::mnt_accrued_storage(ALICE), Balance::zero());
// distribute_all == true, user_accrued == 0:
// we do not perform the transfer.
let third_transfer = Balance::zero();
assert_ok!(MntToken::transfer_mnt(&ALICE, third_transfer, true));
assert_eq!(
get_mnt_account_balance(MntToken::get_account_id()),
MNT_PALLET_START_BALANCE - second_transfer
);
assert_eq!(get_mnt_account_balance(ALICE), second_transfer);
assert_eq!(MntToken::mnt_accrued_storage(ALICE), Balance::zero());
// distribute_all == true, user_accrued > threshold, user_accrued > MNT_pallet_balance:
// we do not perform the transfer.
let fourth_transfer = 10_000_000 * DOLLARS;
assert_ok!(MntToken::transfer_mnt(&ALICE, fourth_transfer, true));
assert_eq!(
get_mnt_account_balance(MntToken::get_account_id()),
MNT_PALLET_START_BALANCE - second_transfer
);
assert_eq!(get_mnt_account_balance(ALICE), second_transfer);
assert_eq!(MntToken::mnt_accrued_storage(ALICE), Balance::zero());
// distribute_all == true, user_accrued < threshold:
// we perform the transfer.
let fifth_transfer = 10 * DOLLARS;
assert_ok!(MntToken::transfer_mnt(&ALICE, first_transfer, true));
assert_eq!(
get_mnt_account_balance(MntToken::get_account_id()),
MNT_PALLET_START_BALANCE - second_transfer - fifth_transfer
);
assert_eq!(get_mnt_account_balance(ALICE), second_transfer + fifth_transfer);
assert_eq!(MntToken::mnt_accrued_storage(ALICE), Balance::zero());
// distribute_all == false, user_accrued > threshold:
// we perform the transfer.
let sixth_transfer = 500 * DOLLARS;
assert_ok!(MntToken::transfer_mnt(&ALICE, sixth_transfer, false));
assert_eq!(
get_mnt_account_balance(MntToken::get_account_id()),
MNT_PALLET_START_BALANCE - second_transfer - fifth_transfer - sixth_transfer
);
assert_eq!(
get_mnt_account_balance(ALICE),
second_transfer + fifth_transfer + sixth_transfer
);
assert_eq!(MntToken::mnt_accrued_storage(ALICE), Balance::zero());
});
}
| 34.499334 | 105 | 0.72716 |
74b519a7adf484769c802a530eb4d349cbb3f3b1 | 2,262 | css | CSS | app/public/css/error-page.css | gkapkowski/torus-website | a038e6f605b2c8c1fa418f2dbab97ac197689a3b | [
"MIT"
] | 2 | 2021-05-24T04:28:17.000Z | 2021-09-04T01:07:21.000Z | app/public/css/error-page.css | gkapkowski/torus-website | a038e6f605b2c8c1fa418f2dbab97ac197689a3b | [
"MIT"
] | null | null | null | app/public/css/error-page.css | gkapkowski/torus-website | a038e6f605b2c8c1fa418f2dbab97ac197689a3b | [
"MIT"
] | 3 | 2020-10-29T15:14:02.000Z | 2021-11-27T12:19:46.000Z | body {
margin: 0;
padding: 0;
font-family: Lato, sans-serif;
line-height: 1.2em;
color: #4a4a4a;
background-image: url('/images/footer_waves.png');
background-repeat: no-repeat;
background-position: center bottom;
}
.text-center {
text-align: center;
}
.error-container {
display: flex;
flex-direction: column;
height: 100vh;
width: 100%;
}
.error-header {
padding: 24px;
}
.error-content {
display: flex;
flex: 1;
justify-content: center;
align-items: center;
width: 100%;
}
.error-content .error-column {
padding: 0 10%;
}
.error-content .error-row {
margin-top: -90px;
}
.error-content .people {
width: 100%;
}
.error-content .page-header {
font-size: 26px;
margin: 0;
line-height: 1.2em;
}
.error-content .page-desc {
margin-top: 10px;
}
.error-content .home-button {
font-family: Roboto, sans-serif;
border: 0;
background-image: linear-gradient(to right, #5396d5, #295dab);
/* padding: 15px 35px; */
padding: 10px 25px;
border-radius: 8px;
color: white;
box-shadow: 0px 2px 5px 0px rgba(0, 0, 0, 0.5);
}
/* Desktop Small */
@media only screen and (min-width: 599px) {
.error-content {
max-width: 900px;
margin: auto;
}
.error-content .error-row {
flex: 1;
margin-top: -90px;
display: flex;
align-items: center;
}
.error-content .error-column {
padding: 0 20px;
flex: 1;
}
.error-content .people {
width: 100%;
}
.error-content .page-header {
font-size: 30px;
}
.error-content .page-desc {
font-size: 20px;
line-height: 1.2em;
}
.error-content .home-button {
margin-top: 10px;
padding: 10px 25px;
font-size: 16px;
}
}
/* Desktop Medium */
@media only screen and (min-width: 960px) {
.error-header {
padding: 50px;
}
.error-content .error-row {
margin-top: -120px;
}
.error-content .people {
width: 90%;
}
.error-content .page-header {
font-size: 40px;
}
.error-content .page-desc {
font-size: 28px;
}
.error-content .home-button {
padding: 15px 35px;
font-size: 21px;
}
}
/* Desktop Large */
@media only screen and (min-width: 1264px) {
body {
background-size: 100%;
}
.error-content {
max-width: 1185px;
}
}
| 15.6 | 64 | 0.614943 |
a981a745ec6dbb530e783b25040f4fbe81178fff | 1,048 | html | HTML | tests/test.html | project-oak/arcsjs-core | ec70f5330136f20bc6e097c1c42c08ef2c604b8e | [
"BSD-3-Clause"
] | 1 | 2022-03-10T20:53:13.000Z | 2022-03-10T20:53:13.000Z | tests/test.html | project-oak/arcsjs-core | ec70f5330136f20bc6e097c1c42c08ef2c604b8e | [
"BSD-3-Clause"
] | 1 | 2022-03-31T06:14:00.000Z | 2022-03-31T06:14:00.000Z | tests/test.html | project-oak/arcsjs-core | ec70f5330136f20bc6e097c1c42c08ef2c604b8e | [
"BSD-3-Clause"
] | null | null | null | <!doctype html>
<!--
Copyright (c) 2022 Google LLC
Use of this source code is governed by a BSD-style
license that can be found in the LICENSE file or at
https://developers.google.com/open-source/licenses/bsd
-->
<style>
body, pre {
font-family: sans-serif;
/* font-family: monospace; */
font-size: 14px;
}
entry {
display: block;
border-bottom: 1px dotted silver;
white-space: pre;
padding: 4px 0;
}
[system] {
color: rgb(74, 74, 74);
}
[console] {
color: rgb(69, 145, 62);
}
[error] {
color: rgb(147, 19, 19);
}
</style>
<pre id="logbook"></pre>
<script type="module">
import * as tests from './tests.js';
import {log, sysLog, errLog} from './utils.js';
const {entries} = Object;
entries(tests).forEach(async ([n, v]) => {
const result = await v();
sysLog(`<h3>test: ${n}</h3>`);
if (result.pass) {
log('PASS');
} else {
errLog('ERROR')
log(`expected: `, result.expected);
sysLog('received: ', result.value);
}
});
</script>
| 20.96 | 55 | 0.582061 |
ff610b454687a60a66a67f1d4320b175ed7a5e72 | 5,770 | py | Python | onadata/apps/api/tests/viewsets/test_team_viewset.py | gushil/kobocat | 5ce27ed5fbf969b2ce68e8a59dd97ced74686711 | [
"BSD-2-Clause"
] | 38 | 2017-02-28T05:39:40.000Z | 2019-01-16T04:39:04.000Z | onadata/apps/api/tests/viewsets/test_team_viewset.py | gushil/kobocat | 5ce27ed5fbf969b2ce68e8a59dd97ced74686711 | [
"BSD-2-Clause"
] | 48 | 2019-03-18T09:26:31.000Z | 2019-05-27T08:12:03.000Z | onadata/apps/api/tests/viewsets/test_team_viewset.py | gushil/kobocat | 5ce27ed5fbf969b2ce68e8a59dd97ced74686711 | [
"BSD-2-Clause"
] | 5 | 2017-02-22T12:25:19.000Z | 2019-01-15T11:16:40.000Z | import json
from onadata.apps.api.models import Team
from onadata.apps.api.tests.viewsets.test_abstract_viewset import\
TestAbstractViewSet
from onadata.apps.api.viewsets.team_viewset import TeamViewSet
class TestTeamViewSet(TestAbstractViewSet):
def setUp(self):
super(self.__class__, self).setUp()
self.view = TeamViewSet.as_view({
'get': 'list',
'post': 'create'
})
def test_teams_list(self):
self._team_create()
# access the url with an unauthorised user
request = self.factory.get('/')
response = self.view(request)
self.assertEqual(response.status_code, 401)
# access the url with an authorised user
request = self.factory.get('/', **self.extra)
response = self.view(request)
owner_team = {
'url':
'http://testserver/api/v1/teams/%s' % self.owner_team.pk,
'name': u'Owners',
'organization': 'denoinc',
'projects': [],
'users': [{'username': u'bob',
'first_name': u'Bob',
'last_name': u'',
'id': self.user.pk}
]
}
self.assertEqual(response.status_code, 200)
self.assertEqual(sorted(response.data), [owner_team, self.team_data])
def test_teams_get(self):
self._team_create()
view = TeamViewSet.as_view({
'get': 'retrieve'
})
request = self.factory.get('/', **self.extra)
response = view(request, pk=self.team.pk)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data, self.team_data)
def _team_create(self):
self._org_create()
data = {
'name': u'dreamteam',
'organization': self.company_data['org']
}
request = self.factory.post(
'/', data=json.dumps(data),
content_type="application/json", **self.extra)
response = self.view(request)
self.assertEqual(response.status_code, 201)
self.owner_team = Team.objects.get(
organization=self.organization.user,
name='%s#Owners' % (self.organization.user.username))
team = Team.objects.get(
organization=self.organization.user,
name='%s#%s' % (self.organization.user.username, data['name']))
data['url'] = 'http://testserver/api/v1/teams/%s' % team.pk
self.assertDictContainsSubset(data, response.data)
self.team_data = response.data
self.team = team
def test_teams_create(self):
self._team_create()
def test_add_user_to_team(self):
self._team_create()
self.assertNotIn(self.team.group_ptr, self.user.groups.all())
view = TeamViewSet.as_view({
'post': 'members'
})
data = {'username': self.user.username}
request = self.factory.post(
'/', data=json.dumps(data),
content_type="application/json", **self.extra)
response = view(request, pk=self.team.pk)
self.assertEqual(response.status_code, 201)
self.assertEqual(response.data,
[self.user.username])
self.assertIn(self.team.group_ptr, self.user.groups.all())
def test_add_user_to_team_missing_username(self):
self._team_create()
self.assertNotIn(self.team.group_ptr, self.user.groups.all())
view = TeamViewSet.as_view({
'post': 'members'
})
data = {}
request = self.factory.post(
'/', data=json.dumps(data),
content_type="application/json", **self.extra)
response = view(request, pk=self.team.pk)
self.assertEqual(response.status_code, 400)
self.assertEqual(response.data,
{'username': [u'This field is required.']})
self.assertNotIn(self.team.group_ptr, self.user.groups.all())
def test_add_user_to_team_user_does_not_exist(self):
self._team_create()
self.assertNotIn(self.team.group_ptr, self.user.groups.all())
view = TeamViewSet.as_view({
'post': 'members'
})
data = {'username': 'aboy'}
request = self.factory.post(
'/', data=json.dumps(data),
content_type="application/json", **self.extra)
response = view(request, pk=self.team.pk)
self.assertEqual(response.status_code, 400)
self.assertEqual(response.data,
{'username': [u'User `aboy` does not exist.']})
self.assertNotIn(self.team.group_ptr, self.user.groups.all())
def test_remove_user_from_team(self):
self._team_create()
self.assertNotIn(self.team.group_ptr, self.user.groups.all())
view = TeamViewSet.as_view({
'post': 'members',
'delete': 'members'
})
data = {'username': self.user.username}
request = self.factory.post(
'/', data=json.dumps(data),
content_type="application/json", **self.extra)
response = view(request, pk=self.team.pk)
self.assertEqual(response.status_code, 201)
self.assertEqual(response.data,
[self.user.username])
self.assertIn(self.team.group_ptr, self.user.groups.all())
request = self.factory.delete(
'/', data=json.dumps(data),
content_type="application/json", **self.extra)
response = view(request, pk=self.team.pk)
self.assertEqual(response.status_code, 201)
self.assertEqual(response.data,
[])
self.assertNotIn(self.team.group_ptr, self.user.groups.all())
| 34.969697 | 77 | 0.583016 |
f448788c5c5567bc928c81d1b07dc0289c57826e | 892 | lua | Lua | Interface/AddOns/LittleWigs/Legion/CathedralOfEternalNight/Locales/zhCN.lua | ChinarG/Game-Wow-Plugins-Setting | e3fd3ddec1387c1f971dc195fec4fd9045d3105d | [
"Apache-2.0"
] | null | null | null | Interface/AddOns/LittleWigs/Legion/CathedralOfEternalNight/Locales/zhCN.lua | ChinarG/Game-Wow-Plugins-Setting | e3fd3ddec1387c1f971dc195fec4fd9045d3105d | [
"Apache-2.0"
] | null | null | null | Interface/AddOns/LittleWigs/Legion/CathedralOfEternalNight/Locales/zhCN.lua | ChinarG/Game-Wow-Plugins-Setting | e3fd3ddec1387c1f971dc195fec4fd9045d3105d | [
"Apache-2.0"
] | null | null | null | local L = BigWigs:NewBossLocale("Mephistroth", "zhCN")
if not L then return end
if L then
L.custom_on_time_lost = "暗影消退计时"
L.custom_on_time_lost_desc = "显示暗影消退为|cffff0000红色|r计时条。"
end
L = BigWigs:NewBossLocale("Domatrax", "zhCN")
if L then
L.custom_on_autotalk = "自动对话"
L.custom_on_autotalk_desc = "立即选择阿格拉玛之盾对话开始与多玛塔克斯战斗。"
L.missing_aegis = "你没站在盾内" -- Aegis is a short name for Aegis of Aggramar
L.aegis_healing = "盾:降低治疗"
L.aegis_damage = "盾:降低伤害"
end
L = BigWigs:NewBossLocale("Cathedral of Eternal Night Trash", "zhCN")
if L then
L.dulzak = "杜尔扎克"
L.wrathguard = "愤怒卫士入侵者"
L.felguard = "恶魔卫士毁灭者"
L.soulmender = "鬼火慰魂者"
L.temptress = "鬼焰女妖"
L.botanist = "邪脉植物学家"
L.orbcaster = "邪足晶球法师"
L.waglur = "瓦格鲁尔"
L.scavenger = "虫语清道夫"
L.gazerax = "加泽拉克斯"
L.vilebark = "邪皮行者"
L.throw_tome = "投掷宝典" -- Common part of Throw Arcane/Frost/Silence Tome (242837/242839/242841)
end
| 26.235294 | 95 | 0.717489 |
dad96782ca95c250b62adcc1accdbfbbc98348e5 | 600 | ts | TypeScript | DFE_Personality_Dev/DFE_Personality_Dev/node_modules/adaptive-expressions/lib/builtinFunctions/sentenceCase.d.ts | danfernau/PersonalityAdaptiveChatbot | 1a1c9e6d6809f3fee62c4ccf63b0cba9c4245075 | [
"MIT"
] | null | null | null | DFE_Personality_Dev/DFE_Personality_Dev/node_modules/adaptive-expressions/lib/builtinFunctions/sentenceCase.d.ts | danfernau/PersonalityAdaptiveChatbot | 1a1c9e6d6809f3fee62c4ccf63b0cba9c4245075 | [
"MIT"
] | null | null | null | DFE_Personality_Dev/DFE_Personality_Dev/node_modules/adaptive-expressions/lib/builtinFunctions/sentenceCase.d.ts | danfernau/PersonalityAdaptiveChatbot | 1a1c9e6d6809f3fee62c4ccf63b0cba9c4245075 | [
"MIT"
] | null | null | null | /**
* @module adaptive-expressions
*/
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License.
*/
import { StringTransformEvaluator } from './stringTransformEvaluator';
/**
* Capitalizing only the first word and leave others lowercase.
*/
export declare class SentenceCase extends StringTransformEvaluator {
/**
* Initializes a new instance of the [SentenceCase](xref:adaptive-expressions.SentenceCase) class.
*/
constructor();
/**
* @private
*/
private static evaluator;
}
//# sourceMappingURL=sentenceCase.d.ts.map | 27.272727 | 102 | 0.703333 |
3a932e6b7e70aecda44bdbb588a43889cffb5091 | 859 | dart | Dart | lib/util/game_intercafe/text_interface_component.dart | rezendegc/bonfire | 5843fa602e67c095584648497e2abb153233c63d | [
"MIT"
] | null | null | null | lib/util/game_intercafe/text_interface_component.dart | rezendegc/bonfire | 5843fa602e67c095584648497e2abb153233c63d | [
"MIT"
] | null | null | null | lib/util/game_intercafe/text_interface_component.dart | rezendegc/bonfire | 5843fa602e67c095584648497e2abb153233c63d | [
"MIT"
] | null | null | null | import 'dart:ui';
import 'package:bonfire/bonfire.dart';
import 'package:flame/text_config.dart';
import 'package:flutter/cupertino.dart';
import 'package:flutter/material.dart';
class TextInterfaceComponent extends InterfaceComponent {
String text;
final TextConfig textConfig;
TextInterfaceComponent({
@required int id,
@required Position position,
this.text = '',
double width = 0,
double height = 0,
VoidCallback onTapComponent,
this.textConfig = const TextConfig(),
}) : super(
id: id,
position: position,
width: width,
height: height,
onTapComponent: onTapComponent,
);
@override
void render(Canvas canvas) {
super.render(canvas);
textConfig.render(
canvas,
text,
Position(this.position.left, this.position.top),
);
}
}
| 23.216216 | 57 | 0.654249 |
23a30020283e63da48ff2f003d9b03a6a58fe505 | 1,537 | js | JavaScript | src/store/modules/pedido/action.js | pizzajs/pizzajs-front-end | 9012c8fb03c6d64a3e8e587fbb33bfba85996f12 | [
"MIT"
] | null | null | null | src/store/modules/pedido/action.js | pizzajs/pizzajs-front-end | 9012c8fb03c6d64a3e8e587fbb33bfba85996f12 | [
"MIT"
] | 4 | 2021-03-10T12:07:02.000Z | 2022-02-18T23:32:11.000Z | src/store/modules/pedido/action.js | pizzajs/pizzajs-front-end | 9012c8fb03c6d64a3e8e587fbb33bfba85996f12 | [
"MIT"
] | 1 | 2020-08-04T22:07:39.000Z | 2020-08-04T22:07:39.000Z | //ACTIONS PIZZA
export function AdicionarPizzaProcess(pizza) {
return {
type: 'ADD_PIZZA_PROCESS',
payload: { pizza }
}
}
export function AdicionarPizza(pizza) {
return {
type: 'ADD_PIZZA',
payload: { pizza }
}
}
export function AdicionarPizzaCustomizada(pizza){
return {
type: 'ADD_PIZZA_CUSTOM',
payload: {pizza}
}
}
export function DeletarPizzaCustomizada(nome){
return {
type: 'DELETE_PIZZA_CUSTOM',
payload: {nome}
}
}
export function RemoverPizza(id) {
return {
type: 'REMOVE_PIZZA',
payload: { id }
}
}
//ACTIONS BEBIDA
// export function AdicionarBebidaProcess(bebida) {
// return {
// type: 'ADD_BEBIDA_PROCESS',
// payload: { bebida }
// }
// }
export function AdicionarBebida(bebida, valor) {
return {
type: 'ADD_BEBIDA',
payload: { bebida, valor }
}
}
export function RemoverBebida(index, valor) {
return {
type: 'REMOVE_BEBIDA',
payload: { index, valor }
}
}
//ACTIONS PEDIDO
export function finalizarPedido() {
return {
type: 'FINALIZAR_PEDIDO'
}
}
export function finalizarPedidoRequest(bebidaId, pizzas_ids, pizzas_customizadas, preco ) {
return {
type: 'FINALIZAR_PEDIDO_REQUEST',
payload: { bebidaId, pizzas_ids, pizzas_customizadas, preco }
}
}
export function cancelarPedido(id) {
return {
type: 'CANCELAR_PEDIDO',
payload: { id }
}
} | 18.518072 | 91 | 0.599219 |
8ff844e7a243c783aeb98f8eebf6e843fe37e847 | 520 | asm | Assembly | MASM/p7.asm | saransh808/Random | 57522016ef5473dd8b7c29f34b3f80344379496e | [
"MIT"
] | null | null | null | MASM/p7.asm | saransh808/Random | 57522016ef5473dd8b7c29f34b3f80344379496e | [
"MIT"
] | null | null | null | MASM/p7.asm | saransh808/Random | 57522016ef5473dd8b7c29f34b3f80344379496e | [
"MIT"
] | null | null | null | data segment
dat db 10
data ends
display macro m1
mov ah,09h
lea dx,m1
int 21h
endm
code segment
assume cs:code, ds:data
start: mov ax,data
mov ds,ax
lea dx,[dat]
mov ah,0ah
int 21h
mov cx,000ah
lea si,dat
up: mov al,[si]
sub al,20h
mov [si],al
inc si
loop up
display dat
code ends
end start
| 15.757576 | 32 | 0.415385 |
67fc9614b3dcebc660247f398f47060c058a5559 | 12,626 | rs | Rust | tools/didc/src/main.rs | Psychedelic/candid | dc5584aac5899aa8f265c36ee647815fe18afb02 | [
"Apache-2.0"
] | null | null | null | tools/didc/src/main.rs | Psychedelic/candid | dc5584aac5899aa8f265c36ee647815fe18afb02 | [
"Apache-2.0"
] | null | null | null | tools/didc/src/main.rs | Psychedelic/candid | dc5584aac5899aa8f265c36ee647815fe18afb02 | [
"Apache-2.0"
] | null | null | null | use anyhow::{bail, Result};
use candid::{
parser::types::{IDLType, IDLTypes},
pretty_check_file, pretty_parse,
types::Type,
Error, IDLArgs, TypeEnv,
};
use std::collections::HashSet;
use std::path::PathBuf;
use structopt::clap::AppSettings;
use structopt::StructOpt;
#[derive(StructOpt)]
#[structopt(global_settings = &[AppSettings::ColoredHelp, AppSettings::DeriveDisplayOrder])]
enum Command {
/// Type check Candid file
Check {
/// Specifies did file for type checking
input: PathBuf,
/// Specifies a previous version of did file for subtyping check
previous: Option<PathBuf>,
},
/// Generate binding for different languages
Bind {
/// Specifies did file for code generation
input: PathBuf,
#[structopt(short, long, possible_values = &["js", "ts", "did", "mo"])]
/// Specifies target language
target: String,
},
/// Generate test suites for different languages
Test {
/// Specifies .test.did file for test suites generation
input: PathBuf,
#[structopt(short, long, possible_values = &["js", "did"], default_value = "js")]
/// Specifies target language
target: String,
},
/// Compute the hash of a field name
Hash { input: String },
/// Encode Candid value
Encode {
#[structopt(parse(try_from_str = parse_args))]
/// Specifies Candid textual format for encoding
args: IDLArgs,
#[structopt(flatten)]
annotate: TypeAnnotation,
#[structopt(short, long, possible_values = &["hex", "pretty", "blob"], default_value = "hex")]
/// Specifies hex format
format: String,
},
/// Decode Candid binary data
Decode {
/// Specifies Candid binary data in hex string
blob: String,
#[structopt(short, long, possible_values = &["hex", "blob"], default_value = "hex")]
/// Specifies hex format
format: String,
#[structopt(flatten)]
annotate: TypeAnnotation,
},
/// Generate random Candid values
Random {
#[structopt(flatten)]
annotate: TypeAnnotation,
#[structopt(short, long, conflicts_with("file"))]
/// Specifies random value generation config in Dhall syntax
config: Option<String>,
#[structopt(short, long)]
/// Load random value generation config from file
file: Option<String>,
#[structopt(short, long, possible_values = &["did", "js"], default_value = "did")]
/// Specifies target language
lang: String,
#[structopt(short, long, requires("method"))]
/// Specifies input arguments for a method call, mocking the return result
args: Option<IDLArgs>,
},
/// Check for subtyping
Subtype {
#[structopt(short, long)]
defs: Option<PathBuf>,
ty1: IDLType,
ty2: IDLType,
},
/// Diff two Candid values
Diff {
#[structopt(parse(try_from_str = parse_args))]
values1: IDLArgs,
#[structopt(parse(try_from_str = parse_args))]
values2: IDLArgs,
#[structopt(flatten)]
annotate: TypeAnnotation,
},
}
#[derive(StructOpt)]
struct TypeAnnotation {
#[structopt(name = "types", short, long)]
#[structopt(parse(try_from_str = parse_types))]
/// Annotates values with Candid types
tys: Option<IDLTypes>,
#[structopt(short, long, conflicts_with("types"), requires("defs"))]
/// Annotates values with a service method, specified in --defs option
method: Option<String>,
#[structopt(short, long)]
/// Loads did file for --types or --method to reference type definitions
defs: Option<PathBuf>,
}
enum Mode {
Encode,
Decode,
}
impl TypeAnnotation {
fn is_empty(&self) -> bool {
self.tys.is_none() && self.method.is_none()
}
fn get_types(&self, mode: Mode) -> candid::Result<(TypeEnv, Vec<Type>)> {
let (env, actor) = if let Some(ref file) = self.defs {
pretty_check_file(file)?
} else {
(TypeEnv::new(), None)
};
match (&self.tys, &self.method) {
(None, None) => Err(Error::msg("no type annotations")),
(Some(tys), None) => {
let mut types = Vec::new();
for ty in tys.args.iter() {
types.push(env.ast_to_type(ty)?);
}
Ok((env, types))
}
(None, Some(meth)) => {
let actor = actor
.ok_or_else(|| Error::msg("Cannot use --method with a non-service did file"))?;
let func = env.get_method(&actor, meth)?;
let types = match mode {
Mode::Encode => &func.args,
Mode::Decode => &func.rets,
}
.clone();
Ok((env, types))
}
_ => unreachable!(),
}
}
}
fn parse_args(str: &str) -> Result<IDLArgs, Error> {
pretty_parse("candid arguments", str)
}
fn parse_types(str: &str) -> Result<IDLTypes, Error> {
pretty_parse("type annotations", str)
}
fn main() -> Result<()> {
match Command::from_args() {
Command::Check { input, previous } => {
let (env, opt_t1) = pretty_check_file(&input)?;
if let Some(previous) = previous {
let (env2, opt_t2) = pretty_check_file(&previous)?;
match (opt_t1, opt_t2) {
(Some(t1), Some(t2)) => {
let mut gamma = HashSet::new();
candid::types::subtype::subtype(&mut gamma, &env, &t1, &env2, &t2)?;
}
_ => {
bail!("did file need to contain the main service type for subtyping check")
}
}
}
}
Command::Subtype { defs, ty1, ty2 } => {
let (env, _) = if let Some(file) = defs {
pretty_check_file(&file)?
} else {
(TypeEnv::new(), None)
};
let ty1 = env.ast_to_type(&ty1)?;
let ty2 = env.ast_to_type(&ty2)?;
candid::types::subtype::subtype(&mut HashSet::new(), &env, &ty1, &env, &ty2)?;
}
Command::Bind { input, target } => {
let (env, actor) = pretty_check_file(&input)?;
let content = match target.as_str() {
"js" => candid::bindings::javascript::compile(&env, &actor),
"ts" => candid::bindings::typescript::compile(&env, &actor),
"did" => candid::bindings::candid::compile(&env, &actor),
"mo" => candid::bindings::motoko::compile(&env, &actor),
_ => unreachable!(),
};
println!("{}", content);
}
Command::Test { input, target } => {
let test = std::fs::read_to_string(&input)
.map_err(|_| Error::msg(format!("could not read file {}", input.display())))?;
let ast = pretty_parse::<candid::parser::test::Test>(input.to_str().unwrap(), &test)?;
let content = match target.as_str() {
"js" => candid::bindings::javascript::test::test_generate(ast),
"did" => {
candid::parser::test::check(ast)?;
"".to_string()
}
_ => unreachable!(),
};
println!("{}", content);
}
Command::Hash { input } => {
println!("{}", candid::idl_hash(&input));
}
Command::Encode {
args,
format,
annotate,
} => {
let bytes = if annotate.is_empty() {
args.to_bytes()?
} else {
let (env, types) = annotate.get_types(Mode::Encode)?;
args.to_bytes_with_types(&env, &types)?
};
let hex = match format.as_str() {
"hex" => hex::encode(&bytes),
"pretty" => pretty_hex::pretty_hex(&bytes),
"blob" => {
let mut res = String::new();
for ch in bytes.iter() {
res.push_str(&candid::parser::pretty::pp_char(*ch));
}
format!("blob \"{}\"", res)
}
_ => unreachable!(),
};
println!("{}", hex);
}
Command::Decode {
blob,
format,
annotate,
} => {
let bytes = match format.as_str() {
"hex" => hex::decode(&blob)?,
"blob" => {
use candid::parser::value::IDLValue;
match pretty_parse::<IDLValue>("blob", &blob)? {
IDLValue::Vec(vec) => vec
.iter()
.map(|v| {
if let IDLValue::Nat8(u) = v {
*u
} else {
unreachable!()
}
})
.collect(),
_ => unreachable!(),
}
}
_ => unreachable!(),
};
let value = if annotate.is_empty() {
IDLArgs::from_bytes(&bytes)?
} else {
let (env, types) = annotate.get_types(Mode::Decode)?;
IDLArgs::from_bytes_with_types(&bytes, &env, &types)?
};
println!("{}", value);
}
Command::Random {
annotate,
lang,
config,
file,
args,
} => {
use candid::parser::configs::Configs;
use rand::Rng;
let (env, types) = if args.is_some() {
annotate.get_types(Mode::Decode)?
} else {
annotate.get_types(Mode::Encode)?
};
let config = match (config, file) {
(None, None) => Configs::from_dhall("{=}")?,
(Some(str), None) => Configs::from_dhall(&str)?,
(None, Some(file)) => {
let content = std::fs::read_to_string(&file)
.map_err(|_| Error::msg(format!("could not read {}", file)))?;
Configs::from_dhall(&content)?
}
_ => unreachable!(),
};
let config = if let Some(ref method) = annotate.method {
config.with_method(method)
} else {
config
};
// TODO figure out how many bytes of entropy we need
let seed: Vec<u8> = if let Some(ref args) = args {
let (env, types) = annotate.get_types(Mode::Encode)?;
let bytes = args.to_bytes_with_types(&env, &types)?;
bytes.into_iter().rev().cycle().take(2048).collect()
} else {
let mut rng = rand::thread_rng();
(0..2048).map(|_| rng.gen::<u8>()).collect()
};
let args = IDLArgs::any(&seed, &config, &env, &types)?;
match lang.as_str() {
"did" => println!("{}", args),
"js" => println!(
"{}",
candid::bindings::javascript::value::pp_args(&args).pretty(80)
),
_ => unreachable!(),
}
}
Command::Diff {
values1,
values2,
annotate,
} => {
let (vs1, vs2) = if annotate.is_empty() {
(values1.args, values2.args)
} else {
// Either we assume the types are in decode mode, or forbid the use of --method in diff
let (env, types) = annotate.get_types(Mode::Decode)?;
(
values1.annotate_types(true, &env, &types)?.args,
values2.annotate_types(true, &env, &types)?.args,
)
};
if vs1.len() != vs2.len() {
return Err(Error::msg("value length mismatch").into());
}
for (v1, v2) in vs1.iter().zip(vs2.iter()) {
let edit = candiff::value_diff(v1, v2, &None);
println!("{}", candiff::pretty::value_edit(&edit).pretty(80));
}
}
};
Ok(())
}
| 36.597101 | 103 | 0.474259 |
12b7bdd1adc40c9abf56c3b2471e372b4c2a727b | 871 | cs | C# | src/GR.Extensions/GR.Ecommerce.Extension/GR.ECommerce.Payments/GR.Providers/GR.ECommerce.Paypal/GR.Paypal.Razor/Extensions/ServiceCollectionExtensions.cs | indrivo/GEAR | 5bfa35c0860effe204645af8efc2610fa3aca94f | [
"MIT"
] | 7 | 2019-10-21T08:52:00.000Z | 2021-09-03T22:22:36.000Z | src/GR.Extensions/GR.Ecommerce.Extension/GR.ECommerce.Payments/GR.Providers/GR.ECommerce.Paypal/GR.Paypal.Razor/Extensions/ServiceCollectionExtensions.cs | indrivo/GEAR | 5bfa35c0860effe204645af8efc2610fa3aca94f | [
"MIT"
] | null | null | null | src/GR.Extensions/GR.Ecommerce.Extension/GR.ECommerce.Payments/GR.Providers/GR.ECommerce.Paypal/GR.Paypal.Razor/Extensions/ServiceCollectionExtensions.cs | indrivo/GEAR | 5bfa35c0860effe204645af8efc2610fa3aca94f | [
"MIT"
] | 4 | 2019-10-18T16:19:50.000Z | 2021-09-03T22:22:37.000Z | using GR.Core.Extensions;
using GR.Paypal.Models;
using GR.Paypal.Razor.Helpers;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
namespace GR.Paypal.Razor.Extensions
{
public static class ServiceCollectionExtensions
{
/// <summary>
/// Register paypal provider
/// </summary>
/// <param name="services"></param>
/// <param name="configuration"></param>
/// <returns></returns>
public static IServiceCollection RegisterPaypalRazorProvider(this IServiceCollection services,
IConfiguration configuration)
{
services.ConfigureOptions(typeof(PaypalAssetsRazorFileConfiguration));
services.ConfigureWritable<PaypalExpressConfigForm>(configuration.GetSection("PayPalSettings"));
return services;
}
}
} | 33.5 | 108 | 0.685419 |
2e338ca7be4c18296dd65642e2c9fe995351cde3 | 291 | sql | SQL | src/Persistence/MySQL/schema/patch-1000705.sql | frantzcy/snuze | da8bda3b1c194372b61f4ad3e16345f0fb046002 | [
"Apache-2.0"
] | 2 | 2020-02-14T23:10:10.000Z | 2021-01-29T13:18:26.000Z | src/Persistence/MySQL/schema/patch-1000705.sql | frantzcy/snuze | da8bda3b1c194372b61f4ad3e16345f0fb046002 | [
"Apache-2.0"
] | 7 | 2019-08-19T22:17:34.000Z | 2022-03-16T22:17:45.000Z | src/Persistence/MySQL/schema/patch-1000705.sql | frantzcy/snuze | da8bda3b1c194372b61f4ad3e16345f0fb046002 | [
"Apache-2.0"
] | 1 | 2022-03-16T00:58:51.000Z | 2022-03-16T00:58:51.000Z | /**
* These DDL statements implement the changes made to the Snuze MySQL schema in
* version: 1000705 (Snuze 0.7.5).
*/
--
ALTER TABLE `subreddits`
ADD COLUMN `coins` MEDIUMINT UNSIGNED NOT NULL DEFAULT 0
AFTER `can_assign_user_flair`;
--
UPDATE `snuze` SET `schema_version` = 1000705;
| 26.454545 | 79 | 0.731959 |
dcc4794152cfb43a0542a5ead3ff25ed993b7e71 | 8,551 | swift | Swift | Forum/Misc/Network.swift | oscardhc/Forum | a753a34b8763196499341d6c5c9a56d0c69b1de4 | [
"MIT"
] | 16 | 2020-12-04T08:21:15.000Z | 2022-01-03T06:58:42.000Z | Forum/Misc/Network.swift | treehollow/Forum | a753a34b8763196499341d6c5c9a56d0c69b1de4 | [
"MIT"
] | 1 | 2020-12-04T09:23:43.000Z | 2020-12-04T10:03:32.000Z | Forum/Misc/Network.swift | oscardhc/Forum | a753a34b8763196499341d6c5c9a56d0c69b1de4 | [
"MIT"
] | 4 | 2021-01-07T03:22:29.000Z | 2021-04-13T08:42:19.000Z | //
// Network.swift
// Forum
//
// Created by Oscar on 2020/9/29.
//
import Foundation
import Socket
import UIKit
class Network {
static let e = JSONEncoder(), ip = "182.254.145.254", port: Int32 = 8080
static private func connect<T: Encodable>(_ d: T) -> [String: Any]? {
func singleConnect() -> [String: Any]? {
do {
let data = try e.encode(d)
print(String(data: data, encoding: .utf8)!)
let s = try Socket.create()
try s.connect(to: ip, port: port, timeout: 10000)
print("connect")
try s.write(from: data)
try s.setReadTimeout(value: 10000)
print("sent")
usleep(10000)
var dt = Data()
while try s.read(into: &dt) > 0 {
print("> get")
}
print("get", String(data: dt, encoding: .utf8)!)
let rec = try JSONSerialization.jsonObject(
with: dt,
options: .allowFragments
)
s.close()
return rec as! [String: Any]
} catch {
return nil
}
}
let before = Date().timeIntervalSince1970
for i in 1...3 {
if let res = singleConnect() {
print("connect success with in \(i) time(s), \((Date().timeIntervalSince1970 - before) * 1000) ms")
G.updateStat((Date().timeIntervalSince1970 - before) * 1000)
return res
}
usleep(10000)
}
G.updateStat(-1)
return nil
}
static private func getData<T>(
op_code: String, needChecking: Bool = true,
pa_1: String = "0", pa_2: String = "0", pa_3: String = "0", pa_4: String = "0", pa_5: String = "0", pa_6: String = "0",
done: (([String: Any]) -> T)
) -> T? {
if let result = connect([
"op_code": op_code, "pa_1": pa_1, "pa_2": pa_2, "pa_3": pa_3, "pa_4": pa_4, "pa_5": pa_5, "pa_6": pa_6, "Token": G.token.content
]) {
if needChecking, let x = result["login_flag"] as? String, x != "1" {
print("not Authorized", result)
Util.halt()
return nil
} else {
return done(result)
}
} else {
print("FAIL!")
return nil
}
}
enum NetworkGetThreadType: String {
case time = "1", favoured = "6", my = "7", trending = "d"
}
static func getThreads(type: NetworkGetThreadType, inBlock: Thread.Category, lastSeenID: String) -> ([Thread], String)? {
getData(op_code: type.rawValue, pa_1: lastSeenID, pa_2: String(Thread.Category.allCases.firstIndex(of: inBlock)!)) {
(
($0["thread_list"]! as! [Any]).map {
Thread(json: $0)
},
$0[$0.keys.first(where: {$0.hasPrefix("LastSeen")})!] as! String
)
}
}
static func searchThreads(keyword: String, lastSeenID: String) -> ([Thread], String)? {
getData(op_code: "b", pa_1: keyword, pa_2: lastSeenID) {
(
($0["thread_list"]! as! [Any]).map {
Thread(json: $0)
},
$0[$0.keys.first(where: {$0.hasPrefix("LastSeen")})!] as! String
)
}
}
static func getFloors(for threadID: String, lastSeenID: String, order: String) -> (([Floor], String)?, Thread?) {
getData(op_code: "2", pa_1: threadID, pa_2: lastSeenID, pa_3: order) {
$0["ExistFlag"] as! String == "0" ? (nil, nil) :
(
(
($0["floor_list"]! as! [Any]).map {Floor(json: $0)},
$0[$0.keys.first(where: {$0.hasPrefix("LastSeen")})!] as! String
),
Thread(json: $0["this_thread"]!, isfromFloorList: true)
)
} ?? (nil, nil)
}
static func getMessages(lastSeenID: String) -> ([Message], String)? {
getData(op_code: "a", pa_1: lastSeenID) {
(
($0["message_list"]! as! [Any]).map {
Message(json: $0)
},
$0[$0.keys.first(where: {$0.hasPrefix("LastSeen")})!] as! String
)
}
}
static func verifyToken() -> (String, String)? {
getData(op_code: "-1", needChecking: false) {
let release = ($0["ReleaseTime"] as? String) ?? ""
let thread = ($0["Ban_ThreadID"] as? String) ?? ""
let content = ($0["Ban_Content"] as? String) ?? ""
let reason = ($0["Ban_Reason"] as? String) ?? ""
let isReply = (($0["Ban_Style"] as? String) ?? "") == "1"
return ($0["login_flag"]! as! String, "由于\(isReply ? "你在 #\(thread) 下的回复" : "你的发帖 #\(thread)") \(reason) ,已被我们屏蔽。结合你之前在无可奉告的封禁记录,你的账号将被暂时封禁至 \(release)。\n\n违规\(isReply ? "回复" : "发帖")内容为:\n\(content)\n\n请与我们一起维护无可奉告社区环境。\n谢谢!")
}
}
static func requestLogin(with email: String) -> Bool {
getData(op_code: "0", needChecking: false, pa_1: email) {
$0["VarifiedEmailAddress"] as! Int == 1
} ?? false
}
static func performLogin(with email: String, verificationCode: String) -> (Bool, String) {
getData(op_code: "f", needChecking: false, pa_1: email, pa_2: verificationCode, pa_3: UIDevice.current.identifierForVendor!.uuidString) {
($0["login_flag"] as! Int == 0, $0["Token"] as! String)
} ?? (false, "")
}
static func favourThread(for threadID: String) -> Bool {
getData(op_code: "5", pa_1: threadID, done: {_ in true}) ?? false
}
static func cancelFavourThread(for threadID: String) -> Bool {
getData(op_code: "5_2", pa_1: threadID, done: {_ in true}) ?? false
}
static func likeFloor(for threadID: String, floor: String) -> Bool {
getData(op_code: "8", pa_1: threadID, pa_4: floor, done: {_ in true}) ?? false
}
static func cancelLikeFloor(for threadID: String, floor: String) -> Bool {
getData(op_code: "8_2", pa_1: threadID, pa_4: floor, done: {_ in true}) ?? false
}
static func dislikeFloor(for threadID: String, floor: String) -> Bool {
getData(op_code: "8_5", pa_1: threadID, pa_4: floor, done: {_ in true}) ?? false
}
static func canceldislikeFloor(for threadID: String, floor: String) -> Bool {
getData(op_code: "8_6", pa_1: threadID, pa_4: floor, done: {_ in true}) ?? false
}
static func likeThread(for threadID: String) -> Bool {
getData(op_code: "8_3", pa_1: threadID, done: {_ in true}) ?? false
}
static func cancelLikeThread(for threadID: String) -> Bool {
getData(op_code: "8_4", pa_1: threadID, done: {_ in true}) ?? false
}
static func dislikeThread(for threadID: String) -> Bool {
getData(op_code: "9", pa_1: threadID, done: {_ in true}) ?? false
}
static func cancelDislikeThread(for threadID: String) -> Bool {
getData(op_code: "9_2", pa_1: threadID, done: {_ in true}) ?? false
}
static func reportThread(for threadID: String) -> Bool {
getData(op_code: "e", pa_1: threadID, done: {_ in true}) ?? false
}
static func reportFloor(for threadID: String, floor: String) -> Bool {
getData(op_code: "h", pa_1: threadID, pa_2: floor, done: {_ in true}) ?? false
}
static func setTag(for threadID: String, with tag: Tag) -> Bool {
getData(op_code: "i", pa_1: threadID, pa_4: String(describing: tag), done: {_ in true}) ?? false
}
static func newThread(title: String, inBlock: Thread.Category, content: String, anonymousType: NameTheme, seed: Int, tag: Tag?) -> Bool {
return getData(op_code: "3", pa_1: title, pa_2: String(Thread.Category.allCases.firstIndex(of: inBlock)!), pa_3: content, pa_4: anonymousType.rawValue, pa_5: String(seed), pa_6: tag == nil ? "NULL" : String(describing: tag!), done: {_ in true}) ?? false
}
static func newReply(for threadID: String, floor: String, content: String) -> Bool {
getData(op_code: floor == "0" ? "4" : "4_2", pa_1: threadID, pa_3: content, pa_4: floor, done: {_ in true}) ?? false
}
}
| 39.40553 | 261 | 0.520875 |
a37396e6ce379f8a51d3220313d4afb0ecdffcd2 | 595 | tsx | TypeScript | libs/ui/app/src/pages/index-page.tsx | platyplus/platydev | 1e4fb8bd3f6b83a74d91544ccdfbaa68bbf1fb7e | [
"MIT"
] | 9 | 2020-12-08T12:56:27.000Z | 2021-09-01T12:11:49.000Z | libs/ui/app/src/pages/index-page.tsx | platyplus/platydev | 1e4fb8bd3f6b83a74d91544ccdfbaa68bbf1fb7e | [
"MIT"
] | 7 | 2021-01-26T09:17:50.000Z | 2021-09-19T15:33:21.000Z | libs/ui/app/src/pages/index-page.tsx | platyplus/platydev | 1e4fb8bd3f6b83a74d91544ccdfbaa68bbf1fb7e | [
"MIT"
] | 6 | 2021-02-07T20:48:29.000Z | 2021-08-19T10:03:12.000Z | import { HeaderTitleWrapper } from '@platyplus/layout'
import { Link, Redirect } from 'react-router-dom'
import { useAuthenticated } from '@platyplus/hbp'
export const IndexPage: React.FC<{ title?: string }> = ({
title = 'Index page'
}) => {
const signedIn = useAuthenticated()
if (signedIn) {
return <Redirect to="/home" />
} else
return (
<HeaderTitleWrapper title={title}>
<h2>Welcome, stranger</h2>
<Link to="/login">Login</Link>
<br />
<Link to="/register">Register</Link>
</HeaderTitleWrapper>
)
}
export default IndexPage
| 25.869565 | 57 | 0.626891 |
eb8ed19925fbcf7b48196bc48bd3968053c0dd65 | 4,081 | css | CSS | game/dist/css/main.css | vnjson/vtq-tpl | f83e93725ff6a69a8ff8055906e5ab2edb812c12 | [
"MIT"
] | null | null | null | game/dist/css/main.css | vnjson/vtq-tpl | f83e93725ff6a69a8ff8055906e5ab2edb812c12 | [
"MIT"
] | null | null | null | game/dist/css/main.css | vnjson/vtq-tpl | f83e93725ff6a69a8ff8055906e5ab2edb812c12 | [
"MIT"
] | null | null | null |
* {
box-sizing: border-box;
margin: 0;
padding: 0;
}
h3{
color: skyblue;
}
html, body{
user-select: none;
overflow: hidden;
background-image: url(../assets/background-1.jpg);
background-size: cover;
}
.wrapper{
font-family: 'Jura';
width: 100vw;
height: 100vh;
display: flex;
justify-content: center;
align-items: center;
background-color: rgba(0, 0, 0, 0.8);
font-size: 16px;
}
#screen{
background-color: #030623;
opacity: 0.8;
width: 900px;
height: 60vh;
border: 3.5px solid #124548;
padding: 10px;
}
.scene,
.show,
.show__dialog-box,
.show__items,
.game-menu{
display: none;
}
.scene{
position: relative;
height: 100%;
}
.show{
width: 70%;
}
/**
* SCREENS
*/
/**
* main-menu
*/
.scene__main-menu{
display: none;
}
/**
* @scene mainMenu
*/
.main-menu__item-wrapper{
margin: 23% auto;
}
.main-menu__item{
font-size: 2em;
margin: 20px 20%;
text-align: center;
cursor: pointer;
padding: 7px;
color: darkcyan;
background-color: #082430;
border: 1px solid #124548;
border-left: 5px solid #124548;
}
.main-menu__item:hover,
.main-menu__item:active{
background-color: #030623;
border: 1px solid #124548;
border-left: 5px solid #124548;
}
/**
* Game over
*/
.scene__game-over{
display: none;
}
.scene__game-over h3{
text-align: center;
margin: 23% auto;
font-size: 3em;
color: red;
}
@font-face {
font-family: "Jura";
src: url('../fonts/Jura-VariableFont_wght.ttf') format("truetype");
font-style: normal;
font-weight: normal;
}
footer{
position: absolute;
bottom: 0;
width: 70%;
color: grey;
padding-bottom: 15px;
display: flex;
flex-direction: row;
justify-content: space-between;
}
footer p{}
footer a{
color: grey;
}
footer a:hover{
color: darkcyan;
}
/**
* aside
*/
.stream-aside{
position: absolute;
right: 0;
top: 0;
width: 30%;
height: 100%;
font-size: 1.3em;
display: block;
transition: 1s;
color: darkcyan;
}
.stream-aside__notifer{
width: 96%;
height: 17%;
border: 2px solid #082430;
margin-bottom: 10px;
padding: 10px;
}
.stream-aside__wiki{
margin-bottom: 10px;
width: 96%;
height: 200px;
border: 2px solid #082430;
padding: 10px;
}
.stream-aside__items{
width: 96%;
height: 180px;
border: 2px solid #082430;
margin-bottom: 10px;
}
.user-item{
cursor: -webkit-grab;
cursor: grab;
width: 3vw;
height: 3vw;
margin: 5px;
border: 1px solid #082430;
color: grey;
display: inline-flex;
justify-content: center;
align-items: center;
font-size: 1em;
}
.user-item:hover{
background-color: #082430;
}
.user-item:active{
cursor: -webkit-grabbing;
cursor: grabbing;
}
/**
* character
*/
.character{
margin-right: 10px;
border: 2px solid #082430;
padding: 10px;
}
.character__name{
display: inline-block;
font-size: 2.5em;
margin-left: 10px;
}
.character__logo{
display: inline-block;
width: 50px;
height: 50px;
border: 2px solid #082430;
padding: 5px;
background-repeat: no-repeat;
background-size: 45px 45px;
}
/**
* reply
*/
.reply{
height: 200px;
margin-right: 10px;
border: 2px solid #082430;
margin-top: 10px;
cursor: pointer;
padding: 10px;
font-size: 1.4em;
}
.reply i{
font-style: normal;
color: skyblue;
}
/**
* game-menu
*/
.game-menu{
font-size: 1.3em;
margin-right: 10px;
}
.game-menu__menu-item{
cursor: pointer;
margin: 10px 0;
padding: 7px;
color: darkcyan;
background-color: #082430;
border: 1px solid #124548;
border-left: 5px solid #124548;
}
.game-menu__menu-item:hover,
.game-menu__menu-item:active{
background-color: #030623;
border: 1px solid #124548;
border-left: 5px solid #124548;
}
input[type='text']{
outline:none;
display: block;
font-size: 1.2em;
margin-top: 10px;
width: 300px;
padding: 7px;
color: skyblue;
background-color: #082430;
border: 1px solid #124548;
border-left: 5px solid skyblue;
}
/**
* plugin ALERT
*/
@keyframes alert {
0% { border: 2.5px solid #124548; }
100% { border: 2.5px solid red; }
}
.alert{
color: red;
animation: alert 0.55s ease-in-out infinite;
animation-direction: alternate;
}
.info{
color: skyblue;
font-size: 0.9em;
} | 14.679856 | 69 | 0.670424 |
4cfd79230d7bba5074f82f9c52921c1cb7017d99 | 2,489 | py | Python | day-24/part-2/thore.py | evqna/adventofcode-2020 | 526bb9c87057d02bda4de9647932a0e25bdb3a5b | [
"MIT"
] | 12 | 2020-11-30T19:22:18.000Z | 2021-06-21T05:55:58.000Z | day-24/part-2/thore.py | evqna/adventofcode-2020 | 526bb9c87057d02bda4de9647932a0e25bdb3a5b | [
"MIT"
] | 13 | 2020-11-30T17:27:22.000Z | 2020-12-22T17:43:13.000Z | day-24/part-2/thore.py | evqna/adventofcode-2020 | 526bb9c87057d02bda4de9647932a0e25bdb3a5b | [
"MIT"
] | 3 | 2020-12-01T08:49:40.000Z | 2022-03-26T21:47:38.000Z | from collections import Counter
from tool.runners.python import SubmissionPy
class ThoreSubmission(SubmissionPy):
def run(self, s):
"""
:param s: input in string format
:return: solution flag
"""
N_DAYS = 100
black_hexs = self.parse_start_grid(s)
for _ in range(N_DAYS):
n_neighbors = Counter(
neighbor
for hex_coord in black_hexs
for neighbor in self.get_neighbors(*hex_coord)
)
black_hexs = {
hex_coord
for hex_coord, n in n_neighbors.items()
if (hex_coord not in black_hexs and n == 2)
or (hex_coord in black_hexs and n in [1, 2])
}
return len(black_hexs)
@staticmethod
def get_neighbors(x, y):
yield x + 2, y
yield x - 2, y
yield x + 1, y + 1
yield x + 1, y - 1
yield x - 1, y + 1
yield x - 1, y - 1
@classmethod
def parse_start_grid(cls, s):
black_hexs = set()
for line in s.splitlines():
hex_coord = cls.parse_line(line)
if hex_coord in black_hexs:
black_hexs.remove(hex_coord)
else:
black_hexs.add(hex_coord)
return black_hexs
@staticmethod
def parse_line(line):
return (
2 * line.count("w")
- line.count("nw")
- line.count("sw")
- 2 * line.count("e")
+ line.count("ne")
+ line.count("se"),
line.count("s") - line.count("n"),
)
def test_thore():
"""
Run `python -m pytest ./day-24/part-2/thore.py` to test the submission.
"""
assert (
ThoreSubmission().run(
"""sesenwnenenewseeswwswswwnenewsewsw
neeenesenwnwwswnenewnwwsewnenwseswesw
seswneswswsenwwnwse
nwnwneseeswswnenewneswwnewseswneseene
swweswneswnenwsewnwneneseenw
eesenwseswswnenwswnwnwsewwnwsene
sewnenenenesenwsewnenwwwse
wenwwweseeeweswwwnwwe
wsweesenenewnwwnwsenewsenwwsesesenwne
neeswseenwwswnwswswnw
nenwswwsewswnenenewsenwsenwnesesenew
enewnwewneswsewnwswenweswnenwsenwsw
sweneswneswneneenwnewenewwneswswnese
swwesenesewenwneswnwwneseswwne
enesenwswwswneneswsenwnewswseenwsese
wnwnesenesenenwwnenwsewesewsesesew
nenewswnwewswnenesenwnesewesw
eneswnwswnwsenenwnwnwwseeswneewsenese
neswnwewnwnwseenwseesewsenwsweewe
wseweeenwnesenwwwswnew
""".strip()
)
== 2208
)
| 27.054348 | 75 | 0.616312 |
8d8e19248b995245d1b4a70f8ffe00aa77e1d311 | 11,262 | sh | Shell | lib/junonia_web.sh | fprimex/zdesk-cli | 082578f79880b0329a4589272f975e44c4bd851c | [
"MIT"
] | 1 | 2020-05-08T02:54:21.000Z | 2020-05-08T02:54:21.000Z | lib/junonia_web.sh | fprimex/zdesk-cli | 082578f79880b0329a4589272f975e44c4bd851c | [
"MIT"
] | null | null | null | lib/junonia_web.sh | fprimex/zdesk-cli | 082578f79880b0329a4589272f975e44c4bd851c | [
"MIT"
] | null | null | null | #!/bin/sh
## -------------------------------------------------------------------
##
## junonia_web.sh - shell functions for web work
##
## -------------------------------------------------------------------
##
## Copyright (c) 2020 Brent W. Woodruff. All Rights Reserved.
##
## This file is provided to you under the Mozilla Public License
## Version 2.0 (the "License"); you may not use this file
## except in compliance with the License. You may obtain
## a copy of the License at
##
## https://www.mozilla.org/en-US/MPL/2.0/
##
## Unless required by applicable law or agreed to in writing,
## software distributed under the License is distributed on an
## "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
## KIND, either express or implied. See the License for the
## specific language governing permissions and limitations
## under the License.
##
## -------------------------------------------------------------------
# JUNONIA_DEBUG # Turn on debugging of the junonia library
# JUNONIA_CONFIG # Path to script rc file
# JUNONIA_CONFIGDIR # Path to config directory
# JUNONIA_CACHEDIR # Path to cache directory
# JUNONIA_CACHE # Flag to optionally disable (0) caching
# JUNONIA_WRAP # Width of two column output (option help listings)
# JUNONIA_COL1 # Width of column one
# JUNONIA_COL2 # Width of column two
# TMPDIR # Temporary directory
###
### Defaults
###
#JW_NEXT_PAGE_NUM='.meta.pagination."next-page"'
#JW_NEXT_PAGE=.links.next
JW_NEXT_PAGE=.next_page
#JW_CONTENT_TYPE=application/vnd.api+json
JW_CONTENT_TYPE=application/json
JW_NPAGES=10000
if [ -n "$JUNONIA_DEBUG" ]; then
# Debugging. Print command errors and make them verbose
jw_curl_silent=
else
# Not debugging. Shut everyting up.
jw_curl_silent="-s"
fi
###
### jq utility functions
###
jw_jq_leafprint='def leafprint(o):
o.indent as $i |
$i + " " as $ni |
o.errors as $e |
$e | keys[] as $k |
(select(($e[$k] | type) != "array" and ($e[$k] | type) != "object") |
"\($k): \($e[$k])"),
(select(($e[$k] | type) == "object") |
"\($k):",
"\(leaf_print({"errors": $e[$k], "indent": $ni}))"),
(select(($e[$k] | type) == "array") |
"\($k):",
"\(leaf_print({"errors": $e[$k], "indent": $ni}))");'
readonly JW_JQS="
$jw_jq_printleaf
"
###
### Shell utility functions
###
# Replace quotes and newlines with escape characters to prepare the
# value for insertion into JSON.
jw_json_escape () {
printf '%s' "$1" | awk '
{
gsub(/"/,"\\\"")
gsub(/\\n/,"\\\\n")
}
NR == 1 {
value_line = $0
}
NR != 1 {
value_line = value_line "\\n" $0
}
END {
printf "%s", value_line
}'
}
jw_curl () {
# $1: optional integer parameter for number of next-pages to retrieve
# $1 or $2 to $#: arguments to provide to curl
echodebug "jw_curl args: $@"
if ! [ "$1" -eq "$1" ] >/dev/null 2>&1; then
npages="$JW_NPAGES"
else
npages="$1"
shift
fi
echodebug "npages: $npages"
echodebug "curl args: $@"
if [ "$npages" -lt 1 ]; then
return 0
fi
case $curl_token_src in
curlrc)
echovvv "curl --header \"Content-Type: $JW_CONTENT_TYPE\"" >&2
echovvv " --config \"$curlrc\"" >&2
echovvv " $*" >&2
resp="$(curl $jw_curl_silent -w '\nhttp_code: %{http_code}\n' \
--header "Content-Type: $JW_CONTENT_TYPE" \
--config "$curlrc" \
$@)"
;;
token)
echovvv "curl --header \"Content-Type: $JW_CONTENT_TYPE\"" >&2
echovvv " --header \"Authorization: Bearer \$JW_TOKEN\"" >&2
echovvv " $*" >&2
resp="$(curl $jw_curl_silent -w '\nhttp_code: %{http_code}\n' \
--header "Content-Type: application/$JW_CONTENT_TYPE" \
--header "Authorization: Bearer $JW_TOKEN" \
$@)"
;;
basic)
echovvv "curl --header \"Content-Type: $JW_CONTENT_TYPE\"" >&2
echovvv " --header \"Authorization: Basic \$JW_USER:\$JW_PASS\"" >&2
echovvv " $*" >&2
resp="$(curl $jw_curl_silent -w '\nhttp_code: %{http_code}\n' \
--header "Content-Type: application/$JW_CONTENT_TYPE" \
--header "Authorization: Basic $JW_USER:$JW_PASS" \
$@)"
;;
esac
resp_body="$(printf '%s' "$resp" | awk '!/^http_code/; /^http_code/{next}')"
resp_code="$(printf '%s' "$resp" | awk '!/^http_code/{next} /^http_code/{print $2}')"
echodebug "API request http code: $resp_code. Response:"
echodebug_raw "$resp_body"
case "$resp_code" in
2*)
printf "%s" "$resp_body"
next_page="$(printf "%s" "$resp_body" | \
jq -r "$JW_NEXT_PAGE" 2>&3)"
if [ -n "$next_page" ] && [ "$next_page" != null ] &&
! [ "$npages" -le 1 ]; then
echodebug "next page: $next_page"
echodebug "npages: $npages"
#next_link="$(printf "%s" "$resp_body" | jq -r '.links.next')"
#echodebug "next link: $next_link"
jw_curl $((--npages)) "$next_page"
fi
;;
4*|5*)
echoerr "API request failed."
echoerr_raw "HTTP status code: $resp_code"
jw_jq_prog='leaf_print({"errors": .errors[], "indent": " "})'
if jsonapi_err="$(echo "$resp_body" \
| jq -r "$jw_jq_leafprint $jw_jq_prog")"; then
echoerr_raw "JSON-API details:"
echoerr_raw "$jsonapi_err"
else
echoerr "Response:"
echoerr_raw "$resp_body"
fi
return 1
;;
*)
echoerr "Unable to complete API request."
echoerr "HTTP status code: $resp_code."
echoerr "Response:"
echoerr "$resp_body"
return 1
;;
esac
}
jw_curlrc () {
readonly JW_DEFAULT_CURLRC="$JUNONIA_CONFIGDIR/curlrc"
readonly JW_CURLRC="${1:-"$JW_DEFAULT_CURLRC"}"
curl_token_src=
# curlrc argument at the command line takes highest precedence
if echo "$TFH_CMDLINE" | grep -qE -- '-curlrc'; then
echodebug "explicit -curlrc"
if [ -f "$curlrc" ]; then
curl_token_src=curlrc
else
curl_token_src=curlrc_not_found
fi
fi
# token at the command line takes second highest precedence
if [ -z "$curl_token_src" ] && [ -n "$token" ] &&
echo "$TFH_CMDLINE" | grep -qE -- '-token'; then
echodebug "explicit -token"
curl_token_src=token
fi
# curlrc from any source (default included) comes third
if [ -z "$curl_token_src" ] && [ -f "$curlrc" ]; then
echodebug "curlrc from env and config file"
curl_token_src=curlrc
fi
# token from the config file or environment var comes last
if [ -z "$curl_token_src" ] && [ -n "$token" ]; then
echodebug "token from env and config file"
curl_token_src=token
fi
if [ -z "$curl_token_src" ]; then
curl_token_src=none
fi
if [ -z "$token" ]; then
token_status="empty"
else
token_status="not empty"
fi
case $curl_token_src in
curlrc)
echov "token: $token_status, unused"
echov "curlrc: $curlrc"
;;
token)
echov "token: $token_status"
echov "curlrc: $curlrc, unused"
;;
curlrc_not_found)
echov "token: $token_status, unused"
echov "curlrc: $curlrc specified but not found"
;;
none)
echov "token: empty"
echov "curlrc: $curlrc not found"
;;
esac
}
# Write a given token value to a curl config file at the given path.
# $1 file path
# $2 token
make_curlrc () {
curlrc_dir="$(dirname "$1")"
if [ ! -d "$curlrc_dir" ]; then
if ! mkdir -p "$curlrc_dir"; then
echoerr "unable to create configuration directory:"
echoerr "$curlrc_dir"
return 1
fi
fi
if ! echo > "$1"; then
echoerr "Error: cannot write to curl config file:"
echoerr "$1"
return 1
fi
if ! chmod 600 "$1"; then
echoerr "WARNING: unable to set permissions on curl config file:"
echoerr "chmod 600 $1"
fi
if ! echo "--header \"Authorization: Bearer $2\"" > "$1"; then
echoerr "Error: cannot generate curl config file:"
echoerr "$1"
return 1
fi
echodebug "Created $1"
}
tfh_curl_config () {
tfrc="$1"
curltoken="$2"
if [ -n "$curltoken" ] && [ -n "$tfrc" ]; then
echoerr "only one of -curltoken or -tfrc can be specified"
return 1
fi
if [ -n "$curltoken" ]; then
# (Re)create / overwrite the curlrc
make_curlrc "$curlrc" "$curltoken"
echo "wrote $curlrc"
return 0
fi
tf_config_token=
tf_config="${TERRAFORM_CONFIG:-"$HOME/.terraformrc"}"
if [ -f "$tf_config" ]; then
# This is simplified. It depends on the token keyword and value being
# on the same line in the .terraformrc.
tf_config_token="$(awk -v host="$hostname" '
# Skip commented lines
/^ *#/ {
next
}
# Get the host for this credentials entry
/credentials *"/ {
cred_host = $2
gsub(/"/, "", cred_host)
}
# Extract the token and note if it matches the specified host
/token *= *"[A-Za-z0-9\.]+"/ {
tokens++
match($0, /"[A-Za-z0-9\.]+"/)
token = substr($0, RSTART+1, RLENGTH-2)
if(cred_host == host) {
host_token = token
}
}
END {
# There was only one token, use that regardless as to the host
if(tokens == 1) {
print token
}
# More than one token, use the specified host
if(tokens > 1 && host_token) {
print host_token
}
# Either did not find any tokens or found tokens, but did not find the
# token for the specified host. To avoid being ambiguous, do not output
# any tokens.
}' "$tf_config")"
fi
if [ $tfrc ]; then
if [ -n "$tf_config_token" ]; then
if ! make_curlrc "$curlrc" "$tf_config_token"; then
echoerr "failed to create curlrc with terraformrc token"
echoerr "source: $tf_config"
echoerr "destination: $curlrc"
return 1
fi
echo "$curlrc generated from $tf_config"
return 0
else
echoerr "unable to extract token from terraformrc:"
echoerr "$tf_config"
return 1
fi
fi
if [ -f "$curlrc" ]; then
echo "$curlrc"
echov "$(cat "$curlrc")"
if [ -f "$tf_config" ] && [ -z "$TFH_NO_CURLRC_DIFF" ] ; then
# Got a .terraformrc token and the current token is from a tfh curl
# config. Compare the tokens to see if they're the same.
curlrc_token="$(awk '
/Bearer [A-Za-z0-9\.][A-Za-z0-9\.]*/ {
match($0, /Bearer [A-Za-z0-9\.][A-Za-z0-9\.]*/)
print substr($0, RSTART+7, RLENGTH-7)
}' "$curlrc")"
if [ "$curlrc_token" != "$tf_config_token" ]; then
echo
echo "WARNING tokens do not match in files:"
echo "$tf_config"
echo "$curlrc"
echo
echo "tfh will use: $curlrc"
echo
echo "to use $tf_config, run \`tfh curl-config -tfrc\`"
echo
echo "suppress this message by setting TFH_NO_CURLRC_DIFF=1"
echo
echov "curlrc : $curlrc_token"
echov "terraformrc: $tf_config_token"
fi
fi
else
echo "no curlrc file at $curlrc"
fi
}
| 27.072115 | 87 | 0.56997 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.