hexsha
stringlengths 40
40
| size
int64 5
1.05M
| ext
stringclasses 98
values | lang
stringclasses 21
values | max_stars_repo_path
stringlengths 3
945
| max_stars_repo_name
stringlengths 4
118
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
368k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
945
| max_issues_repo_name
stringlengths 4
118
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
134k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
945
| max_forks_repo_name
stringlengths 4
135
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
1.05M
| avg_line_length
float64 1
1.03M
| max_line_length
int64 2
1.03M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
dda5c3983af87d62df4fbc4ec8b319f2d01e73ae | 509 | java | Java | Catalogo-Web/src/co/gov/ideamredd/servlets/IrARegistrarEspecieCatalogoServlet.java | danielrcardenas/mapas_ideam | e2bb4235621aa789d8af9758a91e0ac05c622607 | [
"Apache-2.0"
] | 1 | 2020-01-03T09:29:34.000Z | 2020-01-03T09:29:34.000Z | Catalogo-Web/src/co/gov/ideamredd/servlets/IrARegistrarEspecieCatalogoServlet.java | danielrcardenas/mapas_ideam | e2bb4235621aa789d8af9758a91e0ac05c622607 | [
"Apache-2.0"
] | null | null | null | Catalogo-Web/src/co/gov/ideamredd/servlets/IrARegistrarEspecieCatalogoServlet.java | danielrcardenas/mapas_ideam | e2bb4235621aa789d8af9758a91e0ac05c622607 | [
"Apache-2.0"
] | null | null | null | package co.gov.ideamredd.servlets;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
public class IrARegistrarEspecieCatalogoServlet extends HttpServlet {
@Override
protected void doPost(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
response.sendRedirect("reg/registrarDatosEspecies.jsp");
}
}
| 24.238095 | 71 | 0.831041 |
e23ac0c99162a0ea92f7f76c6664dfe96d46bb75 | 3,603 | py | Python | sila_library/sila2lib/fdl_parser/tests/test_fdl_parser.py | lemmi25/sila2lib | ac4db8ee7fe6c99bde498151a539b25be2021d2f | [
"MIT"
] | null | null | null | sila_library/sila2lib/fdl_parser/tests/test_fdl_parser.py | lemmi25/sila2lib | ac4db8ee7fe6c99bde498151a539b25be2021d2f | [
"MIT"
] | null | null | null | sila_library/sila2lib/fdl_parser/tests/test_fdl_parser.py | lemmi25/sila2lib | ac4db8ee7fe6c99bde498151a539b25be2021d2f | [
"MIT"
] | null | null | null | # Set pylint configuration for this file
# pylint: disable=missing-docstring, protected-access
# import general Packages
import unittest
import os
# import package related modules and classes
from ..fdl_parser import FDLParser
from ..command import Command
from ..property import Property
from ..data_type_definition import DataTypeDefinition
from ..standard_errors import DefinedExecutionError
class TestFDLParser(unittest.TestCase):
def setUp(self):
"""
Sets up basic attributes for the unit tests run in this class.
Create the basic path in which the input xml files are stored.
"""
self.base_path = os.path.join(os.path.dirname(__file__), "fdl")
def test_feature(self):
obj = FDLParser(os.path.join(self.base_path, "Simple.sila.xml"))
self.assertEqual(obj.root.tag, '{http://www.sila-standard.org}Feature')
def test_attributes(self):
"""
Test of all attributes are read correctly
For this test it is assumed that no optional attributes are present and only default values are found.
"""
obj = FDLParser(os.path.join(self.base_path, "Simple.sila.xml"))
# start with mandatory attributes
self.assertEqual(obj.feature_version, '1.3')
self.assertEqual(obj.feature_version_major, 1)
self.assertEqual(obj.feature_version_minor, 3)
self.assertEqual(obj.originator, 'org.silastandard')
self.assertEqual(obj.sila2_version, '1.0')
# optional arguments and defaults
self.assertEqual(obj.maturity_level, 'Draft')
self.assertEqual(obj.category, 'example')
self.assertEqual(obj.locale, 'en-us')
def test_attributes_optional(self):
"""
Tests if optional attributes are read correctly if not set.
For this test all optional attributes must be set.
"""
obj = FDLParser(os.path.join(self.base_path, "Simple_AttributesOptional.sila.xml"))
self.assertEqual(obj.locale, 'en-us')
self.assertEqual(obj.maturity_level, 'Draft')
def test_elements_base(self):
"""Tests if the base elements of a feature are read correctly."""
obj = FDLParser(os.path.join(self.base_path, "Simple.sila.xml"))
self.assertEqual(obj.identifier, 'SimpleFeature')
self.assertEqual(obj.name, 'Simple Feature')
self.assertEqual(
obj.description,
'Minimal feature definition, nothing is required. Can be used to check (default) attributes.'
)
def test_elements_complete(self):
"""Tests if all elements (one of each) are read correctly."""
obj = FDLParser(os.path.join(self.base_path, "Complete.sila.xml"))
self.assertEqual(len(obj.commands), 1)
self.assertIn('CommandIdentifier', obj.commands)
self.assertIs(type(obj.commands['CommandIdentifier']), Command)
self.assertEqual(len(obj.properties), 1)
self.assertIn('PropertyIdentifier', obj.properties)
self.assertIs(type(obj.properties['PropertyIdentifier']), Property)
self.assertEqual(len(obj.data_type_definitions), 1)
self.assertIn('DataTypeDefinitionIdentifier', obj.data_type_definitions)
self.assertIs(type(obj.data_type_definitions['DataTypeDefinitionIdentifier']), DataTypeDefinition)
self.assertEqual(len(obj.defined_execution_errors), 1)
self.assertIn('DefinedExecutionErrorIdentifier', obj.defined_execution_errors)
self.assertIs(type(obj.defined_execution_errors['DefinedExecutionErrorIdentifier']), DefinedExecutionError)
| 40.943182 | 115 | 0.696087 |
32513d9e656fbbb8b278aa1527e4f1ca219ea06b | 5,436 | rs | Rust | examples/basic/src/main.rs | Unoqwy/polecen | 7c761f16764a7329950c4bcd483ebf140cec13a0 | [
"Apache-2.0",
"MIT"
] | null | null | null | examples/basic/src/main.rs | Unoqwy/polecen | 7c761f16764a7329950c4bcd483ebf140cec13a0 | [
"Apache-2.0",
"MIT"
] | null | null | null | examples/basic/src/main.rs | Unoqwy/polecen | 7c761f16764a7329950c4bcd483ebf140cec13a0 | [
"Apache-2.0",
"MIT"
] | null | null | null | use std::env;
use std::str::FromStr;
use polecen::arguments::prelude::*;
use serenity::client::{Context, EventHandler};
use serenity::model::channel::Message;
use serenity::model::guild::Member;
use serenity::model::prelude::Ready;
use serenity::{async_trait, Client};
struct Handler;
#[async_trait]
impl EventHandler for Handler {
async fn message(&self, ctx: Context, message: Message) {
if let Some(content) = message.content.strip_prefix("!") {
if let Err(e) = run_command(&ctx, &message, content.to_owned()).await {
match e {
CommandArgumentsReadError::RequiredArgumentMissing { position, .. } => {
message
.channel_id
.say(&ctx.http, format!("Missing argument at position `{}`!", position))
.await
.unwrap();
},
_ => {
message
.channel_id
.say(&ctx.http, format!(":x: Raw error: {}", e))
.await
.unwrap();
},
}
}
}
}
async fn ready(&self, _ctx: Context, ready: Ready) {
println!("{} is connected!", ready.user.name);
}
}
#[derive(Clone)]
enum Operator {
ADD,
SUBSTRACT,
}
impl Operator {
fn calc(&self, lhs: i32, rhs: i32) -> i32 {
match self {
Self::ADD => lhs + rhs,
Self::SUBSTRACT => lhs - rhs,
}
}
}
impl FromStr for Operator {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"+" | "plus" | "add" => Ok(Self::ADD),
"-" | "minus" | "substract" => Ok(Self::SUBSTRACT),
_ => Err(()),
}
}
}
polecen::expand_command_here!((PolecenCommandArgs) polecen => match {
perform => {
target: Member, "Target member";
action: String, "Action to perform";
reason: Option<String>, "Reason";
},
calc => {
lhs: i32, "Integer";
op: String, "Operator";
rhs: i32, "Integer";
},
version | ver | "?" => {}
});
async fn run_command(
ctx: &Context,
message: &Message,
content: String,
) -> Result<(), CommandArgumentsReadError> {
let mut args = content.split_whitespace();
if let Some(command) = args.next() {
match command {
"polecen" => {
let args = polecen::read_args!(PolecenCommandArgs, args, ctx, [M] message)?;
exec_polecen_command(ctx, message, args).await.unwrap();
},
_ => {},
}
}
Ok(())
}
type CommandResult = Result<(), Box<dyn std::error::Error + Send + Sync>>;
async fn exec_polecen_command(
ctx: &Context,
message: &Message,
args: PolecenCommandArgs,
) -> CommandResult {
match &args {
PolecenCommandArgs::Perform(args) => {
let PolecenCommandArgsPerform { target, .. } = args;
message
.channel_id
.send_message(&ctx.http, |m| {
m.embed(|e| {
e.title("Action required");
e.color(0xff0000);
e.author(|a| {
a.name(target.display_name());
if let Some(icon_url) = target.user.avatar_url() {
a.icon_url(icon_url);
}
a
});
e.field(
"Information",
format!(
"You are required to `{}`. Please complete it in the shortest delay.",
args.action
),
false,
);
if let Some(reason) = &args.reason {
e.field("Reason", reason, false);
}
e
});
m
})
.await?;
},
PolecenCommandArgs::Calc(args) => {
let PolecenCommandArgsCalc { lhs, rhs, .. } = args;
if let Ok(op) = args.op.parse::<Operator>() {
message
.channel_id
.say(&ctx.http, format!(":white_check_mark: Result: `{}`", op.calc(*lhs, *rhs)))
.await?;
} else {
message.channel_id.say(&ctx.http, "Unknown operator. Available: `+`, `-`.").await?;
}
},
PolecenCommandArgs::Version => {
message
.channel_id
.say(
&ctx.http,
format!(":information_source: Version: {}", env!("CARGO_PKG_VERSION")),
)
.await?;
},
}
Ok(())
}
#[tokio::main]
async fn main() {
let token = env::var("DISCORD_TOKEN").expect("Expected a token in the environment");
let mut client =
Client::builder(&token).event_handler(Handler).await.expect("Could not create client");
if let Err(why) = client.start().await {
println!("Client error: {:?}", why);
}
}
| 29.704918 | 100 | 0.444077 |
bb8094351a848a5d99d06e8544507e615678acfb | 4,002 | cs | C# | Defs/ExtJs/Ext.tip.Tip.cs | SharpKit/SharpKit-SDK | 66a6e64436c6254b60b7ea5aee54d026fff78237 | [
"MIT"
] | 12 | 2015-02-05T06:46:20.000Z | 2020-12-05T17:31:51.000Z | Defs/ExtJs/Ext.tip.Tip.cs | SharpKit/SharpKit-SDK | 66a6e64436c6254b60b7ea5aee54d026fff78237 | [
"MIT"
] | 4 | 2015-01-08T10:32:01.000Z | 2017-07-06T08:57:11.000Z | Defs/ExtJs/Ext.tip.Tip.cs | SharpKit/SharpKit-SDK | 66a6e64436c6254b60b7ea5aee54d026fff78237 | [
"MIT"
] | 8 | 2015-03-18T18:18:58.000Z | 2020-04-04T08:01:53.000Z | //***************************************************
//* This file was generated by tool
//* SharpKit
//* At: 29/08/2012 03:59:41 p.m.
//***************************************************
using SharpKit.JavaScript;
namespace Ext.tip
{
#region Tip
/// <inheritdocs />
/// <summary>
/// <p>This is the base class for <see cref="Ext.tip.QuickTip">Ext.tip.QuickTip</see> and <see cref="Ext.tip.ToolTip">Ext.tip.ToolTip</see> that provides the basic layout and
/// positioning that all tip-based classes require. This class can be used directly for simple, statically-positioned
/// tips that are displayed programmatically, or it can be extended to provide custom tip implementations.</p>
/// </summary>
[JsType(JsMode.Prototype, Export=false, OmitOptionalParameters=true)]
public partial class Tip : Ext.panel.Panel
{
/// <summary>
/// If true, then the tooltip will be automatically constrained to stay within
/// the browser viewport.
/// Defaults to: <c>true</c>
/// </summary>
public bool constrainPosition;
/// <summary>
/// Experimental. The default Ext.Element.alignTo anchor position value
/// for this tip relative to its element of origin.
/// Defaults to: <c>"tl-bl?"</c>
/// </summary>
public JsString defaultAlign;
/// <summary>
/// Set Tip draggable using base Component's draggability
/// </summary>
private void initDraggable(){}
/// <summary>
/// Shows this tip at the specified XY position. Example usage:
/// <code>// Show the tip at x:50 and y:100
/// tip.showAt([50,100]);
/// </code>
/// </summary>
/// <param name="xy"><p>An array containing the x and y coordinates</p>
/// </param>
public void showAt(JsArray<Number> xy){}
/// <summary>
/// Experimental. Shows this tip at a position relative to another element using
/// a standard Ext.Element.alignTo anchor position value. Example usage:
/// // Show the tip at the default position ('tl-br?')
/// tip.showBy('my-el');
/// // Show the tip's top-left corner anchored to the element's top-right corner
/// tip.showBy('my-el', 'tl-tr');
/// </summary>
/// <param name="el"><p>An HTMLElement, <see cref="Ext.dom.Element">Ext.Element</see> or string
/// id of the target element to align to.</p>
/// </param>
/// <param name="position"><p>A valid <see cref="Ext.dom.Element.alignTo">Ext.Element.alignTo</see> anchor position.</p>
/// <p>Defaults to 'tl-br?' or <see cref="Ext.tip.TipConfig.defaultAlign">defaultAlign</see> if specified.</p>
/// </param>
public void showBy(object el, object position=null){}
public Tip(Ext.tip.TipConfig config){}
public Tip(){}
public Tip(params object[] args){}
}
#endregion
#region TipConfig
/// <inheritdocs />
[JsType(JsMode.Json, Export=false, OmitOptionalParameters=true)]
public partial class TipConfig : Ext.panel.PanelConfig
{
/// <summary>
/// If true, then the tooltip will be automatically constrained to stay within
/// the browser viewport.
/// Defaults to: <c>true</c>
/// </summary>
public bool constrainPosition;
/// <summary>
/// Experimental. The default Ext.Element.alignTo anchor position value
/// for this tip relative to its element of origin.
/// Defaults to: <c>"tl-bl?"</c>
/// </summary>
public JsString defaultAlign;
public TipConfig(params object[] args){}
}
#endregion
#region TipEvents
/// <inheritdocs />
[JsType(JsMode.Json, Export=false, OmitOptionalParameters=true)]
public partial class TipEvents : Ext.panel.PanelEvents
{
public TipEvents(params object[] args){}
}
#endregion
}
| 43.032258 | 178 | 0.591704 |
b673287b607fd78956d79e0586763a709eb41943 | 413 | rb | Ruby | db/migrate/20160809043736_create_naturesoft_projects_images.rb | globalnaturesoft/naturesoft-projects | b782717f9feed4ca33f6ee8e0f49d806a35cba6d | [
"MIT"
] | null | null | null | db/migrate/20160809043736_create_naturesoft_projects_images.rb | globalnaturesoft/naturesoft-projects | b782717f9feed4ca33f6ee8e0f49d806a35cba6d | [
"MIT"
] | null | null | null | db/migrate/20160809043736_create_naturesoft_projects_images.rb | globalnaturesoft/naturesoft-projects | b782717f9feed4ca33f6ee8e0f49d806a35cba6d | [
"MIT"
] | null | null | null | class CreateNaturesoftProjectsImages < ActiveRecord::Migration[5.0]
def change
create_table :naturesoft_projects_images do |t|
t.string :name
t.text :description
t.string :image
t.string :status, :default => "active"
t.boolean :is_main, :default => false
t.references :project, references: :naturesoft_projects_projects, index: true
t.timestamps
end
end
end
| 27.533333 | 83 | 0.690073 |
f600003f47e60792dabea9657b35e02919c55e2e | 2,956 | hpp | C++ | common/SoapySSDPEndpoint.hpp | bastille-attic/SoapyRemote | 63664b890ccfe2d5472901b879b1593aacc92ef1 | [
"BSL-1.0"
] | null | null | null | common/SoapySSDPEndpoint.hpp | bastille-attic/SoapyRemote | 63664b890ccfe2d5472901b879b1593aacc92ef1 | [
"BSL-1.0"
] | null | null | null | common/SoapySSDPEndpoint.hpp | bastille-attic/SoapyRemote | 63664b890ccfe2d5472901b879b1593aacc92ef1 | [
"BSL-1.0"
] | null | null | null | // Copyright (c) 2015-2016 Josh Blum
// SPDX-License-Identifier: BSL-1.0
#pragma once
#include "SoapyRPCSocket.hpp"
#include <string>
#include <csignal> //sig_atomic_t
#include <mutex>
#include <vector>
#include <memory>
class SoapyHTTPHeader;
struct SoapySSDPEndpointData;
/*!
* Service an SSDP endpoint to:
* keep track of discovered servers of interest,
* and to respond to discovery packets for us.
*/
class SoapySSDPEndpoint
{
public:
//! Get a singleton instance of the endpoint
static std::shared_ptr<SoapySSDPEndpoint> getInstance(void);
/*!
* Create a discovery endpoint
*/
SoapySSDPEndpoint(void);
~SoapySSDPEndpoint(void);
/*!
* Allow the endpoint to advertise that its running the RPC service
*/
void registerService(const std::string &uuid, const std::string &service);
/*!
* Enable the client endpoint to search for running services.
*/
void enablePeriodicSearch(const bool enable);
/*!
* Enable the server to send periodic notification messages.
*/
void enablePeriodicNotify(const bool enable);
/*!
* Get a list of all active server URLs.
*
* The same endpoint can be discovered under both IPv4 and IPv6.
* When 'only' is false, the ipVer specifies the IP version preference
* when both are discovered but will fallback to the other version.
* But when 'only' is true, only addresses of the ipVer type are used.
*
* \param ipVer the preferred IP version to discover (6 or 4)
* \param only true to ignore other discovered IP versions
*/
std::vector<std::string> getServerURLs(const int ipVer = 4, const bool only = false);
private:
SoapySocketSession sess;
//protection between threads
std::mutex mutex;
//service settings
bool serviceRegistered;
std::string uuid;
std::string service;
//configured messages
bool periodicSearchEnabled;
bool periodicNotifyEnabled;
//server data
std::vector<SoapySSDPEndpointData *> handlers;
//signal done to the thread
sig_atomic_t done;
void spawnHandler(const std::string &bindAddr, const std::string &groupAddr, const int ipVer);
void handlerLoop(SoapySSDPEndpointData *data);
void sendHeader(SoapyRPCSocket &sock, const SoapyHTTPHeader &header, const std::string &addr);
void sendSearchHeader(SoapySSDPEndpointData *data);
void sendNotifyHeader(SoapySSDPEndpointData *data, const std::string &nts);
void handleSearchRequest(SoapySSDPEndpointData *data, const SoapyHTTPHeader &header, const std::string &addr);
void handleSearchResponse(SoapySSDPEndpointData *data, const SoapyHTTPHeader &header, const std::string &addr);
void handleNotifyRequest(SoapySSDPEndpointData *data, const SoapyHTTPHeader &header, const std::string &addr);
void handleRegisterService(SoapySSDPEndpointData *, const SoapyHTTPHeader &header, const std::string &recvAddr);
};
| 31.784946 | 116 | 0.715832 |
f42e012725c283577d4fbe6cf894083edb2db581 | 308 | cs | C# | COZO/IDepartmentTransaction.cs | pjlammertyn/Cozo-Broker | 8710a478d4a14dff63d3f258614066d90fc980c9 | [
"Apache-2.0"
] | null | null | null | COZO/IDepartmentTransaction.cs | pjlammertyn/Cozo-Broker | 8710a478d4a14dff63d3f258614066d90fc980c9 | [
"Apache-2.0"
] | 1 | 2016-11-09T14:36:57.000Z | 2016-11-09T14:36:57.000Z | COZO/IDepartmentTransaction.cs | pjlammertyn/Cozo-Broker | 8710a478d4a14dff63d3f258614066d90fc980c9 | [
"Apache-2.0"
] | null | null | null | using COZO.KMEHR;
namespace COZO
{
public interface IDepartmentTransaction
{
transaction GetDepartmentTransaction(string departmentId);
//CDHCPARTYvalues GetDepartment(string departmentId);
hcpartyType GetDepartmentHcpartyType(string aHCParty, string departmentId);
}
}
| 25.666667 | 83 | 0.746753 |
03604b29ef9d817ee736de4dc4f11d1006ba9042 | 705 | swift | Swift | huff/views/TweetPhotoCollectionViewCell.swift | rluftw/huff | 153dbbb4b8929ca2fbc590ca88804f189f8115d7 | [
"MIT"
] | 1 | 2021-02-23T21:08:30.000Z | 2021-02-23T21:08:30.000Z | huff/views/TweetPhotoCollectionViewCell.swift | rluftw/huff | 153dbbb4b8929ca2fbc590ca88804f189f8115d7 | [
"MIT"
] | null | null | null | huff/views/TweetPhotoCollectionViewCell.swift | rluftw/huff | 153dbbb4b8929ca2fbc590ca88804f189f8115d7 | [
"MIT"
] | null | null | null | //
// TweetPhotoCollectionCollectionViewCell.swift
// huff
//
// Created by Xing Hui Lu on 12/10/16.
// Copyright © 2016 Xing Hui Lu. All rights reserved.
//
import UIKit
class TweetPhotoCollectionViewCell: UICollectionViewCell {
// MARK: - properties
var photo: UIImage? {
didSet {
photoView.image = photo
activityIndicator.stopAnimating()
}
}
// MARK: - outlets
@IBOutlet weak var photoView: UIImageView!
@IBOutlet weak var activityIndicator: UIActivityIndicatorView!
override func prepareForReuse() {
super.prepareForReuse()
photo = nil
activityIndicator.startAnimating()
}
}
| 22.03125 | 66 | 0.634043 |
e74db4c4bfbceab3b1a7a69d6dd5d71fca431790 | 4,416 | php | PHP | resources/views/home/index.blade.php | way640/zdmoney | 5319ec58322937a3dc18f9800c695852b90b899b | [
"MIT"
] | null | null | null | resources/views/home/index.blade.php | way640/zdmoney | 5319ec58322937a3dc18f9800c695852b90b899b | [
"MIT"
] | null | null | null | resources/views/home/index.blade.php | way640/zdmoney | 5319ec58322937a3dc18f9800c695852b90b899b | [
"MIT"
] | null | null | null | @extends('Home.title')
@section('content')
<!--main-------------------------------------------------------------------------------------------------------------->
<!--banner-----轮播图---------------------------------------------------------------------------------------->
<div id="banner" >
<img id="imgs" src="/image/focus1.gif">
<div id="xd">
<span class="xd2">●</span>
<span class="xd1">●</span>
<span class="xd1">●</span>
<span class="xd1">●</span>
</div>
</div>
<script>
var timer=setInterval("AutoPlay()",1300);//全局变量,定义定时器,每隔1300毫秒执行一次AutoPlay()函数
var p=1;//全局变量,存储当前播放的第几张图片
var xdspan=document.getElementById("xd").getElementsByTagName("span");//获取xd下的所有span标签
var xdspanlength=xdspan.length;//获取span标签的数量
for(var i=0;i<xdspanlength;i++){
xdspan.item(i).onclick=function(){//通过for循环为所有span标签添加点击事件
for(var i=0;i<xdspanlength;i++){//通过for循环为所有span标签设置class="xd1"
xdspan.item(i).className="xd1";
}
this.className="xd2";//设置点击的span标签class="xd2"
for(var i=0;i<xdspanlength;i++){
if(xdspan.item(i).className=="xd2"){//通过for循环判断当前点击的是第几个小圆点,然后修改图片地址
document.getElementById("imgs").src='/image/focus'+(i+1)+'.gif';
p=i+1;////全局变量,存储当前播放的第几张图片,以便从当前图片开始播放下一张图片
}
}
}
}
function AutoPlay(){//自动播放函数
if(p>=5){p=1;}//只有4张图片,当p大于等于4时就从第一张图片开始
for(var i=0;i<xdspanlength;i++){//通过for循环为所有span标签设置class="xd1"
xdspan.item(i).className="xd1";
}
xdspan.item(p-1).className="xd2";//设置当前播放的span标签class="xd2"
document.getElementById("imgs").src='/image/focus'+p+'.gif';//修改图片地址
p++;
}
document.getElementById("banner").onmouseover=function(){clearInterval(timer);}//当鼠标移入轮播区域时,停止自动播放
document.getElementById("banner").onmouseout=function(){timer=setInterval("AutoPlay()",1300);}//当鼠标移出轮播区域时,继续自动播放
</script>
<!--announcement------------------------------------------------------------------------------------------------------>
<div class="main">
<div class="title">
<h1>基金理财</h1>
<p>盒子正在努力审核项目,预计项目发布时间 <span>10:00</span> , <span>13:00</span> , <span>16:00</span> , <span>20:00</span>,其余时间与周末随机发标。</p>
<a href="http://www.zdmoney.com/gold/fundlist">更多</a>
</div>
<div class="regular">
<a class="regular-thear" href="#">
<ol>
<li>产品丰富</li>
<li>期限多样</li>
<li>尽职风控</li>
<li style="font-size:12px;">更多基金理财> </li>
</ol>
</a>
<ul>
<div id='fundbox'>
</div>
</ul>
</div>
<div class="title">
<h1>贵金属</h1>
<a href="http://www.zdmoney.com/gold/gold">更多</a>
</div>
<div class="fund" >
<a class="fund-thear" href="#">
<ol>
<li>紧随市场</li>
<li>海量牛基</li>
<li>热点聚集</li>
<li style="font-size:12px;">更多贵金属理财> </li>
</ol>
</a>
<ul>
<div id='goldbox'>
</div>
</ul>
</div>
<div class="title">
<h1>锦囊</h1>
<a href="#">更多</a>
</div>
<div class="pocket">
<a class="pocket-thear" href="#">
<ol>
<li>大咖云集</li>
<li>见微知著</li>
<li>智慧投资</li>
<li style="font-size:12px;">更多锦囊> </li>
</ol>
</a>
<ul>
<div id='pbox'>
</div>
</ul>
</div>
</div>
<!--row--------------------------------------------------------------------------------------------------------------->
<div class="content-wrap">
<div class="c-wrap">
<h1>合作机构</h1>
<div class="scroll">
<a href="###" class="left2" onclick="left2()"></a>
<div id='linkbox'>
</div>
<a href="###" class="right2" onclick="right2()"></a>
</div>
</div>
</div>
@stop
| 33.709924 | 133 | 0.409647 |
662749aa4f924566389584889915ed287ffe6301 | 23,848 | py | Python | classification/ngram_based_classifier.py | gbosetti/ca | 3f37edc4b8f69f61d02b881242522f6fa15e2695 | [
"MIT"
] | null | null | null | classification/ngram_based_classifier.py | gbosetti/ca | 3f37edc4b8f69f61d02b881242522f6fa15e2695 | [
"MIT"
] | 4 | 2021-06-08T22:30:03.000Z | 2022-03-12T00:48:52.000Z | classification/ngram_based_classifier.py | gbosetti/cati | 3f37edc4b8f69f61d02b881242522f6fa15e2695 | [
"MIT"
] | null | null | null | import string
import nltk
import traceback
from nltk.corpus import stopwords
nltk.download('stopwords')
from collections import Counter
from mabed.es_connector import Es_connector
from nltk.tokenize import TweetTokenizer
import re
import json
from elasticsearch_dsl import UpdateByQuery
from nltk.stem.snowball import FrenchStemmer
from nltk.stem.snowball import EnglishStemmer
from nltk.stem.snowball import ArabicStemmer
from nltk.stem.snowball import SpanishStemmer
class NgramBasedClasifier:
def __init__(self, config_relative_path=''):
# self.logs = []
self.current_thread_percentage = 0
self.config_relative_path = config_relative_path
self.tknzr = TweetTokenizer()
self.retrievedLangs = set() # Matching the languages in the dataset
def get_n_grams(self, text, length=2):
n_grams = zip(*[text[i:] for i in range(length)])
# n_grams = list(nltk.bigrams(text))
return n_grams
def remove_stop_words(self, full_text, langs=["en", "fr", "es"]):
punctuation = list(string.punctuation + "…" + "’" + "'" + '🔴' + '•' + '...' + '.')
multilang_stopwords = self.get_stopwords_for_langs(langs) + ["Ã", "rt", "im"] + punctuation
full_text = full_text.lower().translate(str.maketrans('', '', string.punctuation))
tokenized_text = self.tknzr.tokenize(full_text) # nltk.word_tokenize(full_text)
filtered_words = list(filter(lambda word: len(word)>1 and word not in multilang_stopwords, tokenized_text))
full_text = " ".join(filtered_words)
full_text_no_emojis = self.remove_emojis(full_text)
full_text_no_emojis = " ".join(full_text_no_emojis.split())
return full_text_no_emojis
def remove_urls(self, text):
return re.sub(r'http\S+', '', text).strip()
def lemmatize(self, text, lang):
# spacy.prefer_gpu()
# nlp = spacy.load(lang) # en fr "en_core_web_sm"
if lang == "fr":
stemmer = FrenchStemmer()
elif lang == "es":
stemmer = SpanishStemmer()
else:
stemmer = EnglishStemmer()
stemmed = []
for word in text.split(" "):
stemmed.append(stemmer.stem(word))
# doc = nlp(u""+text)
# lem_terms = []
# for token in doc:
# lem_terms.append(token.lemma_)
return " ".join(stemmed)
def search_bigrams_related_tweets(self, **kwargs):
my_connector = Es_connector(index=kwargs["index"])
if kwargs.get('full_search', False): # All tweets
query = {
"query": {
"bool": {
"must": [
{"match_phrase": {kwargs["ngramsPropName"]: kwargs["ngram"]}},
{"match": {kwargs["session"]: kwargs["label"]}}
]
}
}
}
else: # matching keywords
query = {
"query": {
"bool": {
"must": [
{"match": {"text": kwargs["word"]}},
{"match_phrase": {kwargs["ngramsPropName"]: kwargs["ngram"]}},
{"match": {kwargs["session"]: kwargs["label"]}}
]
}
}
}
print(query)
return my_connector.init_paginatedSearch(query)
def update_tweets_state_by_ngram(self, **kwargs):
tweets_connector = Es_connector(index=kwargs["index"], doc_type="tweet")
if kwargs.get('full_search', False): # All tweets
query = {
"query": {
"bool": {
"must": [
{"match_phrase": {kwargs["ngramsPropName"]: kwargs["ngram"]}},
{"match": {kwargs["session"]: kwargs["query_label"]}}
]
}
}
}
else: # Tweets matching a user-generated query
query = {
"query": {
"bool": {
"must": [
{"match": {"text": kwargs["word"]}},
{"match_phrase": {kwargs["ngramsPropName"]: kwargs["ngram"]}},
{"match": {kwargs["session"]: kwargs["query_label"]}}
]
}
}
}
return tweets_connector.update_query(query, kwargs["session"], kwargs["new_label"])
def search_event_bigrams_related_tweets(self, **kwargs):
my_connector = Es_connector(index=kwargs["index"])
query = {
"query": {
"bool": {
"should": kwargs["target_terms"],
"minimum_should_match": 1,
"must": [
{"match_phrase": {kwargs["ngramsPropName"]: kwargs["ngram"]}},
{"match": {kwargs["session"]: kwargs["label"]}}
]
}
}
}
return my_connector.init_paginatedSearch(query)
def update_tweets_state_by_event_ngram(self, **kwargs):
tweets_connector = Es_connector(index=kwargs["index"], doc_type="tweet")
query = {
"query": {
"bool": {
"should": kwargs["target_terms"],
"minimum_should_match": 1,
"must": [
{"match_phrase": {kwargs["ngramsPropName"]: kwargs["ngram"]}},
{"match": {kwargs["session"]: kwargs["query_label"]}}
]
}
}
}
return tweets_connector.update_query(query, kwargs["session"], kwargs["new_label"])
def get_ngrams(self, **kwargs):
if kwargs.get('full_search', False):
query = {
"bool": {
"must": [
{"match": {kwargs["session"]: kwargs["label"]}}
]
}
}
else:
query = {
"bool": {
"must": [
{"match": {"text": kwargs["word"]}},
{"match": {kwargs["session"]: kwargs["label"]}}
]
}
}
return self.get_ngrams_by_query(query=query, **kwargs)
def chunks(self, target_list, target_size):
"""Yield successive n-sized chunks from l."""
for i in range(0, len(target_list), target_size):
yield target_list[i:i + target_size]
def get_positive_unlabeled_ngrams(self, **kwargs):
res = self.get_ngrams_by_query(query={
"match": {
kwargs["field"]: "confirmed"
}
}, **kwargs)
try:
return res["aggregations"]["ngrams_count"]["buckets"]
except KeyError as e:
return []
def get_negative_unlabeled_ngrams(self, **kwargs):
res = self.get_ngrams_by_query(query={
"match": {
kwargs["field"]: "negative"
}
}, **kwargs)
try:
return res["aggregations"]["ngrams_count"]["buckets"]
except KeyError as e:
return []
def get_ngrams_for_ids(self, **kwargs):
ids_chunks = self.chunks(kwargs["ids"], 50)
total_buckets = []
for chunk in ids_chunks:
ids = ""
for id in chunk:
ids += id + " or "
ids = ids[:-4]
query = {
"match": {
"id_str": ids
}
}
res = self.get_ngrams_by_query(query=query, **kwargs)
buckets = res["aggregations"]["ngrams_count"]["buckets"]
if len(buckets)>0:
total_buckets += buckets
try:
return total_buckets
except KeyError as e:
return []
def get_ngrams_for_event(self, **kwargs):
query = {
"bool": {
"should": kwargs["target_terms"],
"minimum_should_match": 1,
"must": [
{"match": {kwargs["session"]: kwargs["label"]}}
]
}
}
return self.get_ngrams_by_query(query=query, **kwargs)
def get_ngrams_by_query(self, query="", **kwargs):
try:
my_connector = Es_connector(index=kwargs["index"], config_relative_path=self.config_relative_path)
full_query = {
"query": query,
"size": 0,
"aggs": {
"ngrams_count": {
"terms": {
"field": kwargs["n_size"] + "grams.keyword",
"size": kwargs["results_size"]
},
"aggs": {
"status": {
"terms": {
"field": kwargs["session"] + ".keyword"
}
}
}
}
}
}
return my_connector.search(full_query)
except Exception as e:
print('Error: ' + str(e))
traceback.print_exc()
return {}
def get_search_related_classification_data(self, index="test3", word="", session="", label="confirmed OR proposed OR negative", matching_ngrams=[], full_search=False):
if full_search:
query = {
"bool": {
"must": [
{"match": {session: label}}
]
}
}
else:
query = {
"bool": {
"must": [
{"match": {"text": word}},
{"match": {session: label}}
]
}
}
my_connector = Es_connector(index=index)
res = my_connector.search({
"size": 0,
"query": query,
"aggs": {
"query_classification": {
"terms": {
"field": session + ".keyword"
}
}
}
})
return res['aggregations']['query_classification']['buckets']
def get_bigrams_related_classification_data(self, matching_ngrams=[]):
# Counting the matching bigrams results by category
total_ngrams = matching_ngrams["hits"]["total"]
confirmed_ngrams = 0
negative_ngrams = 0
unlabeled_ngrams = 0
accum_total_ngrams = 0
for ngram in matching_ngrams['aggregations']['ngrams_count']['buckets']:
curr_confirmed = self.get_classif_doc_count("confirmed", ngram["status"]["buckets"])
confirmed_ngrams += curr_confirmed
curr_negative = self.get_classif_doc_count("negative", ngram["status"]["buckets"])
negative_ngrams += curr_negative
curr_unlabeled = self.get_classif_doc_count("proposed", ngram["status"]["buckets"])
unlabeled_ngrams += curr_unlabeled
accum_total_ngrams += curr_confirmed + curr_negative + curr_unlabeled
if accum_total_ngrams ==0:
return 0,0,0
else:
return (confirmed_ngrams / accum_total_ngrams) * total_ngrams, \
(negative_ngrams / accum_total_ngrams) * total_ngrams, \
(unlabeled_ngrams / accum_total_ngrams) * total_ngrams # confirmed_ngrams, negative_ngrams, unlabeled_ngrams
def get_classification_data(self, **kwargs):
query_classification = self.get_search_related_classification_data(kwargs["index"], kwargs["word"], kwargs["session"], kwargs["label"], kwargs["matching_ngrams"], kwargs['full_search'])
confirmed_ngrams, negative_ngrams, unlabeled_ngrams = self.get_bigrams_related_classification_data(kwargs["matching_ngrams"])
return [
{
"label": "Query",
"confirmed": self.get_classif_doc_count("confirmed", query_classification),
"negative": self.get_classif_doc_count("negative", query_classification),
"unlabeled": self.get_classif_doc_count("proposed", query_classification)
},
{
"label": "Ngrams",
"confirmed": confirmed_ngrams,
"negative": negative_ngrams,
"unlabeled": unlabeled_ngrams
}
]
def get_classif_doc_count(self, tag, classification):
category = list(filter(lambda item: item["key"] == tag, classification))
if len(category) > 0:
return category[0]["doc_count"]
else:
return 0
def gerenate_ngrams_for_tweets(self, tweets, **kwargs ): # remove_stopwords=True, stemming=True):
length = int(kwargs.get('length', 2))
tweets_to_update = [] # So the URL doesn't get too large
prop = kwargs['from_property']
for tweet in tweets:
try:
if prop in tweet["_source"]:
clean_text = self.remove_stop_words(tweet["_source"][prop]).split()
ngrams = list(self.get_n_grams(clean_text, length))
tweets_to_update.append({
"_ngrams": self.format_single_tweet_ngrams(ngrams),
"_id": tweet["_id"]
})
# for prop in tweet["_source"]:
# if tweet["_source"][prop] == None:
# tweet["_source"][prop] = "None"
full_tweet_ngrams = self.format_single_tweet_ngrams(ngrams)
self.updatePropertyValue(tweet=tweet, property_name=kwargs["prop"], property_value=full_tweet_ngrams, index=kwargs["index"])
else:
print("The tweet has no ", prop, " property.")
except Exception as e:
print('Error: ' + str(e))
# cnn = Es_connector(index=kwargs["index"])
# script_source = "for (int i = 0; i < docs.length; ++i) { if(docs[i]['_id'] == ctx._id){ ctx._source['" + kwargs[
# "prop"] + "'] = docs[i]['_ngrams']; break; }}"
# ubq = UpdateByQuery(using=cnn.es, index=cnn.index).script(source=script_source)
# for i in range(0, len(tweets_to_update), 5):
#
# tweets_chunk = tweets_to_update[i:i + 5]
# str_tweets = str(tweets_chunk).replace("None", "\'None\'").replace("\'", "\"")
# tweet_ids = [ { "match": {"_id": tweet["_id"]}} for tweet in tweets_chunk]
#
# query = {"query": {"bool": {"should": tweet_ids, "minimum_should_match": "1"}}}
# ubq.update_from_dict(query).params(docs=tweets_chunk)
# ubq.execute()
# Es_connector(index=kwargs["index"]).update_by_query(
# {"query": {"bool":{"should": tweet_ids, "minimum_should_match":"1" }}},
# "for (int i = 0; i < docs.length; ++i) { if(docs[i]['_id'] == ctx._id){ ctx._source['" +
# kwargs["prop"] + "'] = docs[i]['_ngrams']; break; }}",
# tweets_chunk
# )
# # "params": { "docs": tweets_chunk }
# q = {
# "script": {
# "inline": "def tweets = " + str_tweets + "; for (int i = 0; i < params.docs.length; ++i) { if(params.docs[i]['_id'] == ctx._id){ ctx._source['" + kwargs["prop"] + "'] = params.docs[i]['_ngrams']; break; }}",
# "lang": "painless"
# },
# "query": {
# "bool":{
# "should":tweet_ids,
# "minimum_should_match":"1"
# }
# }
# }
# print("...")
# cnn.es.update_by_query(body=q, doc_type='tweet', index=kwargs["index"]) #, params={ "docs": tweets_chunk })
# def tweets = " + str_tweets + ";
# ubq = UpdateByQuery(index=cnn.es.index).using(cnn.es).script(
# source="for (int i = 0; i < params.docs.length; ++i) { if(params.docs[i]['_id'] == ctx._id){ ctx._source['" + kwargs["prop"] + "'] = params.docs[i]['_ngrams']; break; }}",
# params={ "docs": str_tweets }
# )
# response = ubq.execute()
# ubq = UpdateByQuery(index=cnn.es.index).using(cnn.es).script(
# source="def tweets = " + str_tweets + "; for (int i = 0; i < params.docs.length; ++i) { if(params.docs[i]['_id'] == ctx._id){ ctx._source['" +
# kwargs["prop"] + "'] = params.docs[i]['_ngrams']; break; }}"
# )
# response = ubq.execute()
def remove_emojis(self, string):
emoji_pattern = re.compile("["
u"\U0001F600-\U0001F64F" # emoticons
u"\U0001F300-\U0001F5FF" # symbols & pictographs
u"\U0001F680-\U0001F6FF" # transport & map symbols
u"\U0001F1E0-\U0001F1FF" # flags (iOS)
u"\U00002702-\U000027B0"
u"\U000024C2-\U0001F251"
"]+", flags=re.UNICODE)
return emoji_pattern.sub(r'', string)
def generate_ngrams_for_index(self, **kwargs):
try:
# Get the data for performinga paginated search
self.current_thread_percentage = 0
print("Starting")
my_connector = Es_connector(index=kwargs["index"])
query = kwargs.get('query', {
"query": {
"match_all": {}
}
})
res = my_connector.init_paginatedSearch(query)
sid = res["sid"]
scroll_size = res["scroll_size"]
total = int(res["total"])
# Analyse and process page by page
i = 0
total_scrolls = int(total/scroll_size)
processed_scrolls = 0
print("from_property:", kwargs['from_property'])
while scroll_size > 0:
tweets = res["results"]
self.gerenate_ngrams_for_tweets(tweets, from_property=kwargs['from_property'], prop=kwargs["prop"], index=kwargs["index"], length=kwargs["length"])
i += 1
res = my_connector.loop_paginatedSearch(sid, scroll_size)
scroll_size = res["scroll_size"]
processed_scrolls += 1
self.current_thread_percentage = round(processed_scrolls * 100 / total_scrolls, 0)
print("Completed: ", self.current_thread_percentage, "%")
# Clean it at the end so the clien knows when to end asking for more logs
self.current_thread_percentage = 100
return True
except Exception as e:
print('Error: ' + str(e))
return False
# def generate_ngrams_for_unlabeled_tweets_on_index(self, **kwargs):
#
# query={
# "query": {
# "bool": {
# "must_not": {
# "exists" : { "field" : kwargs["prop"] }
# }
# }
# }
# }
#
# return self.generate_ngrams_for_index(**dict(kwargs, query=query))
def format_single_tweet_ngrams(self, ngrams):
full_tweet_ngrams = []
for ngram in ngrams:
single_ngram_text = ""
for term in ngram:
single_ngram_text = single_ngram_text + term + "-"
single_ngram_text = single_ngram_text[:-1] #remove the last - of the single ngram
full_tweet_ngrams.append(single_ngram_text)
return full_tweet_ngrams
#
# for k, v in ngrams:
#
# ngram_text = ""
# for term in k:
# ngram_text = ngram_text + term + "-"
# ngram_text = ngram_text.strip()
#
# return ngram_text
def get_current_backend_logs(self):
return { "percentage": self.current_thread_percentage }
def updatePropertyValue(self, **kwargs):
tweet = kwargs["tweet"]
# cnn = Es_connector(index=kwargs["index"]);
#
# q = {
# "script": {
# "inline": "ctx._source." + kwargs["property_name"] + " = params.value",
# "lang": "painless",
# "params": {
# "value": str(kwargs["property_value"])
# }
# },
# "query": {
# "match": {
# "_id": tweet["_id"]
# }
# }
# }
#
# cnn.es.update_by_query(body=q, doc_type='tweet', index=kwargs["index"])
Es_connector(index=kwargs["index"]).es.update(
index=kwargs["index"],
doc_type="tweet",
id=tweet["_id"],
body={"doc": {
kwargs["property_name"]: kwargs["property_value"]
}},
retry_on_conflict=5
)
def get_stopwords_for_langs(self, langs):
swords = []
if "en" in langs:
swords = swords + stopwords.words('english')
self.retrievedLangs.add("en")
if "fr" in langs:
swords = swords + stopwords.words('french')
self.retrievedLangs.add("fr")
if "ar" in langs:
swords = swords + stopwords.words('arabic')
self.retrievedLangs.add("ar")
if "nl" in langs:
swords = swords + stopwords.words('dutch')
self.retrievedLangs.add("nl")
if "id" in langs:
swords = swords + stopwords.words('indonesian')
self.retrievedLangs.add("id")
if "fi" in langs:
swords = swords + stopwords.words('finnish')
self.retrievedLangs.add("fi")
if "de" in langs:
swords = swords + stopwords.words('german')
self.retrievedLangs.add("de")
if "hu" in langs:
swords = swords + stopwords.words('hungarian')
self.retrievedLangs.add("hu")
if "it" in langs:
swords = swords + stopwords.words('italian')
self.retrievedLangs.add("it")
if "nb" in langs:
swords = swords + stopwords.words('norwegian')
self.retrievedLangs.add("nb")
if "pt" in langs:
swords = swords + stopwords.words('portuguese')
self.retrievedLangs.add("pt")
if "ro" in langs:
swords = swords + stopwords.words('romanian')
self.retrievedLangs.add("ro")
if "ru" in langs:
swords = swords + stopwords.words('russian')
self.retrievedLangs.add("ru")
if "es" in langs:
swords = swords + stopwords.words('spanish')
self.retrievedLangs.add("es")
if "sv" in langs:
swords = swords + stopwords.words('swedish')
self.retrievedLangs.add("sv")
if "tr" in langs:
swords = swords + stopwords.words('turkish')
self.retrievedLangs.add("tr")
# TODO: complete with the full list of supported langs (there are some langs supported but miissing and not documented. E.g. Bulgarian or Ukrainian https://pypi.org/project/stop-words/ )
# The full list of languages may be found in C:/Users/username/AppData/Roming/nltk_data/corpora/stopwords
return swords
| 36.024169 | 225 | 0.497358 |
3dc15dba2cd94dd8a6dd52f4da3685548d08164d | 297 | lua | Lua | easy/delta_time.lua | IlkhamGaysin/ce-challenges | ac6b8c1fc4b6c098e380b9d694e93e614f6c969b | [
"BSD-3-Clause"
] | 2 | 2018-06-24T17:09:16.000Z | 2020-08-03T11:44:54.000Z | easy/delta_time.lua | IlkhamGaysin/ce-challenges | ac6b8c1fc4b6c098e380b9d694e93e614f6c969b | [
"BSD-3-Clause"
] | null | null | null | easy/delta_time.lua | IlkhamGaysin/ce-challenges | ac6b8c1fc4b6c098e380b9d694e93e614f6c969b | [
"BSD-3-Clause"
] | null | null | null | function div(a, b) return math.floor(a/b) end
for line in io.lines(arg[1]) do
local a = {}
for i in line:gmatch("%d%d") do a[#a+1] = i end
local t = math.abs((a[1]-a[4])*3600 + (a[2]-a[5])*60 + a[3]-a[6])
io.write(string.format("%02d:%02d:%02d\n", div(t, 3600), div(t, 60)%60, t%60))
end
| 33 | 80 | 0.572391 |
834082bcebe844ffe9fc74287f3911a165e230d9 | 242 | ts | TypeScript | api/permissions/serialised.ts | usekoppa/api | 43bfad59e9e2594d4799a94902c87964e890d14d | [
"MIT"
] | 4 | 2021-11-20T14:24:11.000Z | 2021-12-12T05:12:13.000Z | api/permissions/serialised.ts | usekoppa/api | 43bfad59e9e2594d4799a94902c87964e890d14d | [
"MIT"
] | null | null | null | api/permissions/serialised.ts | usekoppa/api | 43bfad59e9e2594d4799a94902c87964e890d14d | [
"MIT"
] | null | null | null | import { Permission } from "./permission.ts";
/**
* A serialised permission field sent from the API.
*
* https://discord.com/developers/docs/topics/permissions#permissions
*/
export type SerialisedPermissions = `${Permission | number}`;
| 26.888889 | 69 | 0.727273 |
4c9917a0b1ce6fde5d1731a8004f021831445b92 | 5,029 | dart | Dart | lib/models/dokter_model.dart | mf4lsb/adc_nakama | a19dbe23f95afe181ed3895ffe55eb80c0f057ca | [
"Unlicense"
] | 1 | 2020-12-02T07:08:37.000Z | 2020-12-02T07:08:37.000Z | lib/models/dokter_model.dart | mf4lsb/adc_nakama | a19dbe23f95afe181ed3895ffe55eb80c0f057ca | [
"Unlicense"
] | null | null | null | lib/models/dokter_model.dart | mf4lsb/adc_nakama | a19dbe23f95afe181ed3895ffe55eb80c0f057ca | [
"Unlicense"
] | null | null | null | // To parse this JSON data, do
//
// final dokterModel = dokterModelFromJson(jsonString);
import 'dart:convert';
DokterModel dokterModelFromJson(String str) => DokterModel.fromJson(json.decode(str));
String dokterModelToJson(DokterModel data) => json.encode(data.toJson());
class DokterModel {
DokterModel({
this.data,
});
List<Datum> data;
factory DokterModel.fromJson(Map<String, dynamic> json) => DokterModel(
data: List<Datum>.from(json["data"].map((x) => Datum.fromJson(x))),
);
Map<String, dynamic> toJson() => {
"data": List<dynamic>.from(data.map((x) => x.toJson())),
};
}
class Datum {
Datum({
this.id,
this.isActive,
this.picture,
this.age,
this.name,
this.company,
this.tempatKerja,
this.email,
this.phone,
this.address,
this.biografi,
this.kredensial,
this.afliansiAkademik,
this.registered,
this.workingAs,
this.jadwalHari,
this.jadwalJam,
this.gender,
});
String id;
bool isActive;
String picture;
int age;
Name name;
Company company;
TempatKerja tempatKerja;
String email;
String phone;
String address;
String biografi;
String kredensial;
String afliansiAkademik;
String registered;
String workingAs;
JadwalHari jadwalHari;
JadwalJam jadwalJam;
Gender gender;
factory Datum.fromJson(Map<String, dynamic> json) => Datum(
id: json["id"],
isActive: json["isActive"],
picture: json["picture"],
age: json["age"],
name: Name.fromJson(json["name"]),
company: companyValues.map[json["company"]],
tempatKerja: tempatKerjaValues.map[json["tempat_kerja"]],
email: json["email"],
phone: json["phone"],
address: json["address"],
biografi: json["biografi"],
kredensial: json["kredensial"],
afliansiAkademik: json["afliansi_akademik"],
registered: json["registered"],
workingAs: json["working_as"],
jadwalHari: JadwalHari.fromJson(json["jadwal_hari"]),
jadwalJam: JadwalJam.fromJson(json["jadwal_jam"]),
gender: genderValues.map[json["gender"]],
);
Map<String, dynamic> toJson() => {
"id": id,
"isActive": isActive,
"picture": picture,
"age": age,
"name": name.toJson(),
"company": companyValues.reverse[company],
"tempat_kerja": tempatKerjaValues.reverse[tempatKerja],
"email": email,
"phone": phone,
"address": address,
"biografi": biografi,
"kredensial": kredensial,
"afliansi_akademik": afliansiAkademik,
"registered": registered,
"working_as": workingAs,
"jadwal_hari": jadwalHari.toJson(),
"jadwal_jam": jadwalJam.toJson(),
"gender": genderValues.reverse[gender],
};
}
enum Company { NAKAMA }
final companyValues = EnumValues({
"NAKAMA": Company.NAKAMA
});
enum Gender { LAKI_LAKI, PEREMPUAN }
final genderValues = EnumValues({
"Laki-Laki": Gender.LAKI_LAKI,
"Perempuan": Gender.PEREMPUAN
});
class JadwalHari {
JadwalHari({
this.hariSatu,
this.hariDua,
this.hariTiga,
});
String hariSatu;
String hariDua;
String hariTiga;
factory JadwalHari.fromJson(Map<String, dynamic> json) => JadwalHari(
hariSatu: json["hari_satu"],
hariDua: json["hari_dua"],
hariTiga: json["hari_tiga"],
);
Map<String, dynamic> toJson() => {
"hari_satu": hariSatu,
"hari_dua": hariDua,
"hari_tiga": hariTiga,
};
}
class JadwalJam {
JadwalJam({
this.shiftSatu,
this.shiftDua,
this.shiftTiga,
});
String shiftSatu;
String shiftDua;
String shiftTiga;
factory JadwalJam.fromJson(Map<String, dynamic> json) => JadwalJam(
shiftSatu: json["shift_satu"],
shiftDua: json["shift_dua"],
shiftTiga: json["shift_tiga"],
);
Map<String, dynamic> toJson() => {
"shift_satu": shiftSatu,
"shift_dua": shiftDua,
"shift_tiga": shiftTiga,
};
}
class Name {
Name({
this.first,
this.last,
});
String first;
String last;
factory Name.fromJson(Map<String, dynamic> json) => Name(
first: json["first"],
last: json["last"],
);
Map<String, dynamic> toJson() => {
"first": first,
"last": last,
};
}
enum TempatKerja { KLINIK_NAKAMA, RS_NAKAMA }
final tempatKerjaValues = EnumValues({
"KLINIK NAKAMA": TempatKerja.KLINIK_NAKAMA,
"RS NAKAMA": TempatKerja.RS_NAKAMA
});
class EnumValues<T> {
Map<String, T> map;
Map<T, String> reverseMap;
EnumValues(this.map);
Map<T, String> get reverse {
if (reverseMap == null) {
reverseMap = map.map((k, v) => new MapEntry(v, k));
}
return reverseMap;
}
}
| 23.721698 | 86 | 0.58958 |
1a61caa4639109bcd033cdcb3e111c6b139be2a4 | 14,428 | py | Python | roboTraining/unitTest.py | Gabs48/SpringMassNetworks | d917ef242ed99b02f82e9ff8697960d0d1d65178 | [
"MIT"
] | 3 | 2019-11-27T15:35:20.000Z | 2021-12-09T08:20:34.000Z | roboTraining/unitTest.py | Gabs48/SpringMassNetworks | d917ef242ed99b02f82e9ff8697960d0d1d65178 | [
"MIT"
] | null | null | null | roboTraining/unitTest.py | Gabs48/SpringMassNetworks | d917ef242ed99b02f82e9ff8697960d0d1d65178 | [
"MIT"
] | 1 | 2021-03-16T15:42:36.000Z | 2021-03-16T15:42:36.000Z | from robot import SoftEnvironment, HardEnvironment, Morphology, SineControl, RobotState, Robot, SpringMorphology, SpringMorphology3D
from simulate import Plotter, Simulation, SimulationEnvironment, VerletSimulation
from training import Training, TrainingScheme, TrainingVariable, RandomTraining
from utils import SpaceList, Save
import utils
import numpy as np
import unittest
""" test basic functionalities of the RoboTraining Package """
class Constants(object):
thoroughness = 2;
def emptyRobot( spring = 0, damping = 0,gravity=0,groundFriction=0,groundContact=0,airFriction=0,amplitude = 0, ground = False):
""" create a simple robot with two nodes connected by a spring with restlength 1"""
if ground:
env = HardEnvironment(gravity = gravity, airFriction = airFriction)
else:
env = SoftEnvironment(gravity = gravity, groundFriction = groundFriction, groundContact = groundContact, airFriction = airFriction)
morph = SpringMorphology(noNodes=2, mass = 1, spring = spring, damping = damping, noNeighbours = 1, environment = env)
morph.restLength = np.array([[0,1],[1,0]])
control = SineControl(morph = morph, amplitude = amplitude, phase = np.pi, omega =2* np.pi)
state = RobotState(0, morph)
robot = Robot(morph, control, state)
return robot
def emptyRobot3D( spring = 0, damping = 0,gravity=0,groundFriction=0,groundContact=0,airFriction=0,amplitude = 0, ground = False):
""" create a simple 3D robot with two nodes connected by a spring with restlength 1"""
if ground:
env = HardEnvironment(gravity = gravity, airFriction = airFriction, threeD = true)
else:
env = SoftEnvironment(gravity = gravity, groundFriction = groundFriction, groundContact = groundContact, airFriction = airFriction, threeD =True)
morph = SpringMorphology3D(noNodes=2, mass = 1, spring = spring, damping = damping, noNeighbours = 1, environment = env)
morph.restLength = np.array([[0,1],[1,0]])
control = SineControl(amplitude = amplitude, phase = np.pi, omega =2* np.pi, morph = morph)
state = RobotState(0, morph)
robot = Robot(morph, control, state)
return robot
def setState2D (robot, xpos =[0,0], ypos =[0,0] , xspeed =[0,0], yspeed = [0,0]):
robot.state.setState2D(xpos, ypos, xspeed, yspeed, 0)
def setState3D (robot, xpos =[0,0], ypos =[0,0] , zpos = [0,0] , xspeed =[0,0], yspeed = [0,0], zspeed = [0,0]):
robot.state.setState3D(xpos, ypos, zpos, xspeed, yspeed, zspeed, 0)
def simpleSimulation(robot, timeStep = 1e-3, simulationLength = 1000, verlet = True):
""" create a simple simulation without any plotting """
plotenv = Plotter(plot=False);
simulenv = SimulationEnvironment(timeStep = timeStep, simulationLength = simulationLength, plot = plotenv, verlet = verlet)
if verlet:
simulation = VerletSimulation(simulenv, robot)
else:
simulation = Simulation(simulenv, robot)
return simulation
def plotSimulation(robot, timeStep = 1e-3, simulationLength = 1000, verlet = True, movie = False):
""" create a simple simulation without any plotting """
plotenv = Plotter(plot=True, movie = movie);
simulenv = SimulationEnvironment(timeStep = timeStep, simulationLength = simulationLength, plot = plotenv, verlet = verlet)
if verlet:
simulation = VerletSimulation(simulenv, robot)
else:
simulation = Simulation(simulenv, robot)
return simulation
def forceEqual(robot, xforceAim=[0, 0], yforceAim= [0, 0] ):
""" Are forces equal to the prescribed force"""
f = robot.computeForces()
return all((np.allclose(f.x, xforceAim), np.allclose(f.y, yforceAim)))
def forceEqual3D(robot, xforceAim=[0, 0], yforceAim= [0, 0], zforceAim = [0, 0] ):
""" Are forces equal to the prescribed force"""
f = robot.computeForces()
return all(((np.allclose(f.x, xforceAim), np.allclose(f.y, yforceAim)), np.allclose(f.z, zforceAim)))
def stateEqual(robot, xposAim =[0, 0], yposAim = [0, 0], xspeedAim = [0, 0], yspeedAim= [0, 0],tolerance = 1e-3):
"""Is state is equal to prescribed state"""
pos,speed, t = robot.getStateParameters()
return all (( np.allclose(pos.x, xposAim,atol = tolerance),
np.allclose(pos.y, yposAim, atol = tolerance),
np.allclose(speed.x, xspeedAim, atol = tolerance),
np.allclose(speed.y, yspeedAim, atol = tolerance) ))
def stateEqual3D(robot, xposAim =[0, 0], yposAim = [0, 0], zposAim = [0, 0 ], xspeedAim = [0, 0], yspeedAim= [0, 0], zspeedAim= [0,0], tolerance = 1e-3):
"""Is state is equal to prescribed state"""
pos,speed, t = robot.getStateParameters()
return all (( np.allclose(pos.x, xposAim,atol = tolerance),
np.allclose(pos.y, yposAim, atol = tolerance),
np.allclose(pos.z, zposAim, atol = tolerance),
np.allclose(speed.x, xspeedAim, atol = tolerance),
np.allclose(speed.y, yspeedAim, atol = tolerance),
np.allclose(speed.z, zspeedAim, atol = tolerance)))
class TestRobot(unittest.TestCase):
def testStaticSpring2D(self):
"""static spring force 2D"""
robot = emptyRobot(spring = 10)
setState2D(robot, xpos= [0,2])
assert forceEqual(robot, xforceAim = [10, -10])
setState2D(robot, ypos= [0,3])
assert forceEqual(robot, yforceAim = [20, -20])
def testStaticSpring3D(self):
"""static spring force 3D"""
robot = emptyRobot3D(spring = 10)
setState3D(robot, xpos= [0,2])
assert forceEqual3D(robot, xforceAim = [10, -10])
setState3D(robot, ypos= [0,3])
assert forceEqual3D(robot, yforceAim = [20, -20])
setState3D(robot, zpos= [0,2])
assert forceEqual3D(robot, zforceAim = [10, -10])
def testKineticSpring2D(self):
"""dynamic damping spring force 2D"""
robot = emptyRobot(damping = 100)
setState2D(robot, xpos= [0,1], xspeed =[0,2])
assert forceEqual(robot, xforceAim = [200, -200])
setState2D(robot, xpos= [0,1], yspeed =[-1, 1])
assert forceEqual(robot, yforceAim = [200, -200])
def testKineticSpring3D(self):
"""dynamic damping spring force 3D"""
robot = emptyRobot3D(damping = 100)
setState3D(robot, xpos= [0,1], xspeed =[0,2])
assert forceEqual3D(robot, xforceAim = [200, -200])
setState3D(robot, xpos= [0,1], yspeed =[-1, 1])
assert forceEqual3D(robot, yforceAim = [200, -200])
setState3D(robot, xpos= [0,1], zspeed =[0,2])
assert forceEqual3D(robot, zforceAim = [200, -200])
def testGravity(self):
""" Gravity Force"""
robot = emptyRobot(gravity = 10)
assert forceEqual(robot, yforceAim = [-10, -10])
def testAirfriction(self):
""" Air Friction Force"""
robot = emptyRobot(airFriction = 10)
setState2D(robot, xpos= [0, 1], xspeed =[-5, 5], yspeed= [-1, 1])
assert forceEqual(robot, xforceAim = [50, -50], yforceAim = [10, -10])
def testAirfriction3D(self):
""" Air Friction Force"""
robot = emptyRobot3D(airFriction = 10)
setState3D(robot, xpos= [0, 1], xspeed =[-5, 5], yspeed= [-1, 1], zspeed = [10, 10])
assert forceEqual3D(robot, xforceAim = [50, -50], yforceAim = [10, -10], zforceAim = [-100, -100])
def testNormalforce(self):
""" Normal Force """
robot = emptyRobot(groundContact = 1)
robot.state.setState2D([0, 1], [-1, -1] , [0, 0], [-1, 1], 0)
f = robot.computeForces()
assert np.allclose(f.x, [0, -0])
assert np.alltrue(f.y > [0, 0])
def testFriction(self):
""" Friction Force """
robot = emptyRobot(groundFriction = 1)
setState2D(robot, [0, 1], [-1, 1] , [4, 4], [-1, 1])
f = robot.computeForces()
assert f.x[0] < 0
assert f.x[1] == 0
assert np.allclose(f.y , [0, 0])
def testdefault(self):
""" default robot calculates force """
morph = SpringMorphology()
control = SineControl(morph)
robot = Robot(morph, control);
f = robot.computeForces()
assert all(np.isfinite(f.x))
assert all(np.isfinite(f.y))
def testCopyState(self):
"""create a deep copy of the state"""
robot = emptyRobot();
setState2D(robot, xpos = [0, 1])
state = robot.getState()
robot.state.pos.x[1] = 4
assert state.pos.x[1] == 1
def testCopyState3D(self):
"""create a deep copy of the state"""
robot = emptyRobot3D();
setState3D(robot, zpos = [0, 1])
state = robot.getState()
robot.state.pos.z[1] = 4
assert state.pos.z[1] == 1
def testControl(self):
robot = emptyRobot3D( amplitude = 0.5)
assert np.allclose(robot.control.modulationFactor(robot.state), [[1, 1], [1, 1]])
robot.state.currentTime = 0.25
assert np.allclose(robot.control.modulationFactor(robot.state), [[1, 0.5], [0.5, 1]])
robot.state.currentTime = 0.75
assert np.allclose(robot.control.modulationFactor(robot.state), [[1, 1.5],[1.5, 1]])
class TestSimulation(unittest.TestCase):
def testSinusX(self):
"""Robot behaves as harmonic oscillator with period 2*Pi """
if Constants.thoroughness >= 2:
robot = emptyRobot(spring = 0.5)
timestep = 1.0 / 1000;
halfPeriod = int (1* np.pi * 1000)
simulation = simpleSimulation(robot,timestep,halfPeriod)
tolerance = 1e-3
"""
# X direction
setState2D(robot, xpos = [0, 1.5])
# half period
simulation.runSimulation()
assert stateEqual(robot, xposAim = [0.5, 1])
# full period
simulation.runSimulation();
assert stateEqual(robot, xposAim = [0, 1.5])
# Y direction
setState2D(robot, ypos = [0, 1.5])
# half period
simulation.runSimulation();
assert stateEqual(robot, yposAim = [0.5, 1])
# full period
simulation.runSimulation();
assert stateEqual(robot, yposAim = [0, 1.5])
"""
# Z direction
robot = emptyRobot3D(spring = 0.5)
simulation.robot = robot
setState3D(robot, zpos = [0, 1.5])
# half period
simulation.runSimulation();
assert stateEqual3D(robot, zposAim = [0.5, 1])
# full period
simulation.runSimulation();
assert stateEqual3D(robot, zposAim = [0, 1.5])
else: print "testthoroughness is set too low for this test"
class Testutils(unittest.TestCase):
def testArray2Connections(self):
"""conversion from an array to the connections matrix and back"""
robot = emptyRobot()
array = [10]
connections = utils.array2Connections(array, robot.getConnections())
arrayAccent = utils.connections2Array(connections, robot.getConnections())
assert np.allclose(array, arrayAccent)
class TestTraining(unittest.TestCase):
def testNormalize(self):
"""normalization and denormalization procedure of TrainVariable """
trainvar = TrainingVariable("spring",0,1000)
testArray = [500, 300, 3.2 , 0]
testArraynorm = trainvar.normalize(testArray)
testArrayAccent = trainvar.denormalize(testArraynorm)
assert np.allclose(testArray, testArrayAccent)
def testSetterSpring(self):
"""array to robot and back"""
trainScheme = TrainingScheme();
trainScheme.createTrainVariable("spring", 0, 1000)
robot = emptyRobot();
# normal test
array = np.array([[0.4]])
trainScheme.normalizedMatrix2robot(array, robot)
arrayAccent = trainScheme.robot2normalizedMatrix(robot)
assert np.allclose(array, arrayAccent)
# check whether exceptions are thrown in case of invalid input
with self.assertRaises(AssertionError):
array = np.array([[0.4, 0.4]])
trainScheme.normalizedMatrix2robot(array, robot)
with self.assertRaises(AssertionError):
array = np.array([0.4])
trainScheme.normalizedMatrix2robot(array, robot)
with self.assertRaises(AssertionError):
array = np.array([5])
trainScheme.normalizedMatrix2robot(array, robot)
def testCreateTraining(self):
""" no exceptions may be thrown """
if Constants.thoroughness >= 2:
env=SoftEnvironment()
morph=SpringMorphology(noNodes = 10,spring = 1000, noNeighbours = 3,environment = env)
control=SineControl(morph)
state=RobotState(0,morph)
robot=Robot(morph,control,state)
plotter =Plotter(plotCycle=50,plot=False);
simulenv=SimulationEnvironment(timeStep = 0.0005,simulationLength=2000,plot =plotter)
simul = Simulation(simulenv,robot)
simul.runSimulation()
trainscheme = TrainingScheme()
trainscheme.createTrainVariable("spring",0,3000)
trainscheme.createTrainVariable("phase",0,2*np.pi)
training=RandomTraining(trainscheme,robot,simulenv)
trainSave = Save(training, 'temp', 'default')
trainSave.save([[10,10],[20,20]])
else: print "testthoroughness is set too low for this test"
class TestSpaceList(unittest.TestCase):
space2Da = SpaceList(np.array([[1],[2]]))
space2Db = SpaceList(np.array([4]),np.array([10]))
space2Dc = SpaceList(np.array([[1,2],[6,15]]))
space3Da = SpaceList(np.array([[1],[2.0],[3]]))
space3Db = SpaceList(np.array([10.0]),np.array([100]),np.array([1000.0]))
space3Dc = SpaceList(np.array([[1, 2 , 3, 4],[10 , 20 , 30, 40],[100, 200, 300, 400]]))
array = np.array([1,2,3,4])
def testAdd2D(self):
sum = self.space2Da + self.space2Db
assert sum.x == 5
assert sum.y == 12
def testAdd3D(self):
sum = self.space3Da + self.space3Db
assert sum.x == 11
assert sum.y == 102
assert sum.z == 1003
def testAddCopy3D(self):
copy = self.space3Da.copy()
copy += self.space3Da
assert copy.x == 2
assert copy.y == 4
assert copy.z == 6
assert self.space3Da.x == 1
assert self.space3Da.y == 2
assert self.space3Da.z == 3
def testMult3D(self):
product = self.space3Da * self.space3Db
assert product.x == 10
assert product.y == 200
assert product.z == 3000
def testMult2DCopy(self):
copy = self.space2Da.copy()
copy *= self.space2Da
assert copy.x == 1
assert copy.y == 4
assert self.space2Da.x == 1
assert self.space2Da.y == 2
def testMult3Darray(self):
product= self.array * self.space3Dc
aim = np.array([1, 4, 9, 16])
assert np.all(product.x == aim)
assert np.all(product.y == aim * 10)
assert np.all(product.z == aim * 100)
def testMult3Dscalar(self):
product= 4 * self.space3Dc
aim = 4 * np.array([1, 2, 3, 4])
assert np.all(product.x == aim)
assert np.all(product.y == aim * 10)
assert np.all(product.z == aim * 100)
def testdiff2D(self):
xdiff, ydiff = self.space2Dc.getDifference()
assert np.all( xdiff == [[0, -1],[1, 0]])
assert np.all( ydiff == [[0,- 9],[9, 0]])
def run(verbosity = 2, thorogouhness = 1):
Constants.thoroughness = thorogouhness
suite = unittest.TestSuite();
suite.addTests(unittest.makeSuite(TestRobot))
suite.addTests(unittest.makeSuite(TestSimulation))
suite.addTests(unittest.makeSuite(TestTraining))
suite.addTests(unittest.makeSuite(Testutils))
suite.addTests(unittest.makeSuite(TestSpaceList))
unittest.TextTestRunner(verbosity = verbosity).run(suite)
def runSpecial(verbosity = 2):
suite = unittest.TestSuite();
suite.addTest(TestRobot("testControl"))
unittest.TextTestRunner(verbosity = verbosity).run(suite)
if __name__ == '__main__':
unittest.main() | 36.342569 | 153 | 0.696562 |
cf0f741a09383991aab60c2811538dcff553fafb | 5,436 | php | PHP | qrcode.php | Ayshwarya02/QR-Attendance | 9dadad3b5326af1b565aa174cfa6bb2c948af2cf | [
"Unlicense"
] | 50 | 2015-02-27T02:01:59.000Z | 2022-01-16T10:16:58.000Z | qrcode.php | Ayshwarya02/QR-Attendance | 9dadad3b5326af1b565aa174cfa6bb2c948af2cf | [
"Unlicense"
] | 3 | 2017-03-13T14:38:00.000Z | 2021-12-03T15:37:36.000Z | qrcode.php | Ayshwarya02/QR-Attendance | 9dadad3b5326af1b565aa174cfa6bb2c948af2cf | [
"Unlicense"
] | 37 | 2016-06-29T11:33:17.000Z | 2022-03-13T13:38:52.000Z | <?php
require_once('classes/session.php');
require_once('classes/utilisateur.php');
require_once('classes/infos.php');
require_once('classes/ouvrier.php');
require_once('includes/functions.php');
require_once('includes/qrs.php');
$session = new Session();
if(!$session->is_loggedin())
{
$session->message("vous devez s'authentifier");
header('Location: login.php');
}
$ut= new Utilisateur();
$ut->find_by_id($session->get_user_id());
$ut_data= $ut->get_utilisateur();
$ou = new Ouvrier();
if(!$ou->find_by_qrcode($_GET['o'])) {
header('Location: gestion.php');
} else {
?>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Dashboard - QR Code Scan</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
<meta name="apple-mobile-web-app-capable" content="yes">
<link href="css/bootstrap.min.css" rel="stylesheet">
<link href="css/bootstrap-responsive.min.css" rel="stylesheet">
<link href="http://fonts.googleapis.com/css?family=Open+Sans:400italic,600italic,400,600"
rel="stylesheet">
<link href="css/font-awesome.css" rel="stylesheet">
<link href="css/style.css" rel="stylesheet">
<link href="css/pages/dashboard.css" rel="stylesheet">
<!-- Le HTML5 shim, for IE6-8 support of HTML5 elements -->
<!--[if lt IE 9]>
<script src="http://html5shim.googlecode.com/svn/trunk/html5.js"></script>
<![endif]-->
</head>
<body>
<div class="navbar navbar-fixed-top">
<div class="navbar-inner">
<div class="container"> <a class="btn btn-navbar" data-toggle="collapse" data-target=".nav-collapse"><span
class="icon-bar"></span><span class="icon-bar"></span><span class="icon-bar"></span> </a>
<a class="brand" href="index.php">
<i class="shortcut-icon icon-qrcode"></i>
QR Code Scan</a>
<div class="nav-collapse">
<ul class="nav pull-right">
<li class="dropdown"><a href="#" class="dropdown-toggle" data-toggle="dropdown"><i
class="icon-cog"></i> Compte <b class="caret"></b></a>
<ul class="dropdown-menu">
<li><a href="javascript:;">Paramètres</a></li>
<li><a href="javascript:;">Aide</a></li>
</ul>
</li>
<li class="dropdown"><a href="#" class="dropdown-toggle" data-toggle="dropdown"><i
class="icon-user"></i> <?php echo $ut_data['username']; ?> <b class="caret"></b></a>
<ul class="dropdown-menu">
<li><a href="profil.php">Profil</a></li>
<li><a href="process/logout.php">Logout</a></li>
</ul>
</li>
</ul>
<form class="navbar-search pull-right">
<input type="text" class="search-query" placeholder="Recherche">
</form>
</div>
<!--/.nav-collapse -->
</div>
<!-- /container -->
</div>
<!-- /navbar-inner -->
</div>
<!-- /navbar -->
<div class="subnavbar">
<div class="subnavbar-inner">
<div class="container">
<ul class="mainnav">
<li><a href="index.php"><i class="icon-dashboard"></i><span>Gestion</span> </a> </li>
<li class="active"><a href="rapports.php"><i class="icon-list-alt"></i><span>Rapports</span> </a> </li>
<!-- <li><a href="guidely.html"><i class="icon-facetime-video"></i><span>App Tour</span> </a></li> -->
<li><a href="stats.php"><i class="icon-bar-chart"></i><span>Charts</span> </a> </li>
<!-- <li><a href="shortcodes.html"><i class="icon-code"></i><span>Shortcodes</span> </a> </li> -->
<li class="dropdown"><a href="javascript:;" class="dropdown-toggle" data-toggle="dropdown"> <i class="icon-long-arrow-down"></i><span>Autres</span> <b class="caret"></b></a>
<ul class="dropdown-menu">
<li><a href="qrcodes.php">QR Codes</a></li>
<li><a href="utilisateurs.php">Utilisateurs</a></li>
</ul>
</li>
</ul>
</div>
<!-- /container -->
</div>
<!-- /subnavbar-inner -->
</div>
<!-- /subnavbar -->
<div class="main">
<div class="main-inner">
<div class="container">
<div class="row">
<div class="span12">
<?php
checkIn($_GET['o']);
}
?>
</div>
</div>
</div>
</div>
</div>
<!-- /main -->
<div class="extra">
<div class="extra-inner">
<div class="container">
<div class="row">
<!-- Copyright © HouTelecom 2014. Tous droits réservés. -->
</div>
<!-- /row -->
</div>
<!-- /container -->
</div>
<!-- /extra-inner -->
</div>
<!-- /extra -->
<div class="footer">
<div class="footer-inner">
<div class="container">
<div class="row">
<div class="span12"> Copyright © <a href="http://www.houtelecom.com/">HouTelecom</a> 2014. Tous droits réservés. </div>
<!-- /span12 -->
</div>
<!-- /row -->
</div>
<!-- /container -->
</div>
<!-- /footer-inner -->
</div>
<!-- /footer -->
<script src="js/jquery-1.7.2.min.js"></script>
<script src="js/excanvas.min.js"></script>
<script src="js/chart.min.js" type="text/javascript"></script>
<script src="js/bootstrap.js"></script>
<script language="javascript" type="text/javascript" src="js/full-calendar/fullcalendar.min.js"></script>
<script src="js/base.js"></script>
</body>
</html>
| 34.624204 | 181 | 0.563098 |
a196ea0a1b34ac4e2ece197bd776240c2b0c10fb | 208 | ts | TypeScript | src/_core/interface/index.ts | neophytes08/OnlineBooking | f82c8a0890faa586a9b33013665d6846375ca4d7 | [
"MIT"
] | null | null | null | src/_core/interface/index.ts | neophytes08/OnlineBooking | f82c8a0890faa586a9b33013665d6846375ca4d7 | [
"MIT"
] | null | null | null | src/_core/interface/index.ts | neophytes08/OnlineBooking | f82c8a0890faa586a9b33013665d6846375ca4d7 | [
"MIT"
] | null | null | null | export * from './_Address.interface';
export * from './_ContactDetails.interface';
export * from './_Position.interface';
export * from './_UserCreate.interface';
export * from './_VerifiedStatus.interface';
| 34.666667 | 44 | 0.735577 |
1848bf55262c01745cbf8ab86a9bb4790bcd04bf | 695 | tsx | TypeScript | packages/native/src/assets/icons/SevenCircledFinaThin.tsx | jelbaz-ledger/ui | 893b82250c76d8c96d82c96f268c82dd8cfbd428 | [
"MIT"
] | null | null | null | packages/native/src/assets/icons/SevenCircledFinaThin.tsx | jelbaz-ledger/ui | 893b82250c76d8c96d82c96f268c82dd8cfbd428 | [
"MIT"
] | 3 | 2021-09-28T14:14:19.000Z | 2021-09-28T15:47:45.000Z | packages/native/src/assets/icons/SevenCircledFinaThin.tsx | nparigi-ledger/ui | 708d293aeac0d20eb3563e143d676be11def4cd4 | [
"MIT"
] | null | null | null | import * as React from "react";
import Svg, { Path } from "react-native-svg";
type Props = {
size?: number | string;
color?: string;
};
function SevenCircledFinaThin({
size = 16,
color = "currentColor",
}: Props): JSX.Element {
return (
<Svg width={size} height={size} viewBox="0 0 24 24" fill="none">
<Path
d="M9.768 16.44h.528c.384-3.264 1.752-5.976 3.912-7.8V7.584h-6.36v.48h5.88v.36c-2.232 1.968-3.576 4.776-3.96 8.016zm-5.736 4.44h7.056c4.968 0 8.88-4.032 8.88-8.88 0-4.968-3.912-8.88-8.88-8.88H4.032v.48h7.056c4.704 0 8.4 3.696 8.4 8.4 0 4.584-3.696 8.4-8.4 8.4H4.032v.48z"
fill={color}
/>
</Svg>
);
}
export default SevenCircledFinaThin;
| 30.217391 | 279 | 0.627338 |
4952b3768b0dca48691dc5c5b83eaddb86ce8c10 | 199 | py | Python | homeassistant/components/cloudflare/const.py | MrDelik/core | 93a66cc357b226389967668441000498a10453bb | [
"Apache-2.0"
] | 30,023 | 2016-04-13T10:17:53.000Z | 2020-03-02T12:56:31.000Z | homeassistant/components/cloudflare/const.py | MrDelik/core | 93a66cc357b226389967668441000498a10453bb | [
"Apache-2.0"
] | 31,101 | 2020-03-02T13:00:16.000Z | 2022-03-31T23:57:36.000Z | homeassistant/components/cloudflare/const.py | Vaarlion/core | f3de8b9f28de01abf72c0f5bb0b457eb1841f201 | [
"Apache-2.0"
] | 11,956 | 2016-04-13T18:42:31.000Z | 2020-03-02T09:32:12.000Z | """Constants for Cloudflare."""
DOMAIN = "cloudflare"
# Config
CONF_RECORDS = "records"
# Defaults
DEFAULT_UPDATE_INTERVAL = 60 # in minutes
# Services
SERVICE_UPDATE_RECORDS = "update_records"
| 15.307692 | 42 | 0.748744 |
02c6b4dbed15e710d6e9dbd33aee3f9119917f39 | 3,165 | cpp | C++ | GameDownloader/src/GameDownloader/Behavior/PostHookBehavior.cpp | ProtocolONE/cord.game-downloader | 90950019937cd2974801ca2f53ed3b4ecd1d219b | [
"Apache-2.0"
] | 1 | 2019-08-07T06:13:15.000Z | 2019-08-07T06:13:15.000Z | GameDownloader/src/GameDownloader/Behavior/PostHookBehavior.cpp | ProtocolONE/cord.game-downloader | 90950019937cd2974801ca2f53ed3b4ecd1d219b | [
"Apache-2.0"
] | null | null | null | GameDownloader/src/GameDownloader/Behavior/PostHookBehavior.cpp | ProtocolONE/cord.game-downloader | 90950019937cd2974801ca2f53ed3b4ecd1d219b | [
"Apache-2.0"
] | null | null | null | #include <GameDownloader/Behavior/PostHookBehavior.h>
#include <GameDownloader/ServiceState.h>
#include <GameDownloader/GameDownloadService.h>
#include <Core/Service.h>
#include <QtConcurrent/QtConcurrentRun>
namespace P1 {
namespace GameDownloader {
namespace Behavior {
PostHookBehavior::PostHookBehavior(QObject *parent)
: BaseBehavior(parent)
{
QObject::connect(this, SIGNAL(postHooksCompleted(P1::GameDownloader::ServiceState *, P1::GameDownloader::HookBase::HookResult)),
SLOT(postHooksCompletedSlot(P1::GameDownloader::ServiceState *, P1::GameDownloader::HookBase::HookResult )));
}
PostHookBehavior::~PostHookBehavior(void)
{
}
void PostHookBehavior::start(P1::GameDownloader::ServiceState *state)
{
QtConcurrent::run(this, &PostHookBehavior::postHookLoop, state);
}
void PostHookBehavior::stop(P1::GameDownloader::ServiceState *state)
{
emit this->stopping(state);
}
void PostHookBehavior::postHookLoop(P1::GameDownloader::ServiceState *state)
{
Q_CHECK_PTR(state);
Q_CHECK_PTR(state->service());
if (state->state() != ServiceState::Started) {
emit this->postHooksCompleted(state, HookBase::Continue); // Походу надо будет урезать результаты хуков
}
const P1::Core::Service *service = state->service();
emit this->statusMessageChanged(state, QObject::tr("PRE_HOOK_DEFAULT_MESSAGE"));
HookBase::HookResult result = HookBase::Continue;
if(this->_afterDownloadHookMap.contains(service->id())) {
Q_FOREACH(HookBase *hook, this->_afterDownloadHookMap[service->id()]) {
if (state->state() != ServiceState::Started) {
emit this->postHooksCompleted(state, HookBase::Continue); // Походу надо будет урезать результаты хуков
return;
}
result = hook->afterDownload(this->_gameDownloadService, state);
emit this->statusMessageChanged(state, QObject::tr("PRE_HOOK_DEFAULT_MESSAGE"));
if (result != HookBase::Continue)
break;
}
}
emit this->postHooksCompleted(state, result);
}
void PostHookBehavior::postHooksCompletedSlot(ServiceState *state, HookBase::HookResult result)
{
// UNDONE Можно тут кстати обработать разные выходы с хуков и вызвать либо фейли либо разные выходы
if (result == HookBase::Continue)
emit this->next(Finished, state);
else if (result == HookBase::CheckUpdate)
emit this->next(ReturnToStart, state);
else
emit failed(state);
}
void PostHookBehavior::setGameDownloaderService(GameDownloadService *gameDownloadService)
{
this->_gameDownloadService = gameDownloadService;
}
void PostHookBehavior::registerHook(const QString& serviceId, int preHookPriority, HookBase *hook)
{
this->_afterDownloadHookMap[serviceId].insert(-preHookPriority, hook);
}
}
}
} | 35.965909 | 137 | 0.64139 |
3914e8e0dae31a6e8c8d799c2c0d1bd694a14fb2 | 2,279 | py | Python | notebooks/utils.py | cheginit/coastal_transition_zone | 84bcd77cf6246210d8fb592006cedc6e4a4091b5 | [
"MIT"
] | 1 | 2021-10-25T00:44:24.000Z | 2021-10-25T00:44:24.000Z | notebooks/utils.py | cheginit/coastal_transition_zone | 84bcd77cf6246210d8fb592006cedc6e4a4091b5 | [
"MIT"
] | null | null | null | notebooks/utils.py | cheginit/coastal_transition_zone | 84bcd77cf6246210d8fb592006cedc6e4a4091b5 | [
"MIT"
] | null | null | null | from pathlib import Path
import os
import geopandas as gpd
import pyproj
from shapely.geometry import Polygon
def read_config(fname):
if Path(fname).exists():
with open(fname, "r") as f:
inputs = filter(None, (line.rstrip() for line in f))
inputs = [line for line in inputs if not line.lstrip()[0] == "#"]
keys = [f.strip().partition(";")[0].split("=")[0].strip() for f in inputs]
values = [f.strip().partition(";")[0].split("=")[1].strip() for f in inputs]
for i in range(len(values)):
try:
values[i] = float(values[i])
except ValueError:
continue
config = dict(zip(keys, values))
if "class" in config:
config["class"] = int(config["class"])
else:
raise FileNotFoundError(f"info file was not found: {fname}")
return config
def get_path(scenario, class_no, root):
case = get_case(class_no, scenario)
return Path(root, case, "FlowFM_map.nc")
def get_case(class_no, scenario=None):
scenarios = {
"Ref": f"C{class_no}_A8_S1",
"R20": f"C{class_no}_A8_S2_1",
"R30": f"C{class_no}_A8_S2_2",
"D90": f"C{class_no}_A8_S4_1",
"D570": f"C{class_no}_A8_S4_2",
"S07": f"C{class_no}_A8_S5_1",
"S31": f"C{class_no}_A8_S5_2",
}
if scenario is None:
return scenarios
return scenarios[scenario]
def clip_geom(geodf, geom, crs, clip_name, root):
if not geodf.crs.is_exact_same(pyproj.CRS.from_user_input(crs)):
geodf = geodf.to_crs(crs)
cfile = Path(root, f"{clip_name}.gpkg")
if not cfile.exists():
clipped = gpd.clip(geodf, geom).reset_index(drop=True)
clipped.to_file(cfile)
else:
clipped = gpd.read_file(cfile)
return clipped
def geo_census(data_name, geom, crs, clip_name, root):
os.makedirs(Path(root), exist_ok=True)
cfile = Path(root, f"us_{data_name}.gpkg")
if not cfile.exists():
census = gpd.read_file(
f"https://www2.census.gov/geo/tiger/TIGER2020/{data_name.upper()}/tl_2020_us_{data_name}.zip"
)
census.to_file(cfile)
else:
census = gpd.read_file(cfile)
return clip_geom(census, geom, crs, clip_name, root)
| 30.797297 | 105 | 0.605968 |
a4a44f29c0541b33ec09b93874666557517f0d07 | 7,910 | kt | Kotlin | src/main/kotlin/com/jetbrains/snakecharm/codeInsight/SmkTypeProvider.kt | JetBrains-Research/snakecharm | 3c1f8fd123812f5c5c3c9971d93daaee68c69fdf | [
"MIT"
] | 52 | 2019-01-11T22:51:59.000Z | 2021-12-12T13:28:21.000Z | src/main/kotlin/com/jetbrains/snakecharm/codeInsight/SmkTypeProvider.kt | JetBrains-Research/snakecharm | 3c1f8fd123812f5c5c3c9971d93daaee68c69fdf | [
"MIT"
] | 417 | 2019-01-11T19:02:48.000Z | 2022-03-28T14:52:04.000Z | src/main/kotlin/com/jetbrains/snakecharm/codeInsight/SmkTypeProvider.kt | JetBrains-Research/snakecharm | 3c1f8fd123812f5c5c3c9971d93daaee68c69fdf | [
"MIT"
] | 10 | 2019-05-17T08:10:52.000Z | 2021-07-26T18:20:03.000Z | package com.jetbrains.snakecharm.codeInsight
import com.intellij.lang.injection.InjectedLanguageManager
import com.intellij.openapi.util.Ref
import com.intellij.psi.PsiElement
import com.intellij.psi.util.PsiTreeUtil
import com.jetbrains.python.psi.*
import com.jetbrains.python.psi.types.PyType
import com.jetbrains.python.psi.types.PyTypeProviderBase
import com.jetbrains.python.psi.types.TypeEvalContext
import com.jetbrains.snakecharm.codeInsight.SnakemakeAPI.ALLOWED_LAMBDA_OR_CALLABLE_ARGS
import com.jetbrains.snakecharm.codeInsight.SnakemakeAPI.SECTION_ACCESSOR_CLASSES
import com.jetbrains.snakecharm.codeInsight.SnakemakeAPI.SMK_VARS_CHECKPOINTS
import com.jetbrains.snakecharm.codeInsight.SnakemakeAPI.SMK_VARS_RULES
import com.jetbrains.snakecharm.codeInsight.SnakemakeAPI.SMK_VARS_WILDCARDS
import com.jetbrains.snakecharm.codeInsight.SnakemakeAPI.WILDCARDS_ACCESSOR_CLASS
import com.jetbrains.snakecharm.lang.SnakemakeLanguageDialect
import com.jetbrains.snakecharm.lang.SnakemakeNames.SECTION_INPUT
import com.jetbrains.snakecharm.lang.SnakemakeNames.SECTION_OUTPUT
import com.jetbrains.snakecharm.lang.SnakemakeNames.SECTION_RESOURCES
import com.jetbrains.snakecharm.lang.psi.*
import com.jetbrains.snakecharm.lang.psi.impl.SmkPsiUtil
import com.jetbrains.snakecharm.lang.psi.types.SmkCheckpointType
import com.jetbrains.snakecharm.lang.psi.types.SmkRuleLikeSectionArgsType
import com.jetbrains.snakecharm.lang.psi.types.SmkRulesType
import com.jetbrains.snakecharm.lang.psi.types.SmkWildcardsType
import com.jetbrains.snakecharm.stringLanguage.SmkSLanguage
import com.jetbrains.snakecharm.stringLanguage.lang.psi.SmkSLReferenceExpressionImpl
class SmkTypeProvider : PyTypeProviderBase() {
// TODO: provide types for 'run:' : threads, version, wildcards, rule, jobid,...
// collectPyFiles("builtins", usedFiles).get(0).findTopLevelClass("str")
// getParameterType(param, function, context) // only for function declarations, not lambdas
// registerReturnType(classQualifiedName, methods, callback)
// getReturnType(callable, context)
// getCallType(function, callSite, context)
// getGenericType(cls, context)
// getGenericSubstitutions(cls, context)
// getCallableType(callable, context) // e.g. method calls
override fun getReferenceType(
referenceTarget: PsiElement,
context: TypeEvalContext,
anchor: PsiElement?
): Ref<PyType>? {
if (!SmkPsiUtil.isInsideSnakemakeOrSmkSLFile(anchor)) {
return null
}
if (anchor is PyReferenceExpression) {
// lambdas params types
if (referenceTarget is PyNamedParameter) {
return getLambdaParamType(referenceTarget)
}
// XXX: Cannot assign SmkRulesType, SmkCheckPointsType here: anchor is null, only resolve
// target is available, we need anchor for [SmkRulesType] at the moment
// 'run:' section: input, output, wildcards, ..
if (referenceTarget is PyClass) {
return getSectionAccessorInRunSection(referenceTarget, anchor, context)
}
}
return null
}
private fun getSectionAccessorInRunSection(
referenceTarget: PyClass,
anchor: PyReferenceExpression,
context: TypeEvalContext
): Ref<PyType>? {
val fqn = referenceTarget.qualifiedName
val refTargetSection = SECTION_ACCESSOR_CLASSES[fqn]
val type = when {
refTargetSection != null -> {
// check if in run section & rule
val (_, ruleLike) = getParentSectionAndRuleLike(
anchor, SmkRunSection::class.java
) ?: return null
ruleLike.getSectionByName(refTargetSection)?.let { SmkRuleLikeSectionArgsType(it) }
}
fqn == WILDCARDS_ACCESSOR_CLASS -> {
val ruleLike = PsiTreeUtil.getParentOfType(
anchor, SmkRuleOrCheckpoint::class.java
) ?: return null
context.getType(ruleLike.wildcardsElement)
}
else -> null
}
return type?.let { Ref.create(it) }
}
private fun getLambdaParamType(referenceTarget: PyNamedParameter): Ref<PyType>? {
// in a lambda
val lambda = PsiTreeUtil.getParentOfType(
referenceTarget, PyLambdaExpression::class.java
) ?: return null
// in a section, lambda not in call
val (parentSection, ruleLike) = getParentSectionAndRuleLike(
lambda, SmkRuleOrCheckpointArgsSection::class.java, PyCallExpression::class.java
) ?: return null
val allowedArgs = ALLOWED_LAMBDA_OR_CALLABLE_ARGS[parentSection.sectionKeyword] ?: emptyArray()
val paramName = referenceTarget.text
val isFstPositionalParam = !referenceTarget.isKeywordOnly
&& lambda.parameterList.parameters.indexOf(referenceTarget) == 0
if (isFstPositionalParam || paramName in allowedArgs) {
val type = when (paramName) {
SECTION_INPUT, SECTION_OUTPUT, SECTION_RESOURCES -> {
ruleLike.getSectionByName(paramName)?.let { SmkRuleLikeSectionArgsType(it) }
}
else -> {
// 1st positional parameter in a lambda is wildcard
if (isFstPositionalParam) SmkWildcardsType(ruleLike) else null
}
}
return type?.let { Ref.create(it) }
}
return null
}
private fun <T: SmkSection> getParentSectionAndRuleLike(
element: PsiElement,
sectionClass: Class<T>,
vararg sectionStopAt: Class<out PsiElement>
): Pair<T, SmkRuleOrCheckpoint>? {
val section = PsiTreeUtil.getParentOfType(
element, sectionClass, true, *sectionStopAt
) ?: return null
val ruleLike = PsiTreeUtil.getParentOfType(
section, SmkRuleOrCheckpoint::class.java
) ?: return null
return section to ruleLike
}
override fun getReferenceExpressionType(
referenceExpression: PyReferenceExpression,
context: TypeEvalContext
): PyType? {
val smkExpression = when {
SnakemakeLanguageDialect.isInsideSmkFile(referenceExpression) -> referenceExpression
SmkSLanguage.isInsideSmkSLFile(referenceExpression) -> {
val manager = InjectedLanguageManager.getInstance(referenceExpression.project)
manager.getInjectionHost(referenceExpression)
}
else -> return null
}
val psiFile = smkExpression?.containingFile
val parentDeclaration =
PsiTreeUtil.getParentOfType(smkExpression, SmkRuleOrCheckpoint::class.java)
if (referenceExpression.children.isNotEmpty() ||
psiFile == null ||
psiFile !is SmkFile) {
return null
}
if (referenceExpression is SmkSLReferenceExpressionImpl && referenceExpression.isWildcard()) {
// is just a wildcard here
return null
}
// XXX: at the moment affects all "rules" variables in a *.smk file, better to
// affect only "rules" which is resolved to appropriate place
return when (referenceExpression.referencedName) {
SMK_VARS_RULES -> SmkRulesType(
parentDeclaration as? SmkRule,
psiFile
)
SMK_VARS_CHECKPOINTS -> SmkCheckpointType(
parentDeclaration as? SmkCheckPoint,
psiFile
)
SMK_VARS_WILDCARDS -> parentDeclaration?.let {
SmkWildcardsType(parentDeclaration)
}
else -> null
}
}
} | 40.984456 | 103 | 0.6622 |
3942456ee32b1b28218016db5412e47d5bcbf561 | 984 | py | Python | 945MinUnique/MinUnique.py | Easonyesheng/CodePractice | 91c8b09c278f5abb67e90f0096fc83bef975647b | [
"MIT"
] | null | null | null | 945MinUnique/MinUnique.py | Easonyesheng/CodePractice | 91c8b09c278f5abb67e90f0096fc83bef975647b | [
"MIT"
] | null | null | null | 945MinUnique/MinUnique.py | Easonyesheng/CodePractice | 91c8b09c278f5abb67e90f0096fc83bef975647b | [
"MIT"
] | null | null | null | """
给定整数数组 A,每次 move 操作将会选择任意 A[i],并将其递增 1。
返回使 A 中的每个值都是唯一的最少操作次数。
示例 1:
输入:[1,2,2]
输出:1
解释:经过一次 move 操作,数组将变为 [1, 2, 3]。
示例 2:
输入:[3,2,1,2,1,7]
输出:6
解释:经过 6 次 move 操作,数组将变为 [3, 4, 1, 2, 5, 7]。
可以看出 5 次或 5 次以下的 move 操作是不能让数组的每个值唯一的。
提示:
0 <= A.length <= 40000
0 <= A[i] < 40000
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/minimum-increment-to-make-array-unique
著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
"""
# sort
# 线性探测+路径压缩 https://leetcode-cn.com/problems/minimum-increment-to-make-array-unique/solution/ji-shu-onxian-xing-tan-ce-fa-onpai-xu-onlogn-yi-ya/
class Solution:
def minIncrementForUnique(self, A):
'''
'''
A.sort()
print(A)
step = 0
for i in range(1,len(A)):
if A[i] <= A[i-1]:
step += (A[i-1]-A[i]+1)
A[i] = A[i-1] + 1
return step
if __name__ == "__main__":
s = Solution()
A = [1,3,4,2,3,1,2]
print(s.minIncrementForUnique(A)) | 20.93617 | 144 | 0.572154 |
c676e82ef3436104b3f186fe3892e98b4d398930 | 1,476 | py | Python | _internal/generate.py | damz/gopherjs-feather-icons | 0ac41d4810753949b40a018597e32a185b002db1 | [
"MIT"
] | null | null | null | _internal/generate.py | damz/gopherjs-feather-icons | 0ac41d4810753949b40a018597e32a185b002db1 | [
"MIT"
] | null | null | null | _internal/generate.py | damz/gopherjs-feather-icons | 0ac41d4810753949b40a018597e32a185b002db1 | [
"MIT"
] | null | null | null | import os
import glob
import lxml.etree
import subprocess
if __name__ == "__main__":
parser = lxml.etree.XMLParser(remove_blank_text=True)
def normalize_xml(data):
root = lxml.etree.XML(data, parser=parser)
root.insert(
0,
root.makeelement("path", {
"visibility": "hidden",
"fill": "none",
"stroke": "none",
"d": "M0 0 H 24 V 24 H 0 Z",
})
)
return lxml.etree.tostring(root)
camelize_exceptions = {
"cpu": "CPU",
}
def _camelize(part):
try:
return camelize_exceptions[part]
except KeyError:
return part[0].upper() + part[1:]
def camelize(name):
return "".join(
_camelize(part)
for part in name.split("_")
if part != ""
)
with open("icons.go", "wb") as f:
f.write("package icons\n\n")
f.write("const (\n")
for filepath in sorted(glob.glob("feather/icons/*.svg")):
name = os.path.basename(filepath)[:-len(".svg")].replace("-", "_")
go_name = camelize(name)
with open(filepath, "rb") as icon_f:
data = icon_f.read()
f.write("\t%s = `%s`\n" % (
go_name,
normalize_xml(data),
))
f.write(")\n")
subprocess.check_call([
"gofmt", "-w",
"icons.go",
])
| 25.016949 | 78 | 0.47832 |
67733bffcace52d922f0fb77e0f81f89ac61065b | 1,524 | swift | Swift | Package.swift | vignatiev/periphery | 4ad5fb7ee5ab7f023445bd48e7be03646b9974d6 | [
"MIT"
] | 1 | 2020-09-16T15:24:18.000Z | 2020-09-16T15:24:18.000Z | Package.swift | vignatiev/periphery | 4ad5fb7ee5ab7f023445bd48e7be03646b9974d6 | [
"MIT"
] | null | null | null | Package.swift | vignatiev/periphery | 4ad5fb7ee5ab7f023445bd48e7be03646b9974d6 | [
"MIT"
] | 1 | 2020-03-06T09:49:52.000Z | 2020-03-06T09:49:52.000Z | // swift-tools-version:5.0
import PackageDescription
let package = Package(
name: "Periphery",
platforms: [
.macOS(.v10_12),
],
products: [
.executable(name: "periphery", targets: ["Periphery"]),
.library(name: "PeripheryKit", targets: ["PeripheryKit"])
],
dependencies: [
.package(url: "https://github.com/ileitch/Commandant", .branch("boolean-option")),
.package(url: "https://github.com/jpsim/SourceKitten", from: "0.20.0"),
.package(url: "https://github.com/tuist/xcodeproj", from: "6.0.0"),
.package(url: "https://github.com/krzyzanowskim/CryptoSwift", from: "0.1.0"),
.package(url: "https://github.com/kylef/PathKit", from: "0.9.2"),
.package(url: "https://github.com/apple/swift-syntax", from: "0.50000.0")
],
targets: [
.target(
name: "Periphery",
dependencies: ["PeripheryKit"]
),
.target(
name: "PeripheryKit",
dependencies: [
"Commandant",
"SourceKittenFramework",
"xcodeproj",
"CryptoSwift",
"PathKit",
"SwiftSyntax"
]
),
.testTarget(
name: "RetentionFixtures"
),
.testTarget(
name: "SyntaxFixtures"
),
.testTarget(
name: "PeripheryKitTests",
dependencies: ["PeripheryKit"]
)
],
swiftLanguageVersions: [.v4_2, .v5]
)
| 30.48 | 90 | 0.519029 |
b0d25c37a192db0bb22af91d782716426346dd9e | 747 | lua | Lua | test/testping.lua | Ron2014/skynet-study | f13e57d11f326ec83451b4ed2fbd50d14a09bdca | [
"MIT"
] | 1 | 2021-02-08T12:27:24.000Z | 2021-02-08T12:27:24.000Z | test/testping.lua | Ron2014/skynet-study | f13e57d11f326ec83451b4ed2fbd50d14a09bdca | [
"MIT"
] | null | null | null | test/testping.lua | Ron2014/skynet-study | f13e57d11f326ec83451b4ed2fbd50d14a09bdca | [
"MIT"
] | 1 | 2022-01-23T05:14:55.000Z | 2022-01-23T05:14:55.000Z | local skynet = require "skynet"
local snax = require "skynet.snax"
skynet.start(function()
skynet.trace()
local ps = snax.newservice ("pingserver", "hello world")
print(ps.req.ping("foobar"))
print(ps.post.hello())
print(pcall(ps.req.error))
print("Hotfix (i) :", snax.hotfix(ps, [[
local i
local hello
function accept.hello()
i = i + 1
print ("fix", i, hello)
end
function hotfix(...)
local temp = i
i = 100
return temp
end
]]))
print(ps.post.hello())
local info = skynet.call(ps.handle, "debug", "INFO")
for name,v in pairs(info) do
print(string.format("%s\tcount:%d time:%f", name, v.count, v.time))
end
print(ps.post.exit("exit")) -- == snax.kill(ps, "exit")
skynet.exit()
end)
| 19.657895 | 70 | 0.621151 |
a4981b2c5145e99b0ab15123b08119be1228b0ad | 688 | php | PHP | frontend/controllers/NewsController.php | Qosimjondev/Online-Magazin | 1a11dee06ab7f60339ec35a24925e51e01edd6bf | [
"BSD-3-Clause"
] | null | null | null | frontend/controllers/NewsController.php | Qosimjondev/Online-Magazin | 1a11dee06ab7f60339ec35a24925e51e01edd6bf | [
"BSD-3-Clause"
] | null | null | null | frontend/controllers/NewsController.php | Qosimjondev/Online-Magazin | 1a11dee06ab7f60339ec35a24925e51e01edd6bf | [
"BSD-3-Clause"
] | null | null | null | <?php
namespace frontend\controllers;
use common\models\News;
use yii\base\BaseObject;
use yii\data\ActiveDataProvider;
use yii\web\Controller;
class NewsController extends Controller
{
public function actionIndex(){
$newsDataProvider = new ActiveDataProvider([
'query' => News::find()->where(['status'=>1]),
'pagination'=>[
'pageSize'=>5
]
]);
return $this->render('index', [
'newsDataProvider'=>$newsDataProvider,
]);
}
public function actionSingle($slug){
$model = News::findOne(['url'=>$slug]);
return $this->render('single', compact('model'));
}
} | 21.5 | 58 | 0.579942 |
cda0b9dd7d8976468d726f1f5d2a7c9d37454b28 | 2,636 | cs | C# | Kooboo.Sites/Models/CmsFile.cs | tenghui-kooboo/Kooboo | 309c279f69a85a9e64b7205540fd94a4c3565bfc | [
"MIT"
] | null | null | null | Kooboo.Sites/Models/CmsFile.cs | tenghui-kooboo/Kooboo | 309c279f69a85a9e64b7205540fd94a4c3565bfc | [
"MIT"
] | null | null | null | Kooboo.Sites/Models/CmsFile.cs | tenghui-kooboo/Kooboo | 309c279f69a85a9e64b7205540fd94a4c3565bfc | [
"MIT"
] | null | null | null | //Copyright (c) 2018 Yardi Technology Limited. Http://www.kooboo.com
//All rights reserved.
using Kooboo.Data.Interface;
using Kooboo.Extensions;
using System;
using System.Security.Cryptography;
namespace Kooboo.Sites.Models
{
[Kooboo.Attributes.Diskable(Kooboo.Attributes.DiskType.Binary)]
[Kooboo.Attributes.Routable]
public class CmsFile : CoreObject, IBinaryFile, IExtensionable
{
public CmsFile()
{
this.ConstType = ConstObjectType.CmsFile;
}
private Guid _id;
public override Guid Id
{
set { _id = value; }
get
{
if (_id == default(Guid))
{
_id = Kooboo.Data.IDGenerator.GetNewGuid();
}
return _id;
}
}
[Kooboo.Attributes.SummaryIgnore]
public string Extension { get; set; }
/// <summary>
/// the content bytes of this file.
/// </summary>
[Kooboo.Attributes.SummaryIgnore]
public byte[] ContentBytes { get; set; }
public byte[] Bytes
{
get
{
return ContentBytes;
}
set
{
ContentBytes = value;
}
}
/// <summary>
/// this is for some file like text file, etc...
/// </summary>
public string ContentString { get; set; }
/// <summary>
/// The content type of this file. like. application/flash.
/// This is often used to save original content type saved from other location.
/// </summary>
public string ContentType { get; set; }
private int _size;
public int Size
{
get
{
if (_size == default(int))
{
if (ContentBytes != null)
{
_size = ContentBytes.Length;
}
}
return _size;
}
set
{
_size = value;
}
}
public override int GetHashCode()
{
string uniquestring = this.Extension + this.Name;
if (this.ContentBytes != null)
{
MD5 md5Hasher = MD5.Create();
byte[] data = md5Hasher.ComputeHash(ContentBytes);
uniquestring += System.Text.Encoding.ASCII.GetString(data);
}
return Lib.Security.Hash.ComputeIntCaseSensitive(uniquestring);
}
}
}
| 25.843137 | 88 | 0.481411 |
c9debb7fcbb24d9468fe346ba1e2e390c487e7ed | 853 | ts | TypeScript | src/deleteChannel.ts | TriForMine/permissions-plugin | 4151ec8c40724193766fc33cf50cbc0ed6a87a5a | [
"Apache-2.0"
] | null | null | null | src/deleteChannel.ts | TriForMine/permissions-plugin | 4151ec8c40724193766fc33cf50cbc0ed6a87a5a | [
"Apache-2.0"
] | null | null | null | src/deleteChannel.ts | TriForMine/permissions-plugin | 4151ec8c40724193766fc33cf50cbc0ed6a87a5a | [
"Apache-2.0"
] | null | null | null | import { Bot } from "../deps.ts";
import { requireBotGuildPermissions } from "./permissions.ts";
export default function deleteChannel(bot: Bot) {
const deleteChannel = bot.helpers.deleteChannel;
bot.helpers.deleteChannel = async function (channelId, reason) {
const channel = await bot.cache.channels.get(channelId);
if (channel?.guildId) {
const guild = await bot.cache.guilds.get(channel.guildId);
if (!guild) throw new Error("GUILD_NOT_FOUND");
if (guild.rulesChannelId === channelId) {
throw new Error("RULES_CHANNEL_CANNOT_BE_DELETED");
}
if (guild.publicUpdatesChannelId === channelId) {
throw new Error("UPDATES_CHANNEL_CANNOT_BE_DELETED");
}
await requireBotGuildPermissions(bot, guild, ["MANAGE_CHANNELS"]);
}
return deleteChannel(channelId, reason);
};
}
| 30.464286 | 72 | 0.689332 |
06e52ed56fd8ab1447e3e706695b5636bce5f12a | 66 | py | Python | cfdata/tabular/converters/string/__init__.py | carefree0910/carefree-data | ae0f4ea5724b4efd5d76f2a9d420acf3322c1d19 | [
"MIT"
] | 9 | 2020-10-25T11:52:34.000Z | 2022-01-23T02:45:41.000Z | cfdata/tabular/converters/string/__init__.py | carefree0910/carefree-data | ae0f4ea5724b4efd5d76f2a9d420acf3322c1d19 | [
"MIT"
] | 2 | 2020-08-02T01:58:48.000Z | 2021-02-26T11:24:19.000Z | cfdata/tabular/converters/string/__init__.py | carefree0910/carefree-data | ae0f4ea5724b4efd5d76f2a9d420acf3322c1d19 | [
"MIT"
] | 1 | 2021-11-04T14:34:13.000Z | 2021-11-04T14:34:13.000Z | from .core import StringConverter
__all__ = ["StringConverter"]
| 13.2 | 33 | 0.772727 |
7f2608feea144d943fc221d49f447c0ea7655c2c | 841 | cs | C# | LgTvController.cs | jasminetroll/LgTvControl | 397f71c5c9af9715d9262df2fb563a1b67b1a82e | [
"Beerware"
] | null | null | null | LgTvController.cs | jasminetroll/LgTvControl | 397f71c5c9af9715d9262df2fb563a1b67b1a82e | [
"Beerware"
] | null | null | null | LgTvController.cs | jasminetroll/LgTvControl | 397f71c5c9af9715d9262df2fb563a1b67b1a82e | [
"Beerware"
] | null | null | null | using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Configuration;
namespace LagrangianDesign.LgTvControl {
[ApiController]
[Route("[controller]")]
public sealed class LgTvController : ControllerBase {
readonly ILogger<LgTvController> Logger;
readonly LgTvSerialPort LgTvSerialPort;
public LgTvController(
ILogger<LgTvController> logger,
LgTvSerialPort lgTvSerialPort
) {
(Logger, LgTvSerialPort) = (logger, lgTvSerialPort);
}
// GET and PUT methods for all public properties of LgTvSerialPort
// are inserted here before this class is compiled (at runtime).
}
}
| 33.64 | 75 | 0.681332 |
60ecea9a72d5b9e11f86f83a7c099416846b154b | 3,241 | dart | Dart | lib/common/ab/provider/repos/read_history_db_provider.dart | sppsun/nlp_starter | bbeee24435c3a7b4475a426fda57b87a1fe2821f | [
"Apache-2.0"
] | 5 | 2019-07-08T03:25:51.000Z | 2020-08-20T03:32:52.000Z | lib/common/ab/provider/repos/read_history_db_provider.dart | sppsun/nlp_starter | bbeee24435c3a7b4475a426fda57b87a1fe2821f | [
"Apache-2.0"
] | null | null | null | lib/common/ab/provider/repos/read_history_db_provider.dart | sppsun/nlp_starter | bbeee24435c3a7b4475a426fda57b87a1fe2821f | [
"Apache-2.0"
] | null | null | null | import 'dart:async';
import 'package:flutter/foundation.dart';
import 'package:nlp_starter/common/ab/sql_provider.dart';
import 'package:nlp_starter/common/config/config.dart';
import 'package:nlp_starter/common/model/Repository.dart';
import 'package:sqflite/sqflite.dart';
import 'package:nlp_starter/common/utils/code_utils.dart';
/**
* 本地已读历史表
* Created by sppsun
* Date: 2019-06-06
*/
class ReadHistoryDbProvider extends BaseDbProvider {
final String name = 'ReadHistory';
final String columnId = "_id";
final String columnFullName = "fullName";
final String columnReadDate = "readDate";
final String columnData = "data";
int id;
String fullName;
int readDate;
String data;
ReadHistoryDbProvider();
Map<String, dynamic> toMap(String fullName, DateTime readDate, String data) {
Map<String, dynamic> map = {columnFullName: fullName, columnReadDate: readDate.millisecondsSinceEpoch, columnData: data};
if (id != null) {
map[columnId] = id;
}
return map;
}
ReadHistoryDbProvider.fromMap(Map map) {
id = map[columnId];
fullName = map[columnFullName];
readDate = map[columnReadDate];
data = map[columnData];
}
@override
tableSqlString() {
return tableBaseString(name, columnId) +
'''
$columnFullName text not null,
$columnReadDate int not null,
$columnData text not null)
''';
}
@override
tableName() {
return name;
}
Future _getProvider(Database db, int page) async {
List<Map<String, dynamic>> maps = await db.query(name,
columns: [columnId, columnFullName, columnReadDate, columnData],
limit: Config.PAGE_SIZE,
offset: (page - 1) * Config.PAGE_SIZE,
orderBy: "$columnReadDate DESC");
if (maps.length > 0) {
return maps;
}
return null;
}
Future _getProviderInsert(Database db, String fullName) async {
List<Map<String, dynamic>> maps = await db.query(
name,
columns: [columnId, columnFullName, columnReadDate, columnData],
where: "$columnFullName = ?",
whereArgs: [fullName],
);
if (maps.length > 0) {
ReadHistoryDbProvider provider = ReadHistoryDbProvider.fromMap(maps.first);
return provider;
}
return null;
}
///插入到数据库
Future insert(String fullName, DateTime dateTime, String dataMapString) async {
Database db = await getDataBase();
var provider = await _getProviderInsert(db, fullName);
if (provider != null) {
await db.delete(name, where: "$columnFullName = ?", whereArgs: [fullName]);
}
return await db.insert(name, toMap(fullName, dateTime, dataMapString));
}
///获取事件数据
Future<List<Repository>> geData(int page) async {
Database db = await getDataBase();
var provider = await _getProvider(db, page);
if (provider != null) {
List<Repository> list = new List();
for (var providerMap in provider) {
ReadHistoryDbProvider provider = ReadHistoryDbProvider.fromMap(providerMap);
///使用 compute 的 Isolate 优化 json decode
var mapData = await compute(CodeUtils.decodeMapResult, provider.data);
list.add(Repository.fromJson(mapData));
}
return list;
}
return null;
}
}
| 28.182609 | 125 | 0.670781 |
747986f5c245dcedaf05244e566aa01213a94ebe | 71 | c | C | test.c | alexander-novo/CS460Novotny | 201eae265587602e72c11c88d25036cd8c56c5ce | [
"MIT"
] | null | null | null | test.c | alexander-novo/CS460Novotny | 201eae265587602e72c11c88d25036cd8c56c5ce | [
"MIT"
] | null | null | null | test.c | alexander-novo/CS460Novotny | 201eae265587602e72c11c88d25036cd8c56c5ce | [
"MIT"
] | 1 | 2019-03-23T01:00:46.000Z | 2019-03-23T01:00:46.000Z | void main(void) {
int x = 1;
if (x == 1) {
int y = 2;
}
y = 3;
}
| 8.875 | 17 | 0.394366 |
a00b4757562002db1c2ed29755305c5fd1be9807 | 8,286 | ts | TypeScript | src/ServiceWorker/sw.ts | NYPL-Simplified/web-reader | 601521d411299e2453d9942bd8c43b0aa5fd1a33 | [
"MIT"
] | 14 | 2016-08-04T16:32:51.000Z | 2022-01-24T14:55:51.000Z | src/ServiceWorker/sw.ts | NYPL-Simplified/web-reader | 601521d411299e2453d9942bd8c43b0aa5fd1a33 | [
"MIT"
] | 110 | 2017-02-10T15:26:10.000Z | 2022-03-30T13:40:51.000Z | src/ServiceWorker/sw.ts | NYPL-Simplified/web-reader | 601521d411299e2453d9942bd8c43b0aa5fd1a33 | [
"MIT"
] | 3 | 2021-06-28T19:35:27.000Z | 2022-02-12T07:34:13.000Z | import { clientsClaim } from 'workbox-core';
import { ExpirationPlugin } from 'workbox-expiration';
import { CacheFirst, StaleWhileRevalidate } from 'workbox-strategies';
import { IS_DEV } from '../constants';
import { WebpubManifest } from '../types';
import { ReadiumLink } from '../WebpubManifestTypes/ReadiumLink';
import {
CACHE_EXPIRATION_SECONDS,
PRECACHE_PUBLICATIONS,
WEBPUB_CACHE_NAME,
} from './constants';
import { registerRoute } from 'workbox-routing';
import { PublicationConfig, WebReaderSWConfig } from './types';
declare let self: ServiceWorkerGlobalScope;
const VERSION = 'v2';
/**
* We claim the clients immediately and skip waiting because we don't care if
* half the page resources come from the SW and half from the network. We use
* content hashes for this to work
*/
clientsClaim();
/**
* Sets up the event listeners to:
* - On Fetch
* - Serve cached responses if they exist and are less than a week old.
*/
export default function initWebReaderSW({
cacheExpirationSeconds = CACHE_EXPIRATION_SECONDS,
}: WebReaderSWConfig | undefined = {}): void {
log('INITIALIZING');
self.addEventListener('install', (event) => {
log('INSTALLING ');
async function installSW() {
// perform any install tasks
// skip the waiting phase and activate immediately
await self.skipWaiting();
log('INSTALLED');
}
event.waitUntil(installSW());
});
/**
* Allow the client to send a message telling us to pre-cache
* webpub manifests and resources within them.
*/
self.addEventListener('message', async (event) => {
if (event.data.type === PRECACHE_PUBLICATIONS) {
log('Precaching publications');
if (typeof event.data.publications !== 'object') {
console.error('Precache event missing publications');
return;
}
await cachePublications(event.data.publications);
}
});
const cacheFirst = new CacheFirst({
cacheName: WEBPUB_CACHE_NAME,
plugins: [
new ExpirationPlugin({
maxAgeSeconds: cacheExpirationSeconds,
}),
],
});
/**
* Register the additional urls we sent with a stale-while-revalidate strategy
* Cache all the manifests in parallel. They're top priority.
* Then cache all their resources.
* Only cache items that don't already exist in the cache.
*/
async function cachePublications(publications: PublicationConfig[]) {
const cache = await caches.open(WEBPUB_CACHE_NAME);
// first route the swr urls
for (const pub of publications) {
for (const url of pub.swrUrls ?? []) {
log(`Routing ${url}`);
registerRoute(
url,
new StaleWhileRevalidate({ cacheName: WEBPUB_CACHE_NAME })
);
}
}
// route, fetch and cache the manifests.
// but don't re-fetch if they already exist in cache.
const pubResults: PromiseSettledResult<PubWithManifest>[] = await Promise.allSettled(
publications.map(async (pub) => {
const finalManifestUrl = getProxiedUrl(pub.manifestUrl, pub.proxyUrl);
// route it so that workbox knows to respond.
registerRoute(finalManifestUrl, cacheFirst);
// bail out if the manifest already exists
const match = await cache.match(finalManifestUrl);
if (match) {
return { ...pub, manifest: await match.json() };
}
// otherwise fetch it
const manifestResponse = await fetch(finalManifestUrl);
handleBadResponse(finalManifestUrl, manifestResponse);
// add the manifest response to the cache
await cache.put(finalManifestUrl, manifestResponse.clone());
const manifest: WebpubManifest = await manifestResponse.json();
return { ...pub, manifest };
})
);
// filter out any errored results
const pubs = pubResults
.map((result) =>
result.status === 'fulfilled' ? result.value : undefined
)
.filter(isPub);
// then route, fetch and cache all resources in each.
const promises = pubs.map(async (pub) => {
// make a list of resources with proxy included
const resourceHrefs = extractHrefs(
pub.manifest.resources ?? [],
pub.manifestUrl,
pub.proxyUrl
);
const readingOrderHrefs = extractHrefs(
pub.manifest.readingOrder ?? [],
pub.manifestUrl,
pub.proxyUrl
);
// make sure array is deduped using set or we may get a cache error
const allResourcesToCache = Array.from(
new Set([...resourceHrefs, ...readingOrderHrefs])
);
// route, fetch and cache each one.
// but don't re-fetch if it is already in the cache.
await Promise.all(
allResourcesToCache.map(async (url) => {
// route it
registerRoute(url, cacheFirst);
// bail out if it already exists
const match = await cache.match(url);
if (match) {
return;
}
const response = await fetch(url);
handleBadResponse(url, response);
return await cache.put(url, response);
})
);
});
return await Promise.allSettled(promises);
}
}
type PubWithManifest = PublicationConfig & { manifest: WebpubManifest };
function isPub(maybe: PubWithManifest | undefined): maybe is PubWithManifest {
return !!maybe;
}
function handleBadResponse(url: string, response: Response) {
if (!response.ok) {
const message = `Bad response status for: ${url}. Status: ${response.status}`;
console.warn(message);
throw new Error(message);
}
}
/**
* Prepends the proxy url if there is one
*/
function getProxiedUrl(url: string, proxyUrl: string | undefined) {
return proxyUrl ? `${proxyUrl}${encodeURIComponent(url)}` : url;
}
/**
* If the passed in url is relative, it will resolve it relative to the
* manifest url. Otherwise it should stay the same. Finally, the proxy is
* conditionally added
*/
function getAbsoluteUrl(
maybeRelative: string,
manifestUrl: string,
proxyUrl?: string
) {
return getProxiedUrl(
new URL(maybeRelative, manifestUrl).toString(),
proxyUrl
);
}
/**
* Gets an array of raw href values from an array of readium links
*/
function extractHrefs(
links: ReadiumLink[],
manifestUrl: string,
proxyUrl: string | undefined
): string[] {
return links.map((res) => getAbsoluteUrl(res.href, manifestUrl, proxyUrl));
}
// each logging line will be prepended with the service worker version
function log(message: string) {
if (IS_DEV) console.log(`SW (${VERSION}) -`, message);
}
/**
* On a fetch event, respond with an item from the cache, if
* it exists. We don't ever add things to the cache here,
* because the fetch event is called for _all_ network requests,
* and we can't tell if any given request is for app resources or
* publication resources. Thus publication resources are added
* to the cache separately, and then just returned if found here.
*
* This event listener MUST be run as the last fetch event listener
* of all in the host app because it always responds to the event
* in order to be able to use async functionality.
*/
// self.addEventListener('fetch', (event) => {
// if (event.request.method !== 'GET') {
// return;
// }
// async function matchOrFetch() {
// const pubCache = await caches.open(WEBPUB_CACHE_NAME);
// const match = await pubCache.match(event.request);
// // check if there is a match
// if (match) {
// return new CacheFirst({
// cacheName: WEBPUB_CACHE_NAME,
// plugins: [
// new ExpirationPlugin({
// // Only cache requests for a week
// maxAgeSeconds: cacheExpirationSeconds,
// }),
// ],
// }).handle(event);
// }
// // otherwise go to network
// return fetch(event.request);
// }
// // we have to make the event wait if we want to use async work. This will
// // make the network tab show "ServiceWorker" in all requests, despite the
// // fact that not every request actually goes through the service worker:
// // https://stackoverflow.com/questions/33590378/status-code200-ok-from-serviceworker-in-chrome-network-devtools/33655173
// event.respondWith(matchOrFetch());
// });
| 31.505703 | 125 | 0.657012 |
f142a150dad64f68ddef062796c67fa7cd4f804f | 1,484 | dart | Dart | pkg/analyzer/test/src/diagnostics/unqualified_reference_to_non_local_static_member_test.dart | lfkdsk/sdk | 07aa9ec332600b585b0edc8d3805fb413e4370bd | [
"BSD-3-Clause"
] | 1 | 2021-03-12T13:52:01.000Z | 2021-03-12T13:52:01.000Z | pkg/analyzer/test/src/diagnostics/unqualified_reference_to_non_local_static_member_test.dart | lfkdsk/sdk | 07aa9ec332600b585b0edc8d3805fb413e4370bd | [
"BSD-3-Clause"
] | null | null | null | pkg/analyzer/test/src/diagnostics/unqualified_reference_to_non_local_static_member_test.dart | lfkdsk/sdk | 07aa9ec332600b585b0edc8d3805fb413e4370bd | [
"BSD-3-Clause"
] | null | null | null | // Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
import 'package:analyzer/src/error/codes.dart';
import 'package:test_reflective_loader/test_reflective_loader.dart';
import '../dart/resolution/driver_resolution.dart';
main() {
defineReflectiveSuite(() {
defineReflectiveTests(UnqualifiedReferenceToNonLocalStaticMemberTest);
});
}
@reflectiveTest
class UnqualifiedReferenceToNonLocalStaticMemberTest
extends DriverResolutionTest {
test_getter() async {
await assertErrorsInCode(r'''
class A {
static int get a => 0;
}
class B extends A {
int b() {
return a;
}
}
''', [
error(
CompileTimeErrorCode.UNQUALIFIED_REFERENCE_TO_NON_LOCAL_STATIC_MEMBER,
80,
1),
]);
}
test_getter_invokeTarget() async {
await assertErrorsInCode(r'''
class A {
static int foo;
}
class B extends A {
static bar() {
foo.abs();
}
}
''', [
error(
CompileTimeErrorCode.UNQUALIFIED_REFERENCE_TO_NON_LOCAL_STATIC_MEMBER,
72,
3),
]);
}
test_setter() async {
await assertErrorsInCode(r'''
class A {
static set a(x) {}
}
class B extends A {
b(y) {
a = y;
}
}
''', [
error(
CompileTimeErrorCode.UNQUALIFIED_REFERENCE_TO_NON_LOCAL_STATIC_MEMBER,
66,
1),
]);
}
}
| 20.054054 | 80 | 0.656334 |
57fbeaf9d2574bc6917a3dde0d1ba025403a4229 | 6,632 | php | PHP | app/search.php | psztrnk/catalog-app | 74e972739db79a837f7be0b0d6222e53f8ade964 | [
"MIT"
] | 16 | 2020-07-28T11:44:39.000Z | 2022-02-28T04:28:48.000Z | app/search.php | psztrnk/catalog-app | 74e972739db79a837f7be0b0d6222e53f8ade964 | [
"MIT"
] | 11 | 2017-08-10T07:55:26.000Z | 2022-01-17T21:25:49.000Z | app/search.php | psztrnk/catalog-app | 74e972739db79a837f7be0b0d6222e53f8ade964 | [
"MIT"
] | 12 | 2020-01-20T17:38:22.000Z | 2022-03-01T21:48:44.000Z | <?php include_once('snippets/header.php') ?>
<?php
$term_suffix = $_POST['term'];
$collection = $db->table('books');
if (strpos($term_suffix, 'title:') !== FALSE) {
$term = trim(str_replace('title:', '', $term_suffix));
$s_field = $lang['SFIELD_TITLE'];
$results_books = $db->table('books')
->where('title', 'like', '%' . $term . '%')
->orWhere('o_title', 'like', '%' . $term . '%')
->all();
} else if (strpos($term_suffix, 'author:') !== FALSE) {
$term = trim(str_replace('author:', '', $term_suffix));
$s_field = $lang['SFIELD_AUTHOR'];
$results_books = $db->table('books')
->where('a_str', 'like', '%' . $term . '%')
->all();
} else if (strpos($term_suffix, 'publish:') !== FALSE) {
$term = trim(str_replace('publish:', '', $term_suffix));
$s_field = $lang['SFIELD_PUBLISH'];
$results_books = $db->table('books')
->where('publisher', 'like', '%' . $term . '%')
->orWhere('year', 'like', '%' . $term . '%')
->all();
} else if (strpos($term_suffix, 'isbn:') !== FALSE) {
$term = trim(str_replace('isbn:', '', $term_suffix));
$s_field = $lang['SFIELD_ISBN'];
$results_books = $db->table('books')
->where('isbn', 'like', '%' . $term . '%')
->all();
} else if (strpos($term_suffix, 'genre:') !== FALSE) {
$term = trim(str_replace('genre:', '', $term_suffix));
$s_field = $lang['SFIELD_GENRE'];
$results_books = $db->table('books')
->where('g_str', 'like', '%' . $term . '%')
->all();
} else if (strpos($term_suffix, 'lang:') !== FALSE) {
$term = trim(str_replace('lang:', '', $term_suffix));
$s_field = $lang['SFIELD_LANGUAGE'];
$results_books = $db->table('books')
->where('language', 'like', '%' . $term . '%')
->all();
} else if (strpos($term_suffix, 'description:') !== FALSE) {
$term = trim(str_replace('description:', '', $term_suffix));
$s_field = $lang['SFIELD_DESCRIPTION'];
$results_books = $db->table('books')
->where('description', 'like', '%' . $term . '%')
->all();
} else if (strpos($term_suffix, 'location:') !== FALSE) {
$term = trim(str_replace('location:', '', $term_suffix));
$s_field = $lang['SFIELD_LOCATION'];
$results_books = $db->table('books')
->where('location', 'like', '%' . $term . '%')
->all();
} else if (strpos($term_suffix, 'series:') !== FALSE) {
$term = trim(str_replace('series:', '', $term_suffix));
$s_field = $lang['SFIELD_SERIES'];
$results_books = $db->table('books')
->where('series', 'like', '%' . $term . '%')
->all();
} else if (strpos($term_suffix, 'lent:') !== FALSE) {
$term = trim(str_replace('lent:', '', $term_suffix));
$s_field = $lang['SFIELD_LENT'];
$results_books = $db->table('books')
->where('lentto', 'like', '%' . $term . '%')
->orWhere('lentat', 'like', '%' . $term . '%')
->all();
} else {
$term = $term_suffix;
$s_field = $lang['SFIELD_ALL'];
$results_books = $db->table('books')
->where('title', 'like', '%' . $term . '%')
->orWhere('o_title', 'like', '%' . $term . '%')
->orWhere('description', 'like', '%' . $term . '%')
->orWhere('publisher', 'like', '%' . $term . '%')
->orWhere('isbn', 'like', '%' . $term . '%')
->orWhere('year', 'like', '%' . $term . '%')
->orWhere('language', 'like', '%' . $term . '%')
->orWhere('a_str', 'like', '%' . $term . '%')
->orWhere('g_str', 'like', '%' . $term . '%')
->orWhere('location', 'like', '%' . $term . '%')
->orWhere('lentto', 'like', '%' . $term . '%')
->orWhere('lentat', 'like', '%' . $term . '%')
->all();
}
$unique = NULL;
?>
<h2>
<i class="fa fa-search" aria-hidden="true"></i><?php echo $lang['SEARCH_TITLE'].$term.$lang['SEARCH_TITLE_SUFFIX'].$s_field; ?>
</h2>
<div id="item-list">
<?php
if ($results_books->count() == 1) {
$wording = $lang['VERB_SINGULAR'];
$suffix = $lang['SEARCH_REASULTCOUNT_SINGULAR_SUFFIX'];
} else {
$wording = $lang['VERB_PLURAL'];
$suffix = $lang['SEARCH_REASULTCOUNT_PLURAL_SUFFIX'];
}
if ($results_books->count() == 0) {
echo '<p>' . $lang['SEARCH_NORESULTS'] . '</p></div>';
include_once('snippets/footer.php');
exit;
} else {
echo '<p>' . $lang['SEARCH_REASULTCOUNT_PREFIX'] . $wording . ' ' . $results_books->count() . $suffix . '</p>';
}
?>
<table class="item-list-table">
<?php include_once ('snippets/table-head.php');?>
<tbody>
<?php
foreach ($results_books as $book) {
$book_id = $book->id();
if ($unique == $book_id) {
continue;
} else {
if ($book->islent() == 'on') {
$lentstatus = '<a href="display?lent=' . urlencode($book->islent()) . '" title="' . $book->lentto() . ' ' . $book->lentat() . '"><i class="fa fa-check" aria-hidden="true"></i></a>';
} else {
$lentstatus = '<i class="fa fa-times" aria-hidden="true"></i>';
}
echo '<tr>';
echo '<td>';
$authors = $db->table('authors')
->select('authors.author', 'authors.book_id')
->where('authors.book_id = ' . $book_id)
->order('author', 'ASC')
->all();
foreach ($authors as $author) {
echo '<a href="display?author=' . urlencode($author->author()) . '">' . $author->author() . '</a><br />';
}
echo '</td>';
echo '<td><a href="display?id=' . $book->id() . '">' . $book->title() . '</a></td>';
echo '<td class="publisher"><a href="display?publisher=' . urlencode($book->publisher()) . '">' . $book->publisher() . '</a></td>';
echo '<td class="year"><a href="display?year=' . urlencode($book->year()) . '">' . $book->year() . '</a></td>';
echo '<td class="genre">';
$genres = $db->table('genres')
->select('genres.genre', 'genres.book_id')
->where('genres.book_id = ' . $book_id)
->order('genre', 'ASC')
->all();
foreach ($genres as $genre) {
echo '<a href="display?genre=' . urlencode($genre->genre()) . '">' . $genre->genre() . '</a><br />';
}
echo '</td>';
echo '<td class="lent">' . $lentstatus . '</td>';
echo '</tr>';
}
$unique = $book_id;
}
?>
</tbody>
</table>
</div>
<?php include_once('snippets/footer.php') ?>
| 37.050279 | 194 | 0.489897 |
2083ab7300b381c6da6179e07ba2636b7c6d6146 | 5,257 | ddl | SQL | Data/Samples/Testing Chambers/InputConfig.ddl | Tekh-ops/ezEngine | d6a5887d8709f267bf8f2943ef15054e29f6d3d5 | [
"MIT"
] | 703 | 2015-03-07T15:30:40.000Z | 2022-03-30T00:12:40.000Z | Data/Samples/Testing Chambers/InputConfig.ddl | Tekh-ops/ezEngine | d6a5887d8709f267bf8f2943ef15054e29f6d3d5 | [
"MIT"
] | 233 | 2015-01-11T16:54:32.000Z | 2022-03-19T18:00:47.000Z | Data/Samples/Testing Chambers/InputConfig.ddl | Tekh-ops/ezEngine | d6a5887d8709f267bf8f2943ef15054e29f6d3d5 | [
"MIT"
] | 101 | 2016-10-28T14:05:10.000Z | 2022-03-30T19:00:59.000Z | InputAction
{
string %Set{"Player"}
string %Action{"MoveForwards"}
bool %TimeScale{false}
Slot
{
string %Key{"keyboard_w"}
float %Scale{1}
}
Slot
{
string %Key{"controller0_leftstick_posy"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"MoveBackwards"}
bool %TimeScale{false}
Slot
{
string %Key{"keyboard_s"}
float %Scale{1}
}
Slot
{
string %Key{"controller0_leftstick_negy"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"StrafeLeft"}
bool %TimeScale{false}
Slot
{
string %Key{"keyboard_a"}
float %Scale{1}
}
Slot
{
string %Key{"controller0_leftstick_negx"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"StrafeRight"}
bool %TimeScale{false}
Slot
{
string %Key{"keyboard_d"}
float %Scale{1}
}
Slot
{
string %Key{"controller0_leftstick_posx"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"Jump"}
bool %TimeScale{false}
Slot
{
string %Key{"keyboard_space"}
float %Scale{1}
}
Slot
{
string %Key{"controller0_button_a"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"Run"}
bool %TimeScale{false}
Slot
{
string %Key{"keyboard_left_shift"}
float %Scale{1}
}
Slot
{
string %Key{"controller0_button_x"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"RotateLeft"}
bool %TimeScale{true}
Slot
{
string %Key{"keyboard_left"}
float %Scale{1}
}
Slot
{
string %Key{"controller0_rightstick_negx"}
float %Scale{3}
}
Slot
{
string %Key{"mouse_move_negx"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"RotateRight"}
bool %TimeScale{true}
Slot
{
string %Key{"keyboard_right"}
float %Scale{1}
}
Slot
{
string %Key{"controller0_rightstick_posx"}
float %Scale{3}
}
Slot
{
string %Key{"mouse_move_posx"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"Use"}
bool %TimeScale{false}
Slot
{
string %Key{"keyboard_e"}
float %Scale{1}
}
Slot
{
string %Key{"controller0_button_b"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"Shoot"}
bool %TimeScale{false}
Slot
{
string %Key{"mouse_button_0"}
float %Scale{1}
}
Slot
{
string %Key{"controller0_right_trigger"}
float %Scale{1}
}
Slot
{
string %Key{"keyboard_right_ctrl"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"Crouch"}
bool %TimeScale{false}
Slot
{
string %Key{"keyboard_left_ctrl"}
float %Scale{1}
}
Slot
{
string %Key{"controller0_left_shoulder"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"LookUp"}
bool %TimeScale{true}
Slot
{
string %Key{"keyboard_up"}
float %Scale{1}
}
Slot
{
string %Key{"controller0_rightstick_posy"}
float %Scale{1}
}
Slot
{
string %Key{"mouse_move_negy"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"LookDown"}
bool %TimeScale{true}
Slot
{
string %Key{"keyboard_down"}
float %Scale{1}
}
Slot
{
string %Key{"controller0_rightstick_negy"}
float %Scale{1}
}
Slot
{
string %Key{"mouse_move_posy"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"Flashlight"}
bool %TimeScale{false}
Slot
{
string %Key{"keyboard_f"}
float %Scale{1}
}
Slot
{
string %Key{"controller0_button_y"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"SwitchWeapon1"}
bool %TimeScale{true}
Slot
{
string %Key{"keyboard_1"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"SwitchWeapon2"}
bool %TimeScale{true}
Slot
{
string %Key{"keyboard_2"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"SwitchWeapon3"}
bool %TimeScale{true}
Slot
{
string %Key{"keyboard_3"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"SwitchWeapon4"}
bool %TimeScale{true}
Slot
{
string %Key{"keyboard_4"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"SwitchWeapon5"}
bool %TimeScale{true}
Slot
{
string %Key{"keyboard_5"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"SwitchWeapon6"}
bool %TimeScale{true}
Slot
{
string %Key{"keyboard_6"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"SwitchWeapon7"}
bool %TimeScale{true}
Slot
{
string %Key{"keyboard_7"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"Reload"}
bool %TimeScale{true}
Slot
{
string %Key{"keyboard_r"}
float %Scale{1}
}
Slot
{
string %Key{"controller0_button_x"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"SwitchWeapon0"}
bool %TimeScale{true}
Slot
{
string %Key{"keyboard_0"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"Teleport"}
bool %TimeScale{true}
Slot
{
string %Key{"keyboard_q"}
float %Scale{1}
}
}
InputAction
{
string %Set{"Player"}
string %Action{"Aim"}
bool %TimeScale{false}
Slot
{
string %Key{"mouse_button_1"}
float %Scale{1}
}
Slot
{
string %Key{"controller0_left_trigger"}
float %Scale{1}
}
}
| 13.7979 | 44 | 0.661594 |
a3bf8a97e01e5259008d2221ae4a3a724d2a70da | 6,961 | java | Java | rxtomcat-container/src/main/java/net/tonwu/tomcat/container/core/AdapterImpl.java | ctlcom2010/RxTomcat | a28692469ebf662fcc418f330d4b35015175e817 | [
"Apache-2.0"
] | 7 | 2019-07-04T13:01:44.000Z | 2021-09-14T10:15:59.000Z | rxtomcat-container/src/main/java/net/tonwu/tomcat/container/core/AdapterImpl.java | ctlcom2010/RxTomcat | a28692469ebf662fcc418f330d4b35015175e817 | [
"Apache-2.0"
] | null | null | null | rxtomcat-container/src/main/java/net/tonwu/tomcat/container/core/AdapterImpl.java | ctlcom2010/RxTomcat | a28692469ebf662fcc418f330d4b35015175e817 | [
"Apache-2.0"
] | 3 | 2020-09-20T15:11:16.000Z | 2021-05-20T01:11:02.000Z | /**
* Copyright 2019 tonwu.net - 顿悟源码
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.tonwu.tomcat.container.core;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.TreeMap;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import net.tonwu.tomcat.container.servletx.Request;
import net.tonwu.tomcat.container.servletx.Response;
import net.tonwu.tomcat.http.Adapter;
import net.tonwu.tomcat.http.RawRequest;
import net.tonwu.tomcat.http.RawResponse;
/**
* 适配容器,主要是映射 Servlet,尝试从 Cookie 中解析 Session ID
*
* @author tonwu.net
*/
public class AdapterImpl implements Adapter {
final static Logger log = LoggerFactory.getLogger(AdapterImpl.class);
private Connector connector;
public AdapterImpl(Connector connector) {
this.connector = connector;
}
@Override
public void service(RawRequest rawReq, RawResponse rawResp) throws Exception {
// 创建并关联容器内部的请求和响应对象
Request containerRequest = new Request();
containerRequest.setRawReq(rawReq);
Response containerResponse = new Response();
containerResponse.setRawResp(rawResp);
containerRequest.setResp(containerResponse);
containerResponse.setContainerRequest(containerRequest);
// 进入容器生成响应
try {
if (postParseRequest(rawReq, containerRequest, rawResp, containerResponse)) {
connector.getContainer().getPipeline().handle(containerRequest, containerResponse);
}
containerResponse.finish();
} finally {
containerRequest.recycle();
containerResponse.recycle();
}
}
/**
* 使用 url 映射 servlet;解析 sessionid
*/
private boolean postParseRequest(RawRequest rawReq, Request req, RawResponse rawResp, Response resp) {
Context context = connector.getContainer();
// 严格来说要对 uri 规范化
String uri = rawReq.getUri();
try {
uri = URLDecoder.decode(uri, rawReq.getEncoding().name());
} catch (UnsupportedEncodingException e) {
}
// 匹配 Context
if (uri.startsWith(context.getDocBase(), 1)) {
req.setContext(context);
} else {
log.debug("匹配 Web 上下文对象 Context 失败,响应 404");
rawResp.setStatus(HttpServletResponse.SC_NOT_FOUND);
return false;
}
// uri 去除应用名称
uri = uri.substring(uri.indexOf(context.getDocBase()) + context.getDocBase().length());
// 没有 Servlet Path
if ("".equals(uri)) {
uri += "/";
}
boolean mapRequired = true;
while (mapRequired) {
Wrapper wrapper = mapServlet(context, uri);
req.setWrapper(wrapper);
// Parse session id in Cookie
Cookie[] cookies = req.getCookies();
if (cookies != null) {
for (Cookie cookie : cookies) {
if ("JSESSIONID".equalsIgnoreCase(cookie.getName())) {
String reqId = cookie.getValue();
req.setSessionId(reqId);
}
}
}
if (log.isDebugEnabled()) {
StringBuilder sb = new StringBuilder(120);
sb.append("映射 Servlet\r\n======Mapping Result======");
sb.append("\r\n Request Path: ").append(uri);
sb.append("\r\n Context: /").append(context.getDocBase());
sb.append("\r\n Wrapper: ").append(wrapper);
sb.append("\r\n jsessionid: ").append(req.getRequestedSessionId());
sb.append("\r\n==========================");
log.debug(sb.toString());
}
mapRequired = false;
// Tomcat 在这里进行了多版本 Context 检测,由并行部署同一个 Web 应用导致不同的版本
// 简单起见,这里只检测应用 class 文件是否正在热加载,没有实现 web.xml 检测和部署
if (!mapRequired && context.getPaused()) {
log.debug("Web 应用 [/{}] 正在热加载,重新映射 Servlet", context.getDocBase());
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
// Should never happen
}
// reset mapping
req.recycle();
wrapper = null;
mapRequired = true;
}
}
return true;
}
/***
* 映射 Servlet
*
* @param context 请求匹配的应用上下文对象
* @param uri 请求 Servlet 路径
* @return 返回的肯定不为空,默认返回 DefaultServlet
*/
private Wrapper mapServlet(Context context, String uri) {
Wrapper mapWrapper = null;
// Rule 1 -- Exact Match 精确匹配 /catalog
TreeMap<String, Wrapper> exactWrappers = context.getExactWrappers();
String key = exactWrappers.floorKey(uri);
if (key != null && uri.equals(key)) {
mapWrapper = exactWrappers.get(key);
}
// Rule 2 -- Prefix Match 模糊匹配 /foo/bar/*
if (mapWrapper == null) {
TreeMap<String, Wrapper> wildcardWrappers = context.getWildcardWrappers();
key = wildcardWrappers.floorKey(uri);
if (key != null) {
// uri = /foo/bar, a/foo/bar, a/foo/bar/c, a/foo/bar/c/d
// name = /foo/bar
if (uri.startsWith(key) || uri.endsWith(key) || uri.contains(key + "/")) {
mapWrapper = wildcardWrappers.get(key);
}
}
}
// Rule 3 -- Extension Match 扩展名,最长路径的模糊匹配
if (mapWrapper == null) {
TreeMap<String, Wrapper> extensionWrappers = context.getExtensionWrappers();
key = extensionWrappers.floorKey(uri);
if (key != null && uri.endsWith("." + key)) {
mapWrapper = extensionWrappers.get(key);
}
}
// Rule 4 -- Welcome resources processing for servlets
if (mapWrapper == null) {
if (uri.endsWith("/")) {
uri += context.getWelcomeFile();
}
}
// Rule 5 -- Default servlet
if (mapWrapper == null) {
mapWrapper = context.getDefaultWrapper();
}
mapWrapper.setWrapperPath(uri);
return mapWrapper;
}
}
| 34.122549 | 106 | 0.568022 |
7f508bb25cd6de158f0e6d283332258cc1cb8f2c | 535 | sql | SQL | WebContent/bookstore.sql | sumitprojects/JavaProject | 073f14dca17cfc30b702caf3080276ac8bc6f05e | [
"MIT"
] | 4 | 2018-07-03T12:14:35.000Z | 2019-08-13T13:54:03.000Z | WebContent/bookstore.sql | sumitprojects/JavaProject | 073f14dca17cfc30b702caf3080276ac8bc6f05e | [
"MIT"
] | null | null | null | WebContent/bookstore.sql | sumitprojects/JavaProject | 073f14dca17cfc30b702caf3080276ac8bc6f05e | [
"MIT"
] | 1 | 2019-07-24T13:43:35.000Z | 2019-07-24T13:43:35.000Z | CREATE TABLE `book` (
`book_id` int(11) NOT NULL,
`title` varchar(128) NOT NULL,
`author` varchar(45) NOT NULL,
`price` float NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
INSERT INTO `book` (`book_id`, `title`, `author`, `price`) VALUES
(1, 'testsds', 'test', 1000),
(2, 'test', 'test', 2000);
ALTER TABLE `book`
ADD PRIMARY KEY (`book_id`),
ADD UNIQUE KEY `book_id_UNIQUE` (`book_id`),
ADD UNIQUE KEY `title_UNIQUE` (`title`);
ALTER TABLE `book`
MODIFY `book_id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=3;
| 33.4375 | 69 | 0.678505 |
9c4ab19b84978152cefc6bc4d6d5aac07824fc10 | 1,338 | rs | Rust | src/player/bullet.rs | roeldev/godot-rust-space-sidescroller | 0ec9737e47d5554e7995a1b5cd2333af0c5c0630 | [
"BSD-3-Clause"
] | 2 | 2021-08-30T23:53:23.000Z | 2021-12-14T06:29:11.000Z | src/player/bullet.rs | roeldev/godot-rust-space-sidescroller | 0ec9737e47d5554e7995a1b5cd2333af0c5c0630 | [
"BSD-3-Clause"
] | null | null | null | src/player/bullet.rs | roeldev/godot-rust-space-sidescroller | 0ec9737e47d5554e7995a1b5cd2333af0c5c0630 | [
"BSD-3-Clause"
] | 1 | 2021-07-12T15:21:43.000Z | 2021-07-12T15:21:43.000Z | // Copyright (c) 2021, Roel Schut. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
use crate::*;
use crate::utils::convert::TryInstanceFrom;
#[derive(NativeClass)]
#[inherit(Sprite)]
pub struct PlayerBullet {
#[property(default = 100.0)]
speed: f32,
#[property(default = 1)]
damage: u8,
}
#[methods]
impl PlayerBullet {
fn new(_owner: &Sprite) -> Self {
PlayerBullet {
speed: 100.0,
damage: 1,
}
}
pub fn get_damage(&self) -> u8 { self.damage }
#[export]
fn _process(&self, owner: &Sprite, delta: f32) {
let mut pos = owner.global_position();
pos.x += self.speed * delta;
owner.set_global_position(pos);
if pos.x > 180.0 {
owner.queue_free();
}
}
}
impl TryInstanceFrom<Self, Sprite> for PlayerBullet {}
// impl<'l> TryFrom<Option<Ref<Node>>> for PlayerBullet {
// type Error = ();
//
// fn try_instance_from(node: Option<Ref<Node>>) -> Result<RefInstance<'l, Self, Shared>, Self::Error> {
// node.map(|node| unsafe { node.assume_safe() })
// .and_then(|node| node.cast::<Sprite>())
// .and_then(|node| node.cast_instance::<Self>())
// .ok_or(Self::Error)
// }
// } | 26.235294 | 108 | 0.578475 |
d27eae2bf6995f06d39f58fcb2b49558cf90a952 | 2,218 | sh | Shell | by-tag/os/verify_copy_process.sh | milosz/shell-octo-adventure | d5db9c42043f0952aaae6fc5ca788084497e7444 | [
"MIT"
] | null | null | null | by-tag/os/verify_copy_process.sh | milosz/shell-octo-adventure | d5db9c42043f0952aaae6fc5ca788084497e7444 | [
"MIT"
] | null | null | null | by-tag/os/verify_copy_process.sh | milosz/shell-octo-adventure | d5db9c42043f0952aaae6fc5ca788084497e7444 | [
"MIT"
] | null | null | null | #!/bin/sh
# Simple "copy [directories] and verify [files]" shell script
# Sample usage: copy.sh /from_directory /to_directory
# https://blog.sleeplessbeastie.eu/2014/11/21/how-to-verify-copy-process/
# used commands
find_command=$(which find)
shasum_command=$(which sha256sum)
cat_command=$(which cat)
unlink_command=$(which unlink)
# copy command with additional arguments
copy_command=$(which cp)
copy_arguments="-rp" # recursive mode
# preserve mode, ownership, timestamps
# mail command and with used email address
mail_command=$(which mail)
mail_subject_argument="-s"
mail_address="milosz"
if [ -d "$1" -a ! -d "$2" ]; then
# first directory exists
# second directory does not exists
# compute 256-bit checksums
shasum_log=$(mktemp)
(cd $1 && $find_command . -type f -exec $shasum_command '{}' \; > $shasum_log)
# copy data
copy_log=$(mktemp)
$copy_command $copy_arguments "$1" "$2" > $copy_log
# verify computed checksums
verify_log=$(mktemp)
(cd $2 && $cat_command $shasum_log | $shasum_command -c > $verify_log)
shasum_exit_code="$?"
# prepare message and send mail message
mail_file=$(mktemp)
if [ "$shasum_exit_code" -eq "0" ]; then
mail_subject="Subject: ${0}: Success"
else
mail_subject="Subject: ${0}: Error"
fi
echo > $mail_file
echo "Command-line: ${0} ${1} ${2}" >> $mail_file
if [ -s "$copy_log" ]; then
echo >> $mail_file
echo "Copy process" >> $mail_file
$cat_command $copy_log >> $mail_file
fi
if [ "$shasum_exit_code" -ne "0" ]; then
echo >> $mail_file
echo "Verify process" >> $mail_file
$cat_command $verify_log | grep -v OK$ >> $mail_file
fi
$mail_command $mail_subject_argument "${mail_subject}" $mail_address < $mail_file
# cleanup temporary files
$unlink_command $mail_file
$unlink_command $verify_log
$unlink_command $copy_log
$unlink_command $shasum_log
else
echo "Problem with parameters\nCommand-line: ${0} ${1} ${2}" | $mail_command $mail_subject_argument "${0}" $mail_address
exit 5
fi
| 30.805556 | 122 | 0.632552 |
49162563ee7a5c06c5af0b306fed87f317ec328d | 12,136 | py | Python | Fiji/imgproc.py | eufmike/storm_image_processing | 076335519be0be3b66d289a180421d36770ab820 | [
"CC-BY-4.0"
] | null | null | null | Fiji/imgproc.py | eufmike/storm_image_processing | 076335519be0be3b66d289a180421d36770ab820 | [
"CC-BY-4.0"
] | null | null | null | Fiji/imgproc.py | eufmike/storm_image_processing | 076335519be0be3b66d289a180421d36770ab820 | [
"CC-BY-4.0"
] | null | null | null | #@ UIService uiService
#@ LogService log
#@ File(label="Select the main directory", style="directory", value="/Volumes/LaCie_DataStorage/xiaochao_wei_STORM imaging/STORM_imaging/", persist=false) path
#@ String(label="Name of Analysis Folder", value = "analysis_20190419", persist=false) dir_output
#@ File(label="Folder for input images", style="directory", value="/Volumes/LaCie_DataStorage/xiaochao_wei_STORM imaging/STORM_imaging/resource/testdata", persist=false) ippath
#@ Boolean(label="Batchmode", value=false, persist=true) batchmodeop
print('Script Starts')
print('Importing modules ...')
# Import ImageJ/Fiji package
import sys
import os
import re
import csv
import gc
import time
from ij import IJ
from ij import plugin
from ij import gui
from ij.io import FileSaver
from ij import WindowManager as wm
from ij.process import ImageStatistics as IS
from ij.macro import Interpreter
# Import JAVA modules
import java.awt.Color as Color
import java.lang.System.gc as javagc
# Import Bio-Formats
from loci.plugins import BF
from loci.plugins.in import ImporterOptions
# Functions Section Begins ----------------------------------------------------- #
print('Loading functions ...')
# log.info('Loading functions ...')
def filesavercheck(fs, outputdir, filename):
"""
FileSaverCheck first check if the input folder exists,
then check if the input file exists in the targer folder
If the folder does not exist, it returns error message:
"Folder does not exist or it's not a folder!"
If the folder exists but the file also exists:
File exists! Not saving the image, would overwrite a file!"
Otherwise, it will save file and return message:
"File saved successfully at given filepath"
Arguments:
outputdir: output directory
filename: input file with absolute path
"""
if os.path.exists(outputdir) and os.path.isdir(outputdir):
print "folder exists:", outputdir
filepath = os.path.join(outputdir, filename) # Operating System-specific
if os.path.exists(filepath):
print "File exists! Not saving the image, would overwrite a file!"
elif fs.saveAsTiff(filepath):
print "File saved successfully at ", filepath
else:
print "Folder does not exist or it's not a folder!"
def dircheck(targetpaths):
"""
dircheck checks the target folder and create the folder if it does not exist.
targetdirlist: list of folderpath
"""
# print(type(targetpaths))
if type(targetpaths) is unicode:
print(os.path.exists(targetpaths))
if not os.path.exists(targetpaths):
os.makedirs(targetpaths)
elif type(targetpaths) is list:
for path in targetpaths:
if not os.path.exists(path):
os.makedirs(path)
def getprocessedimg(op_dir, pattern = r'(.+?).'):
"""
NOT USING
getprocessedimg check the output folder and create a list of processed data
pattern: the pattern re search
"""
processed_img = []
for (directory, dir_names, file_names) in os.walk(op_dir):
for file_name in file_names:
# print(file_name)
# search the processed files by using re.search
m = re.search(pattern, file_name)
if m:
# print(m)
file_name_temp = m.group(1)
processed_img.append(file_name_temp)
# replace the duplicated filenames
processed_img = list(set(processed_img))
return (processed_img)
def listfiles(path, extension = None):
"""
"""
filelist = []
fileabslist = []
for directory, dir_names, file_names in os.walk(path):
# print(file_names)
for file_name in file_names:
if (not file_name.startswith('.')) & (file_name.endswith(extension)):
file_name_base = file_name.replace(extension, '')
filepath_tmp = os.path.join(directory, file_name)
fileabslist.append(filepath_tmp)
return fileabslist
# def getpendinglist(src_dir, op_dir, src_ext = '.nd2', op_ext = '.csv', pattern = r'(.+?).'):
def getpendinglist(src_dir, op_dir, src_ext = '.nd2', op_ext = '.csv'):
"""
getpendinglist compares the files from src_dir and the accomplisjed file in op_dir,
then creates a pending list of unprocessed image.
"""
srclist = listfiles(src_dir, src_ext)
oplist = listfiles(op_dir, op_ext)
oplist_basename = []
for i in oplist:
name = os.path.basename(i)
basename = os.path.splitext(name)[0]
oplist_basename.append(basename)
pendingfllist = []
pendingpathlist_input = []
pendingpathlist_output = []
for i in range(len(srclist)):
srcflname = os.path.basename(srclist[i])
srcflbasename = os.path.splitext(srcflname)[0]
if not srcflbasename in oplist_basename:
pendingfllist.append(srcflbasename)
pendingpathlist_input.append(srclist[i])
pendingpathlist_output.append(os.path.join(op_dir, srcflbasename + op_ext))
return (pendingfllist, pendingpathlist_input, pendingpathlist_output)
def getStatistics(imp):
""" Return statistics for the given ImagePlus """
options = IS.MEAN | IS.MEDIAN | IS.MIN_MAX
ip = imp.getProcessor()
stats = IS.getStatistics(ip, options, imp.getCalibration())
return stats.mean, stats.median, stats.min, stats.max
def garbagecollect(iteration = 3):
for i in range(iteration):
gc.collect()
# Functions Section Ends ----------------------------------------------------- #
# STORM Image Analysis ------------------------------------------------------- #
def run_script(path=path):
import gc
path = str(path)
# Prepare workspace ------------------------------------------------------ #
print('Preparing ...')
# log.info('Preparing ...')
if batchmodeop:
Interpreter.batchMode = True
# define workspace
# path = '/Volumes/LaCie_DataStorage/xiaochao_wei_STORM imaging/STORM_imaging'
# dir_output = 'analysis_20190305'
pathlist = []
# define temp folder
dir_temp = 'temp'
file_par = 'par'
# create temp folder
path_temp = os.path.join(path, dir_output, dir_temp)
pathlist.append(path_temp)
# define input folder
path_srcimg = str(ippath)
print(path_srcimg)
# define output folder
outputdir = 'preprocessing'
dir_preproimg = 'preproimg'
dir_imginfo = 'imginfo'
dir_imgintensity = 'imgintensity'
dir_imgmetadata = 'imgmetadata'
file_imgstat = 'imgstat.csv'
# create output path
path_preproimg = os.path.join(path, dir_output, outputdir, dir_preproimg)
path_imginfo = os.path.join(path, dir_output, outputdir, dir_imginfo)
path_imgintensity = os.path.join(path, dir_output, outputdir, dir_imgintensity)
path_imgmetadata = os.path.join(path_imginfo, dir_imgmetadata)
pathlist.append(path_preproimg)
pathlist.append(path_imginfo)
pathlist.append(path_imgintensity)
pathlist.append(path_imgmetadata)
# create output file path
path_imgstat = os.path.join(path_imginfo, file_imgstat)
# check the existence of output folders and create folders if necessary
dircheck(pathlist)
# Create img list for processing ============ #
# create a file list for all images
# return the pending files
src_ext = '.nd2'
pendingfllist, pendingpathlist_input, pendingpathlist_output = getpendinglist(path_srcimg, path_imgintensity, src_ext = src_ext)
print(pendingfllist)
# log.info(pendingfllist)
# Processing start ========================== #
# load and crop the image
for i in range(len(pendingfllist)):
# for i in range(2):
print(pendingfllist[i])
# check if stat .csv file exist
pendingfllist_pre = []
processed_filename = []
list_xsize = []
list_ysize = []
list_nSlices = []
list_nFrames = []
list_nChannels = []
list_sizebytes = []
if os.path.exists(path_imgstat):
with open(path_imgstat, 'rb') as csvfile:
csvreader = csv.reader(csvfile, delimiter=',')
csv_header = next(csvreader)
for row in csvreader:
pendingfllist_pre.append(row[0])
processed_filename.append(row[1])
list_xsize.append(row[2])
list_ysize.append(row[3])
list_nSlices.append(row[4])
list_nFrames.append(row[5])
list_nChannels.append(row[6])
list_sizebytes.append(row[7])
# load image
imps = BF.openImagePlus(pendingpathlist_input[i])
ipflbasename = pendingfllist[i]
pendingfllist_pre.append(pendingfllist[i])
for imp in imps:
imp.show()
imp_main = IJ.getImage()
# Save average intensity ===================== #
processed_filename.append(imp_main.title)
# Save img metadata ========================== #
print('Save image metadata...')
# log.info('Save image metadata...')
# print info for each image
# No need to have image object in the arguments
IJ.run("Show Info...")
# create window name
img_info_title = 'Info for ' + imp_main.title
# select the info window
img_info = wm.getWindow(img_info_title)
# wm.addWindow(img_info)
wm.setWindow(img_info)
# save the info information
IJ.saveAs("Text", os.path.join(path_imgmetadata, (ipflbasename + ".txt")))
# close the info window
img_info.close()
# Save img information ======================== #
# return basic information of the image
img_x = imp_main.getWidth()
img_y = imp_main.getWidth()
img_nslices = imp_main.getNSlices()
img_nFrames = imp_main.getNFrames()
img_nChannels = imp_main.getNChannels()
img_sizebytes = imp_main.getSizeInBytes()
list_xsize.append(img_x)
list_ysize.append(img_y)
list_nSlices.append(img_nslices)
list_nFrames.append(img_nFrames)
list_nChannels.append(img_nChannels)
list_sizebytes.append(img_sizebytes/(1024*1024))
with open(path_imgstat, 'wb') as csvfile:
csvwriter = csv.writer(csvfile, delimiter=",")
csvwriter.writerow(['image_name', 'ip_file_name','xSize', 'ySize', 'nSlices', 'nFrames', 'nChannels', 'size_MB'])
for j in range(len(list_xsize)):
csvwriter.writerow([pendingfllist_pre[j], processed_filename[j], list_xsize[j], list_ysize[j], \
list_nSlices[j], list_nFrames[j], list_nChannels[j], list_sizebytes[j]])
# Print information =========================== #
# return the title of the window (optional)
print('Current Image File: {}'.format(img_info.title))
# return in log windows
# log.info('Current Image File: {}'.format(img_info.title))
# log.info('SizeZ: {}'.format(img_nslices))
# log.info('SizeT: {}'.format(img_nFrames))
# log.info('SizeC: {}'.format(img_nChannels))
# log.info('Size in Bytes: {}'.format(img_sizebytes))
# define the size of center ROI
roi = [(img_x/2) - 64, (img_y/2) - 64, 128, 128]
crop_roi = gui.Roi(roi[0], roi[1], roi[2], roi[3])
stats_allframe = []
# Export individual channel ================== #
for j in range(img_nChannels):
IJ.run(imp_main, "Duplicate...", "duplicate channels=" + str(j+1))
imp_channel = IJ.getImage()
fs = FileSaver(imp_channel)
path_preproim_c = os.path.join(path_preproimg, str(j+1))
dircheck(path_preproim_c)
filesavercheck(fs, path_preproim_c, ipflbasename + '.tif')
imp_channel.setRoi(crop_roi)
for k in range(img_nFrames):
imp_channel.setT(k+1)
stats = list(getStatistics(imp_channel))
head = [j+1, k+1]
stats_allframe.append(head + stats)
imp_channel.close()
del imp_channel
del fs
# save into a csv ============================= #
with open(os.path.join(path_imgintensity, pendingfllist[i] + '.csv'), 'wb') as csvfile:
csvwriter = csv.writer(csvfile, delimiter=",")
csvwriter.writerow(['channel', 'frame', 'mean', 'median','min', 'max'])
for j in range(len(stats_allframe)):
csvwriter.writerow(stats_allframe[j])
# Ending the loop ============================= #
# Close image windows and delete variables
imp_main.close()
# not sure if this is necessary
del stats_allframe
del imp_main
del imps
# Close Exception window
'''
img_ex = wm.getWindow('')
wm.setWindow(img_ex)
img_ex.close()
'''
time.sleep(3)
# garbage collection
gc.collect()
time.sleep(3)
gc.collect()
# break
print('Saving image stats ...')
# log.info('Saving image stats ...')
print("Script Ends ...")
# log.info("Script Ends ...")
time.sleep(3)
if batchmodeop:
Interpreter.batchMode = False
import gc
gc.collect()
return
if __name__ in ['__builtin__','__main__']:
run_script()
# quit script after running the script
from java.lang import System
System.exit(0) | 29.672372 | 176 | 0.689436 |
729c12dbac0340a861dab19eb8b1fc43fc64d8e8 | 68,725 | cs | C# | FHSDK/Sync/FHSyncDataset.cs | alexlobanov/fh-dotnet-sdk | 6b5b1c5b351386a396d3f0ba600ca7194d691ce4 | [
"Apache-2.0"
] | null | null | null | FHSDK/Sync/FHSyncDataset.cs | alexlobanov/fh-dotnet-sdk | 6b5b1c5b351386a396d3f0ba600ca7194d691ce4 | [
"Apache-2.0"
] | 46 | 2015-02-17T13:24:52.000Z | 2019-06-08T19:26:30.000Z | FHSDK/Sync/FHSyncDataset.cs | alexlobanov/fh-dotnet-sdk | 6b5b1c5b351386a396d3f0ba600ca7194d691ce4 | [
"Apache-2.0"
] | 14 | 2015-01-27T08:17:52.000Z | 2018-09-17T22:30:40.000Z | using System;
using System.Collections.Generic;
using FHSDK.Services;
using System.Threading.Tasks;
using System.IO;
using System.Diagnostics.Contracts;
using System.Xml;
using Newtonsoft.Json;
using System.Net;
using System.Runtime.CompilerServices;
using FHSDK.Services.Data;
using FHSDK.Services.Log;
using FHSDK.Services.Network;
using Newtonsoft.Json.Linq;
using Newtonsoft.Json.Converters;
namespace FHSDK.Sync
{
/// <summary>
/// FHsyncDataset represent a set of data to expose to synchronization.
/// </summary>
/// <typeparam name="T"></typeparam>
public class FHSyncDataset<T> where T : IFHSyncModel
{
private const string LOG_TAG = "FHSyncDataset";
private const string PERSIST_FILE_NAME = ".sync.json";
/// <summary>
/// Where is the data persisted.
/// </summary>
protected const string DATA_PERSIST_FILE_NAME = ".data.json";
/// <summary>
/// Where is the pending data persisted.
/// </summary>
protected const string PENDING_DATA_PERSIST_FILE_NAME = ".pendings.json";
/// <summary>
/// If the sync loop is running
/// </summary>
private Boolean syncRunning = false;
/// <summary>
/// <summary>
/// Is there any pending sync records
/// </summary>
private Boolean syncPending = false;
/// The store of pending records
/// </summary>
protected IDataStore<FHSyncPendingRecord<T>> pendingRecords;
/// <summary>
/// The store of data records
/// </summary>
protected IDataStore<FHSyncDataRecord<T>> dataRecords;
/// <summary>
/// Should the sync be stopped
/// </summary>
private static ILogService logger = ServiceFinder.Resolve<ILogService>();
private static INetworkService networkService = ServiceFinder.Resolve<INetworkService>();
/// <summary>
/// The sync configuration
/// </summary>
public FHSyncConfig SyncConfig { set; get; }
/// <summary>
/// The hash value of the dataset
/// </summary>
public String HashValue { get; set; }
/// <summary>
/// The id of the data set the sync client is currently managing
/// </summary>
[JsonProperty]
protected string DatasetId { get; set; }
/// <summary>
/// When the last sync started
/// </summary>
[JsonProperty]
private Nullable<DateTime> SyncStart { get; set; }
/// <summary>
/// When the last sync ended
/// </summary>
[JsonProperty]
private Nullable<DateTime> SyncEnd { get; set; }
/// <summary>
/// The query params for the data records. Will be used to send to the cloud when listing initial records.
/// </summary>
[JsonProperty("QueryParams")]
public IDictionary<string, string> QueryParams { get; set; }
/// <summary>
/// The meta data for the dataset
/// </summary>
[JsonProperty("MetaData")]
protected FHSyncMetaData MetaData { get; set; }
/// <summary>
/// If this is set to true, a sync loop will start almost immediately
/// </summary>
/// <value><c>true</c> if force sync; otherwise, <c>false</c>.</value>
[JsonIgnore]
public Boolean ForceSync { set; get; }
/// <summary>
/// Records change acknowledgements
/// </summary>
[JsonProperty("Acknowledgements")]
protected List<FHSyncResponseUpdatesData> Acknowledgements { get; set; }
public event EventHandler<FHSyncNotificationEventArgs> SyncNotificationHandler;
/// <summary>
/// Used to have a mapping if the temporary assigned uid and its definitive uid (assigned in cloud apps).
/// </summary>
[JsonProperty]
protected IDictionary<string, string> UidMapping { get; set;}
/// <summary>
/// Constructor.
/// </summary>
public FHSyncDataset()
{
}
/// <summary>
/// Init a sync dataset with some parameters
/// </summary>
/// <param name="datasetId">Dataset identifier.</param>
/// <param name="syncConfig">Sync config.</param>
/// <param name="qp">Query parameters that will be send to the cloud when listing dataset</param>
/// <param name="meta">Meta data that will be send to the cloud when syncing </param>
/// <typeparam name="X">The 1st type parameter.</typeparam>
public static FHSyncDataset<X> Build<X>(string datasetId, FHSyncConfig syncConfig, IDictionary<string, string> qp, FHSyncMetaData meta) where X : IFHSyncModel
{
JsonConvert.DefaultSettings = () => new JsonSerializerSettings
{
NullValueHandling = NullValueHandling.Ignore
};
//check if there is a dataset model file exists and load it
string syncClientMeta = FHSyncUtils.GetDataFilePath(datasetId, PERSIST_FILE_NAME);
FHSyncDataset<X> dataset = LoadExistingDataSet<X>(syncClientMeta, datasetId);
if (null == dataset)
{
//no existing one, create a new one
dataset = new FHSyncDataset<X>();
dataset.DatasetId = datasetId;
dataset.SyncConfig = syncConfig;
dataset.QueryParams = null == qp ? new Dictionary<string, string>() : qp;
dataset.MetaData = null == meta ? new FHSyncMetaData() : meta;
dataset.dataRecords = new InMemoryDataStore<FHSyncDataRecord<X>>
{
PersistPath = GetPersistFilePathForDataset(syncConfig, datasetId, DATA_PERSIST_FILE_NAME)
};
dataset.pendingRecords = new InMemoryDataStore<FHSyncPendingRecord<X>>
{
PersistPath = GetPersistFilePathForDataset(syncConfig, datasetId, PENDING_DATA_PERSIST_FILE_NAME)
};
dataset.UidMapping = new Dictionary<string, string>();
//persist the dataset immediately
dataset.Save();
}
return dataset;
}
private string GetUid(string oldOrNewUid)
{
return UidMapping.ContainsKey(oldOrNewUid) ? UidMapping[oldOrNewUid] : oldOrNewUid;
}
/// <summary>
/// List data
/// </summary>
public List<T> List()
{
List<T> results = new List<T>();
Dictionary<string, FHSyncDataRecord<T>> storedData = this.dataRecords.List();
foreach (KeyValuePair<string, FHSyncDataRecord<T>> item in storedData)
{
FHSyncDataRecord<T> record = item.Value;
T data = (T)FHSyncUtils.Clone(record.Data);
data.UID = item.Key;
results.Add(data);
}
return results;
}
/// <summary>
/// Read data specified by uid.
/// </summary>
/// <param name="uid">Uid.</param>
public T Read(string uid)
{
Contract.Assert(null != uid, "uid is null");
uid = GetUid(uid);
FHSyncDataRecord<T> record = this.dataRecords.Get(uid);
if (null != record)
{
T data = (T)FHSyncUtils.Clone(record.Data);
data.UID = record.Uid;
return data;
}
else
{
return default(T);
}
}
/// <summary>
/// Create data.
/// </summary>
/// <param name="data">Data.</param>
public T Create(T data)
{
Contract.Assert(data.UID == null, "data is not new");
T ret = default(T);
FHSyncPendingRecord<T> pendingRecord = AddPendingRecord(null, data, "create");
if (null != pendingRecord)
{
//for creation, the uid will be the uid of the pending record temporarily
FHSyncDataRecord<T> record = this.dataRecords.Get(pendingRecord.Uid);
if (null != record)
{
ret = (T)FHSyncUtils.Clone(record.Data);
ret.UID = record.Uid;
}
}
if (ret == null)
{
throw new Exception("create failed");
}
else
{
return ret;
}
}
/// <summary>
/// Update the specified data.
/// </summary>
/// <param name="data">Data.</param>
public T Update(T data)
{
Contract.Assert(data.UID != null, "data is new");
data.UID = GetUid(data.UID);
FHSyncDataRecord<T> record = this.dataRecords.Get(data.UID);
Contract.Assert(null != record, "data record with uid " + data.UID + " doesn't exist");
T ret = default(T);
FHSyncPendingRecord<T> pendingRecord = AddPendingRecord(data.UID, data, "update");
if (null != pendingRecord)
{
FHSyncDataRecord<T> updatedRecord = this.dataRecords.Get(data.UID);
if (null != updatedRecord)
{
ret = (T)FHSyncUtils.Clone(record.Data);
ret.UID = record.Uid;
}
}
if (ret == null)
{
throw new Exception("update failed");
}
else
{
return ret;
}
}
/// <summary>
/// Delete the specified uid.
/// </summary>
/// <param name="uid">Uid.</param>
public T Delete(string uid)
{
Contract.Assert(null != uid, "uid is null");
uid = GetUid(uid);
FHSyncDataRecord<T> record = this.dataRecords.Get(uid);
Contract.Assert(null != record, "data record with uid " + uid + " doesn't exist");
T ret = default(T);
FHSyncPendingRecord<T> pendingRecord = AddPendingRecord(uid, record.Data, "delete");
if (null != pendingRecord)
{
ret = (T)FHSyncUtils.Clone(record.Data);
ret.UID = uid;
}
if (ret.Equals(default(T)))
{
throw new Exception("delete failed");
}
else
{
return ret;
}
}
/// <summary>
/// Add pending records.
/// </summary>
/// <param name="UID"></param>
/// <param name="dataRecords"></param>
/// <param name="action"></param>
/// <returns></returns>
protected FHSyncPendingRecord<T> AddPendingRecord(string UID, T dataRecords, string action)
{
if (!networkService.IsOnline())
{
this.OnSyncNotification(UID, SyncNotification.OfflineUpdate, action);
}
//create pendingRecord
FHSyncPendingRecord<T> pendingRecord = new FHSyncPendingRecord<T>();
pendingRecord.InFlight = false;
pendingRecord.Action = action;
FHSyncDataRecord<T> dataRecord = null;
if (null != dataRecords)
{
dataRecord = new FHSyncDataRecord<T>(dataRecords);
pendingRecord.PostData = dataRecord;
}
if ("create".Equals(action))
{
pendingRecord.Uid = pendingRecord.GetHashValue();
dataRecord.Uid = pendingRecord.Uid;
}
else
{
FHSyncDataRecord<T> existing = this.dataRecords.Get(UID);
dataRecord.Uid = existing.Uid;
pendingRecord.Uid = existing.Uid;
pendingRecord.PreData = existing.Clone();
}
StorePendingRecord(pendingRecord);
string uid = pendingRecord.Uid;
if("delete".Equals(action)){
this.dataRecords.Delete(UID);
} else {
this.dataRecords.Insert(uid, dataRecord);
}
this.Save();
this.OnSyncNotification(uid, SyncNotification.LocalUpdateApplied, pendingRecord.Action);
return pendingRecord;
}
//TODO: probably move this to a dedicated PendingRecordsManager
/// <summary>
/// Store pending record to a local storage.
/// </summary>
/// <param name="pendingRecord"></param>
protected void StorePendingRecord(FHSyncPendingRecord<T> pendingRecord)
{
this.pendingRecords.Insert(pendingRecord.GetHashValue(), pendingRecord);
string previousPendingUID = null;
FHSyncPendingRecord<T> previousPending = null;
string uid = pendingRecord.Uid;
DebugLog("update local dataset for uid " + uid + " - action = " + pendingRecord.Action);
FHSyncDataRecord<T> existing = dataRecords.Get(uid);
Boolean fromPending = this.MetaData.GetMetaDataAsBool(uid, "fromPending");
if ("create".Equals(pendingRecord.Action)) {
if (null != existing) {
DebugLog("data already exists for uid for create :: " + existing.ToString());
if (fromPending) {
previousPendingUID = this.MetaData.GetMetaDataAsString(uid, "pendingUid");
if (null != previousPendingUID) {
this.pendingRecords.Delete(previousPendingUID);
}
}
}
this.MetaData.InsertBoolMetaData(uid, "fromPending", true);
this.MetaData.InsertStringMetaData(uid, "pendingUid", pendingRecord.GetHashValue());
}
if ("update".Equals(pendingRecord.Action)) {
string metaPendingHash = pendingRecord.GetHashValue();
if (null != existing) {
DebugLog("Update an existing pending record for dataset :: " + existing.ToString());
previousPendingUID = this.MetaData.GetMetaDataAsString(uid, "pendingUid");
if(null != previousPendingUID){
this.MetaData.InsertStringMetaData(uid, "previousPendingUid", previousPendingUID);
previousPending = this.pendingRecords.Get(previousPendingUID);
if(null != previousPending) {
if(!previousPending.InFlight) {
DebugLog("existing pre-flight pending record =" + previousPending.ToString());
if ("create".Equals(previousPending.Action))
{
// We are trying to perform a delete on an existing pending create
// These cancel each other out so remove them both
pendingRecords.Delete(pendingRecord.GetHashValue());
pendingRecords.Delete(pendingRecord.Uid);
}
if ("update".Equals(pendingRecord.Action))
{
// We are trying to perform a delete on an existing pending update
// Use the pre value from the pending update for the delete and
// get rid of the pending update
pendingRecord.PreData = previousPending.PreData;
pendingRecord.InFlight = false;
pendingRecords.Delete(previousPending.Uid);
}
} else {
DebugLog("existing in-flight pending record = " + previousPending.ToString());
pendingRecord.SetDelayed(previousPending.GetHashValue());
pendingRecord.Waiting = previousPending.GetHashValue();
}
}
}
}
this.MetaData.InsertBoolMetaData(uid, "fromPending", true);
this.MetaData.InsertStringMetaData(uid, "pendingUid", metaPendingHash);
}
if("delete".Equals(pendingRecord.Action)){
if(null != existing){
if(fromPending){
DebugLog("Deleting an existing pending record for dataset :: " + existing.ToString());
previousPendingUID = this.MetaData.GetMetaDataAsString(uid, "pendingUid");
if(null != previousPendingUID){
this.MetaData.InsertStringMetaData(uid, "previousPendingUid", previousPendingUID);
previousPending = this.pendingRecords.Get(previousPendingUID);
if(!previousPending.InFlight){
DebugLog("existing pending record = " + previousPending.ToString());
if("create".Equals(previousPending.Action)){
this.pendingRecords.Delete(pendingRecord.GetHashValue());
this.pendingRecords.Delete(previousPendingUID);
}
if("update".Equals(previousPending.Action)){
pendingRecord.PreData = previousPending.PreData;
pendingRecord.InFlight = false;
this.pendingRecords.Delete(previousPendingUID);
}
} else {
DebugLog("existing in-flight pending record = " + previousPending.ToString());
pendingRecord.SetDelayed(previousPending.GetHashValue());
pendingRecord.Waiting = pendingRecord.GetHashValue();
}
}
}
}
}
if(this.SyncConfig.AutoSyncLocalUpdates){
this.syncPending = true;
}
}
/// <summary>
/// Mainmethod called to start synchronization pooling.
/// </summary>
/// <returns></returns>
public async Task StartSyncLoop()
{
this.syncPending = false;
this.syncRunning = true;
this.SyncStart = DateTime.Now;
this.OnSyncNotification(null, SyncNotification.SyncStarted, null);
if(networkService.IsOnline()){
FHSyncLoopParams syncParams = new FHSyncLoopParams(this);
if(syncParams.Pendings.Count > 0){
logger.i(LOG_TAG, "starting sync loop - global hash = " + this.HashValue + " :: params = " + syncParams.ToString(), null);
}
try {
FHResponse syncRes = await DoCloudCall(syncParams);
if(null == syncRes.Error){
FHSyncResponseData<T> returnedSyncData = (FHSyncResponseData<T>)FHSyncUtils.DeserializeObject(syncRes.RawResponse, typeof(FHSyncResponseData<T>));
//TODO: it should be possible achieve the same effects using one loop through the pending records, there is no need to loop the pending records 6 times!
//e.g.
/**
* for each pending in pendingRecords
* check if sync response contains update for the pending
* true => update pending pre data from the syn response
* false => update syn response with the pending record post data
*
* if pending is in flight
* if pending is crashed
* check if there is updates for the crashed record
* true => resole the crash status
* false => keep waiting or give up
*
* if pendingRecord is delayed
* check if sync response contains info about the delay records
* true => resolve delayed status
*/
// Check to see if any new pending records need to be updated to reflect the current state of play.
this.UpdatePendingFromNewData(returnedSyncData);
// Check to see if any previously crashed inflight records can now be resolved
this.UpdateCrashedInFlightFromNewData(returnedSyncData);
//Check to see if any delayed pending records can now be set to ready
this.UpdateDelayedFromNewData(returnedSyncData);
//Check meta data as well to make sure it contains the correct info
this.UpdateMetaFromNewData(returnedSyncData);
// Update the new dataset with details of any inflight updates which we have not received a response on
this.UpdateNewDataFromInFlight(returnedSyncData);
// Update the new dataset with details of any pending updates
this.UpdateNewDataFromPending(returnedSyncData);
if(null != returnedSyncData.Updates)
{
CheckUidChanges(returnedSyncData);
this.ProcessUpdatesFromRemote(returnedSyncData);
}
DebugLog("Local dataset stale - syncing records :: local hash = " + this.HashValue + " - remoteHash = " + returnedSyncData.Hash);
//Different hash value returned - sync individual records
await this.SyncRecords();
} else {
// The HTTP call failed to complete succesfully, so the state of the current pending updates is unknown
// Mark them as "crashed". The next time a syncLoop completets successfully, we will review the crashed
// records to see if we can determine their current state.
this.MarkInFlightAsCrased();
DebugLog("syncLoop failed :: res = " + syncRes.RawResponse + " err = " + syncRes.Error);
this.SyncLoopComplete(syncRes.RawResponse, SyncNotification.SyncFailed);
}
} catch (Exception e) {
DebugLog("Error performing sync - " + e.ToString());
this.SyncLoopComplete(e.Message, SyncNotification.SyncFailed);
}
} else {
this.OnSyncNotification(null, SyncNotification.SyncFailed, "offline");
}
}
private void CheckUidChanges(FHSyncResponseData<T> syncResponse)
{
if (!syncResponse.Updates.ContainsKey("applied")) return;
var newUids = new Dictionary<string, string>();
var updates = syncResponse.Updates["applied"];
foreach (var item in updates)
{
var update = item.Value;
if ("create".Equals(update.Action))
{
//we are receving the results of creations, at this point, we will have the old uid(the hash) and the real uid generated by the cloud
var newUid = update.Uid;
var oldUid = update.Hash;
UidMapping[oldUid] = newUid;
newUids[oldUid] = newUid;
if (syncResponse.Records !=null && syncResponse.Records.ContainsKey(oldUid))
{
var record = syncResponse.Records[oldUid];
syncResponse.Records[newUid] = record;
syncResponse.Records.Remove(oldUid);
}
//update the old uid in meta data
if (MetaData.metaData.ContainsKey(oldUid))
{
MetaData.metaData[newUid] = MetaData.metaData[oldUid];
MetaData.metaData.Remove(oldUid);
}
}
}
if (newUids.Count > 0)
{
foreach (var item in pendingRecords.List())
{
var pendingRecord = item.Value;
if (newUids.ContainsKey(pendingRecord.Uid))
{
pendingRecord.Uid = newUids[pendingRecord.Uid];
}
}
}
}
private async Task SyncRecords()
{
FHSyncRecordsParams syncParams = new FHSyncRecordsParams(this);
FHResponse syncRecordsRes = await this.DoCloudCall(syncParams);
if(null == syncRecordsRes.Error){
FHSyncRecordsResponseData<T> remoteDataRecords = (FHSyncRecordsResponseData<T>) FHSyncUtils.DeserializeObject(syncRecordsRes.RawResponse, typeof(FHSyncRecordsResponseData<T>));
ApplyPendingChangesToRecords(remoteDataRecords);
Dictionary<string, FHSyncDataRecord<T>> createdRecords = remoteDataRecords.CreatedRecords;
foreach(var created in createdRecords){
FHSyncDataRecord<T> r = created.Value;
r.Uid = created.Key;
this.dataRecords.Insert(created.Key, r);
this.OnSyncNotification(created.Key, SyncNotification.RecordDeltaReceived, "create");
}
Dictionary<string, FHSyncDataRecord<T>> updatedRecords = remoteDataRecords.UpdatedRecords;
foreach(var updated in updatedRecords){
FHSyncDataRecord<T> r = updated.Value;
r.Uid = updated.Key;
this.dataRecords.Insert(updated.Key, r);
this.OnSyncNotification(updated.Key, SyncNotification.RecordDeltaReceived, "update");
}
Dictionary<string, FHSyncDataRecord<T>> deletedRecords = remoteDataRecords.DeletedRecords;
foreach (var deleted in deletedRecords) {
this.dataRecords.Delete(deleted.Key);
this.OnSyncNotification(deleted.Key, SyncNotification.RecordDeltaReceived, "delete");
}
this.OnSyncNotification(remoteDataRecords.Hash, SyncNotification.DeltaReceived, "partial dataset");
if(null != remoteDataRecords.Hash){
this.HashValue = remoteDataRecords.Hash;
}
this.SyncLoopComplete("online", SyncNotification.SyncCompleted);
} else {
DebugLog("SyncRecords failed : " + syncRecordsRes.RawResponse + " error = " + syncRecordsRes.Error);
this.SyncLoopComplete(syncRecordsRes.RawResponse, SyncNotification.SyncFailed);
}
}
private void ApplyPendingChangesToRecords(FHSyncRecordsResponseData<T> remoteDataRecords)
{
DebugLog(string.Format("SyncRecords result = {0} pending = {1}", remoteDataRecords, pendingRecords));
foreach (var item in pendingRecords.List())
{
// If the records returned from syncRecord request contains elements in pendings,
// it means there are local changes that haven't been applied to the cloud yet.
// Remove those records from the response to make sure local data will not be
// overridden (blinking desappear / reappear effect).
var pendingRecord = item.Value;
if (remoteDataRecords.CreatedRecords.ContainsKey(pendingRecord.Uid))
{
remoteDataRecords.CreatedRecords.Remove(pendingRecord.Uid);
}
if (remoteDataRecords.UpdatedRecords.ContainsKey(pendingRecord.Uid))
{
pendingRecord.PreData = remoteDataRecords.UpdatedRecords[pendingRecord.Uid];
remoteDataRecords.UpdatedRecords.Remove(pendingRecord.Uid);
}
if (remoteDataRecords.DeletedRecords.ContainsKey(pendingRecord.Uid))
{
remoteDataRecords.DeletedRecords.Remove(pendingRecord.Uid);
}
}
}
private void SyncLoopComplete(string message, SyncNotification notification)
{
this.syncRunning = false;
this.SyncEnd = DateTime.Now;
this.Save();
this.OnSyncNotification(this.HashValue, notification, message);
}
private void UpdatePendingFromNewData(FHSyncResponseData<T> syncResData)
{
if(null != pendingRecords && null != syncResData.Records){
Dictionary<string, FHSyncPendingRecord<T>> localPendingRecords = pendingRecords.List();
foreach (var item in localPendingRecords)
{
FHSyncPendingRecord<T> pendingRecord = item.Value;
if(!pendingRecord.InFlight){
//process pending records that have not been submitted
DebugLog("Found Non in flight record -> action = " + pendingRecord.Action + " :: uid=" + pendingRecord.Uid + " :: hash=" + pendingRecord.GetHashValue());
if("update".Equals(pendingRecord.Action) || "delete".Equals(pendingRecord.Action)){
//update the prevalue of pending record to reflect the latest data returned from sync
//This will prevent a collision being reported when the pending record is sent
//TODO: is this mean we are blindly apply changes from remote to the current store, then when the local change is submitted, the remote data will be overridden by local updates even local updates could be wrong?
FHSyncDataRecord<T> returnedRecord = null;
syncResData.Records.TryGetValue(pendingRecord.Uid, out returnedRecord);
if(null != returnedRecord){
DebugLog("updating pre values for existing pending record " + pendingRecord.Uid);
pendingRecord.PreData = returnedRecord;
} else {
//The update/delete maybe for a newly created record in which case the uid will have changed
string previousPendingUid = this.MetaData.GetMetaDataAsString(pendingRecord.Uid, "previousPendingUid");
if(null != previousPendingUid){
FHSyncPendingRecord<T> previousPendingRecord = null;
localPendingRecords.TryGetValue(previousPendingUid, out previousPendingRecord);
if(null != previousPendingRecord){
FHSyncResponseUpdatesData appliedRecord = syncResData.GetAppliedUpdates(previousPendingRecord.GetHashValue());
if(null != appliedRecord){
string newUid = appliedRecord.Uid;
FHSyncDataRecord<T> newRecord = syncResData.GetRemoteRecord(newUid);
if(null != newRecord){
DebugLog("Updating pre values for existing pending record which was previously a create " + pendingRecord.Uid + " => " + newUid);
pendingRecord.PreData = newRecord;
pendingRecord.Uid = newUid;
}
}
}
}
}
}
if("create".Equals(pendingRecord.Action)){
FHSyncResponseUpdatesData appliedRecord = syncResData.GetAppliedUpdates(pendingRecord.GetHashValue());
if(null != appliedRecord){
DebugLog("Found an update for a pending create + " + appliedRecord.ToString());
FHSyncDataRecord<T> newRecord = syncResData.GetRemoteRecord(pendingRecord.GetHashValue());
if(null != newRecord){
DebugLog("Changing pending create to an update based on new record " + newRecord.ToString());
pendingRecord.Action = "update";
pendingRecord.PreData = newRecord;
pendingRecord.Uid = appliedRecord.Uid;
}
}
}
}
}
}
}
private void UpdateCrashedInFlightFromNewData(FHSyncResponseData<T> syncResData)
{
Dictionary<string, FHSyncPendingRecord<T>> localPendingRecords = this.pendingRecords.List();
foreach (string pendingRecordKey in localPendingRecords.Keys)
{
bool processed = false;
FHSyncPendingRecord<T> pendingRecord = localPendingRecords[pendingRecordKey];
if(pendingRecord.InFlight && pendingRecord.Crashed){
DebugLog("Found crashed inFlight pending record uid =" + pendingRecord.Uid + " :: hash = " + pendingRecord.GetHashValue());
if(null != syncResData.Updates && syncResData.Updates.ContainsKey("hashes") ){
FHSyncResponseUpdatesData crashedUpdate = syncResData.GetUpdateByHash(pendingRecord.GetHashValue());
if(null != crashedUpdate){
DebugLog("resolving status for crashed inflight pending record " + crashedUpdate.ToString());
if(crashedUpdate.Type == FHSyncResponseUpdatesData.FHSyncResponseUpdatesDataType.failed){
if(crashedUpdate.Action.Equals("create")){
DebugLog("Deleting failed create from dataset");
this.dataRecords.Delete(crashedUpdate.Uid);
} else if(crashedUpdate.Action.Equals("update") || crashedUpdate.Action.Equals("delete")){
DebugLog("Reverting failed " + crashedUpdate.Action + " in dataset");
this.dataRecords.Insert(crashedUpdate.Uid, pendingRecord.PreData);
}
}
this.pendingRecords.Delete(pendingRecordKey);
switch (crashedUpdate.Type)
{
case FHSyncResponseUpdatesData.FHSyncResponseUpdatesDataType.applied:
OnSyncNotification(crashedUpdate.Uid, SyncNotification.RemoteUpdateApplied, crashedUpdate.ToString());
break;
case FHSyncResponseUpdatesData.FHSyncResponseUpdatesDataType.failed:
OnSyncNotification(crashedUpdate.Uid, SyncNotification.RemoteUpdateFailed, crashedUpdate.ToString());
break;
case FHSyncResponseUpdatesData.FHSyncResponseUpdatesDataType.collisions:
OnSyncNotification(crashedUpdate.Uid, SyncNotification.CollisionDetected, crashedUpdate.ToString());
break;
default:
break;
}
processed = true;
}
}
if(!processed){
//no word on our crashed upate - increment a counter to reflect another sync that did not give us any updates on our crashed record
pendingRecord.IncrementCrashCount();
if(pendingRecord.CrashedCount > this.SyncConfig.CrashedCountWait){
DebugLog("Crashed inflight pending record has reached CrashedCount limit");
if(this.SyncConfig.ResendCrashedUpdated){
DebugLog("Retrying crashed inflight pending record");
pendingRecord.ResetCrashStatus();
} else {
DebugLog("Deleting crashed inflight pending record");
this.pendingRecords.Delete(pendingRecordKey);
}
}
}
}
}
}
private void UpdateDelayedFromNewData(FHSyncResponseData<T> syncResData){
Dictionary<string, FHSyncPendingRecord<T>> localPendingRecords = this.pendingRecords.List();
foreach (string pendingRecordKey in localPendingRecords.Keys)
{
FHSyncPendingRecord<T> pendingRecord = localPendingRecords[pendingRecordKey];
if(pendingRecord.Delayed && null != pendingRecord.Waiting){
DebugLog("Found delayed pending record uid = " + pendingRecord.Uid + " :: hash=" + pendingRecord.GetHashValue());
FHSyncResponseUpdatesData waitingRecord = syncResData.GetUpdateByHash(pendingRecord.Waiting);
if(null != waitingRecord){
DebugLog("Waiting pending record is resolved rec =" + waitingRecord.ToString());
pendingRecord.ResetDelayed();
}
}
}
}
private void UpdateMetaFromNewData(FHSyncResponseData<T> syncResData)
{
FHSyncMetaData metaData = this.MetaData;
Dictionary<string, Dictionary<string, string>>.KeyCollection keys = this.MetaData.Keys;
List<string> keysToDelete = new List<string>();
foreach (string metaDataKey in keys)
{
string pendingHash = metaData.GetMetaDataAsString(metaDataKey, "pendingUid");
string previousPendingHash = metaData.GetMetaDataAsString(metaDataKey, "previousPendingUid");
DebugLog("Found metadata with uid = " + metaDataKey + " :: pendingHash = " + pendingHash + " :: previousPendingHash " + previousPendingHash);
bool previousPendingResolved = true;
bool pendingResolved = true;
if(null != previousPendingHash){
//we have previous pending in meta data, see if it's resolved
previousPendingResolved = false;
FHSyncResponseUpdatesData updateFromRes = syncResData.GetUpdateByHash(previousPendingHash);
if(null != updateFromRes){
DebugLog("Found previousPendingUid in meta data resolved - resolved = " + updateFromRes.ToString());
//the previous pending is resolved in the cloud
metaData.DeleteMetaData(metaDataKey, "previousPendingUid");
previousPendingResolved = true;
}
}
if(null != pendingHash){
//we have current pending in meta data, see if it's resolved
pendingResolved = false;
FHSyncResponseUpdatesData updateFromRes = syncResData.GetUpdateByHash(pendingHash);
if(null != updateFromRes){
DebugLog("Found pendingUid in meta data resolved - resolved = " + updateFromRes.ToString());
//the current pending is resolved in the cloud
metaData.DeleteMetaData(metaDataKey, "pendingUid");
pendingResolved = true;
}
}
if(pendingResolved && previousPendingResolved){
DebugLog("both previous and current pendings are resolved for meta data with uid " + metaDataKey + ". Delete it");
keysToDelete.Add(metaDataKey);
}
}
foreach (string keyToDelete in keysToDelete)
{
this.MetaData.Delete(keyToDelete);
}
}
private void UpdateNewDataFromInFlight(FHSyncResponseData<T> syncResData)
{
if(null != syncResData.Records){
Dictionary<string, FHSyncPendingRecord<T>> localPendingRecords = this.pendingRecords.List();
foreach (string pendingRecordKey in localPendingRecords.Keys)
{
FHSyncPendingRecord<T> pendingRecord = localPendingRecords[pendingRecordKey];
if(pendingRecord.InFlight){
FHSyncResponseUpdatesData updatedPending = syncResData.GetUpdateByHash(pendingRecordKey);
if(null == updatedPending){
DebugLog("Found inFlight pending record -> action =" + pendingRecord.Action + " :: uid = " + pendingRecord.Uid + " :: hash = " + pendingRecord.GetHashValue());
FHSyncDataRecord<T> newRecord = syncResData.GetRemoteRecord(pendingRecord.Uid);
if(pendingRecord.Action.Equals("update") && null != newRecord){
newRecord = pendingRecord.PostData;
} else if(pendingRecord.Action.Equals("delete") && null != newRecord){
syncResData.Records.Remove(pendingRecord.Uid);
} else if(pendingRecord.Action.Equals("create")){
DebugLog("re adding pending create to incomming dataset");
FHSyncDataRecord<T> createRecordData = pendingRecord.PostData.Clone();
syncResData.Records[pendingRecord.Uid] = createRecordData;
}
}
}
}
}
}
private void UpdateNewDataFromPending(FHSyncResponseData<T> syncResData)
{
if(null != syncResData.Records){
Dictionary<string, FHSyncPendingRecord<T>> localPendingRecords = this.pendingRecords.List();
foreach (string pendingRecordKey in localPendingRecords.Keys)
{
FHSyncPendingRecord<T> pendingRecord = localPendingRecords[pendingRecordKey];
if(!pendingRecord.InFlight){
DebugLog("Found non inFlight record -> action =" + pendingRecord.Action + " :: uid = " + pendingRecord.Uid + " :: hash = " + pendingRecord.GetHashValue());
FHSyncDataRecord<T> newRecord = syncResData.GetRemoteRecord(pendingRecord.Uid);
if(pendingRecord.Action.Equals("update") && null != newRecord){
newRecord = pendingRecord.PostData;
} else if(pendingRecord.Action.Equals("delete") && null != newRecord){
syncResData.Records.Remove(pendingRecord.Uid);
} else if(pendingRecord.Action.Equals("create")){
DebugLog("re adding pending create to incomming dataset");
FHSyncDataRecord<T> createRecordData = pendingRecord.PostData.Clone();
syncResData.Records[pendingRecord.Uid] = createRecordData;
}
}
}
}
}
private void ProcessUpdatesFromRemote(FHSyncResponseData<T> syncResData)
{
List<FHSyncResponseUpdatesData> acks = new List<FHSyncResponseUpdatesData>();
foreach(string key in syncResData.Updates.Keys){
if(!"hashes".Equals(key)){
Dictionary<string, FHSyncResponseUpdatesData> updates = syncResData.Updates[key];
foreach(var item in updates){
SyncNotification notification = default(SyncNotification);
FHSyncResponseUpdatesData update = item.Value;
acks.Add(update);
FHSyncPendingRecord<T> pending = this.pendingRecords.Get(item.Key);
if(null != pending && pending.InFlight && !pending.Crashed){
this.pendingRecords.Delete(item.Key);
switch (update.Type)
{
case FHSyncResponseUpdatesData.FHSyncResponseUpdatesDataType.applied:
notification = SyncNotification.RemoteUpdateApplied;
break;
case FHSyncResponseUpdatesData.FHSyncResponseUpdatesDataType.failed:
notification = SyncNotification.RemoteUpdateFailed;
break;
case FHSyncResponseUpdatesData.FHSyncResponseUpdatesDataType.collisions:
notification = SyncNotification.CollisionDetected;
break;
default:
break;
}
this.OnSyncNotification(update.Uid, notification, update.ToString());
}
}
}
}
this.Acknowledgements = acks;
}
private void MarkInFlightAsCrased(){
foreach (var item in this.pendingRecords.List())
{
FHSyncPendingRecord<T> pendingRecord = item.Value;
if(pendingRecord.InFlight){
DebugLog("Marking in flight pending record as crashed : " + item.Key);
pendingRecord.Crashed = true;
}
}
}
private void DebugLog(string message, [CallerMemberName] string methodName = "")
{
string logMessage = string.Format("{0} - {1}", methodName, message);
logger.d(LOG_TAG, logMessage, null);
}
protected virtual async Task<FHResponse> DoCloudCall(object syncParams)
{
if(this.SyncConfig.SyncCloud == FHSyncConfig.SyncCloudType.Auto){
await CheckSyncCloudType();
}
if(this.SyncConfig.SyncCloud == FHSyncConfig.SyncCloudType.Mbbas) {
string service = string.Format("sync/{0}", this.DatasetId);
FHResponse res = await FH.Mbaas(service, syncParams);
return res;
} else {
FHResponse res = await FH.Act(this.DatasetId, syncParams);
return res;
}
}
private async Task CheckSyncCloudType()
{
Dictionary<string, object> actParams = new Dictionary<string, object>();
actParams.Add("fh", "sync");
FHResponse actRes = await FH.Act(this.DatasetId, actParams);
if(actRes.StatusCode == HttpStatusCode.OK || actRes.StatusCode == HttpStatusCode.InternalServerError){
this.SyncConfig.SyncCloud = FHSyncConfig.SyncCloudType.Legacy;
} else {
this.SyncConfig.SyncCloud = FHSyncConfig.SyncCloudType.Mbbas;
}
}
/// <summary>
/// Persist the dataset.
/// </summary>
protected void Save()
{
this.dataRecords.Save();
this.pendingRecords.Save();
string syncClientMeta = FHSyncUtils.GetDataFilePath(this.DatasetId, PERSIST_FILE_NAME);
IIOService iosService = ServiceFinder.Resolve<IIOService>();
string content = FHSyncUtils.SerializeObject(this);
try
{
iosService.WriteFile(syncClientMeta, content);
}
catch (Exception ex)
{
logger.e(LOG_TAG, "Failed to save dataset", ex);
throw ex;
}
}
private static FHSyncDataset<X> LoadExistingDataSet<X>(string syncClientMetaFile, string datasetId) where X : IFHSyncModel
{
FHSyncDataset<X> result = null;
IIOService ioService = ServiceFinder.Resolve<IIOService>();
if (ioService.Exists(syncClientMetaFile))
{
string content = ioService.ReadFile(syncClientMetaFile);
if (!string.IsNullOrEmpty(content))
{
try
{
FHSyncDataset<X> syncDataset = (FHSyncDataset<X>)FHSyncUtils.DeserializeObject(content, typeof(FHSyncDataset<X>));
if (null != syncDataset)
{
result = LoadDataForDataset<X>(syncDataset);
}
}
catch (Exception ex)
{
logger.d(LOG_TAG, "Failed to load existing dataset", ex);
throw ex;
}
}
}
return result;
}
private static FHSyncDataset<X> LoadDataForDataset<X>(FHSyncDataset<X> dataSet) where X: IFHSyncModel
{
string datasetFile = GetPersistFilePathForDataset(dataSet.SyncConfig, dataSet.DatasetId, DATA_PERSIST_FILE_NAME);
string pendingdatasetFile = GetPersistFilePathForDataset(dataSet.SyncConfig, dataSet.DatasetId, PENDING_DATA_PERSIST_FILE_NAME);
dataSet.dataRecords = InMemoryDataStore<FHSyncDataRecord<X>>.Load<FHSyncDataRecord<X>>(datasetFile);
dataSet.pendingRecords = InMemoryDataStore<FHSyncPendingRecord<X>>.Load<FHSyncPendingRecord<X>>(pendingdatasetFile);
return dataSet;
}
/// <summary>
/// Get file storage path.
/// </summary>
/// <param name="syncConfig"></param>
/// <param name="datasetId"></param>
/// <param name="fileName"></param>
/// <returns></returns>
protected static String GetPersistFilePathForDataset(FHSyncConfig syncConfig, string datasetId, string fileName)
{
string filePath = FHSyncUtils.GetDataFilePath(datasetId, fileName);
if(null != syncConfig){
if(!string.IsNullOrEmpty(syncConfig.DataPersistanceDir)){
filePath = Path.Combine(syncConfig.DataPersistanceDir, datasetId, fileName);
}
}
return filePath;
}
/// <summary>
/// Check if a sync loop should run
/// </summary>
/// <returns><c>true</c>, if sync was shoulded, <c>false</c> otherwise.</returns>
public bool ShouldSync()
{
if(!syncRunning && (this.SyncConfig.SyncActive || this.ForceSync)){
if(this.ForceSync){
this.syncPending = true;
} else if(null == SyncStart){
DebugLog(this.DatasetId + " - Performing initial sync");
this.syncPending = true;
} else if(null != SyncEnd){
DateTime nextSync = SyncEnd.Value.Add(TimeSpan.FromSeconds(this.SyncConfig.SyncFrequency));
if(DateTime.Now >= nextSync){
this.syncPending = true;
}
}
if(this.syncPending){
this.ForceSync = false;
}
return this.syncPending;
} else {
return false;
}
}
/// <summary>
/// Start synchronization pooling.
/// </summary>
public async void RunSyncLoop()
{
DebugLog("Checking if sync loop should run");
if(this.ShouldSync()){
await this.StartSyncLoop();
}
}
/// <summary>
/// Will run a sync loop.
/// </summary>
public void DoSync()
{
this.syncPending = true;
}
/// <summary>
/// Stop the sync.
/// </summary>
public void StopSync()
{
if(this.SyncConfig.SyncActive){
this.SyncConfig.SyncActive = false;
}
}
/// <summary>
/// Start sync.
/// </summary>
public void StartSync()
{
if(!this.SyncConfig.SyncActive){
this.SyncConfig.SyncActive = true;
}
}
/// <summary>
/// Get le thist of pending records.
/// </summary>
/// <returns></returns>
public IDataStore<FHSyncPendingRecord<T>> GetPendingRecords()
{
return this.pendingRecords.Clone();
}
/// <summary>
/// Callback method for synchronization events.
/// </summary>
/// <param name="uid"></param>
/// <param name="code"></param>
/// <param name="message"></param>
protected virtual void OnSyncNotification(string uid, SyncNotification code, string message)
{
if(null != this.SyncNotificationHandler){
FHSyncNotificationEventArgs args = new FHSyncNotificationEventArgs
{
DatasetId = this.DatasetId,
Uid = uid,
Code = code,
Message = message
};
this.SyncNotificationHandler(this, args);
}
}
/// <summary>
///
/// </summary>
public class FHSyncLoopParams
{
/// <summary>
/// Constructor.
/// </summary>
public FHSyncLoopParams()
{
}
/// <summary>
/// Get dataset name.
/// </summary>
[JsonProperty("fn")]
public string FnName { get; set; }
/// <summary>
/// Get dataset id.
/// </summary>
[JsonProperty("dataset_id")]
public string DatasetId { get; set; }
/// <summary>
/// Get query params.
/// </summary>
[JsonProperty("query_params")]
public IDictionary<string, string> QueryParams { set; get; }
/// <summary>
/// Get sync config.
/// </summary>
[JsonProperty("config")]
public FHSyncConfig SyncConfg { get; set; }
/// <summary>
/// Get meta data.
/// </summary>
[JsonProperty("meta")]
public FHSyncMetaData MetaData { get; set; }
/// <summary>
/// Get dataset hash.
/// </summary>
[JsonProperty("dataset_hash")]
public string Hash { set; get; }
/// <summary>
/// Get acknowledgements.
/// </summary>
[JsonProperty("acknowledgements")]
public List<FHSyncResponseUpdatesData> Acknowledgements { set; get;}
/// <summary>
/// Get pendings.
/// </summary>
[JsonProperty("pending")]
public List<JObject> Pendings { set; get; }
/// <summary>
/// Set loops params.
/// </summary>
/// <param name="dataset"></param>
public FHSyncLoopParams(FHSyncDataset<T> dataset)
{
this.FnName = "sync";
this.DatasetId = dataset.DatasetId;
this.QueryParams = dataset.QueryParams;
this.SyncConfg = dataset.SyncConfig;
this.MetaData = dataset.MetaData;
this.Hash = dataset.HashValue;
this.Acknowledgements = dataset.Acknowledgements;
List<JObject> pendingRecords = new List<JObject>();
foreach (KeyValuePair<string, FHSyncPendingRecord<T>> item in dataset.pendingRecords.List()) {
FHSyncPendingRecord<T> record = item.Value;
if(!record.InFlight && !record.Crashed && !record.Delayed) {
record.InFlight = true;
record.InFlightDate = DateTime.Now;
pendingRecords.Add(record.AsJObjectWithHash());
}
}
this.Pendings = pendingRecords;
}
/// <summary>
/// Serialize to string.
/// </summary>
/// <returns></returns>
public override string ToString()
{
return FHSyncUtils.SerializeObject(this);
}
}
/// <summary>
/// Class to represents Sync data params.
/// </summary>
public class FHSyncRecordsParams
{
/// <summary>
/// Constructor.
/// </summary>
public FHSyncRecordsParams()
{
}
/// <summary>
/// Name.
/// </summary>
[JsonProperty("fn")]
public string FnName { get; set; }
/// <summary>
/// Dataset id.
/// </summary>
[JsonProperty("dataset_id")]
public string DatasetId { get; set; }
/// <summary>
/// Query params.
/// </summary>
[JsonProperty("query_params")]
public IDictionary<string, string> QueryParams { set; get; }
/// <summary>
/// Client records.
/// </summary>
[JsonProperty("clientRecs")]
Dictionary<string, string> ClientRecords { set; get; }
/// <summary>
/// Dataset hash.
/// </summary>
[JsonProperty("dataset_hash")]
public string Hash { set; get; }
/// <summary>
/// Constructor.
/// </summary>
/// <param name="dataset"></param>
public FHSyncRecordsParams(FHSyncDataset<T> dataset)
{
this.FnName = "syncRecords";
this.DatasetId = dataset.DatasetId;
this.QueryParams = dataset.QueryParams;
this.Hash = dataset.HashValue;
Dictionary<string, string> records = new Dictionary<string, string>();
foreach(var item in dataset.dataRecords.List()){
records.Add(item.Value.Uid, item.Value.HashValue);
}
this.ClientRecords = records;
}
}
}
/// <summary>
/// MetaData used for sync.
/// </summary>
public class FHSyncMetaData
{
/// <summary>
/// Constructor.
/// </summary>
public FHSyncMetaData()
{
this.metaData = new Dictionary<string, Dictionary<string, string>>();
}
/// <summary>
/// A dictionary of meta data.
/// </summary>
public Dictionary<string, Dictionary<string, string>> metaData { set; get; }
private Dictionary<string, string> GetDict(string uid)
{
Dictionary<string, string> dict = null;
if(metaData.ContainsKey(uid)){
metaData.TryGetValue(uid, out dict);
}
if (null == dict)
{
dict = new Dictionary<string, string>();
metaData[uid] = dict;
}
return dict;
}
/// <summary>
/// Add a string meta data with its key/value.
/// </summary>
/// <param name="uid"></param>
/// <param name="key"></param>
/// <param name="value"></param>
public void InsertStringMetaData(string uid, string key, string value)
{
GetDict(uid);
metaData[uid][key] = value;
}
/// <summary>
/// Add a boolean meta data with its key/value.
/// </summary>
/// <param name="uid"></param>
/// <param name="key"></param>
/// <param name="value"></param>
public void InsertBoolMetaData(string uid, string key, bool value)
{
GetDict(uid);
metaData[uid][key] = value.ToString();
}
/// <summary>
/// Get a string meta data fron its key.
/// </summary>
/// <param name="uid"></param>
/// <param name="key"></param>
/// <returns></returns>
public string GetMetaDataAsString(string uid, string key)
{
if(metaData.ContainsKey(uid)){
Dictionary<string, string> dict = GetDict(uid);
string value = null;
dict.TryGetValue(key, out value);
return value;
} else {
return null;
}
}
/// <summary>
/// Get a boolean meta data from its key.
/// </summary>
/// <param name="uid"></param>
/// <param name="key"></param>
/// <returns></returns>
public bool GetMetaDataAsBool(string uid, string key)
{
string val = GetMetaDataAsString(uid, key);
if (null != val)
{
return Boolean.Parse(val);
}
else
{
return false;
}
}
/// <summary>
/// List of metadata keys/
/// </summary>
[JsonIgnore]
public Dictionary<string, Dictionary<string, string>>.KeyCollection Keys
{
get {
return this.metaData.Keys;
}
}
/// <summary>
/// Delete a meta data object from its key.
/// </summary>
/// <param name="uid"></param>
/// <param name="key"></param>
public void DeleteMetaData(string uid, string key)
{
Dictionary<string, string> dict = GetDict(uid);
if(dict.ContainsKey(key)){
dict.Remove(key);
}
}
/// <summary>
/// Delete all meta data.
/// </summary>
/// <param name="uid"></param>
public void Delete(string uid)
{
if(metaData.ContainsKey(uid)){
metaData.Remove(uid);
}
}
}
/// <summary>
/// Sync Response.
/// </summary>
/// <typeparam name="T"></typeparam>
public class FHSyncResponseData<T> where T : IFHSyncModel
{
/// <summary>
/// Constructor.
/// </summary>
public FHSyncResponseData()
{
}
/// <summary>
/// List records from cloud app.
/// </summary>
[JsonProperty("records")]
public Dictionary<string, FHSyncDataRecord<T>> Records { set; get; }
/// <summary>
/// List updates detected in cloud app.
/// </summary>
[JsonProperty("updates")]
public Dictionary<string, Dictionary<string, FHSyncResponseUpdatesData>> Updates { set; get; }
/// <summary>
/// Hash from cloud app.
/// </summary>
[JsonProperty("hash")]
public string Hash { set; get; }
/// <summary>
/// Applied updates in cloud app.
/// </summary>
/// <param name="key"></param>
/// <returns></returns>
public FHSyncResponseUpdatesData GetAppliedUpdates(string key)
{
if(null != this.Updates && this.Updates.Count > 0){
if(this.Updates.ContainsKey("applied")){
Dictionary<string, FHSyncResponseUpdatesData> appliedRecords = this.Updates["applied"];
if(appliedRecords.ContainsKey(key)){
return appliedRecords[key];
}
}
}
return null;
}
/// <summary>
/// All remote records.
/// </summary>
/// <param name="key"></param>
/// <returns></returns>
public FHSyncDataRecord<T> GetRemoteRecord(string key)
{
if(null != this.Records && this.Records.Count > 0){
if(this.Records.ContainsKey(key)){
return this.Records[key];
}
}
return null;
}
/// <summary>
/// Updates per hash from cloud app.
/// </summary>
/// <param name="hash"></param>
/// <returns></returns>
public FHSyncResponseUpdatesData GetUpdateByHash(string hash){
if(null != this.Updates && this.Updates.ContainsKey("hashes")){
Dictionary<string, FHSyncResponseUpdatesData> hashes = this.Updates["hashes"];
if(hashes.ContainsKey(hash)){
return hashes[hash];
}
}
return null;
}
}
/// <summary>
/// Utilities class to get list of updates from cloud app.
/// </summary>
public class FHSyncResponseUpdatesData
{
/// <summary>
/// Constructor.
/// </summary>
public FHSyncResponseUpdatesData()
{
}
/// <summary>
/// Was the update applied, failed or in collisions.
/// </summary>
public enum FHSyncResponseUpdatesDataType
{
applied,
failed,
collisions
}
/// <summary>
/// Cuid.
/// </summary>
[JsonProperty("cuid")]
public string Cuid { set; get; }
/// <summary>
/// Type.
/// </summary>
[JsonProperty("type")]
[JsonConverter(typeof(StringEnumConverter))]
public FHSyncResponseUpdatesDataType Type { set; get; }
/// <summary>
/// Action.
/// </summary>
[JsonProperty("action")]
public string Action { set; get; }
/// <summary>
/// Hash.
/// </summary>
[JsonProperty("hash")]
public string Hash { set; get; }
/// <summary>
/// Uid.
/// </summary>
[JsonProperty("uid")]
public string Uid { set; get; }
/// <summary>
/// Message.
/// </summary>
[JsonProperty("message")]
public string Message { set; get; }
}
/// <summary>
/// Response received from a syncRecords call.
/// </summary>
/// <typeparam name="T"></typeparam>
public class FHSyncRecordsResponseData<T> where T: IFHSyncModel
{
/// <summary>
/// Constructor.
/// </summary>
public FHSyncRecordsResponseData()
{
}
/// <summary>
/// Hash.
/// </summary>
[JsonProperty("hash")]
public string Hash { set; get; }
/// <summary>
/// List of required "Create" action.
/// </summary>
[JsonProperty("create")]
public Dictionary<string, FHSyncDataRecord<T>> CreatedRecords { set; get; }
/// <summary>
/// List of required "update" action.
/// </summary>
[JsonProperty("update")]
public Dictionary<string, FHSyncDataRecord<T>> UpdatedRecords { set; get; }
/// <summary>
/// List of required "delete" action.
/// </summary>
[JsonProperty("delete")]
public Dictionary<string, FHSyncDataRecord<T>> DeletedRecords { set; get; }
}
}
| 42.527847 | 239 | 0.515387 |
2fb4e2d871819b3c8343ec0beb98ef31b9d2ee82 | 547 | py | Python | contrib/tornado/test/curl_httpclient_test.py | loggly/alertbirds-community-edition | b35f0ffbe80049dfa74d79e9e45b4cce4cdbf47a | [
"Apache-2.0"
] | 2 | 2015-10-28T23:14:47.000Z | 2015-11-27T18:00:12.000Z | tornado/test/curl_httpclient_test.py | joetyson/tornado | 02ce53b1fd8b4acc4721e6616b73d11bf6c6a4fb | [
"Apache-2.0"
] | null | null | null | tornado/test/curl_httpclient_test.py | joetyson/tornado | 02ce53b1fd8b4acc4721e6616b73d11bf6c6a4fb | [
"Apache-2.0"
] | null | null | null | from tornado.test.httpclient_test import HTTPClientCommonTestCase
try:
import pycurl
except ImportError:
pycurl = None
if pycurl is not None:
from tornado.curl_httpclient import CurlAsyncHTTPClient
class CurlHTTPClientCommonTestCase(HTTPClientCommonTestCase):
def get_http_client(self):
return CurlAsyncHTTPClient(io_loop=self.io_loop)
# Remove the base class from our namespace so the unittest module doesn't
# try to run it again.
del HTTPClientCommonTestCase
if pycurl is None:
del CurlHTTPClientCommonTestCase
| 26.047619 | 73 | 0.800731 |
4376a7f943138627d4508c1123ab3d7c99af91c6 | 472 | ts | TypeScript | server/api/tasks/controller.ts | huideyeren/iosifWeb | 7be7b35788d0a30084581befde28df617aa9474d | [
"MIT"
] | null | null | null | server/api/tasks/controller.ts | huideyeren/iosifWeb | 7be7b35788d0a30084581befde28df617aa9474d | [
"MIT"
] | null | null | null | server/api/tasks/controller.ts | huideyeren/iosifWeb | 7be7b35788d0a30084581befde28df617aa9474d | [
"MIT"
] | null | null | null | import { defineController } from './$relay'
import { getTasks, createTask } from '$/service/tasks'
const print = (text: string) => console.log(text)
export default defineController({ getTasks, print }, ({ getTasks, print }) => ({
get: async ({ query }) => {
if (query?.message) print(query.message)
return { status: 200, body: await getTasks(query?.limit) }
},
post: async ({ body }) => ({
status: 201,
body: await createTask(body.label)
})
}))
| 27.764706 | 80 | 0.627119 |
c42540e5c27ad1cc5aea128573f1731dc0b40771 | 3,385 | cc | C++ | stig/rpc/msg.cc | ctidder/stigdb | d9ef3eb117d46542745ca98c55df13ec71447091 | [
"Apache-2.0"
] | 5 | 2018-04-24T12:36:50.000Z | 2020-03-25T00:37:17.000Z | stig/rpc/msg.cc | ctidder/stigdb | d9ef3eb117d46542745ca98c55df13ec71447091 | [
"Apache-2.0"
] | null | null | null | stig/rpc/msg.cc | ctidder/stigdb | d9ef3eb117d46542745ca98c55df13ec71447091 | [
"Apache-2.0"
] | 2 | 2018-04-24T12:39:24.000Z | 2020-03-25T00:45:08.000Z | /* <stig/rpc/msg.cc>
Implements <stig/rpc/msg.h>.
Copyright 2010-2014 Tagged
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include <stig/rpc/msg.h>
#include <stdexcept>
#include <io/endian.h>
using namespace std;
using namespace Io;
using namespace Stig::Rpc;
void TMsg::Send(TSafeTransceiver *xver, int sock_fd) {
assert(this);
assert(xver);
/* Measure up our blob. */
size_t block_count, byte_count;
Blob.GetCounts(block_count, byte_count);
block_count += 3;
/* We'll need to send the kind, the request id, and the byte count as well as
the blocks in the blob itself, so order up some iovecs. */
auto
*vecs = xver->GetIoVecs(block_count),
*limit = vecs + block_count;
/* The first iovec holds the kind.
That's just one byte, so we don't need to worry about network byte order. */
vecs->iov_base = &Kind;
vecs->iov_len = sizeof(Kind);
++vecs;
/* The second iovec holds the request id, in NBO. */
auto nbo_req_id = SwapEnds(ReqId);
vecs->iov_base = &nbo_req_id;
vecs->iov_len = sizeof(nbo_req_id);
++vecs;
/* The second iovec holds the number of bytes in the blob, in NBO. */
auto nbo_byte_count = SwapEnds(static_cast<TByteCount>(byte_count));
vecs->iov_base = &nbo_byte_count;
vecs->iov_len = sizeof(nbo_byte_count);
++vecs;
/* The rest of the iovecs hold the blob's data blocks. Fill them in and fire away. */
Blob.InitIoVecs(vecs, limit);
xver->Send(sock_fd);
}
TMsg TMsg::Recv(TBufferPool *buffer_pool, TSafeTransceiver *xver, int sock_fd) {
assert(buffer_pool);
assert(xver);
/* We'll start by receiving a kind, a request id, and the number of bytes in the incoming blob. */
TKind kind;
TReqId req_id;
TByteCount byte_count;
auto *vecs = xver->GetIoVecs(3);
vecs->iov_base = &kind;
vecs->iov_len = sizeof(kind);
++vecs;
vecs->iov_base = &req_id;
vecs->iov_len = sizeof(req_id);
++vecs;
vecs->iov_base = &byte_count;
vecs->iov_len = sizeof(byte_count);
xver->Recv(sock_fd);
/* The request id and the byte count arrived in network byte order, so it's time to do the endian dance. */
req_id = SwapEnds(req_id);
byte_count = SwapEnds(byte_count);
/* Check the kind we receiver to make sure it's valid. */
switch (kind) {
case TKind::Request:
case TKind::NormalReply:
case TKind::ErrorReply: {
break;
}
default: {
throw invalid_argument("Stig RPC message arrived with unknown kind");
}
}
/* Make a buffer space large enough, then receive the blob into it. */
size_t block_count = (byte_count + BlockSize - 1) / BlockSize;
TBlob blob(buffer_pool, byte_count);
vecs = xver->GetIoVecs(block_count);
auto *limit = vecs + block_count;
blob.InitIoVecs(vecs, limit);
xver->Recv(sock_fd);
/* Construct a new message and return it. */
return move(TMsg(kind, req_id, move(blob)));
}
| 32.864078 | 109 | 0.690694 |
e28f815e05920f26f0bd6064ce5648e50bec33d2 | 533 | py | Python | materials/Au.py | zhangzheng1023/PyPhysics | c06f632144e21f5edd9653d95e566d532efbc937 | [
"MIT"
] | null | null | null | materials/Au.py | zhangzheng1023/PyPhysics | c06f632144e21f5edd9653d95e566d532efbc937 | [
"MIT"
] | null | null | null | materials/Au.py | zhangzheng1023/PyPhysics | c06f632144e21f5edd9653d95e566d532efbc937 | [
"MIT"
] | null | null | null | # coding=utf-8
import basic
import matplotlib.pyplot as plt
def au_dielectric_complex(lambda_0):
lambda_c = 8.9342e-6
lambda_p = 1.6826e-7
epsilon_complex = 1 - (lambda_0**2*lambda_c)/(lambda_p**2*(lambda_c+complex(0, lambda_0)))
return epsilon_complex
# Au材料的折射率实部的可视化
x = []
y = []
wavelength = 400e-9
while wavelength <= 900e-9:
x.append(wavelength*1e9)
y.append(basic.convert_dielectric_refractive(au_dielectric_complex(wavelength)).real)
wavelength = wavelength + 0.1e-9
plt.plot(x, y)
plt.show() | 24.227273 | 94 | 0.72045 |
7c829ae2b11e47b7a3c1e1998d8a816e91d550a5 | 807 | swift | Swift | QiitaKit/Sources/UI/LoadingFooterView.swift | masashi-sutou/QiitaKitForSample | 1efa35eebe3735d5857c73023c724037bbfe7853 | [
"MIT"
] | 1 | 2020-04-03T00:36:50.000Z | 2020-04-03T00:36:50.000Z | QiitaKit/Sources/UI/LoadingFooterView.swift | masashi-sutou/QiitaKitForSample | 1efa35eebe3735d5857c73023c724037bbfe7853 | [
"MIT"
] | null | null | null | QiitaKit/Sources/UI/LoadingFooterView.swift | masashi-sutou/QiitaKitForSample | 1efa35eebe3735d5857c73023c724037bbfe7853 | [
"MIT"
] | null | null | null | //
// LoadingFooterView.swift
// QiitaKit
//
// Created by 須藤将史 on 2017/11/23.
// Copyright © 2017年 須藤将史. All rights reserved.
//
import UIKit
final public class LoadingFooterView: UITableViewHeaderFooterView, Nibable {
public static let defaultHeight: CGFloat = 44
@IBOutlet weak var activityIndicator: UIActivityIndicatorView!
public var isLoading: Bool = false {
didSet {
DispatchQueue.main.async { [weak self] in
guard let me = self else { return }
me.activityIndicator?.isHidden = !me.isLoading
if me.isLoading {
me.activityIndicator?.startAnimating()
} else {
me.activityIndicator?.stopAnimating()
}
}
}
}
}
| 26.032258 | 76 | 0.578686 |
7489b4c4ba9e53aed3a3a7e068aa7f1e27348224 | 15,659 | dart | Dart | lib/solitaire/game_screen.dart | lepak-xyz/Flutter-Games | 84185c988f96683b0a13816b602afcab75859ce5 | [
"MIT"
] | null | null | null | lib/solitaire/game_screen.dart | lepak-xyz/Flutter-Games | 84185c988f96683b0a13816b602afcab75859ce5 | [
"MIT"
] | null | null | null | lib/solitaire/game_screen.dart | lepak-xyz/Flutter-Games | 84185c988f96683b0a13816b602afcab75859ce5 | [
"MIT"
] | null | null | null | import 'dart:math';
import 'package:flutter/material.dart';
import 'package:flutter_games/solitaire/card_column.dart';
import 'package:flutter_games/solitaire/empty_card.dart';
import 'package:flutter_games/solitaire/playing_card.dart';
import 'package:flutter_games/solitaire/transformed_card.dart';
class GameScreen extends StatefulWidget {
@override
_GameScreenState createState() => _GameScreenState();
}
class _GameScreenState extends State<GameScreen> {
// Stores the cards on the seven columns
List<PlayingCard> cardColumn1 = [];
List<PlayingCard> cardColumn2 = [];
List<PlayingCard> cardColumn3 = [];
List<PlayingCard> cardColumn4 = [];
List<PlayingCard> cardColumn5 = [];
List<PlayingCard> cardColumn6 = [];
List<PlayingCard> cardColumn7 = [];
// Stores the card deck
List<PlayingCard> cardDeckClosed = [];
List<PlayingCard> cardDeckOpened = [];
// Stores the card in the upper boxes
List<PlayingCard> finalHeartsDeck = [];
List<PlayingCard> finalDiamondsDeck = [];
List<PlayingCard> finalSpadesDeck = [];
List<PlayingCard> finalClubsDeck = [];
@override
void initState() {
super.initState();
_initialiseGame();
}
@override
Widget build(BuildContext context) {
return Scaffold(
backgroundColor: Colors.green,
appBar: AppBar(
title: Text("Flutter Solitaire"),
elevation: 0.0,
backgroundColor: Colors.green,
actions: <Widget>[
InkWell(
child: Padding(
padding: const EdgeInsets.all(8.0),
child: Icon(
Icons.refresh,
color: Colors.white,
),
),
splashColor: Colors.white,
onTap: () {
_initialiseGame();
},
)
],
),
body: Column(
children: <Widget>[
Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: <Widget>[
_buildCardDeck(),
_buildFinalDecks(),
],
),
SizedBox(
height: 16.0,
),
Row(
mainAxisAlignment: MainAxisAlignment.spaceEvenly,
children: <Widget>[
Expanded(
child: CardColumn(
cards: cardColumn1,
onCardsAdded: (cards, index) {
setState(() {
cardColumn1.addAll(cards);
int length = _getListFromIndex(index).length;
_getListFromIndex(index)
.removeRange(length - cards.length, length);
_refreshList(index);
});
},
columnIndex: 1,
),
),
Expanded(
child: CardColumn(
cards: cardColumn2,
onCardsAdded: (cards, index) {
setState(() {
cardColumn2.addAll(cards);
int length = _getListFromIndex(index).length;
_getListFromIndex(index)
.removeRange(length - cards.length, length);
_refreshList(index);
});
},
columnIndex: 2,
),
),
Expanded(
child: CardColumn(
cards: cardColumn3,
onCardsAdded: (cards, index) {
setState(() {
cardColumn3.addAll(cards);
int length = _getListFromIndex(index).length;
_getListFromIndex(index)
.removeRange(length - cards.length, length);
_refreshList(index);
});
},
columnIndex: 3,
),
),
Expanded(
child: CardColumn(
cards: cardColumn4,
onCardsAdded: (cards, index) {
setState(() {
cardColumn4.addAll(cards);
int length = _getListFromIndex(index).length;
_getListFromIndex(index)
.removeRange(length - cards.length, length);
_refreshList(index);
});
},
columnIndex: 4,
),
),
Expanded(
child: CardColumn(
cards: cardColumn5,
onCardsAdded: (cards, index) {
setState(() {
cardColumn5.addAll(cards);
int length = _getListFromIndex(index).length;
_getListFromIndex(index)
.removeRange(length - cards.length, length);
_refreshList(index);
});
},
columnIndex: 5,
),
),
Expanded(
child: CardColumn(
cards: cardColumn6,
onCardsAdded: (cards, index) {
setState(() {
cardColumn6.addAll(cards);
int length = _getListFromIndex(index).length;
_getListFromIndex(index)
.removeRange(length - cards.length, length);
_refreshList(index);
});
},
columnIndex: 6,
),
),
Expanded(
child: CardColumn(
cards: cardColumn7,
onCardsAdded: (cards, index) {
setState(() {
cardColumn7.addAll(cards);
int length = _getListFromIndex(index).length;
_getListFromIndex(index)
.removeRange(length - cards.length, length);
_refreshList(index);
});
},
columnIndex: 7,
),
),
],
),
],
),
);
}
// Build the deck of cards left after building card columns
Widget _buildCardDeck() {
return Container(
child: Row(
children: <Widget>[
InkWell(
child: cardDeckClosed.isNotEmpty
? Padding(
padding: const EdgeInsets.all(4.0),
child: TransformedCard(
playingCard: cardDeckClosed.last,
),
)
: Opacity(
opacity: 0.4,
child: Padding(
padding: const EdgeInsets.all(4.0),
child: TransformedCard(
playingCard: PlayingCard(
cardSuit: CardSuit.diamonds,
cardType: CardType.five,
),
),
),
),
onTap: () {
setState(() {
if (cardDeckClosed.isEmpty) {
cardDeckClosed.addAll(cardDeckOpened.map((card) {
return card
..opened = false
..faceUp = false;
}));
cardDeckOpened.clear();
} else {
cardDeckOpened.add(
cardDeckClosed.removeLast()
..faceUp = true
..opened = true,
);
}
});
},
),
cardDeckOpened.isNotEmpty
? Padding(
padding: const EdgeInsets.all(4.0),
child: TransformedCard(
playingCard: cardDeckOpened.last,
attachedCards: [
cardDeckOpened.last,
],
columnIndex: 0,
),
)
: Container(
width: 40.0,
),
],
),
);
}
// Build the final decks of cards
Widget _buildFinalDecks() {
return Container(
child: Row(
children: <Widget>[
Padding(
padding: const EdgeInsets.all(4.0),
child: EmptyCardDeck(
cardSuit: CardSuit.hearts,
cardsAdded: finalHeartsDeck,
onCardAdded: (cards, index) {
finalHeartsDeck.addAll(cards);
int length = _getListFromIndex(index).length;
_getListFromIndex(index)
.removeRange(length - cards.length, length);
_refreshList(index);
},
columnIndex: 8,
),
),
Padding(
padding: const EdgeInsets.all(4.0),
child: EmptyCardDeck(
cardSuit: CardSuit.diamonds,
cardsAdded: finalDiamondsDeck,
onCardAdded: (cards, index) {
finalDiamondsDeck.addAll(cards);
int length = _getListFromIndex(index).length;
_getListFromIndex(index)
.removeRange(length - cards.length, length);
_refreshList(index);
},
columnIndex: 9,
),
),
Padding(
padding: const EdgeInsets.all(4.0),
child: EmptyCardDeck(
cardSuit: CardSuit.spades,
cardsAdded: finalSpadesDeck,
onCardAdded: (cards, index) {
finalSpadesDeck.addAll(cards);
int length = _getListFromIndex(index).length;
_getListFromIndex(index)
.removeRange(length - cards.length, length);
_refreshList(index);
},
columnIndex: 10,
),
),
Padding(
padding: const EdgeInsets.all(4.0),
child: EmptyCardDeck(
cardSuit: CardSuit.clubs,
cardsAdded: finalClubsDeck,
onCardAdded: (cards, index) {
finalClubsDeck.addAll(cards);
int length = _getListFromIndex(index).length;
_getListFromIndex(index)
.removeRange(length - cards.length, length);
_refreshList(index);
},
columnIndex: 11,
),
),
],
),
);
}
// Initialise a new game
void _initialiseGame() {
cardColumn1 = [];
cardColumn2 = [];
cardColumn3 = [];
cardColumn4 = [];
cardColumn5 = [];
cardColumn6 = [];
cardColumn7 = [];
// Stores the card deck
cardDeckClosed = [];
cardDeckOpened = [];
// Stores the card in the upper boxes
finalHeartsDeck = [];
finalDiamondsDeck = [];
finalSpadesDeck = [];
finalClubsDeck = [];
List<PlayingCard> allCards = [];
// Add all cards to deck
CardSuit.values.forEach((suit) {
CardType.values.forEach((type) {
allCards.add(PlayingCard(
cardType: type,
cardSuit: suit,
faceUp: false,
));
});
});
Random random = Random();
// Add cards to columns and remaining to deck
for (int i = 0; i < 28; i++) {
int randomNumber = random.nextInt(allCards.length);
if (i == 0) {
PlayingCard card = allCards[randomNumber];
cardColumn1.add(
card
..opened = true
..faceUp = true,
);
allCards.removeAt(randomNumber);
} else if (i > 0 && i < 3) {
if (i == 2) {
PlayingCard card = allCards[randomNumber];
cardColumn2.add(
card
..opened = true
..faceUp = true,
);
} else {
cardColumn2.add(allCards[randomNumber]);
}
allCards.removeAt(randomNumber);
} else if (i > 2 && i < 6) {
if (i == 5) {
PlayingCard card = allCards[randomNumber];
cardColumn3.add(
card
..opened = true
..faceUp = true,
);
} else {
cardColumn3.add(allCards[randomNumber]);
}
allCards.removeAt(randomNumber);
} else if (i > 5 && i < 10) {
if (i == 9) {
PlayingCard card = allCards[randomNumber];
cardColumn4.add(
card
..opened = true
..faceUp = true,
);
} else {
cardColumn4.add(allCards[randomNumber]);
}
allCards.removeAt(randomNumber);
} else if (i > 9 && i < 15) {
if (i == 14) {
PlayingCard card = allCards[randomNumber];
cardColumn5.add(
card
..opened = true
..faceUp = true,
);
} else {
cardColumn5.add(allCards[randomNumber]);
}
allCards.removeAt(randomNumber);
} else if (i > 14 && i < 21) {
if (i == 20) {
PlayingCard card = allCards[randomNumber];
cardColumn6.add(
card
..opened = true
..faceUp = true,
);
} else {
cardColumn6.add(allCards[randomNumber]);
}
allCards.removeAt(randomNumber);
} else {
if (i == 27) {
PlayingCard card = allCards[randomNumber];
cardColumn7.add(
card
..opened = true
..faceUp = true,
);
} else {
cardColumn7.add(allCards[randomNumber]);
}
allCards.removeAt(randomNumber);
}
}
cardDeckClosed = allCards;
cardDeckOpened.add(
cardDeckClosed.removeLast()
..opened = true
..faceUp = true,
);
setState(() {});
}
void _refreshList(int index) {
if (finalDiamondsDeck.length +
finalHeartsDeck.length +
finalClubsDeck.length +
finalSpadesDeck.length ==
52) {
_handleWin();
}
setState(() {
if (_getListFromIndex(index).length != 0) {
_getListFromIndex(index)[_getListFromIndex(index).length - 1]
..opened = true
..faceUp = true;
}
});
}
// Handle a win condition
void _handleWin() {
showDialog(
context: context,
builder: (context) {
return AlertDialog(
title: Text("Congratulations!"),
content: Text("You Win!"),
actions: <Widget>[
FlatButton(
onPressed: () {
_initialiseGame();
Navigator.pop(context);
},
child: Text("Play again"),
),
],
);
},
);
}
List<PlayingCard> _getListFromIndex(int index) {
switch (index) {
case 0:
return cardDeckOpened;
case 1:
return cardColumn1;
case 2:
return cardColumn2;
case 3:
return cardColumn3;
case 4:
return cardColumn4;
case 5:
return cardColumn5;
case 6:
return cardColumn6;
case 7:
return cardColumn7;
case 8:
return finalHeartsDeck;
case 9:
return finalDiamondsDeck;
case 10:
return finalSpadesDeck;
case 11:
return finalClubsDeck;
default:
return null;
}
}
}
| 29.769962 | 70 | 0.456351 |
e1bd0d9ce6d8461ee82dc741593ffee23a0dea80 | 4,612 | ps1 | PowerShell | DomainManagement/functions/passwordpolicies/Register-DMPasswordPolicy.ps1 | WillyMoselhy/DomainManagement | e9b2fff30e7c7b66d9057389909447180a0fb634 | [
"MIT"
] | 6 | 2020-02-24T12:34:47.000Z | 2020-08-25T08:48:16.000Z | DomainManagement/functions/passwordpolicies/Register-DMPasswordPolicy.ps1 | WillyMoselhy/DomainManagement | e9b2fff30e7c7b66d9057389909447180a0fb634 | [
"MIT"
] | 22 | 2020-02-03T15:37:16.000Z | 2021-03-15T07:33:48.000Z | DomainManagement/functions/passwordpolicies/Register-DMPasswordPolicy.ps1 | WillyMoselhy/DomainManagement | e9b2fff30e7c7b66d9057389909447180a0fb634 | [
"MIT"
] | 2 | 2020-09-28T15:19:35.000Z | 2021-12-30T02:10:06.000Z | function Register-DMPasswordPolicy
{
<#
.SYNOPSIS
Register a new Finegrained Password Policy as the desired state.
.DESCRIPTION
Register a new Finegrained Password Policy as the desired state.
These policies are then compared to the current state in a domain.
.PARAMETER Name
The name of the PSO.
.PARAMETER DisplayName
The display name of the PSO.
.PARAMETER Description
The description for the PSO.
.PARAMETER Precedence
The precedence rating of the PSO.
The lower the precedence number, the higher the priority.
.PARAMETER MinPasswordLength
The minimum number of characters a password must have.
.PARAMETER SubjectGroup
The group that the PSO should be assigned to.
.PARAMETER LockoutThreshold
How many bad password entries will lead to account lockout?
.PARAMETER MaxPasswordAge
The maximum age a password may have before it must be changed.
.PARAMETER ComplexityEnabled
Whether complexity rules are applied to users affected by this policy.
By default, complexity rules requires 3 out of: "Lowercase letter", "Uppercase letter", "number", "special character".
However, custom password filters may lead to very validation rules.
.PARAMETER LockoutDuration
If the account is being locked out, how long will the lockout last.
.PARAMETER LockoutObservationWindow
What is the time window before the bad password count is being reset.
.PARAMETER MinPasswordAge
How soon may a password be changed again after updating the password.
.PARAMETER PasswordHistoryCount
How many passwords are kept in memory to prevent going back to a previous password.
.PARAMETER ReversibleEncryptionEnabled
Whether the password should be stored in a manner that allows it to be decrypted into cleartext.
By default, only un-reversible hashes are being stored.
.PARAMETER SubjectDomain
The domain the group is part of.
Defaults to the target domain.
.PARAMETER Present
Whether the PSO should exist.
Defaults to $true.
If this is set to $false, no PSO will be created, instead the PSO will be removed if it exists.
.EXAMPLE
PS C:\> Get-Content $configPath | ConvertFrom-Json | Write-Output | Register-DMPasswordPolicy
Imports all the configured policies from the defined config json file.
#>
[CmdletBinding()]
param (
[Parameter(Mandatory = $true, ValueFromPipelineByPropertyName = $true)]
[string]
$Name,
[Parameter(Mandatory = $true, ValueFromPipelineByPropertyName = $true)]
[string]
$DisplayName,
[Parameter(Mandatory = $true, ValueFromPipelineByPropertyName = $true)]
[string]
$Description,
[Parameter(Mandatory = $true, ValueFromPipelineByPropertyName = $true)]
[int]
$Precedence,
[Parameter(Mandatory = $true, ValueFromPipelineByPropertyName = $true)]
[int]
$MinPasswordLength,
[Parameter(Mandatory = $true, ValueFromPipelineByPropertyName = $true)]
[string[]]
$SubjectGroup,
[Parameter(Mandatory = $true, ValueFromPipelineByPropertyName = $true)]
[int]
$LockoutThreshold,
[Parameter(Mandatory = $true, ValueFromPipelineByPropertyName = $true)]
[PSFTimespan]
$MaxPasswordAge,
[Parameter(ValueFromPipelineByPropertyName = $true)]
[bool]
$ComplexityEnabled = $true,
[Parameter(ValueFromPipelineByPropertyName = $true)]
[PSFTimespan]
$LockoutDuration = '1h',
[Parameter(ValueFromPipelineByPropertyName = $true)]
[PSFTimespan]
$LockoutObservationWindow = '1h',
[Parameter(ValueFromPipelineByPropertyName = $true)]
[PSFTimespan]
$MinPasswordAge = '30m',
[Parameter(ValueFromPipelineByPropertyName = $true)]
[int]
$PasswordHistoryCount = 24,
[Parameter(ValueFromPipelineByPropertyName = $true)]
[bool]
$ReversibleEncryptionEnabled = $false,
[Parameter(ValueFromPipelineByPropertyName = $true)]
[string]
$SubjectDomain = '%DomainFqdn%',
[bool]
$Present = $true
)
process
{
$script:passwordPolicies[$Name] = [PSCustomObject]@{
PSTypeName = 'DomainManagement.PasswordPolicy'
Name = $Name
Precedence = $Precedence
ComplexityEnabled = $ComplexityEnabled
LockoutDuration = $LockoutDuration.Value
LockoutObservationWindow = $LockoutObservationWindow.Value
LockoutThreshold = $LockoutThreshold
MaxPasswordAge = $MaxPasswordAge.Value
MinPasswordAge = $MinPasswordAge.Value
MinPasswordLength = $MinPasswordLength
DisplayName = $DisplayName
Description = $Description
PasswordHistoryCount = $PasswordHistoryCount
ReversibleEncryptionEnabled = $ReversibleEncryptionEnabled
SubjectDomain = $SubjectDomain
SubjectGroup = $SubjectGroup
Present = $Present
}
}
}
| 28.645963 | 120 | 0.754553 |
76f2e862f3c8a31b6cac1d4d62cdd0a2bbd9a42e | 436 | c | C | linux-2.6.0/drivers/media/common/saa7146_vv_ksyms.c | dnhua/Linux_study | 96863b599cbba9c925b3209bed07b1d7b60cb463 | [
"MIT"
] | 1 | 2020-11-10T12:47:02.000Z | 2020-11-10T12:47:02.000Z | linux-2.6.0/drivers/media/common/saa7146_vv_ksyms.c | dnhua/Linux_study | 96863b599cbba9c925b3209bed07b1d7b60cb463 | [
"MIT"
] | null | null | null | linux-2.6.0/drivers/media/common/saa7146_vv_ksyms.c | dnhua/Linux_study | 96863b599cbba9c925b3209bed07b1d7b60cb463 | [
"MIT"
] | null | null | null | #include <linux/module.h>
#include <media/saa7146_vv.h>
EXPORT_SYMBOL_GPL(saa7146_vbi_uops);
EXPORT_SYMBOL_GPL(saa7146_video_uops);
EXPORT_SYMBOL_GPL(saa7146_start_preview);
EXPORT_SYMBOL_GPL(saa7146_stop_preview);
EXPORT_SYMBOL_GPL(saa7146_set_hps_source_and_sync);
EXPORT_SYMBOL_GPL(saa7146_register_device);
EXPORT_SYMBOL_GPL(saa7146_unregister_device);
EXPORT_SYMBOL_GPL(saa7146_vv_init);
EXPORT_SYMBOL_GPL(saa7146_vv_release);
| 27.25 | 51 | 0.876147 |
0dab6a11c0d89d6eb3153b6d95c8cae9e117a816 | 992 | rb | Ruby | db/migrate/20210219061358_create_colors.rb | cpcsacggc/barbershopHQ | 6c67125d89ef638915cf4a95bee68f08719cf4cd | [
"MIT"
] | null | null | null | db/migrate/20210219061358_create_colors.rb | cpcsacggc/barbershopHQ | 6c67125d89ef638915cf4a95bee68f08719cf4cd | [
"MIT"
] | null | null | null | db/migrate/20210219061358_create_colors.rb | cpcsacggc/barbershopHQ | 6c67125d89ef638915cf4a95bee68f08719cf4cd | [
"MIT"
] | null | null | null | class CreateColors < ActiveRecord::Migration[6.1]
def change
create_table :colors do |t|
t.text :name
t.text :value
t.timestamps
end
Color.create :name => 'Black', :value =>'#000000'
Color.create :name => 'SaddleBrown', :value =>'#8b4513'
Color.create :name => 'Orange', :value =>'#ffa500'
Color.create :name => 'Green', :value =>'#008000'
Color.create :name => 'Dark blue', :value =>'#00008B'
Color.create :name => 'Blue', :value =>'#0000ff'
Color.create :name => 'Turquoise', :value =>'#40e0d0'
Color.create :name => 'Light green', :value =>'#90ee90'
Color.create :name => 'Dark green', :value =>'#006400'
Color.create :name => 'Yellow', :value =>'#ffff00'
Color.create :name => 'Red', :value =>'#ff0000'
Color.create :name => 'Dark red', :value =>'#8b0000'
Color.create :name => 'Purple', :value =>'#800080'
Color.create :name => 'Gray', :value =>'#e1e1e1'
end
end
| 41.333333 | 63 | 0.570565 |
e2f526138b8678858f8ed7c6cd655bed425ce0eb | 138 | py | Python | hindi_numbers_info/biggerNumbers.py | gvvishwanath/numbers-to-words | 16e016a8b135bf1ba4791e2426ce9d40d3bfd8ad | [
"MIT"
] | 1 | 2020-07-03T08:03:28.000Z | 2020-07-03T08:03:28.000Z | hindi_numbers_info/biggerNumbers.py | gvvishwanath/numbers-to-words | 16e016a8b135bf1ba4791e2426ce9d40d3bfd8ad | [
"MIT"
] | null | null | null | hindi_numbers_info/biggerNumbers.py | gvvishwanath/numbers-to-words | 16e016a8b135bf1ba4791e2426ce9d40d3bfd8ad | [
"MIT"
] | null | null | null | Lakh = 100 * 1000
Crore = 100 * Lakh
biggerNumbers = {
100 : "sau",
1000 : "hazaar",
Lakh : "laakh",
Crore : "karoD"
}
| 13.8 | 21 | 0.514493 |
395058d4b785c1a242a2aee8ca684f57e965ec58 | 494 | py | Python | plot.py | bandreghetti/autoplay | 4d3ba7c7383dfb2abf73040bf521f463b1966c30 | [
"MIT"
] | null | null | null | plot.py | bandreghetti/autoplay | 4d3ba7c7383dfb2abf73040bf521f463b1966c30 | [
"MIT"
] | null | null | null | plot.py | bandreghetti/autoplay | 4d3ba7c7383dfb2abf73040bf521f463b1966c30 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
from games import gameList
import numpy as np
import sys
import os
from matplotlib import pyplot as plt
def main():
if len(sys.argv) <= 1:
print("Usage: ./plot.py <foldername>")
print("Example: ./plot.py MLP")
exit()
foldername = sys.argv[1]
history_x = np.load(os.path.join(foldername, 'history_x.npy'))
history_y = np.load(os.path.join(foldername, 'history_y.npy'))
plt.plot(history_x, history_y)
plt.show()
main()
| 20.583333 | 66 | 0.649798 |
27de48cba1d7ce9aef8b737afb99a4c5e937fade | 8,179 | dart | Dart | packages/flutter_goldens_client/lib/skia_client.dart | josh-ksr/flutter | abfad9cc4994b61de64c9ef1cb096498e18275dc | [
"BSD-3-Clause"
] | 7 | 2020-07-04T06:05:52.000Z | 2022-02-26T00:41:51.000Z | packages/flutter_goldens_client/lib/skia_client.dart | josh-ksr/flutter | abfad9cc4994b61de64c9ef1cb096498e18275dc | [
"BSD-3-Clause"
] | 1 | 2019-03-06T07:31:58.000Z | 2019-03-06T07:31:58.000Z | packages/flutter_goldens_client/lib/skia_client.dart | josh-ksr/flutter | abfad9cc4994b61de64c9ef1cb096498e18275dc | [
"BSD-3-Clause"
] | 2 | 2019-12-24T10:06:47.000Z | 2021-09-26T14:04:57.000Z | // Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'dart:convert';
import 'dart:io' as io;
import 'package:file/file.dart';
import 'package:file/local.dart';
import 'package:path/path.dart' as path;
import 'package:platform/platform.dart';
import 'package:process/process.dart';
import 'package:flutter_goldens_client/client.dart';
// If you are here trying to figure out how to use golden files in the Flutter
// repo itself, consider reading this wiki page:
// https://github.com/flutter/flutter/wiki/Writing-a-golden-file-test-for-package%3Aflutter
// TODO(Piinks): This file will replace ./client.dart when transition to Skia
// Gold testing is complete
const String _kGoldctlKey = 'GOLDCTL';
const String _kServiceAccountKey = 'GOLD_SERVICE_ACCOUNT';
/// An extension of the [GoldensClient] class that interfaces with Skia Gold
/// for golden file testing.
class SkiaGoldClient extends GoldensClient {
SkiaGoldClient({
FileSystem fs = const LocalFileSystem(),
ProcessManager process = const LocalProcessManager(),
Platform platform = const LocalPlatform(),
}) : super(
fs: fs,
process: process,
platform: platform,
);
/// The local [Directory] within the [comparisonRoot] for the current test
/// context. In this directory, the client will create image and json files
/// for the goldctl tool to use.
///
/// This is informed by the [FlutterGoldenFileComparator] [basedir]. It cannot
/// be null.
Directory _workDirectory;
/// The path to the local [Directory] where the goldctl tool is hosted.
///
/// Uses the [platform] environment in this implementation.
String get _goldctl => platform.environment[_kGoldctlKey];
/// The path to the local [Directory] where the service account key is
/// hosted.
///
/// Uses the [platform] environment in this implementation.
String get _serviceAccount => platform.environment[_kServiceAccountKey];
@override
Directory get comparisonRoot => flutterRoot.childDirectory(fs.path.join('bin', 'cache', 'pkg', 'skia_goldens'));
/// Prepares the local work space for golden file testing and calls the
/// goldctl `auth` command.
///
/// This ensures that the goldctl tool is authorized and ready for testing. It
/// will only be called once for each instance of
/// [FlutterSkiaGoldFileComparator].
///
/// The [workDirectory] parameter specifies the current directory that golden
/// tests are executing in, relative to the library of the given test. It is
/// informed by the basedir of the [FlutterSkiaGoldFileComparator].
Future<void> auth(Directory workDirectory) async {
assert(workDirectory != null);
_workDirectory = workDirectory;
if (_clientIsAuthorized())
return;
if (_serviceAccount.isEmpty) {
final StringBuffer buf = StringBuffer()..writeln('Gold service account is unavailable.');
throw NonZeroExitCode(1, buf.toString());
}
final File authorization = _workDirectory.childFile('serviceAccount.json');
await authorization.writeAsString(_serviceAccount);
final List<String> authArguments = <String>[
'auth',
'--service-account', authorization.path,
'--work-dir', _workDirectory.childDirectory('temp').path,
];
// final io.ProcessResult authResults =
await io.Process.run(
_goldctl,
authArguments,
);
// TODO(Piinks): Re-enable after Gold flakes are resolved, https://github.com/flutter/flutter/pull/36103
// if (authResults.exitCode != 0) {
// final StringBuffer buf = StringBuffer()
// ..writeln('Flutter + Skia Gold auth failed.')
// ..writeln('stdout: ${authResults.stdout}')
// ..writeln('stderr: ${authResults.stderr}');
// throw NonZeroExitCode(authResults.exitCode, buf.toString());
// }
}
/// Executes the `imgtest init` command in the goldctl tool.
///
/// The `imgtest` command collects and uploads test results to the Skia Gold
/// backend, the `init` argument initializes the current test.
Future<void> imgtestInit() async {
final File keys = _workDirectory.childFile('keys.json');
final File failures = _workDirectory.childFile('failures.json');
await keys.writeAsString(_getKeysJSON());
await failures.create();
final String commitHash = await _getCurrentCommit();
final List<String> imgtestInitArguments = <String>[
'imgtest', 'init',
'--instance', 'flutter',
'--work-dir', _workDirectory.childDirectory('temp').path,
'--commit', commitHash,
'--keys-file', keys.path,
'--failure-file', failures.path,
'--passfail',
];
if (imgtestInitArguments.contains(null)) {
final StringBuffer buf = StringBuffer();
buf.writeln('Null argument for Skia Gold imgtest init:');
imgtestInitArguments.forEach(buf.writeln);
throw NonZeroExitCode(1, buf.toString());
}
// final io.ProcessResult imgtestInitResult =
await io.Process.run(
_goldctl,
imgtestInitArguments,
);
// TODO(Piinks): Re-enable after Gold flakes are resolved, https://github.com/flutter/flutter/pull/36103
// if (imgtestInitResult.exitCode != 0) {
// final StringBuffer buf = StringBuffer()
// ..writeln('Flutter + Skia Gold imgtest init failed.')
// ..writeln('stdout: ${imgtestInitResult.stdout}')
// ..writeln('stderr: ${imgtestInitResult.stderr}');
// throw NonZeroExitCode(imgtestInitResult.exitCode, buf.toString());
// }
}
/// Executes the `imgtest add` command in the goldctl tool.
///
/// The `imgtest` command collects and uploads test results to the Skia Gold
/// backend, the `add` argument uploads the current image test. A response is
/// returned from the invocation of this command that indicates a pass or fail
/// result.
///
/// The testName and goldenFile parameters reference the current comparison
/// being evaluated by the [FlutterSkiaGoldFileComparator].
Future<bool> imgtestAdd(String testName, File goldenFile) async {
assert(testName != null);
assert(goldenFile != null);
final List<String> imgtestArguments = <String>[
'imgtest', 'add',
'--work-dir', _workDirectory.childDirectory('temp').path,
'--test-name', testName.split(path.extension(testName.toString()))[0],
'--png-file', goldenFile.path,
];
await io.Process.run(
_goldctl,
imgtestArguments,
);
// TODO(Piinks): Comment on PR if triage is needed, https://github.com/flutter/flutter/issues/34673
// So as not to turn the tree red in this initial implementation, this will
// return true for now.
// The ProcessResult that returns from line 157 contains the pass/fail
// result of the test & links to the dashboard and diffs.
return true;
}
/// Returns the current commit hash of the Flutter repository.
Future<String> _getCurrentCommit() async {
if (!flutterRoot.existsSync()) {
final StringBuffer buf = StringBuffer()
..writeln('Flutter root could not be found: $flutterRoot');
throw NonZeroExitCode(1, buf.toString());
} else {
final io.ProcessResult revParse = await process.run(
<String>['git', 'rev-parse', 'HEAD'],
workingDirectory: flutterRoot.path,
);
return revParse.exitCode == 0 ? revParse.stdout.trim() : null;
}
}
/// Returns a JSON String with keys value pairs used to uniquely identify the
/// configuration that generated the given golden file.
///
/// Currently, the only key value pair being tracked is the platform the image
/// was rendered on.
String _getKeysJSON() {
return json.encode(
<String, dynamic>{
'Platform' : platform.operatingSystem,
}
);
}
/// Returns a boolean value to prevent the client from re-authorizing itself
/// for multiple tests.
bool _clientIsAuthorized() {
final File authFile = _workDirectory?.childFile(super.fs.path.join(
'temp',
'auth_opt.json',
));
return authFile.existsSync();
}
}
| 36.67713 | 114 | 0.685903 |
c7c19bdd2726a3afd0396471d40879a8dcd1b67b | 181 | sql | SQL | apps/intake-db-workflows/schema/0002-intake_table.sql | MattHodge/tilt-demo | c3b00d71f1ba50c1e4d94f79b3da60ca10fb7e5b | [
"MIT"
] | null | null | null | apps/intake-db-workflows/schema/0002-intake_table.sql | MattHodge/tilt-demo | c3b00d71f1ba50c1e4d94f79b3da60ca10fb7e5b | [
"MIT"
] | null | null | null | apps/intake-db-workflows/schema/0002-intake_table.sql | MattHodge/tilt-demo | c3b00d71f1ba50c1e4d94f79b3da60ca10fb7e5b | [
"MIT"
] | null | null | null | USE intake;
create table if not exists intake
(
id int auto_increment,
host text null,
metric_name text null,
value int null,
constraint table_name_pk
primary key (id)
);
| 12.928571 | 33 | 0.740331 |
4450d8cd723d33dd7e7d4c06aaf2064c6fbb2e75 | 4,266 | py | Python | tests/test_cli.py | mriedmann/pipecheck | 9919c13c96d1c9ec28e90ca9c4da5f5b33eb41e9 | [
"MIT"
] | null | null | null | tests/test_cli.py | mriedmann/pipecheck | 9919c13c96d1c9ec28e90ca9c4da5f5b33eb41e9 | [
"MIT"
] | 5 | 2021-06-05T22:09:17.000Z | 2021-11-24T22:17:08.000Z | tests/test_cli.py | mriedmann/pipecheck | 9919c13c96d1c9ec28e90ca9c4da5f5b33eb41e9 | [
"MIT"
] | null | null | null | import unittest
from icecream import ic
from parameterized import parameterized
from pipecheck.cli import get_commands_and_config_from_args, parse_args
class CliTests(unittest.TestCase):
def assertSubset(self, collection, subset):
self.assertTrue(subset.items() <= collection.items(), f"{subset} not in {collection}")
def assertSubsetList(self, collection, subset):
for i in range(0, len(subset)):
self.assertSubset(collection[i], subset[i])
@parameterized.expand(
[
(["--http", "https://httpstat.us/200"], {"http": ["https://httpstat.us/200"]}),
(["--tcp", "8.8.8.8:53"], {"tcp": ["8.8.8.8:53"]}),
(["--dns", "one.one.one.one"], {"dns": ["one.one.one.one"]}),
(["--dns", "one.one.one.one=1.1.1.1"], {"dns": ["one.one.one.one=1.1.1.1"]}),
(["--ping", "8.8.8.8"], {"ping": ["8.8.8.8"]}),
(["--ping", "8.8.8.8", "1.1.1.1"], {"ping": ["8.8.8.8", "1.1.1.1"]}),
(
["--http", "https://httpstat.us/200", "--tcp", "8.8.8.8:53", "--dns", "one.one.one.one=1.1.1.1,1.0.0.1"],
{"http": ["https://httpstat.us/200"], "tcp": ["8.8.8.8:53"], "dns": ["one.one.one.one=1.1.1.1,1.0.0.1"]},
),
(["-i", "30", "-p", "9990"], {"port": 9990, "interval": 30}),
(["-n", "test-namespace", "-l", "app==test"], {"namespace": "test-namespace", "selector": "app==test"}),
]
)
def test_cli_parser(self, params, expected_args):
args = parse_args(params)
self.assertSubset(args, expected_args)
@parameterized.expand(
[
({"http": ["https://httpstat.us/200"]}, [{"type": "http", "url": "https://httpstat.us/200"}]),
({"tcp": ["8.8.8.8:53"]}, [{"type": "tcp", "host": "8.8.8.8", "port": 53}]),
(
{"dns": ["one.one.one.one=1.1.1.1,1.0.0.1"]},
[{"type": "dns", "name": "one.one.one.one", "ips": ["1.1.1.1", "1.0.0.1"]}],
),
({"dns": ["one.one.one.one"]}, [{"type": "dns", "name": "one.one.one.one", "ips": []}]),
({"dns": ["one.one.one.one=1.1.1.1"]}, [{"type": "dns", "name": "one.one.one.one", "ips": ["1.1.1.1"]}]),
({"ping": ["8.8.8.8"]}, [{"type": "ping", "host": "8.8.8.8"}]),
({"ping": ["8.8.8.8", "1.1.1.1"]}, [{"type": "ping", "host": "8.8.8.8"}, {"type": "ping", "host": "1.1.1.1"}]),
(
{"http": ["https://httpstat.us/200"], "tcp": ["8.8.8.8:53"], "dns": ["one.one.one.one=1.1.1.1,1.0.0.1"]},
[
({"type": "http", "url": "https://httpstat.us/200"}),
({"type": "dns", "name": "one.one.one.one", "ips": ["1.1.1.1", "1.0.0.1"]}),
({"type": "tcp", "host": "8.8.8.8", "port": 53}),
],
),
]
)
def test_cli(self, args, expected_commands):
(commands, _) = ic(list(get_commands_and_config_from_args(args)))
commands.sort(key=lambda x: x["type"])
expected_commands.sort(key=lambda x: x["type"])
self.assertSubsetList(commands, expected_commands)
@parameterized.expand(
[
(
["--http", "https://self-signed.badssl.com/", "--insecure"],
[{"type": "http", "url": "https://self-signed.badssl.com/"}],
{"insecure": True},
),
(
["--http", "https://self-signed.badssl.com/", "--http-status", "200"],
[{"type": "http", "url": "https://self-signed.badssl.com/"}],
{"http_status": [200]},
),
(
["--http", "https://self-signed.badssl.com/", "--http-status", "200", "301"],
[{"type": "http", "url": "https://self-signed.badssl.com/"}],
{"http_status": [200, 301]},
),
]
)
def test_cli_and_config(self, params, expected_commands, expected_config):
args = parse_args(params)
(commands, config) = list(get_commands_and_config_from_args(args))
self.assertSubsetList(commands, expected_commands)
self.assertSubset(config, expected_config)
if __name__ == "__main__":
unittest.main()
| 45.870968 | 123 | 0.473277 |
b935c2efa78679e5745ca4fb8bff6d8f5a2018d8 | 240 | css | CSS | src/lib/VideoJsPlayerCss.css | Commission-on-POST/video-js-component-example | 9173cfa8ae7d5305c2e7311b00003c64120715e4 | [
"Unlicense"
] | null | null | null | src/lib/VideoJsPlayerCss.css | Commission-on-POST/video-js-component-example | 9173cfa8ae7d5305c2e7311b00003c64120715e4 | [
"Unlicense"
] | null | null | null | src/lib/VideoJsPlayerCss.css | Commission-on-POST/video-js-component-example | 9173cfa8ae7d5305c2e7311b00003c64120715e4 | [
"Unlicense"
] | null | null | null | .vjs-loading-spinner {
display: none !important;
}
.video-js .vjs-progress-control .vjs-play-progress .vjs-time-tooltip {
display: none;
}
.video-js .vjs-progress-control:hover .vjs-play-progress .vjs-time-tooltip {
display: block;
}
| 24 | 76 | 0.720833 |
3e6e8d9349438466b1c3c3102d66603ae7b35a14 | 1,014 | lua | Lua | copy-paste-recipe-signals_1.1.1/settings.lua | Zomis/FactorioMonitorMod | 8dc38e74d8a69c2e3dcf6f721490dc23433dab94 | [
"MIT"
] | 12 | 2017-05-28T10:56:36.000Z | 2021-01-14T19:57:26.000Z | copy-paste-recipe-signals_1.1.1/settings.lua | Zomis/FactorioMods | f0f7e6345a2c200a6b2dce07bc5aaf942ea3ab5f | [
"MIT"
] | 51 | 2016-04-04T16:31:58.000Z | 2022-02-11T23:46:28.000Z | copy-paste-recipe-signals_1.1.1/settings.lua | Zomis/FactorioMonitorMod | 8dc38e74d8a69c2e3dcf6f721490dc23433dab94 | [
"MIT"
] | 4 | 2016-06-06T19:09:19.000Z | 2021-01-29T22:30:49.000Z | data:extend({
{
type = "int-setting",
name = "copy-paste-recipe-signals-ingredient-multiplier",
setting_type = "runtime-per-user",
order = "a",
default_value = -1,
minimum_value = -100000,
maximum_value = 100000
}
})
data:extend({
{
type = "int-setting",
name = "copy-paste-recipe-signals-product-multiplier",
setting_type = "runtime-per-user",
order = "b",
default_value = 1,
minimum_value = -100000,
maximum_value = 100000
}
})
data:extend({
{
type = "bool-setting",
name = "copy-paste-recipe-signals-include-ticks",
setting_type = "runtime-per-user",
order = "c",
default_value = true
}
})
data:extend({
{
type = "bool-setting",
name = "copy-paste-recipe-signals-include-seconds",
setting_type = "runtime-per-user",
order = "d",
default_value = true
}
})
data:extend({
{
type = "bool-setting",
name = "copy-paste-recipe-time-includes-modules",
setting_type = "runtime-per-user",
order = "d",
default_value = false
}
})
| 20.28 | 59 | 0.635108 |
e77dc2b27ea791af68b709376ab3318e95095614 | 2,747 | php | PHP | routes/web.php | thanhlt-28/thanh-mvc | 585d0b23a75b8d1d705debb6c95e572895d6a7a6 | [
"MIT"
] | null | null | null | routes/web.php | thanhlt-28/thanh-mvc | 585d0b23a75b8d1d705debb6c95e572895d6a7a6 | [
"MIT"
] | null | null | null | routes/web.php | thanhlt-28/thanh-mvc | 585d0b23a75b8d1d705debb6c95e572895d6a7a6 | [
"MIT"
] | null | null | null | <?php
use App\Http\Controllers\CategoryController;
use App\Http\Controllers\HomeController;
use App\Http\Controllers\PostController;
use Illuminate\Routing\RouteGroup;
use Symfony\Component\HttpFoundation\Request;
use Illuminate\Support\Facades\Route;
use App\Http\Controllers\LoginController;
use Illuminate\Support\Facades\Auth;
/*
|--------------------------------------------------------------------------
| Web Routes
|--------------------------------------------------------------------------
|
| Here is where you can register web routes for your application. These
| routes are loaded by the RouteServiceProvider within a group which
| contains the "web" middleware group. Now create something great!
|
*/
Route::get('/', [HomeController::class, 'index'])->name('home');
Route::view('login', 'auth.login')->name('login');
Route::post('login', [LoginController::class, 'postLogin']);
Route::any('logout', function () {
Auth::logout();
return redirect(route('login'));
})->name('logout');
Route::prefix('admin')->middleware('check-admin-role')->group(function () {
Route::get('/', function () {
return view('backend.dashboard');
})->name('dashboard');
Route::prefix('cate')->group(function () {
Route::get('index', [CategoryController::class, 'index'])->name('cate.index');
Route::get('add-cate', [CategoryController::class, 'create'])->name('cate.create');
Route::post('add-cate', [CategoryController::class, 'store'])->name('cate.store');
Route::get('remove/{id}', [CategoryController::class, 'destroy'])->name('cate.destroy');
Route::get('edit/{id}', [CategoryController::class, 'edit'])->name('cate.edit');
Route::post('edit/{id}', [CategoryController::class, 'update'])->name('cate.update');
});
Route::prefix('post')->group(function () {
Route::get('index', [PostController::class, 'index'])->name('post.index');
Route::get('add-post', [PostController::class, 'create'])->name('post.create');
Route::post('add-post', [PostController::class, 'store'])->name('post.store');
Route::get('remove/{id}', [PostController::class, 'destroy'])->name('post.destroy');
Route::get('edit/{id}', [PostController::class, 'edit'])->name('post.edit');
Route::post('edit/{id}', [PostController::class, 'update']);
});
});
Route::get('post/{id}', [PostController::class, 'detail'])->name('post.detail');
Route::post('post/api/tang-view', [PostController::class, 'tangView'])
->name('post.tangView');
// Route::get('posts',[PostController::class,'postPage'])->name('')
Route::middleware(['auth:sanctum', 'verified'])->get('/dashboard', function () {
return view('dashboard');
})->name('dashboard1');
| 41 | 96 | 0.617401 |
c9cba4b804574cf5a33dc4a2bacbd4f85351520f | 7,252 | ts | TypeScript | src/app/watch/routemap.ts | routehub/route_web | 45134159708b943c144a98f5cd04b5c0c2f889e1 | [
"MIT"
] | 27 | 2020-04-02T01:18:03.000Z | 2022-03-30T05:20:36.000Z | src/app/watch/routemap.ts | routehub/route_web | 45134159708b943c144a98f5cd04b5c0c2f889e1 | [
"MIT"
] | 25 | 2020-03-29T07:29:56.000Z | 2022-03-02T07:57:34.000Z | src/app/watch/routemap.ts | routehub/route_web | 45134159708b943c144a98f5cd04b5c0c2f889e1 | [
"MIT"
] | 6 | 2020-04-02T00:30:27.000Z | 2021-08-08T14:56:10.000Z | import * as L from 'leaflet'
import * as Elevation from 'leaflet.elevation/src/L.Control.Elevation.js'
import * as Hotline from 'leaflet-hotline'
import turfbbox from '@turf/bbox'
import * as turf from '@turf/helpers'
import * as AnimatedMarker from './animatedMarker.js'
export class Routemap {
gpsIcon = new L.icon({
iconUrl: '/assets/icon/gps_icon.png',
iconSize: [20, 20], // size of the icon
iconAnchor: [10, 10], // point of the icon which will correspond to marker's location
popupAnchor: [0, 0], // point from which the popup should open relative to the iconAnchor
});
startIcon = new L.icon({
iconUrl: '/assets/icon/start_icon.png',
iconSize: [50, 27], // size of the icon
iconAnchor: [52, 27], // point of the icon which will correspond to marker's location
popupAnchor: [0, 0], // point from which the popup should open relative to the iconAnchor
});
goalIcon = new L.icon({
iconUrl: '/assets/icon/goal_icon.png',
iconSize: [50, 27], // size of the icon
iconAnchor: [-2, 27], // point of the icon which will correspond to marker's location
popupAnchor: [0, 0], // point from which the popup should open relative to the iconAnchor
});
commentIcon = new L.icon({
iconUrl: '/assets/icon/comment_icon.png',
iconSize: [20, 20], // size of the icon
iconAnchor: [10, 10], // point of the icon which will correspond to marker's location
popupAnchor: [0, 0], // point from which the popup should open relative to the iconAnchor
});
editIcon = new L.icon({
iconUrl: '/assets/icon/edit_icon.png',
iconSize: [14, 14], // size of the icon
iconAnchor: [7, 7], // point of the icon which will correspond to marker's location
popupAnchor: [0, 0], // point from which the popup should open relative to the iconAnchor
className: 'map-editIcon',
});
private getYahooLayer() {
const attrString = '<a href="https://map.yahoo.co.jp/maps?hlat=35.66572&lat=35.66572&hlon=139.731&lon=139.731&z=18&datum=wgs&mode=map&.f=jsapilogo" target="_blank" id="yolp-logo-link" class= "yolp-logo" style="z-index: 10; position: absolute; margin: 0px; padding: 0px; right: 3px; bottom: 3px;" > <img src="https://s.yimg.jp/images/maps/logo/yj_logo.png" alt = "" border="0" > </a>'
const layer = new L.tileLayer('https://map.c.yimg.jp/m?x={x}&y={y}&z={z}&r=1&style=base:standard&size=512', {
attribution: attrString,
maxZoom: 19,
})
layer.getTileUrl = function (coord) {
const z = coord.z + 1
const { x } = coord
const y = Math.pow(2, coord.z - 1) - coord.y - 1
return `https://map.c.yimg.jp/m?x=${x}&y=${y}&z=${z}&r=1&style=base:standard&size=512`
}
return layer
}
private getOSMLayer() {
const url = 'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png'
// const url2 = 'https://www.toolserver.org/tiles/hikebike/{z}/{x}/{y}.png'
//const url3 = 'https://tile.openstreetmap.jp/{z}/{x}/{y}.png'
// const url4 = 'http://{s}.tile.openstreetmap.fr/hot/{z}/{x}/{y}.png'
const layer = L.tileLayer(url, {
attribution: '© <a href="http://osm.org/copyright">OpenStreetMap</a> contributors, <a href="http://creativecommons.org/licenses/by-sa/2.0/">CC-BY-SA</a>',
maxZoom: 19,
})
return layer
}
private getOSMCycleLayer() {
const url = 'https://tile.thunderforest.com/cycle/{z}/{x}/{y}.png?apikey=8ff577dddcc24dbd945e80ef152bf1e5'
const layer = L.tileLayer(url, {
attribution: '© <a href="http://osm.org/copyright">OpenStreetMap</a> contributors, <a href="http://creativecommons.org/licenses/by-sa/2.0/">CC-BY-SA</a>',
maxZoom: 19,
})
return layer
}
private getGSILayer() {
return L.tileLayer('https://cyberjapandata.gsi.go.jp/xyz/std/{z}/{x}/{y}.png', {
attribution: "<a href='https://maps.gsi.go.jp/development/ichiran.html' target='_blank'>地理院タイル</a>",
})
}
constructor() {
Elevation
Hotline
AnimatedMarker
}
createMap(mapele) {
const center: any = [35.681, 139.767]
const map = L.map(mapele, { center, zoom: 9, zoomControl: false })
const baselayers = {
OSM: this.getOSMLayer(),
'OSM Cycle': this.getOSMCycleLayer(),
Yahoo: this.getYahooLayer(),
GSI: this.getGSILayer(),
}
const overlays = {}
L.control.layers(baselayers, overlays).addTo(map)
baselayers.OSM.addTo(map)
// スケールコントロールを追加(オプションはフィート単位を非表示)
// TODO画面の設計を考えてじゃまにならないように配置したい
L.control.scale({ imperial: false }).addTo(map)
// elevation
const elevation = L.control.elevation({
position: 'bottomright',
theme: 'steelblue-theme',
// TODO : ウィンドウサイズ変更イベントに対応する
width: window.innerWidth - 10,
height: 100,
margins: {
top: 0,
right: 5,
bottom: 0,
left: 0,
},
yAxisMin: 0,
useHeightIndicator: true,
isInnerLabel: true,
tooltips: true,
tooltipsLabel: {
dist: '距離',
elevation: '標高',
slope: '斜度',
distDiff: '距離差',
elevationDiff: '標高差',
slopeAverage: '平均斜度',
},
addSlope: true,
}).addTo(map)
return {
map,
elevation,
addAnimatedMarker: (line) => {
const latlnglist = line.map((l) => [l[1], l[0]])
const animatedMarker = L.animatedMarker(latlnglist, { icon: this.gpsIcon })
map.addLayer(animatedMarker)
return animatedMarker
},
addElevationHotlineLayer: (line) => {
const maxElev = Math.max.apply(null, line.map((l) => l[2]))
const latlngelevlist = line.map((l) => [l[1], l[0], l[2] / maxElev])
return L.hotline(latlngelevlist, {
outlineWidth: 1,
outlineColor: 'blue',
}).addTo(map)
},
addSlopeHotlineLayer: (line) => {
let prevPoint
let prevElevation
const latlngelevlist = line.map((l) => {
let slope = 0
if (prevPoint) {
const point = L.latLng(l[1], [0])
const distDiff = point.distanceTo(prevPoint)
const elevDiff = l[2] - prevElevation
slope = Math.ceil(elevDiff / distDiff * 100 * 100) / 100
if (Math.abs(slope) > 20 && slope > 20) {
slope = 20
} else if (Math.abs(slope) > 20 && slope < 20) {
slope = -20
} else if (!slope) {
slope = 0
}
prevPoint = point
} else {
prevPoint = L.latLng(l[1], [0])
}
prevElevation = l[2]
return [l[1], l[0], slope]
})
return L.hotline(latlngelevlist, {
outlineWidth: 1,
outlineColor: 'blue',
min: -20,
max: 20,
palette: { 0.0: 'blue', 0.4: '#6aff70', 1.0: 'red' },
}).addTo(map)
},
}
}
posToLatLngBounds(pos) {
const line = turf.lineString(pos)
const bbox = turfbbox(line) // lonlat問題...
const latplus = Math.abs(bbox[1] - bbox[3]) * 0.1
const lonplus = Math.abs(bbox[0] - bbox[2]) * 0.1
return L.latLngBounds([ // いい感じの範囲にするために調整
[bbox[1] * 1 - latplus, bbox[0] * 1 - lonplus],
[bbox[3] * 1 + latplus, bbox[2] * 1 + lonplus],
])
}
} | 35.90099 | 415 | 0.593354 |
8335eda676ff7416d3cb1c8adbd3674fb12015d6 | 1,812 | ts | TypeScript | node_modules/rmi/src/client/File/Attach/Lazy.ts | nikitabelotelov/coffee | 63cd862b827337a373301677f02d341d59ce2485 | [
"MIT"
] | null | null | null | node_modules/rmi/src/client/File/Attach/Lazy.ts | nikitabelotelov/coffee | 63cd862b827337a373301677f02d341d59ce2485 | [
"MIT"
] | null | null | null | node_modules/rmi/src/client/File/Attach/Lazy.ts | nikitabelotelov/coffee | 63cd862b827337a373301677f02d341d59ce2485 | [
"MIT"
] | null | null | null | /// <amd-module name="File/Attach/Lazy" />
import Base = require("File/Attach/Base");
import GetterContainerLazy = require("File/Attach/Container/GetterLazy");
import SourceContainerLazy = require("File/Attach/Container/SourceLazy");
import {IResourceGetter} from "File/IResourceGetter";
import {IResourceConstructor} from "File/IResource";
import {IGetterContainerLazy, ISourceContainerLazy} from 'File/Attach/IContainer';
/**
* Класс, наследник Attach/Base, позволяющий регестрировать
* динамично подгружаемые экземпляры {@link File/IResourceGetter} и {@link File/Attach/Source}
* @public
* @class File/Attach/Lazy
* @extends File/Attach/Base
* @author Заляев А.В.
*/
class Lazy extends Base {
protected _getterContainer: IGetterContainerLazy;
protected _sourceContainer: ISourceContainerLazy;
constructor(opt){
super(opt);
this._getterContainer = new GetterContainerLazy();
this._sourceContainer = new SourceContainerLazy();
}
/**
* Ленивая регестрация экземпляров IResourceGetter, для получения файлов
* @param {String} name Имя модуля
* @param {String} link Сылка на модуль
* @param {*} [options] Параметры вызова конструктора
* @void
*/
registerLazyGetter(name: string, link: string, options?) {
return this._getterContainer.register(name, link, options);
}
/**
* Ленивая регестрация ISource
* @param {File/IResourceConstructor} fileType Конструктор обёртки над ресурсом
* @param {String} link Ссылка на источник данных
* @param {*} [options] Параметры вызова конструктора обёртки
* @void
*/
registerLazySource(fileType: IResourceConstructor, link: string, options?: any) {
return this._sourceContainer.register(fileType, link, options);
}
}
export = Lazy;
| 37.75 | 94 | 0.71468 |
806a156b5b274e016b13e781288c060354a4eda6 | 200 | swift | Swift | WeChatSwift/WebView/WebBottomToolBar.swift | InHisName/WeChatSwift | 3bbd4bf1abc741226aad5b05c333eb6db819bff9 | [
"MIT"
] | 132 | 2019-07-15T11:14:28.000Z | 2022-02-21T02:56:26.000Z | WeChatSwift/WebView/WebBottomToolBar.swift | InHisName/WeChatSwift | 3bbd4bf1abc741226aad5b05c333eb6db819bff9 | [
"MIT"
] | 6 | 2019-08-19T09:48:33.000Z | 2021-11-03T01:34:20.000Z | WeChatSwift/WebView/WebBottomToolBar.swift | InHisName/WeChatSwift | 3bbd4bf1abc741226aad5b05c333eb6db819bff9 | [
"MIT"
] | 33 | 2019-08-04T15:16:47.000Z | 2022-01-07T07:06:04.000Z | //
// WebBottomToolBar.swift
// WeChatSwift
//
// Created by xu.shuifeng on 2019/7/29.
// Copyright © 2019 alexiscn. All rights reserved.
//
import UIKit
class WebBottomToolBar: UIView {
}
| 14.285714 | 51 | 0.68 |
f40210eba3b29a277a2e41fd3218dfd7d72283c1 | 920 | cs | C# | Tests/Totem.Runtime.Tests/Json/TotemSerializerSettingsSpecs.cs | snickroger/Totem | d1c691391579f3af6cc29e851488bd9cc14d7141 | [
"MIT"
] | null | null | null | Tests/Totem.Runtime.Tests/Json/TotemSerializerSettingsSpecs.cs | snickroger/Totem | d1c691391579f3af6cc29e851488bd9cc14d7141 | [
"MIT"
] | null | null | null | Tests/Totem.Runtime.Tests/Json/TotemSerializerSettingsSpecs.cs | snickroger/Totem | d1c691391579f3af6cc29e851488bd9cc14d7141 | [
"MIT"
] | null | null | null | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Totem.Runtime.Json
{
/// <summary>
/// Scenarios involving the <see cref="TotemSerializerSettings"/> class
/// </summary>
public class TotemSerializerSettingsSpecs : Specs
{
void PreserveUtcDateTime()
{
var original = new TimeObject { Time = DateTime.UtcNow };
var serialized = JsonFormat.Text.Serialize(original);
var deserialized = JsonFormat.Text.Deserialize<TimeObject>(serialized);
Expect(original.Time.Hour).Is(deserialized.Time.Hour, "Hour should match original");
Expect(original.Time.Minute).Is(deserialized.Time.Minute, "Minute should match original");
Expect(original.Time.Kind).Is(deserialized.Time.Kind, "Kind should match original");
}
class TimeObject
{
public DateTime Time { get; set; }
}
}
}
| 27.878788 | 96 | 0.705435 |
7afc076b093a84eec20c246e69ae706ca325b959 | 5,671 | cs | C# | TOTPAuthenticationProvider/TOTPAuthenticationProviderTests/TOTPAuthenticatorDummyStore.cs | tinodo/adfsmfaadapter | 89f3695635eb962883e709cd113dee16a28051d1 | [
"MIT"
] | null | null | null | TOTPAuthenticationProvider/TOTPAuthenticationProviderTests/TOTPAuthenticatorDummyStore.cs | tinodo/adfsmfaadapter | 89f3695635eb962883e709cd113dee16a28051d1 | [
"MIT"
] | null | null | null | TOTPAuthenticationProvider/TOTPAuthenticationProviderTests/TOTPAuthenticatorDummyStore.cs | tinodo/adfsmfaadapter | 89f3695635eb962883e709cd113dee16a28051d1 | [
"MIT"
] | null | null | null | //-----------------------------------------------------------------------
// <copyright file="AuthenticationAdapterMetadata.cs" company="Microsoft">
// Copyright (c) Microsoft. All rights reserved.
// </copyright>
// <author>Tino Donderwinkel</author>
//
// THIS CODE AND ANY ASSOCIATED INFORMATION ARE PROVIDED “AS IS” WITHOUT
// WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT
// LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS
// FOR A PARTICULAR PURPOSE. THE ENTIRE RISK OF USE, INABILITY TO USE, OR
// RESULTS FROM THE USE OF THIS CODE REMAINS WITH THE USER.
//
//-----------------------------------------------------------------------
namespace TOTPAuthenticationProviderTests
{
using System;
using System.Collections.Generic;
using TOTPAuthenticationProvider;
public class TOTPAuthenticatorDummyStore : TOTPAuthenticatorStore
{
private class Secret
{
public string upn;
public string secret;
public int attempts;
public DateTime? lockedUntil;
}
private class UsedCode
{
public string upn;
public long interval;
}
private List<Secret> secrets;
private List<UsedCode> usedCodes;
public TOTPAuthenticatorDummyStore(string connectionString)
: base(connectionString)
{
this.secrets = new List<Secret>();
this.usedCodes = new List<UsedCode>();
}
public override void AddUsedCode(string upn, long interval)
{
if (this.secrets.Exists(secret => secret.upn == upn))
{
this.usedCodes.Add(new UsedCode() { upn = upn, interval = interval });
}
else
{
throw new TOTPAuthenticatorStoreException("User does not exist.");
}
}
public override void CleanupUsedCodes(string upn, long fromInterval)
{
if (this.secrets.Exists(secret => secret.upn == upn))
{
this.usedCodes.RemoveAll(usedCode => usedCode.interval < fromInterval);
}
else
{
throw new TOTPAuthenticatorStoreException("User does not exist.");
}
}
public override bool CodeWasUsedPreviously(string upn, long interval)
{
if (this.secrets.Exists(secret => secret.upn == upn))
{
return this.usedCodes.Exists(usedCode => usedCode.upn == upn && usedCode.interval == interval);
}
else
{
throw new TOTPAuthenticatorStoreException("User does not exist.");
}
}
public override void CreateSecretKey(string upn, string secretKey)
{
if (this.secrets.Exists(secret => secret.upn == upn))
{
throw new TOTPAuthenticatorStoreException("User already has a secret key.");
}
else
{
this.secrets.Add(new Secret() { upn = upn, secret = secretKey, attempts = 0, lockedUntil = null });
}
}
public override bool TryGetSecretKey(string upn, out string secretKey, out int attempts, out bool locked)
{
bool hasSecretKey;
var secret = this.secrets.Find(s => s.upn == upn);
if (secret == null)
{
hasSecretKey = false;
secretKey = null;
attempts = 0;
locked = false;
}
else
{
hasSecretKey = true;
secretKey = secret.secret;
attempts = secret.attempts;
if (secret.lockedUntil.HasValue)
{
locked = secret.lockedUntil.Value > DateTime.UtcNow;
if (!locked)
{
secret.attempts = 0;
secret.lockedUntil = null;
}
}
else
{
locked = false;
}
}
return hasSecretKey;
}
public override int IncreaseAttempts(string upn)
{
var secret = this.secrets.Find(s => s.upn == upn);
if (secret == null)
{
throw new TOTPAuthenticatorStoreException("User does not exist.");
}
secret.attempts++;
return secret.attempts;
}
//public override void UnlockAccount(string upn)
//{
// var secret = this.secrets.Find(s => s.upn == upn);
// if (secret == null)
// {
// throw new TOTPAuthenticatorStoreException("User does not exist.");
// }
// secret.attempts = 0;
// secret.lockedUntil = null;
//}
public override void LockAccount(string upn, DateTime lockedUntil)
{
var secret = this.secrets.Find(s => s.upn == upn);
if (secret == null)
{
throw new TOTPAuthenticatorStoreException("User does not exist.");
}
secret.lockedUntil = lockedUntil;
}
public override void ResetAttempts(string upn)
{
var secret = this.secrets.Find(s => s.upn == upn);
if (secret == null)
{
throw new TOTPAuthenticatorStoreException("User does not exist.");
}
secret.attempts = 0;
secret.lockedUntil = null;
}
}
}
| 32.405714 | 115 | 0.507671 |
14f50ceda46d3d8f65de2bb29388df988b1d64f3 | 1,492 | ts | TypeScript | packages/muban-transition-component/src/hooks/useScrollTransition.ts | pigeonfresh/transition-component | fb829d18376741cc000638decf419b673f60d919 | [
"MIT"
] | null | null | null | packages/muban-transition-component/src/hooks/useScrollTransition.ts | pigeonfresh/transition-component | fb829d18376741cc000638decf419b673f60d919 | [
"MIT"
] | null | null | null | packages/muban-transition-component/src/hooks/useScrollTransition.ts | pigeonfresh/transition-component | fb829d18376741cc000638decf419b673f60d919 | [
"MIT"
] | null | null | null | import gsap from 'gsap';
import ScrollTrigger from 'gsap/ScrollTrigger';
import type { SetupScrollTransitionOptions } from '@mediamonks/core-transition-component';
import { useTransitionController } from './useTransitionController';
import type {
SetupSignatureElements,
TransitionRef,
TransitionRefElement,
} from '../types/Transition.types';
import { transitionRefToElement } from '../util/Transition.utils';
gsap.registerPlugin(ScrollTrigger);
export function useScrollTransition<
T extends Record<string, R>,
R extends TransitionRef = TransitionRef,
E extends SetupSignatureElements<T> = SetupSignatureElements<T>
>(
container: TransitionRefElement,
{ scrollTrigger = {}, ...restOptions }: SetupScrollTransitionOptions<T, R, E>,
): ReturnType<typeof useTransitionController> {
const trigger = transitionRefToElement(container);
const transitionController = useTransitionController<T, R, E>(container, {
...restOptions,
scrollTrigger: { trigger, start: 'top 75%', ...scrollTrigger },
});
if (!scrollTrigger.scrub) {
/**
* We create another scroll trigger because we want to reset the timeline once the users completely
* scrolls the component out of the viewport. This way we can re-transition the component when the
* user scrolls it back into the viewport.
*/
ScrollTrigger.create({
trigger,
onLeaveBack: () => transitionController.transitionTimeline.in.pause(0),
});
}
return transitionController;
}
| 33.909091 | 103 | 0.738606 |
be6a501da28d8dd85a162bf726b58568b716f3fa | 7,251 | ts | TypeScript | frontend/e2e/authenticated/special-uses/application-temp-outfitters.e2e-spec.ts | thimonas/fs-open-forest-platform | dd754d81833f1bc44e5c150f75f00fe91dd0c4b3 | [
"CC0-1.0"
] | null | null | null | frontend/e2e/authenticated/special-uses/application-temp-outfitters.e2e-spec.ts | thimonas/fs-open-forest-platform | dd754d81833f1bc44e5c150f75f00fe91dd0c4b3 | [
"CC0-1.0"
] | null | null | null | frontend/e2e/authenticated/special-uses/application-temp-outfitters.e2e-spec.ts | thimonas/fs-open-forest-platform | dd754d81833f1bc44e5c150f75f00fe91dd0c4b3 | [
"CC0-1.0"
] | null | null | null | import { TempOutfittersForm } from './app.po';
import { FieldValidation } from './field-validation.po';
import { browser, element, by, Key } from 'protractor';
describe('Apply for a temp outfitters permit', () => {
let page: TempOutfittersForm;
let fieldValidation: FieldValidation;
fieldValidation = new FieldValidation();
const path = require('path');
const testSuccessFile = path.resolve(__dirname, 'test-files/success.pdf');
beforeEach(() => {
page = new TempOutfittersForm();
browser.driver.manage().window().setSize(1400, 900);
});
it('should display the permit name in the header', () => {
page.navigateTo();
expect<any>(element(by.css('app-root h1')).getText()).toEqual('Apply for a temporary outfitters permit with Open Forest.');
});
it('should not show errors by default', () => {
expect<any>(element(by.id('form-errors')).isPresent()).toBeFalsy();
});
fieldValidation.validateFileUploadField('insurance-certificate', 'pdf', true);
it('should display good standing evidence upload field and business name if organization is clicked', () => {
element(by.id('organization-label')).click();
expect<any>(element(by.id('good-standing-evidence-wrapper')).isPresent()).toBeTruthy();
expect<any>(element(by.id('organization-name')).isPresent()).toBeTruthy();
});
it('should not submit application if not all required fields are entered', () => {
element(by.id('good-standing-evidence')).sendKeys(testSuccessFile);
element(by.css('.primary-permit-holder-first-name')).sendKeys('test');
element(by.css('.primary-permit-holder-last-name')).sendKeys('test');
element(by.id('organization-name')).sendKeys('test');
element(by.css('.primary-permit-holder-address')).sendKeys('test');
element(by.css('.primary-permit-holder-address-line-2')).sendKeys('test');
element(by.css('.primary-permit-holder-city')).sendKeys('test');
element(by.css('.primary-permit-holder-state')).sendKeys('AK');
element(by.css('.primary-permit-holder-zip')).sendKeys('55555');
element(by.id('day-phone')).sendKeys('2222222222');
element(by.id('day-phone-ext')).sendKeys('2222');
element(by.id('submit-application')).click();
expect<any>(element(by.id('form-errors')).isPresent()).toBeTruthy();
});
it('should submit an application with only the required fields populated', () => {
element(by.id('email')).sendKeys('[email protected]');
element(by.id('number-of-trips')).sendKeys('10');
element(by.id('party-size')).sendKeys('11');
element(by.id('start-month')).sendKeys('10');
element(by.id('start-day')).sendKeys('10');
element(by.id('start-year')).sendKeys('2020');
element(by.id('individual-label')).click();
element(by.id('good-standing-evidence')).sendKeys(testSuccessFile);
element(by.id('location-description')).sendKeys('test');
element(by.id('services-provided')).sendKeys('test');
element(by.id('audience-description')).sendKeys('test');
element(by.id('description-of-cleanup-and-restoration')).sendKeys('test');
element(by.id('advertising-url')).sendKeys('http://test.com');
element(by.id('client-charges')).sendKeys('test');
element(by.id('signature')).sendKeys('test');
element(by.id('submit-application')).click();
browser.sleep(3000);
expect<any>(element(by.css('app-root h2')).getText()).toEqual('Submitted for review!');
});
it('should navigate back to temp outfitter', () => {
page.navigateTo();
expect<any>(element(by.css('app-root h1')).getText()).toEqual('Apply for a temporary outfitters permit with Open Forest.');
});
fieldValidation.validateFileUploadField('guide-document', 'xls');
fieldValidation.validateFileUploadField('acknowledgement-of-risk-form', 'pdf');
fieldValidation.validateFileUploadField('insurance-certificate', 'pdf');
fieldValidation.validateFileUploadField('operating-plan', 'pdf');
fieldValidation.validateFileUploadField('location-map', 'pdf');
it('should submit an application', () => {
element(by.css('.primary-permit-holder-first-name')).sendKeys('test');
element(by.css('.primary-permit-holder-last-name')).sendKeys('test');
element(by.css('.primary-permit-holder-address')).sendKeys('test');
element(by.css('.primary-permit-holder-address-line-2')).sendKeys('test');
element(by.css('.primary-permit-holder-city')).sendKeys('test');
element(by.css('.primary-permit-holder-state')).sendKeys('AK');
element(by.css('.primary-permit-holder-zip')).sendKeys('55555');
element(by.id('day-phone')).sendKeys('2222222222');
element(by.id('day-phone-ext')).sendKeys('2222');
element(by.id('add-additional-phone-label')).click();
element(by.id('evening-phone')).sendKeys('1111111111');
element(by.id('evening-phone-ext')).sendKeys('1111');
element(by.id('fax')).sendKeys('3333333333');
element(by.id('fax-extension')).sendKeys('');
element(by.id('email')).sendKeys('[email protected]');
element(by.id('website')).sendKeys('http://test.com');
element(by.id('llc-label')).click();
element(by.id('individual-label')).click();
element(by.id('individual-citizen-label')).click();
element(by.id('small-business-label')).click();
element(by.id('good-standing-evidence')).sendKeys(testSuccessFile);
element(by.id('number-of-trips')).sendKeys('10');
element(by.id('party-size')).sendKeys('11');
element(by.id('start-month')).sendKeys('10');
element(by.id('start-day')).sendKeys('10');
element(by.id('start-year')).sendKeys('2020');
element(by.id('location-description')).sendKeys('test');
element(by.id('services-provided')).sendKeys('test');
element(by.id('audience-description')).sendKeys('test');
element(by.id('need-government-facilities-label')).click();
element(by.id('list-of-government-facilities')).sendKeys('test');
element(by.id('need-temporary-improvements-label')).click();
element(by.id('list-of-temporary-improvements')).sendKeys('test');
element(by.id('have-motorized-equipment-label')).click();
element(by.id('statement-of-motorized-equipment')).sendKeys('test');
element(by.id('have-livestock-label')).click();
element(by.id('statement-of-transportation-of-livestock')).sendKeys('test');
element(by.id('need-assigned-site-label')).click();
element(by.id('statement-of-assigned-site')).sendKeys('test');
element(by.id('description-of-cleanup-and-restoration')).sendKeys('test');
element(by.id('no-promotional-website-label')).click();
element(by.id('advertising-description')).sendKeys('test');
element(by.id('client-charges')).sendKeys('test');
element(by.id('have-national-forest-permits-label')).click();
element(by.id('list-all-national-forest-permits')).sendKeys('test');
element(by.id('have-other-permits-label')).click();
element(by.id('list-all-other-permits')).sendKeys('test');
element(by.id('have-citations-label')).click();
element(by.id('list-all-citations')).sendKeys('test');
element(by.id('signature')).sendKeys('test');
element(by.id('submit-application')).click();
browser.sleep(5000);
expect<any>(element(by.css('app-root h2')).getText()).toEqual('Submitted for review!');
});
});
| 52.543478 | 127 | 0.680458 |
bb4730677c5a9788a92eb97b33a5e073c8f625d8 | 182 | cs | C# | src/Vyr.Agents/IAgent.cs | kaep7n/vyr | 265c9a1a5f6300023db460c73bd135efa6e30e66 | [
"MIT"
] | null | null | null | src/Vyr.Agents/IAgent.cs | kaep7n/vyr | 265c9a1a5f6300023db460c73bd135efa6e30e66 | [
"MIT"
] | 58 | 2016-09-23T06:03:34.000Z | 2020-04-07T05:55:25.000Z | src/Vyr.Agents/IAgent.cs | kaep7n/vyr | 265c9a1a5f6300023db460c73bd135efa6e30e66 | [
"MIT"
] | null | null | null | using System.Threading.Tasks;
namespace Vyr.Agents
{
public interface IAgent
{
bool IsRunning { get; }
Task RunAsync();
Task IdleAsync();
}
}
| 13 | 31 | 0.576923 |
3687a983baac1833fb066e512ed89f875275337a | 301 | dart | Dart | lib/models/Todo.dart | osmankh/flutter-todo | d20a3b5ccd2ea303c50819d586a66f7b75302d10 | [
"MIT"
] | null | null | null | lib/models/Todo.dart | osmankh/flutter-todo | d20a3b5ccd2ea303c50819d586a66f7b75302d10 | [
"MIT"
] | null | null | null | lib/models/Todo.dart | osmankh/flutter-todo | d20a3b5ccd2ea303c50819d586a66f7b75302d10 | [
"MIT"
] | null | null | null | class Todo {
final int id;
final String label;
final int checked;
final int task_id;
Todo({this.id, this.label, this.checked, this.task_id});
Map<String, dynamic> toMap() {
return {
'id': id,
'label': label,
'checked': checked,
'task_id': task_id
};
}
} | 17.705882 | 58 | 0.581395 |
555467d726a71f689bb9ce24db66e8ac095deaf9 | 15,824 | sql | SQL | sample-data/SQL/countries.sql | iohexo/graph | 3e0d2e3187d2ba6d2e0c2e6d84f31ea267581972 | [
"Apache-2.0"
] | null | null | null | sample-data/SQL/countries.sql | iohexo/graph | 3e0d2e3187d2ba6d2e0c2e6d84f31ea267581972 | [
"Apache-2.0"
] | null | null | null | sample-data/SQL/countries.sql | iohexo/graph | 3e0d2e3187d2ba6d2e0c2e6d84f31ea267581972 | [
"Apache-2.0"
] | null | null | null | INSERT INTO countries
(CODE,DESCR)
VALUES ('AF','Afghanistan');
INSERT INTO countries
(CODE,DESCR)
VALUES ('AL','Albania');
INSERT INTO countries
(CODE,DESCR)
VALUES ('DZ','Algeria');
INSERT INTO countries
(CODE,DESCR)
VALUES ('AS','American Samoa');
INSERT INTO countries
(CODE,DESCR)
VALUES ('AD','Andorra');
INSERT INTO countries
(CODE,DESCR)
VALUES ('AO','Angola');
INSERT INTO countries
(CODE,DESCR)
VALUES ('AI','Anguilla');
INSERT INTO countries
(CODE,DESCR)
VALUES ('AG','Antigua and Barbuda');
INSERT INTO countries
(CODE,DESCR)
VALUES ('AR','Argentina');
INSERT INTO countries
(CODE,DESCR)
VALUES ('AM','Armenia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('AW','Aruba');
INSERT INTO countries
(CODE,DESCR)
VALUES ('AT','Austria');
INSERT INTO countries
(CODE,DESCR)
VALUES ('AU','Australia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('AZ','Azerbajan');
INSERT INTO countries
(CODE,DESCR)
VALUES ('BS','Bahamas');
INSERT INTO countries
(CODE,DESCR)
VALUES ('BH','Bahrain');
INSERT INTO countries
(CODE,DESCR)
VALUES ('BD','Bangladesh');
INSERT INTO countries
(CODE,DESCR)
VALUES ('BB','Barbados');
INSERT INTO countries
(CODE,DESCR)
VALUES ('BY','Belarus');
INSERT INTO countries
(CODE,DESCR)
VALUES ('BE','Belgium');
INSERT INTO countries
(CODE,DESCR)
VALUES ('BZ','Belize');
INSERT INTO countries
(CODE,DESCR)
VALUES ('BJ','Benin');
INSERT INTO countries
(CODE,DESCR)
VALUES ('BM','Bermuda');
INSERT INTO countries
(CODE,DESCR)
VALUES ('BT','Bhutan');
INSERT INTO countries
(CODE,DESCR)
VALUES ('BO','Bolivia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('BQ','Bonaire - Sint Eustatius and Saba');
INSERT INTO countries
(CODE,DESCR)
VALUES ('BA','Bosnia and Herzegovina');
INSERT INTO countries
(CODE,DESCR)
VALUES ('BW','Botswana');
INSERT INTO countries
(CODE,DESCR)
VALUES ('BR','Brazil');
INSERT INTO countries
(CODE,DESCR)
VALUES ('VG','British Virgin Islands');
INSERT INTO countries
(CODE,DESCR)
VALUES ('BN','Brunei');
INSERT INTO countries
(CODE,DESCR)
VALUES ('BG','Bulgaria');
INSERT INTO countries
(CODE,DESCR)
VALUES ('BF','Burkina Faso');
INSERT INTO countries
(CODE,DESCR)
VALUES ('BI','Burundi');
INSERT INTO countries
(CODE,DESCR)
VALUES ('KH','Cambodia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('CM','Cameroon');
INSERT INTO countries
(CODE,DESCR)
VALUES ('CV','Cabo Verde');
INSERT INTO countries
(CODE,DESCR)
VALUES ('CA','Canada');
INSERT INTO countries
(CODE,DESCR)
VALUES ('KY','Cayman Islands');
INSERT INTO countries
(CODE,DESCR)
VALUES ('CF','Central African Republic');
INSERT INTO countries
(CODE,DESCR)
VALUES ('TD','Chad');
INSERT INTO countries
(CODE,DESCR)
VALUES ('CL','Chile');
INSERT INTO countries
(CODE,DESCR)
VALUES ('CN','China');
INSERT INTO countries
(CODE,DESCR)
VALUES ('CX','Christmas Island');
INSERT INTO countries
(CODE,DESCR)
VALUES ('CO','Colombia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('CG','Congo');
INSERT INTO countries
(CODE,DESCR)
VALUES ('CK','Cook Islands');
INSERT INTO countries
(CODE,DESCR)
VALUES ('CR','Costa Rica');
INSERT INTO countries
(CODE,DESCR)
VALUES ('CI','Côte d''Ivoire');
INSERT INTO countries
(CODE,DESCR)
VALUES ('CC','Cocos (Keeling) Islands');
INSERT INTO countries
(CODE,DESCR)
VALUES ('KM','Comoros');
INSERT INTO countries
(CODE,DESCR)
VALUES ('HR','Croatia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('CU','Cuba');
INSERT INTO countries
(CODE,DESCR)
VALUES ('CW','Curacao');
INSERT INTO countries
(CODE,DESCR)
VALUES ('CY','Cyprus');
INSERT INTO countries
(CODE,DESCR)
VALUES ('CZ','Czech Republic');
INSERT INTO countries
(CODE,DESCR)
VALUES ('KP','Democratic People''s Republic of Korea');
INSERT INTO countries
(CODE,DESCR)
VALUES ('CD','Democratic Republic of the Congo');
INSERT INTO countries
(CODE,DESCR)
VALUES ('DK','Denmark');
INSERT INTO countries
(CODE,DESCR)
VALUES ('DJ','Djibouti');
INSERT INTO countries
(CODE,DESCR)
VALUES ('DM','Dominica');
INSERT INTO countries
(CODE,DESCR)
VALUES ('DO','Dominican Republic');
INSERT INTO countries
(CODE,DESCR)
VALUES ('DE','Germany');
INSERT INTO countries
(CODE,DESCR)
VALUES ('GL','Greenland');
INSERT INTO countries
(CODE,DESCR)
VALUES ('EC','Ecuador');
INSERT INTO countries
(CODE,DESCR)
VALUES ('EG','Egypt');
INSERT INTO countries
(CODE,DESCR)
VALUES ('SV','El Salvador');
INSERT INTO countries
(CODE,DESCR)
VALUES ('ER','Eritrea');
INSERT INTO countries
(CODE,DESCR)
VALUES ('EE','Estonia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('ET','Ethiopia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('GQ','Equatorial Guinea');
INSERT INTO countries
(CODE,DESCR)
VALUES ('FK','Falkland Islands');
INSERT INTO countries
(CODE,DESCR)
VALUES ('FO','Faroe Islands');
INSERT INTO countries
(CODE,DESCR)
VALUES ('FM','Federated States of Micronesia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('FJ','Fiji');
INSERT INTO countries
(CODE,DESCR)
VALUES ('FI','Finland');
INSERT INTO countries
(CODE,DESCR)
VALUES ('FR','France');
INSERT INTO countries
(CODE,DESCR)
VALUES ('GF','French Guiana');
INSERT INTO countries
(CODE,DESCR)
VALUES ('PF','French Polynesia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('GA','Gabon');
INSERT INTO countries
(CODE,DESCR)
VALUES ('GM','Gambia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('GE','Georgia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('GH','Ghana');
INSERT INTO countries
(CODE,DESCR)
VALUES ('GI','Gibraltar');
INSERT INTO countries
(CODE,DESCR)
VALUES ('GR','Greece');
INSERT INTO countries
(CODE,DESCR)
VALUES ('GD','Grenada');
INSERT INTO countries
(CODE,DESCR)
VALUES ('GU','Guam');
INSERT INTO countries
(CODE,DESCR)
VALUES ('GP','Guadeloupe');
INSERT INTO countries
(CODE,DESCR)
VALUES ('GT','Guatemala');
INSERT INTO countries
(CODE,DESCR)
VALUES ('GG','Guernsey');
INSERT INTO countries
(CODE,DESCR)
VALUES ('GN','Guinea');
INSERT INTO countries
(CODE,DESCR)
VALUES ('GW','Guinea-Bissau');
INSERT INTO countries
(CODE,DESCR)
VALUES ('GY','Guyana');
INSERT INTO countries
(CODE,DESCR)
VALUES ('HT','Haiti');
INSERT INTO countries
(CODE,DESCR)
VALUES ('HN','Honduras');
INSERT INTO countries
(CODE,DESCR)
VALUES ('HK','Hong Kong');
INSERT INTO countries
(CODE,DESCR)
VALUES ('HU','Hungary');
INSERT INTO countries
(CODE,DESCR)
VALUES ('IS','Iceland');
INSERT INTO countries
(CODE,DESCR)
VALUES ('IN','India');
INSERT INTO countries
(CODE,DESCR)
VALUES ('ID','Indonesia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('IQ','Iraq');
INSERT INTO countries
(CODE,DESCR)
VALUES ('IR','Iran');
INSERT INTO countries
(CODE,DESCR)
VALUES ('IE','Ireland');
INSERT INTO countries
(CODE,DESCR)
VALUES ('IM','Isle of Man');
INSERT INTO countries
(CODE,DESCR)
VALUES ('IL','Israel');
INSERT INTO countries
(CODE,DESCR)
VALUES ('IT','Italy');
INSERT INTO countries
(CODE,DESCR)
VALUES ('JM','Jamaica');
INSERT INTO countries
(CODE,DESCR)
VALUES ('JP','Japan');
INSERT INTO countries
(CODE,DESCR)
VALUES ('JE','Jersey');
INSERT INTO countries
(CODE,DESCR)
VALUES ('JO','Jordan');
INSERT INTO countries
(CODE,DESCR)
VALUES ('KZ','Kazakhstan');
INSERT INTO countries
(CODE,DESCR)
VALUES ('KE','Kenya');
INSERT INTO countries
(CODE,DESCR)
VALUES ('KI','Kiribati');
INSERT INTO countries
(CODE,DESCR)
VALUES ('KS','Kosovo');
INSERT INTO countries
(CODE,DESCR)
VALUES ('KW','Kuwait');
INSERT INTO countries
(CODE,DESCR)
VALUES ('KG','Kyrgyzstan');
INSERT INTO countries
(CODE,DESCR)
VALUES ('LA','Lao PDR');
INSERT INTO countries
(CODE,DESCR)
VALUES ('LV','Latvia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('LB','Lebanon');
INSERT INTO countries
(CODE,DESCR)
VALUES ('LS','Lesotho');
INSERT INTO countries
(CODE,DESCR)
VALUES ('LR','Liberia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('LY','Libya');
INSERT INTO countries
(CODE,DESCR)
VALUES ('LI','Liechtenstein');
INSERT INTO countries
(CODE,DESCR)
VALUES ('LT','Lithuania');
INSERT INTO countries
(CODE,DESCR)
VALUES ('LU','Luxembourg');
INSERT INTO countries
(CODE,DESCR)
VALUES ('MO','Macau');
INSERT INTO countries
(CODE,DESCR)
VALUES ('MK','Macedonia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('MG','Madagascar');
INSERT INTO countries
(CODE,DESCR)
VALUES ('MW','Malawi');
INSERT INTO countries
(CODE,DESCR)
VALUES ('MY','Malaysia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('ML','Mali');
INSERT INTO countries
(CODE,DESCR)
VALUES ('MT','Malta');
INSERT INTO countries
(CODE,DESCR)
VALUES ('MV','Maldives');
INSERT INTO countries
(CODE,DESCR)
VALUES ('MH','Marshall Islands');
INSERT INTO countries
(CODE,DESCR)
VALUES ('MQ','Martinique');
INSERT INTO countries
(CODE,DESCR)
VALUES ('MR','Mauritania');
INSERT INTO countries
(CODE,DESCR)
VALUES ('MU','Mauritius');
INSERT INTO countries
(CODE,DESCR)
VALUES ('YT','Mayotte');
INSERT INTO countries
(CODE,DESCR)
VALUES ('MM','Myanmar');
INSERT INTO countries
(CODE,DESCR)
VALUES ('MX','Mexico');
INSERT INTO countries
(CODE,DESCR)
VALUES ('MC','Monaco');
INSERT INTO countries
(CODE,DESCR)
VALUES ('ME','Montenegro');
INSERT INTO countries
(CODE,DESCR)
VALUES ('MN','Mongolia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('MS','Montserrat');
INSERT INTO countries
(CODE,DESCR)
VALUES ('MA','Morocco');
INSERT INTO countries
(CODE,DESCR)
VALUES ('MD','Moldova');
INSERT INTO countries
(CODE,DESCR)
VALUES ('MZ','Mozambique');
INSERT INTO countries
(CODE,DESCR)
VALUES ('NA','Namibia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('NR','Nauru');
INSERT INTO countries
(CODE,DESCR)
VALUES ('NP','Nepal');
INSERT INTO countries
(CODE,DESCR)
VALUES ('NL','Netherlands');
INSERT INTO countries
(CODE,DESCR)
VALUES ('NC','New Caledonia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('NI','Nicaragua');
INSERT INTO countries
(CODE,DESCR)
VALUES ('NE','Niger');
INSERT INTO countries
(CODE,DESCR)
VALUES ('NG','Nigeria');
INSERT INTO countries
(CODE,DESCR)
VALUES ('NF','Norfolk Island');
INSERT INTO countries
(CODE,DESCR)
VALUES ('MP','Northern Mariana Islands');
INSERT INTO countries
(CODE,DESCR)
VALUES ('NO','Norway');
INSERT INTO countries
(CODE,DESCR)
VALUES ('NZ','New Zealand');
INSERT INTO countries
(CODE,DESCR)
VALUES ('OM','Oman');
INSERT INTO countries
(CODE,DESCR)
VALUES ('PA','Panama');
INSERT INTO countries
(CODE,DESCR)
VALUES ('PK','Pakistan');
INSERT INTO countries
(CODE,DESCR)
VALUES ('PG','Papua New Guinea');
INSERT INTO countries
(CODE,DESCR)
VALUES ('PY','Paraguay');
INSERT INTO countries
(CODE,DESCR)
VALUES ('PW','Palau');
INSERT INTO countries
(CODE,DESCR)
VALUES ('PE','Peru');
INSERT INTO countries
(CODE,DESCR)
VALUES ('PH','Philippines');
INSERT INTO countries
(CODE,DESCR)
VALUES ('PN','Pitcairn');
INSERT INTO countries
(CODE,DESCR)
VALUES ('PL','Poland');
INSERT INTO countries
(CODE,DESCR)
VALUES ('PT','Portugal');
INSERT INTO countries
(CODE,DESCR)
VALUES ('PR','Puerto Rico');
INSERT INTO countries
(CODE,DESCR)
VALUES ('QA','Qatar');
INSERT INTO countries
(CODE,DESCR)
VALUES ('KR','Republic of Korea');
INSERT INTO countries
(CODE,DESCR)
VALUES ('RE','Reunion');
INSERT INTO countries
(CODE,DESCR)
VALUES ('RO','Romania');
INSERT INTO countries
(CODE,DESCR)
VALUES ('RU','Russian Federation');
INSERT INTO countries
(CODE,DESCR)
VALUES ('RW','Rwanda');
INSERT INTO countries
(CODE,DESCR)
VALUES ('BL','Saint-Barthelemy');
INSERT INTO countries
(CODE,DESCR)
VALUES ('SH','Saint Helena');
INSERT INTO countries
(CODE,DESCR)
VALUES ('KN','Saint Kitts and Nevis');
INSERT INTO countries
(CODE,DESCR)
VALUES ('LC','Saint Lucia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('MF','Saint-Martin (France)');
INSERT INTO countries
(CODE,DESCR)
VALUES ('PM','Saint Pierre and Miquelon');
INSERT INTO countries
(CODE,DESCR)
VALUES ('VC','Saint Vincent and Grenadines');
INSERT INTO countries
(CODE,DESCR)
VALUES ('WS','Samoa');
INSERT INTO countries
(CODE,DESCR)
VALUES ('SM','San Marino');
INSERT INTO countries
(CODE,DESCR)
VALUES ('ST','Sao Tome and Principe');
INSERT INTO countries
(CODE,DESCR)
VALUES ('SA','Saudi Arabia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('SR','Suriname');
INSERT INTO countries
(CODE,DESCR)
VALUES ('SC','Seychelles');
INSERT INTO countries
(CODE,DESCR)
VALUES ('SL','Sierra Leone');
INSERT INTO countries
(CODE,DESCR)
VALUES ('SN','Senegal');
INSERT INTO countries
(CODE,DESCR)
VALUES ('RS','Serbia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('SG','Singapore');
INSERT INTO countries
(CODE,DESCR)
VALUES ('SX','Sint Martin');
INSERT INTO countries
(CODE,DESCR)
VALUES ('SK','Slovakia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('SI','Slovenia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('ES','Spain');
INSERT INTO countries
(CODE,DESCR)
VALUES ('SO','Somalia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('SB','Solomon Islands');
INSERT INTO countries
(CODE,DESCR)
VALUES ('ZA','South Africa');
INSERT INTO countries
(CODE,DESCR)
VALUES ('SS','South Sudan');
INSERT INTO countries
(CODE,DESCR)
VALUES ('LK','Sri Lanka');
INSERT INTO countries
(CODE,DESCR)
VALUES ('SD','Sudan');
INSERT INTO countries
(CODE,DESCR)
VALUES ('SE','Sweden');
INSERT INTO countries
(CODE,DESCR)
VALUES ('SZ','Swaziland');
INSERT INTO countries
(CODE,DESCR)
VALUES ('CH','Switzerland');
INSERT INTO countries
(CODE,DESCR)
VALUES ('SY','Syrian Arab Republic');
INSERT INTO countries
(CODE,DESCR)
VALUES ('TW','Taiwan');
INSERT INTO countries
(CODE,DESCR)
VALUES ('TJ','Tajikistan');
INSERT INTO countries
(CODE,DESCR)
VALUES ('TZ','Tanzania');
INSERT INTO countries
(CODE,DESCR)
VALUES ('TG','Togo');
INSERT INTO countries
(CODE,DESCR)
VALUES ('TO','Tonga');
INSERT INTO countries
(CODE,DESCR)
VALUES ('TH','Thailand');
INSERT INTO countries
(CODE,DESCR)
VALUES ('TL','Timor-Leste');
INSERT INTO countries
(CODE,DESCR)
VALUES ('TT','Trinidad and Tobago');
INSERT INTO countries
(CODE,DESCR)
VALUES ('TN','Tunisia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('TR','Turkey');
INSERT INTO countries
(CODE,DESCR)
VALUES ('TM','Turkmenistan');
INSERT INTO countries
(CODE,DESCR)
VALUES ('TC','Turks and Caicos Islands');
INSERT INTO countries
(CODE,DESCR)
VALUES ('TV','Tuvalu');
INSERT INTO countries
(CODE,DESCR)
VALUES ('UG','Uganda');
INSERT INTO countries
(CODE,DESCR)
VALUES ('UA','Ukraine');
INSERT INTO countries
(CODE,DESCR)
VALUES ('UK','United Kingdom');
INSERT INTO countries
(CODE,DESCR)
VALUES ('AE','United Arab Emirates');
INSERT INTO countries
(CODE,DESCR)
VALUES ('US','United States');
INSERT INTO countries
(CODE,DESCR)
VALUES ('UY','Uruguay');
INSERT INTO countries
(CODE,DESCR)
VALUES ('UZ','Uzbekistan');
INSERT INTO countries
(CODE,DESCR)
VALUES ('VU','Vanuatu');
INSERT INTO countries
(CODE,DESCR)
VALUES ('VE','Venezuela');
INSERT INTO countries
(CODE,DESCR)
VALUES ('VI','Virgin Islands (US)');
INSERT INTO countries
(CODE,DESCR)
VALUES ('VN','Viet Nam');
INSERT INTO countries
(CODE,DESCR)
VALUES ('WF','Wallis and Futuna Islands');
INSERT INTO countries
(CODE,DESCR)
VALUES ('EH','Western Sahara');
INSERT INTO countries
(CODE,DESCR)
VALUES ('YE','Yemen');
INSERT INTO countries
(CODE,DESCR)
VALUES ('ZM','Zambia');
INSERT INTO countries
(CODE,DESCR)
VALUES ('ZW','Zimbabwe');
| 22.193548 | 57 | 0.676694 |
f8f85a34f18c825113ef3cfaddb67d4103cb9dd5 | 3,948 | cc | C++ | ns-allinone-3.27/ns-3.27/src/wimax/model/simple-ofdm-send-param.cc | zack-braun/4607_NS | 43c8fb772e5552fb44bd7cd34173e73e3fb66537 | [
"MIT"
] | 93 | 2019-04-21T08:22:26.000Z | 2022-03-30T04:26:29.000Z | ns-allinone-3.27/ns-3.27/src/wimax/model/simple-ofdm-send-param.cc | zack-braun/4607_NS | 43c8fb772e5552fb44bd7cd34173e73e3fb66537 | [
"MIT"
] | 12 | 2019-04-19T16:39:58.000Z | 2021-06-22T13:18:32.000Z | ns-allinone-3.27/ns-3.27/src/wimax/model/simple-ofdm-send-param.cc | zack-braun/4607_NS | 43c8fb772e5552fb44bd7cd34173e73e3fb66537 | [
"MIT"
] | 21 | 2019-05-27T19:36:12.000Z | 2021-07-26T02:37:41.000Z | /* -*- Mode:C++; c-file-style:"gnu"; indent-tabs-mode:nil; -*- */
/*
* Copyright (c) 2007,2008, 2009 INRIA, UDcast
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation;
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* Author: Mohamed Amine Ismail <[email protected]>
* <[email protected]>
*/
#include "simple-ofdm-send-param.h"
#include "simple-ofdm-wimax-phy.h"
#include "simple-ofdm-wimax-channel.h"
namespace ns3 {
simpleOfdmSendParam::simpleOfdmSendParam (void)
{
// m_fecBlock = 0;
m_burstSize = 0;
m_isFirstBlock = 0;
m_frequency = 0;
m_modulationType = WimaxPhy::MODULATION_TYPE_QPSK_12;
m_direction = 0;
m_rxPowerDbm = 0;
}
simpleOfdmSendParam::simpleOfdmSendParam (const bvec &fecBlock,
uint32_t burstSize,
bool isFirstBlock,
uint64_t Frequency,
WimaxPhy::ModulationType modulationType,
uint8_t direction,
double rxPowerDbm)
{
m_fecBlock = fecBlock;
m_burstSize = burstSize;
m_isFirstBlock = isFirstBlock;
m_frequency = Frequency;
m_modulationType = modulationType;
m_direction = direction;
m_rxPowerDbm = rxPowerDbm;
}
simpleOfdmSendParam::simpleOfdmSendParam (uint32_t burstSize,
bool isFirstBlock,
uint64_t Frequency,
WimaxPhy::ModulationType modulationType,
uint8_t direction,
double rxPowerDbm,
Ptr<PacketBurst> burst)
{
m_burstSize = burstSize;
m_isFirstBlock = isFirstBlock;
m_frequency = Frequency;
m_modulationType = modulationType;
m_direction = direction;
m_rxPowerDbm = rxPowerDbm;
m_burst = burst;
}
simpleOfdmSendParam::~simpleOfdmSendParam (void)
{
}
void
simpleOfdmSendParam::SetFecBlock (const bvec &fecBlock)
{
m_fecBlock = fecBlock;
}
void
simpleOfdmSendParam::SetBurstSize (uint32_t burstSize)
{
m_burstSize = burstSize;
}
void
simpleOfdmSendParam::SetIsFirstBlock (bool isFirstBlock)
{
m_isFirstBlock = isFirstBlock;
}
void
simpleOfdmSendParam::SetFrequency (uint64_t Frequency)
{
m_frequency = Frequency;
}
void
simpleOfdmSendParam::SetModulationType (WimaxPhy::ModulationType modulationType)
{
m_modulationType = modulationType;
}
void
simpleOfdmSendParam::SetDirection (uint8_t direction)
{
m_direction = direction;
}
void
simpleOfdmSendParam::SetRxPowerDbm (double rxPowerDbm)
{
m_rxPowerDbm = rxPowerDbm;
}
bvec
simpleOfdmSendParam::GetFecBlock (void)
{
return m_fecBlock;
}
uint32_t
simpleOfdmSendParam::GetBurstSize (void)
{
return m_burstSize;
}
bool
simpleOfdmSendParam::GetIsFirstBlock (void)
{
return m_isFirstBlock;
}
uint64_t
simpleOfdmSendParam::GetFrequency (void)
{
return m_frequency;
}
WimaxPhy::ModulationType
simpleOfdmSendParam::GetModulationType (void)
{
return m_modulationType;
}
uint8_t
simpleOfdmSendParam::GetDirection (void)
{
return m_direction;
}
double
simpleOfdmSendParam::GetRxPowerDbm (void)
{
return m_rxPowerDbm;
}
Ptr<PacketBurst>
simpleOfdmSendParam::GetBurst (void)
{
return m_burst;
}
}
| 25.146497 | 82 | 0.664387 |
b89fb7891ced41338d0b2c020a66001d277672b7 | 4,813 | h | C | src/mame/includes/nbmj8891.h | Robbbert/messui | 49b756e2140d8831bc81335298ee8c5471045e79 | [
"BSD-3-Clause"
] | 26 | 2015-03-31T06:25:51.000Z | 2021-12-14T09:29:04.000Z | src/mame/includes/nbmj8891.h | Robbbert/messui | 49b756e2140d8831bc81335298ee8c5471045e79 | [
"BSD-3-Clause"
] | null | null | null | src/mame/includes/nbmj8891.h | Robbbert/messui | 49b756e2140d8831bc81335298ee8c5471045e79 | [
"BSD-3-Clause"
] | 10 | 2015-03-27T05:45:51.000Z | 2022-02-04T06:57:36.000Z | // license:BSD-3-Clause
// copyright-holders:Takahiro Nogi
#include "machine/nb1413m3.h"
#include "emupal.h"
#include "screen.h"
class nbmj8891_state : public driver_device
{
public:
enum
{
TIMER_BLITTER
};
nbmj8891_state(const machine_config &mconfig, device_type type, const char *tag)
: driver_device(mconfig, type, tag)
, m_maincpu(*this, "maincpu")
, m_nb1413m3(*this, "nb1413m3")
, m_screen(*this, "screen")
, m_palette(*this, "palette")
, m_clut_ptr(*this, "protection")
{
}
void mjfocusm(machine_config &config);
void mjfocus(machine_config &config);
void bananadr(machine_config &config);
void scandal(machine_config &config);
void hanamomo(machine_config &config);
void telmahjn(machine_config &config);
void pairsten(machine_config &config);
void club90s(machine_config &config);
void mgion(machine_config &config);
void chinmoku(machine_config &config);
void msjiken(machine_config &config);
void hnageman(machine_config &config);
void mjcamerb(machine_config &config);
void mjnanpas(machine_config &config);
void mmcamera(machine_config &config);
void pairsnb(machine_config &config);
void taiwanmb(machine_config &config);
void hanaoji(machine_config &config);
void lovehous(machine_config &config);
void hnxmasev(machine_config &config);
void mmaiko(machine_config &config);
void maiko(machine_config &config);
void mladyhtr(machine_config &config);
void omotesnd(machine_config &config);
void abunai(machine_config &config);
void gionbana(machine_config &config);
void mgmen89(machine_config &config);
void scandalm(machine_config &config);
void init_pairsten();
void init_telmahjn();
void init_gionbana();
void init_omotesnd();
void init_scandal();
void init_mgmen89();
void init_mjfocusm();
void init_mjfocus();
void init_pairsnb();
void init_mjnanpas();
DECLARE_READ_LINE_MEMBER(nb1413m3_outcoin_flag_r);
private:
required_device<cpu_device> m_maincpu;
required_device<nb1413m3_device> m_nb1413m3;
required_device<screen_device> m_screen;
required_device<palette_device> m_palette;
optional_region_ptr<uint8_t> m_clut_ptr;
int m_scrolly;
int m_blitter_destx;
int m_blitter_desty;
int m_blitter_sizex;
int m_blitter_sizey;
int m_blitter_src_addr;
int m_blitter_direction_x;
int m_blitter_direction_y;
int m_vram;
int m_gfxrom;
int m_dispflag;
int m_flipscreen;
int m_clutsel;
int m_screen_refresh;
int m_gfxdraw_mode;
bitmap_ind16 m_tmpbitmap0;
bitmap_ind16 m_tmpbitmap1;
std::unique_ptr<uint8_t[]> m_videoram0;
std::unique_ptr<uint8_t[]> m_videoram1;
std::unique_ptr<uint8_t[]> m_palette_ptr;
std::unique_ptr<uint8_t[]> m_clut;
int m_param_old[0x10];
int m_param_cnt;
int m_flipscreen_old;
emu_timer *m_blitter_timer;
uint8_t palette_type1_r(offs_t offset);
void palette_type1_w(offs_t offset, uint8_t data);
uint8_t palette_type2_r(offs_t offset);
void palette_type2_w(offs_t offset, uint8_t data);
uint8_t palette_type3_r(offs_t offset);
void palette_type3_w(offs_t offset, uint8_t data);
void clutsel_w(uint8_t data);
uint8_t clut_r(offs_t offset);
void clut_w(offs_t offset, uint8_t data);
void blitter_w(offs_t offset, uint8_t data);
void scrolly_w(uint8_t data);
void vramsel_w(uint8_t data);
void romsel_w(uint8_t data);
uint8_t taiwanmb_unk_r();
void taiwanmb_blitter_w(offs_t offset, uint8_t data);
void taiwanmb_gfxdraw_w(uint8_t data);
void taiwanmb_gfxflag_w(uint8_t data);
void taiwanmb_mcu_w(uint8_t data);
virtual void video_start() override;
DECLARE_VIDEO_START(_1layer);
uint32_t screen_update(screen_device &screen, bitmap_ind16 &bitmap, const rectangle &cliprect);
void vramflip(int vram);
void update_pixel0(int x, int y);
void update_pixel1(int x, int y);
void gfxdraw();
void common_save_state();
void postload();
void bananadr_io_map(address_map &map);
void club90s_map(address_map &map);
void gionbana_io_map(address_map &map);
void gionbana_map(address_map &map);
void hanamomo_io_map(address_map &map);
void hanamomo_map(address_map &map);
void hanaoji_map(address_map &map);
void hnageman_map(address_map &map);
void hnxmasev_map(address_map &map);
void lovehous_io_map(address_map &map);
void lovehous_map(address_map &map);
void maiko_io_map(address_map &map);
void maiko_map(address_map &map);
void mgion_io_map(address_map &map);
void mgion_map(address_map &map);
void mmaiko_map(address_map &map);
void msjiken_io_map(address_map &map);
void omotesnd_io_map(address_map &map);
void omotesnd_map(address_map &map);
void scandal_io_map(address_map &map);
void scandalm_io_map(address_map &map);
void scandalm_map(address_map &map);
void taiwanmb_io_map(address_map &map);
void taiwanmb_map(address_map &map);
virtual void device_timer(emu_timer &timer, device_timer_id id, int param, void *ptr) override;
};
| 30.08125 | 96 | 0.783295 |
0566524fa9092ac00f85807dd1b3596c8887f014 | 1,096 | rb | Ruby | db/post_migrate/20200608072931_backfill_imported_snippet_repositories.rb | glimmerhq/glimmerhq | e2cbbd9edadb5c3e40e7c29dd501e3bb26c0889f | [
"MIT"
] | 2 | 2020-10-10T05:58:12.000Z | 2020-10-10T09:30:57.000Z | db/post_migrate/20200608072931_backfill_imported_snippet_repositories.rb | glimmerhq/glimmerhq | e2cbbd9edadb5c3e40e7c29dd501e3bb26c0889f | [
"MIT"
] | 818 | 2016-07-12T01:02:16.000Z | 2022-03-31T23:54:44.000Z | db/post_migrate/20200608072931_backfill_imported_snippet_repositories.rb | glimmerhq/glimmerhq | e2cbbd9edadb5c3e40e7c29dd501e3bb26c0889f | [
"MIT"
] | 3 | 2020-11-04T05:28:36.000Z | 2020-11-06T04:49:03.000Z | # frozen_string_literal: true
class BackfillImportedSnippetRepositories < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
DELAY_INTERVAL = 2.minutes.to_i
BATCH_SIZE = 200
MIGRATION = 'BackfillSnippetRepositories'
disable_ddl_transaction!
class Snippet < ActiveRecord::Base
include EachBatch
self.table_name = 'snippets'
self.inheritance_column = :_type_disabled
end
class SnippetRepository < ActiveRecord::Base
self.table_name = 'snippet_repositories'
end
def up
index = 1
Snippet.select(:id).where.not(id: SnippetRepository.select(:snippet_id)).each_batch(of: BATCH_SIZE, column: 'id') do |batch|
split_in_consecutive_batches(batch).each do |ids_batch|
migrate_in(index * DELAY_INTERVAL, MIGRATION, [ids_batch.first, ids_batch.last])
index += 1
end
end
end
def down
# no-op
end
private
def split_in_consecutive_batches(relation)
ids = relation.pluck(:id)
(ids.first..ids.last).to_a.split {|i| !ids.include?(i) }.select(&:present?)
end
end
| 22.833333 | 128 | 0.714416 |
cd72f5fa197b677dea6bef8d663f9ff36cc335a5 | 1,461 | cs | C# | ExpressionEngine/FunctionExpressions/CosExpression.cs | webmaster442/ExpressionEngine | 458331209aabcaa189221e51bedc827a95d967f3 | [
"MIT"
] | null | null | null | ExpressionEngine/FunctionExpressions/CosExpression.cs | webmaster442/ExpressionEngine | 458331209aabcaa189221e51bedc827a95d967f3 | [
"MIT"
] | null | null | null | ExpressionEngine/FunctionExpressions/CosExpression.cs | webmaster442/ExpressionEngine | 458331209aabcaa189221e51bedc827a95d967f3 | [
"MIT"
] | null | null | null | //-----------------------------------------------------------------------------
// (c) 2020 Ruzsinszki Gábor
// This code is licensed under MIT license (see LICENSE for details)
//-----------------------------------------------------------------------------
using ExpressionEngine.Base;
using ExpressionEngine.BaseExpressions;
using ExpressionEngine.Maths;
using ExpressionEngine.Numbers;
using System;
namespace ExpressionEngine.FunctionExpressions
{
internal sealed class CosExpression : UnaryExpression
{
public CosExpression(IExpression? child) : base(child)
{
}
public override IExpression? Differentiate(string byVariable)
{
return new MultiplyExpression(new NegateExpression(new SinExpression(Child)), Child?.Differentiate(byVariable));
}
public override IExpression? Simplify()
{
var newChild = Child?.Simplify();
if (newChild is ConstantExpression childConst)
{
// child is constant
return new ConstantExpression(Evaluate(childConst.Value));
}
return new CosExpression(newChild);
}
public override string ToString(IFormatProvider formatProvider)
{
return $"cos({Child})";
}
protected override Number Evaluate(Number number)
{
return NumberMath.Round(NumberMath.Cos(number), 21);
}
}
}
| 31.085106 | 124 | 0.568789 |
af59165c25dfc49bc06440d9d1871b1138bd54ec | 970 | py | Python | backend/underbudget/schemas/envelope.py | vimofthevine/underbudget4 | c90eecf9879f7ce57c77a68b3f83b1d76c4451af | [
"MIT"
] | null | null | null | backend/underbudget/schemas/envelope.py | vimofthevine/underbudget4 | c90eecf9879f7ce57c77a68b3f83b1d76c4451af | [
"MIT"
] | 45 | 2019-12-23T23:45:10.000Z | 2022-03-31T05:01:22.000Z | backend/underbudget/schemas/envelope.py | vimofthevine/underbudget4 | c90eecf9879f7ce57c77a68b3f83b1d76c4451af | [
"MIT"
] | 1 | 2020-12-26T17:16:58.000Z | 2020-12-26T17:16:58.000Z | """ Envelope schema """
from marshmallow import Schema, fields, validate
class EnvelopeSchema(Schema):
""" Envelope schema """
id = fields.Integer(dump_only=True)
category_id = fields.Integer(data_key="categoryId")
name = fields.String(required=True, validate=validate.Length(min=1))
archived = fields.Boolean(missing=False)
external_id = fields.String(data_key="externalId", missing="")
created = fields.DateTime(dump_only=True)
last_updated = fields.DateTime(data_key="lastUpdated", dump_only=True)
class EnvelopeCategorySchema(Schema):
""" Envelope category schema """
id = fields.Integer(dump_only=True)
name = fields.String(required=True, validate=validate.Length(min=1))
envelopes = fields.List(
fields.Nested(EnvelopeSchema, only=["id", "name", "archived"]), dump_only=True
)
created = fields.DateTime(dump_only=True)
last_updated = fields.DateTime(data_key="lastUpdated", dump_only=True)
| 35.925926 | 86 | 0.717526 |
3a71b3ac8cd39a657b65824d87e57350163c28c3 | 2,231 | lua | Lua | 2015/Severance/gamemodes/cwseverance/plugins/sevsweps/plugin/entities/weapons/bs_sniper_base/cl_init.lua | kurozael/project-archive | dcac1a9a8dee65c11f4a631ec08d6eef320f5ad0 | [
"MIT"
] | null | null | null | 2015/Severance/gamemodes/cwseverance/plugins/sevsweps/plugin/entities/weapons/bs_sniper_base/cl_init.lua | kurozael/project-archive | dcac1a9a8dee65c11f4a631ec08d6eef320f5ad0 | [
"MIT"
] | null | null | null | 2015/Severance/gamemodes/cwseverance/plugins/sevsweps/plugin/entities/weapons/bs_sniper_base/cl_init.lua | kurozael/project-archive | dcac1a9a8dee65c11f4a631ec08d6eef320f5ad0 | [
"MIT"
] | 2 | 2020-03-16T22:49:09.000Z | 2022-01-02T23:04:32.000Z | --[[ TETA_BONITA MADE THE FADING EFFECT. THANKS TETA_BONITA IF YOU'RE STILL ALIVE. --]]
include("shared.lua")
local iScreenWidth = surface.ScreenWidth()
local iScreenHeight = surface.ScreenHeight()
local SCOPEFADE_TIME = 1
function SWEP:DrawHUD()
if self.UseScope then
local bScope = self.Weapon:GetNetworkedBool("Scope")
if bScope ~= self.bLastScope then
self.bLastScope = bScope
self.fScopeTime = CurTime()
elseif bScope then
local fScopeZoom = self.Weapon:GetNetworkedFloat("ScopeZoom")
if fScopeZoom ~= self.fLastScopeZoom then
self.fLastScopeZoom = fScopeZoom
self.fScopeTime = CurTime()
end
end
local fScopeTime = self.fScopeTime or 0
if fScopeTime > CurTime() - SCOPEFADE_TIME then
local Mul = 3.0
Mul = 1 - math.Clamp((CurTime() - fScopeTime)/SCOPEFADE_TIME, 0, 1)
if self.Weapon:GetNetworkedBool("Scope") then
self.Owner:DrawViewModel(false)
else
self.Owner:DrawViewModel(true)
end
surface.SetDrawColor(0, 0, 0, 255*Mul)
surface.DrawRect(0,0,iScreenWidth,iScreenHeight)
end
if (bScope) then
// Draw the crosshair
if not (self.ScopeReddot or self.ScopeMs) then
surface.SetDrawColor(0, 0, 0, 255)
surface.DrawLine(self.CrossHairTable.x11, self.CrossHairTable.y11, self.CrossHairTable.x12, self.CrossHairTable.y12)
surface.DrawLine(self.CrossHairTable.x21, self.CrossHairTable.y21, self.CrossHairTable.x22, self.CrossHairTable.y22)
end
// Put the texture
surface.SetDrawColor(0, 0, 0, 255)
if (self.Scope1) then
surface.SetTexture(surface.GetTextureID("ph_scope/ph_scope_lens"))
elseif (self.Scope2) then
surface.SetTexture(surface.GetTextureID("ph_scope/ph_scope_lens2"))
elseif (self.Scope3) then
surface.SetTexture(surface.GetTextureID("ph_scope/ph_scope_lens3"))
elseif (self.Scope4) then
surface.SetTexture(surface.GetTextureID("ph_scope/ph_scope_lens4"))
elseif (self.Scope5) then
surface.SetTexture(surface.GetTextureID("ph_scope/ph_scope_lens5"))
elseif (self.Scope6) then
surface.SetTexture(surface.GetTextureID("ph_scope/ph_scope_lens6"))
end
surface.DrawTexturedRect(0, 0, iScreenWidth, iScreenHeight)
end
end
end | 29.746667 | 120 | 0.727476 |
70666cc9346f32437dccb5e903a66c9843e1095a | 1,325 | lua | Lua | StarfallEx/Libraries/PNGWriter/Example.lua | Vurv78/lua | 715c654a6a3130c8de3142a700d5463a73f3ebe0 | [
"Apache-2.0"
] | 2 | 2021-08-08T22:06:37.000Z | 2022-02-11T12:29:50.000Z | StarfallEx/Libraries/PNGWriter/Example.lua | Vurv78/Lua | 7b07d4a13f16ea5222229c5cda24090bcf6e98be | [
"Apache-2.0"
] | 2 | 2020-06-23T08:25:57.000Z | 2020-12-09T04:17:11.000Z | StarfallEx/Libraries/PNGWriter/Example.lua | Vurv78/starfallex-creations | 7b07d4a13f16ea5222229c5cda24090bcf6e98be | [
"Apache-2.0"
] | null | null | null | --@name PNG Library Example 2
--@author Vurv
--@client
--@include pnglib.txt
if player() ~= owner() then return end
local createPNG = require("pnglib.txt")
local function canRun()
return quotaTotalAverage() < quotaMax()*0.4
end
local main_routine = coroutine.create(function()
render.createRenderTarget("rt")
render.selectRenderTarget("rt")
local png = createPNG(512, 512, "rgb") -- Create the png
local to_col = 255/512
-- Make sure you don't write RGB that goes over 255. At worst it might break the image, best it'll just write black pixels.
for Y = 0,511 do
for X = 0,511 do
render.setRGBA(X * to_col,Y * to_col,0,255)
png:writeRGB(X * to_col,Y * to_col,0)
render.drawRectFast(X,Y,1,1)
if not canRun() then
coroutine.yield()
render.selectRenderTarget("rt") -- Re-select the RT when we continue
end
end
end
print("Finished drawing.")
png:export("bruh.png")
end)
hook.add("renderoffscreen","",function()
if canRun() then
if coroutine.status(main_routine) ~= "dead" then
coroutine.resume(main_routine)
end
end
end)
hook.add("render","",function()
render.setRenderTargetTexture("rt")
render.drawTexturedRect(0,0,1024,1024)
end) | 28.804348 | 127 | 0.630189 |
c67aa9d2f3d66a14ff0ce79951866368734a7c25 | 276 | py | Python | dynamic_image_networks/hmdb51/utilities/calculate_training_metrics.py | DoranLyong/dynamic-images-for-action-recognition | 06a68c2337b45c44a8c7ec50e94585a9b9615ad0 | [
"MIT"
] | 22 | 2018-09-14T00:32:41.000Z | 2020-10-23T11:19:12.000Z | dynamic_image_networks/hmdb51/utilities/calculate_training_metrics.py | DoranLyong/dynamic-images-for-action-recognition | 06a68c2337b45c44a8c7ec50e94585a9b9615ad0 | [
"MIT"
] | 1 | 2020-07-08T03:10:54.000Z | 2020-07-13T19:04:26.000Z | dynamic_image_networks/hmdb51/utilities/calculate_training_metrics.py | DoranLyong/dynamic-images-for-action-recognition | 06a68c2337b45c44a8c7ec50e94585a9b9615ad0 | [
"MIT"
] | 7 | 2018-11-01T02:32:09.000Z | 2020-10-03T12:19:02.000Z | import torch
import numpy as np
def calculate_accuracy(y_pred, y_true):
# Inspired from https://pytorch.org/tutorials/beginner/blitz/cifar10_tutorial.html.
_, predicted = torch.max(y_pred, 1)
acc = (predicted == y_true).sum().item() / len(y_pred)
return acc
| 27.6 | 87 | 0.710145 |
c96bad685ee180ee39cee136f5d59721dfa4236c | 3,490 | tsx | TypeScript | src/CarouselNotice/index.tsx | joye61/cl-util | 4dca7d807c731c008248ea90fc05674f823726ed | [
"MIT"
] | 38 | 2018-12-28T10:32:17.000Z | 2019-12-18T02:42:40.000Z | src/CarouselNotice/index.tsx | joye61/cl-util | 4dca7d807c731c008248ea90fc05674f823726ed | [
"MIT"
] | null | null | null | src/CarouselNotice/index.tsx | joye61/cl-util | 4dca7d807c731c008248ea90fc05674f823726ed | [
"MIT"
] | 5 | 2019-09-25T13:21:55.000Z | 2019-11-12T09:08:46.000Z | /** @jsx jsx */
import { jsx, Interpolation, css, Theme } from "@emotion/react";
import * as CSS from "csstype";
import { useState, useEffect } from "react";
import { useInterval } from "../Effect/useInterval";
import { style, Bubble } from "./style";
export interface CarouselNoticeOption extends React.DetailedHTMLProps<React.HTMLAttributes<HTMLDivElement>, HTMLDivElement> {
// 需要滚动的列表
list: Array<React.ReactNode>;
// 滚动容器的宽度
width: CSS.Property.Width;
// 滚动容器的高度
height: CSS.Property.Height;
// 滚动的内容水平对齐,默认center
justify: "start" | "center" | "end";
// 每一次冒泡持续时间(单位毫秒),默认200ms
duration: number;
// 每一轮冒泡切换的时间间(单位毫秒),默认3000ms
interval: number;
// 容器样式
containerStyle?: Interpolation<Theme>;
// 内部容器样式
wrapperStyle?: Interpolation<Theme>;
// 条目样式
itemStyle?: Interpolation<Theme>;
}
/**
* 滚动循环轮播公告
* @param props
*/
export function CarouselNotice(props: Partial<CarouselNoticeOption>) {
const { width, height, justify = "center", interval = 3000, duration = 200, list = [], containerStyle, wrapperStyle, itemStyle, ...attrs } = props;
const [current, setCurrent] = useState<number>(0);
const [animation, setAnimation] = useState<boolean>(false);
/**
* 一旦列表发生更新时,触发的逻辑
*/
useEffect(() => {
setCurrent(0);
setAnimation(false);
}, [list]);
/**
* 每隔多少秒更新一次动画
*/
useInterval(
() => {
setAnimation(true);
},
list.length > 1 ? interval : null
);
/**
* 当前显示的两条数据,只用两条数据来轮播
*/
const showContent = () => {
const justifyStyle: Interpolation<any> = {};
if (justify === "center") {
justifyStyle.justifyContent = "center";
} else if (justify === "start") {
justifyStyle.justifyContent = "flex-start";
} else if (justify === "end") {
justifyStyle.justifyContent = "flex-end";
} else {
justifyStyle.justifyContent = "center";
}
const itemCss = [style.item, justifyStyle];
if (list.length === 1) {
return (
<div css={[itemCss, itemStyle]} key={0}>
{list[0]}
</div>
);
}
const showList: Array<React.ReactNode> = [];
if (current === list.length - 1) {
showList.push(
<div css={[itemCss, itemStyle]} key={current}>
{list[list.length - 1]}
</div>
);
showList.push(
<div css={[itemCss, itemStyle]} key={0}>
{list[0]}
</div>
);
} else {
showList.push(
<div css={[itemCss, itemStyle]} key={current}>
{list[current]}
</div>
);
showList.push(
<div css={[itemCss, itemStyle]} key={current + 1}>
{list[current + 1]}
</div>
);
}
return showList;
};
/**
* 获取动画
*/
const getAnimation = () => {
if (!animation || list.length <= 1) {
return null;
}
return css({
animationName: Bubble,
animationTimingFunction: "linear",
animationDuration: `${duration}ms`,
});
};
/**
* 一轮动画结束时触发下一轮
*/
const animationEnd = () => {
let newIndex = current + 1;
if (current >= list.length - 1) {
newIndex = 0;
}
setCurrent(newIndex);
setAnimation(false);
};
return (
Array.isArray(list) &&
list.length > 0 && (
<div {...attrs} css={[style.box, { width, height }, containerStyle]}>
<div onAnimationEnd={animationEnd} css={[style.wrapper, getAnimation(), wrapperStyle]}>
{showContent()}
</div>
</div>
)
);
}
| 23.90411 | 149 | 0.574212 |
38771d74cfeb86a93fa98bb01ade4f76435b0a0f | 773 | php | PHP | vendor/everlutionsk/email-bundle-2/Everlution/EmailBundle/Outbound/Message/UniqueOutboundMessage.php | rasbak/gamehub | 7221af9a0742c69cf32d66b8ff7a9f12d28b2d73 | [
"MIT"
] | null | null | null | vendor/everlutionsk/email-bundle-2/Everlution/EmailBundle/Outbound/Message/UniqueOutboundMessage.php | rasbak/gamehub | 7221af9a0742c69cf32d66b8ff7a9f12d28b2d73 | [
"MIT"
] | null | null | null | vendor/everlutionsk/email-bundle-2/Everlution/EmailBundle/Outbound/Message/UniqueOutboundMessage.php | rasbak/gamehub | 7221af9a0742c69cf32d66b8ff7a9f12d28b2d73 | [
"MIT"
] | null | null | null | <?php
namespace Everlution\EmailBundle\Outbound\Message;
use Everlution\EmailBundle\Message\IdentifiableMessage;
class UniqueOutboundMessage implements IdentifiableMessage
{
/** @var string */
protected $messageId;
/** @var OutboundMessage */
protected $message;
/**
* @param string $messageId
* @param OutboundMessage $message
*/
public function __construct($messageId, OutboundMessage $message)
{
$this->messageId = $messageId;
$this->message = $message;
}
/**
* @return string
*/
public function getMessageId()
{
return $this->messageId;
}
/**
* @return OutboundMessage
*/
public function getMessage()
{
return $this->message;
}
} | 18.404762 | 69 | 0.615783 |
6d03372f0bf5b0525f4e841f989a3358e82cd9e7 | 159 | ts | TypeScript | frontend-angular/projects/ui/autocomplete/public-api.ts | alaindet/mazu | 2945037bf58924f2e1568ebb5a7c0b98de8d44af | [
"MIT"
] | null | null | null | frontend-angular/projects/ui/autocomplete/public-api.ts | alaindet/mazu | 2945037bf58924f2e1568ebb5a7c0b98de8d44af | [
"MIT"
] | null | null | null | frontend-angular/projects/ui/autocomplete/public-api.ts | alaindet/mazu | 2945037bf58924f2e1568ebb5a7c0b98de8d44af | [
"MIT"
] | null | null | null | export * from './autocomplete-item/autocomplete-item.component';
export * from './autocomplete.module';
export * from './autocomplete/autocomplete.component';
| 39.75 | 64 | 0.767296 |
0dcddc261e0d438f73c18e1ed14e2a6a917535c8 | 142 | cs | C# | src/SFA.DAS.ApprenticeCommitments.Web/Services/IHasBackLink.cs | SkillsFundingAgency/das-apprentice-commitments-web | a27cf6878b0d4179ec19a7ebb8fb1a53afa3162c | [
"MIT"
] | null | null | null | src/SFA.DAS.ApprenticeCommitments.Web/Services/IHasBackLink.cs | SkillsFundingAgency/das-apprentice-commitments-web | a27cf6878b0d4179ec19a7ebb8fb1a53afa3162c | [
"MIT"
] | 77 | 2021-02-12T13:44:36.000Z | 2022-03-31T10:13:34.000Z | src/SFA.DAS.ApprenticeCommitments.Web/Services/IHasBackLink.cs | SkillsFundingAgency/das-apprentice-commitments-web | a27cf6878b0d4179ec19a7ebb8fb1a53afa3162c | [
"MIT"
] | 1 | 2021-04-11T08:32:47.000Z | 2021-04-11T08:32:47.000Z | namespace SFA.DAS.ApprenticeCommitments.Web.Services
{
public interface IHasBackLink
{
string Backlink { get; }
}
} | 20.285714 | 54 | 0.640845 |
f62b23f95c442075e0a6cadee50fc25b565bf479 | 4,132 | cpp | C++ | src/download.cpp | toyobayashi/evm-windows | 5734cfe8555ccc492671e15bc30b54467681c749 | [
"MIT"
] | null | null | null | src/download.cpp | toyobayashi/evm-windows | 5734cfe8555ccc492671e15bc30b54467681c749 | [
"MIT"
] | null | null | null | src/download.cpp | toyobayashi/evm-windows | 5734cfe8555ccc492671e15bc30b54467681c749 | [
"MIT"
] | null | null | null | #include <sstream>
#include <math.h>
#include <time.h>
#include "download.h"
#include <curl/curl.h>
#include <iostream>
#include "path.hpp"
//static size_t onDataString(void* buffer, size_t size, size_t nmemb, progressInfo * userp) {
// const char* d = (const char*)buffer;
// // userp->headerString.append(d, size * nmemb);
// std::string tmp(d);
// unsigned int contentlengthIndex = tmp.find(std::string("Content-Length: "));
// if (contentlengthIndex != std::string::npos) {
// userp->total = atoi(tmp.substr(16, tmp.find_first_of('\r')).c_str()) + userp->size;
// }
// return size * nmemb;
//}
static size_t onDataWrite(void* buffer, size_t size, size_t nmemb, progressInfo * userp) {
if (userp->code == -1) {
curl_easy_getinfo(userp->curl, CURLINFO_RESPONSE_CODE, &(userp->code));
}
if (userp->code >= 400) {
return size * nmemb;
}
if (userp->total == -1) {
curl_off_t cl;
curl_easy_getinfo(userp->curl, CURLINFO_CONTENT_LENGTH_DOWNLOAD_T, &cl);
if (cl != -1) {
userp->total = (long)cl + userp->size;
} else {
return size * nmemb;
}
}
if (userp->fp == nullptr) {
Path::mkdirp(Path::dirname(userp->path));
_wfopen_s(&(userp->fp), (userp->path + L".tmp").c_str(), L"ab+");
if (!(userp->fp)) {
return size * nmemb;
}
}
size_t iRec = fwrite(buffer, size, nmemb, userp->fp);
if (iRec < 0) {
return iRec;
}
userp->sum += iRec;
userp->speed += iRec;
int now = clock();
if (now - userp->last_time > 200) {
userp->last_time = now;
userp->speed = 0;
if (userp->callback) {
userp->callback(userp, userp->param);
}
} else if (userp->sum == userp->total - userp->size) {
userp->end_time = clock();
if (userp->callback) {
userp->callback(userp, userp->param);
}
}
return iRec;
}
bool download (std::wstring url, std::wstring path, downloadCallback callback, void* param) {
if (Path::exists(path)) {
if (Path::isDirectory(path)) {
return false;
}
return true;
}
CURL* curl = curl_easy_init();
struct curl_slist* headers = nullptr;
/*headers = curl_slist_append(headers, "Connection: Keep-Alive");
headers = curl_slist_append(headers, "Accept-Encoding: gzip");*/
headers = curl_slist_append(headers, "Accept: */*");
headers = curl_slist_append(headers, "User-Agent: Electron Version Manager");
struct _stat stat;
int res = _wstat((path + L".tmp").c_str(), &stat);
long size = stat.st_size;
if (size != 0) {
headers = curl_slist_append(headers, (std::string("Range: bytes=") + std::to_string(size) + "-").c_str());
}
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, headers);
curl_easy_setopt(curl, CURLOPT_URL, Util::w2a(url, CP_UTF8).c_str());
curl_easy_setopt(curl, CURLOPT_CUSTOMREQUEST, "GET");
curl_easy_setopt(curl, CURLOPT_CONNECTTIMEOUT, 10);
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1);
//curl_easy_setopt(curl, CURLOPT_TIMEOUT, 10);
progressInfo info;
info.path = path;
info.curl = curl;
info.fp = nullptr;
info.size = size;
info.sum = 0;
info.speed = 0;
info.start_time = clock();
info.end_time = 0;
info.last_time = 0;
info.total = -1;
info.param = param;
info.code = -1;
info.callback = callback;
// curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, &onDataString);
// curl_easy_setopt(curl, CURLOPT_HEADERDATA, &info);
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, &onDataWrite);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, &info);
curl_easy_setopt(curl, CURLOPT_SSL_VERIFYPEER, 0L);
curl_easy_setopt(curl, CURLOPT_SSL_VERIFYHOST, 0L);
CURLcode code = curl_easy_perform(curl);
if (code != CURLE_OK) {
printf("%s\n", curl_easy_strerror(code));
if (info.fp != nullptr) {
fclose(info.fp);
info.fp = nullptr;
}
curl_slist_free_all(headers);
curl_easy_cleanup(curl);
return false;
}
if (info.fp != nullptr) {
fclose(info.fp);
info.fp = nullptr;
if (Path::exists(path + L".tmp")) {
Path::rename(path + L".tmp", path);
}
}
curl_slist_free_all(headers);
curl_easy_cleanup(curl);
return true;
}
| 27.731544 | 110 | 0.644966 |
78ed36a96c2539e535ae3912ec2e1803531ec6e1 | 29 | dart | Dart | test/simple_cookies_test.dart | jannikhst/simple_cookies | c7f0b00488b5a3089efced57070ddfca386c950c | [
"MIT"
] | 1 | 2022-01-22T10:48:54.000Z | 2022-01-22T10:48:54.000Z | test/simple_cookies_test.dart | jannikhst/simple_cookies | c7f0b00488b5a3089efced57070ddfca386c950c | [
"MIT"
] | null | null | null | test/simple_cookies_test.dart | jannikhst/simple_cookies | c7f0b00488b5a3089efced57070ddfca386c950c | [
"MIT"
] | 1 | 2021-07-04T16:07:26.000Z | 2021-07-04T16:07:26.000Z | // @dart=2.9
void main() {}
| 7.25 | 14 | 0.482759 |
3b1240768082b6f7210dbe8da1f03534760b8482 | 895 | dart | Dart | test/yanti_test.dart | qqmikey/yanti | f1d53795a0cebf388f5f3888eba2fea0c8ef5366 | [
"MIT"
] | null | null | null | test/yanti_test.dart | qqmikey/yanti | f1d53795a0cebf388f5f3888eba2fea0c8ef5366 | [
"MIT"
] | null | null | null | test/yanti_test.dart | qqmikey/yanti | f1d53795a0cebf388f5f3888eba2fea0c8ef5366 | [
"MIT"
] | null | null | null | import 'package:flutter_test/flutter_test.dart';
import 'package:yanti/yanti.dart';
class TestClass {
TestClass({this.value});
String? value;
TestClass copyWith({Optional<String>? value}) {
return TestClass(
value: value.or(this.value),
);
}
}
void main() {
final foo = TestClass(value: 'foo');
test('test constructor', () {
expect(foo.value, 'foo');
});
test('test copy with value', () {
final bar = foo.copyWith(value: Optional('bar'));
expect(bar.value, 'bar');
});
test('test copy without value', () {
final bar = foo.copyWith();
expect(bar.value, 'foo');
});
test('test copy with Optional.empty', () {
final bar = foo.copyWith(value: Optional.empty());
expect(bar.value, null);
});
test('test copy with null value', () {
final bar = foo.copyWith(value: Optional(null));
expect(bar.value, null);
});
}
| 20.813953 | 54 | 0.612291 |
db2609d868d52b0041dd7e4be081ff205e4b258b | 2,538 | rs | Rust | eventific_rabbitmq/src/rabbitmq_sender.rs | Joatin/eventific | 6e28ac6c3767690e4a6e326a4716ee64efc9345e | [
"MIT"
] | 8 | 2019-07-14T02:20:35.000Z | 2021-06-01T14:14:13.000Z | eventific_rabbitmq/src/rabbitmq_sender.rs | Joatin/eventific | 6e28ac6c3767690e4a6e326a4716ee64efc9345e | [
"MIT"
] | 9 | 2018-01-24T19:59:42.000Z | 2018-02-15T15:02:53.000Z | eventific_rabbitmq/src/rabbitmq_sender.rs | eventific/eventific | 6e28ac6c3767690e4a6e326a4716ee64efc9345e | [
"MIT"
] | 1 | 2018-02-04T22:28:16.000Z | 2018-02-04T22:28:16.000Z | use futures::{Future, TryStreamExt, FutureExt, StreamExt};
use std::process;
use std::sync::{Arc, RwLock};
use eventific::Uuid;
use eventific::{Component, Eventific};
use eventific::store::Store;
use std::fmt::Debug;
use strum::IntoEnumIterator;
use std::error::Error;
use lapin::{Connection, ConnectionProperties, ExchangeKind, BasicProperties};
use lapin::options::{ExchangeDeclareOptions, BasicPublishOptions};
use tokio_amqp::LapinTokioExt;
use lapin::types::FieldTable;
use tracing::Instrument;
use eventific::notification::Sender;
use tokio::sync::broadcast::{ Receiver as TokioReceiver };
#[derive(Debug)]
pub struct RabbitMqSender {
amqp_address: String
}
impl RabbitMqSender {
pub fn new(amqp_address: &str) -> Self {
Self {
amqp_address: amqp_address.to_owned(),
}
}
}
#[eventific::async_trait]
impl<
St: Store<EventData = D, MetaData = M>,
S: 'static + Send + Sync + Debug + Default,
D: 'static + Debug + Clone + Send + Sync + IntoEnumIterator + AsRef<str>,
M: 'static + Clone + Send + Sync + Debug,
> Sender<St, S, D, M> for RabbitMqSender {
#[tracing::instrument]
async fn init(&mut self, eventific: &Eventific<St, S, D, M>, mut receiver: TokioReceiver<Uuid>) -> Result<(), Box<dyn Error + Send + Sync>> {
let service_name = eventific.service_name().to_owned();
let instance_id = eventific.instance_id().to_owned();
let conn = Connection::connect(
&self.amqp_address,
ConnectionProperties::default().with_tokio(),
)
.await?;
let channel = conn.create_channel().await?;
let _exchange = channel.exchange_declare(&service_name, ExchangeKind::Fanout, ExchangeDeclareOptions::default(), FieldTable::default()).await?;
tokio::spawn(async move {
loop {
match receiver.recv().await {
Ok(id) => {
channel.clone().basic_publish(
&service_name,
"",
BasicPublishOptions::default(),
Uuid::as_bytes(&id).to_vec(),
BasicProperties::default()
).await.unwrap();
},
Err(_) => {
tracing::error!("Can't keep up with messages")
}
}
}
});
Ok(())
}
fn name(&self) -> &str {
"RabbitMQ Sender 🐰🗣"
}
}
| 32.126582 | 151 | 0.5658 |
cdcac765dd752315b2b36cbe161475ac038d015f | 1,054 | cs | C# | src/AspNet.Security.OAuth.HealthGraph/HealthGraphAuthenticationOptions.cs | TrovoLib/AspNet.Security.OAuth.Providers | f0f864a7ee5aecfb58657f016dac6bedd3008162 | [
"Apache-2.0"
] | 2 | 2020-01-30T08:40:13.000Z | 2021-06-04T20:07:44.000Z | src/AspNet.Security.OAuth.HealthGraph/HealthGraphAuthenticationOptions.cs | aabb1212/AspNet.Security.OAuth.Providers | 627626387fb5aa41ade3f21506aed2b8a35b71c9 | [
"Apache-2.0"
] | 1 | 2020-09-24T20:43:48.000Z | 2020-09-24T20:43:48.000Z | src/AspNet.Security.OAuth.HealthGraph/HealthGraphAuthenticationOptions.cs | aabb1212/AspNet.Security.OAuth.Providers | 627626387fb5aa41ade3f21506aed2b8a35b71c9 | [
"Apache-2.0"
] | null | null | null | /*
* Licensed under the Apache License, Version 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
* See https://github.com/aspnet-contrib/AspNet.Security.OAuth.Providers
* for more information concerning the license and the contributors participating to this project.
*/
using System.Security.Claims;
namespace AspNet.Security.OAuth.HealthGraph;
/// <summary>
/// Defines a set of options used by <see cref="HealthGraphAuthenticationHandler"/>.
/// </summary>
public class HealthGraphAuthenticationOptions : OAuthOptions
{
public HealthGraphAuthenticationOptions()
{
ClaimsIssuer = HealthGraphAuthenticationDefaults.Issuer;
CallbackPath = HealthGraphAuthenticationDefaults.CallbackPath;
AuthorizationEndpoint = HealthGraphAuthenticationDefaults.AuthorizationEndpoint;
TokenEndpoint = HealthGraphAuthenticationDefaults.TokenEndpoint;
UserInformationEndpoint = HealthGraphAuthenticationDefaults.UserInformationEndpoint;
ClaimActions.MapJsonKey(ClaimTypes.NameIdentifier, "userID");
}
}
| 36.344828 | 98 | 0.777989 |
ce9d622741a01ad4f48dbc778932f1134400776c | 10,229 | lua | Lua | spec/plugins/cors/access_spec.lua | totalkyos/maskong | c28fad03c27bb23c8b394f8db19dd98d4b87b577 | [
"Apache-2.0"
] | null | null | null | spec/plugins/cors/access_spec.lua | totalkyos/maskong | c28fad03c27bb23c8b394f8db19dd98d4b87b577 | [
"Apache-2.0"
] | null | null | null | spec/plugins/cors/access_spec.lua | totalkyos/maskong | c28fad03c27bb23c8b394f8db19dd98d4b87b577 | [
"Apache-2.0"
] | 1 | 2019-10-07T17:55:19.000Z | 2019-10-07T17:55:19.000Z | local spec_helper = require "spec.spec_helpers"
local http_client = require "kong.tools.http_client"
local PROXY_URL = spec_helper.PROXY_URL
describe("CORS Plugin", function()
setup(function()
spec_helper.prepare_db()
spec_helper.insert_fixtures {
api = {
{ name = "tests-cors-1", request_host = "cors1.com", upstream_url = "http://mockbin.com" },
{ name = "tests-cors-2", request_host = "cors2.com", upstream_url = "http://mockbin.com" },
{ name = "tests-cors-3", request_host = "cors3.com", upstream_url = "http://httpbin.org" },
{ name = "tests-cors-4", request_host = "cors4.com", upstream_url = "http://httpbin.org" }
},
plugin = {
{ name = "cors", config = {}, __api = 1 },
{ name = "cors", config = { origin = "example.com",
methods = { "GET" },
headers = { "origin", "type", "accepts" },
exposed_headers = { "x-auth-token" },
max_age = 23,
credentials = true }, __api = 2 },
{ name = "cors", config = { origin = "example.com",
methods = { "GET" },
headers = { "origin", "type", "accepts" },
exposed_headers = { "x-auth-token" },
max_age = 23,
preflight_continue = true,
credentials = true }, __api = 3 },
{ name = "cors", config = { origin = "example.com",
methods = { "GET" },
headers = { "origin", "type", "accepts" },
exposed_headers = { "x-auth-token" },
max_age = 23,
preflight_continue = false,
credentials = true }, __api = 4 }
}
}
spec_helper.start_kong()
end)
teardown(function()
spec_helper.stop_kong()
end)
describe("OPTIONS", function()
it("should give appropriate defaults when no options are passed", function()
local _, status, headers = http_client.options(PROXY_URL.."/", {}, {host = "cors1.com"})
-- assertions
assert.are.equal(204, status)
assert.are.equal("*", headers["access-control-allow-origin"])
assert.are.equal("GET,HEAD,PUT,PATCH,POST,DELETE", headers["access-control-allow-methods"])
assert.are.equal(nil, headers["access-control-allow-headers"])
assert.are.equal(nil, headers["access-control-expose-headers"])
assert.are.equal(nil, headers["access-control-allow-credentials"])
assert.are.equal(nil, headers["access-control-max-age"])
end)
it("should reflect what is specified in options", function()
-- make proxy request
local _, status, headers = http_client.options(PROXY_URL.."/", {}, {host = "cors2.com"})
-- assertions
assert.are.equal(204, status)
assert.are.equal("example.com", headers["access-control-allow-origin"])
assert.are.equal("origin,type,accepts", headers["access-control-allow-headers"])
assert.are.equal(nil, headers["access-control-expose-headers"])
assert.are.equal("GET", headers["access-control-allow-methods"])
assert.are.equal(tostring(23), headers["access-control-max-age"])
assert.are.equal(tostring(true), headers["access-control-allow-credentials"])
end)
it("should work with preflight_continue=true and a duplicate header set by the API", function()
-- An OPTIONS preflight request with preflight_continue=true should have the same response as directly invoking the final API
local response, status, headers = http_client.options(PROXY_URL.."/headers", {}, {host = "cors3.com"})
local response2, status2, headers2 = http_client.options("http://httpbin.org/response-headers", {}, {host = "cors3.com"})
headers["via"] = nil
headers["x-kong-proxy-latency"] = nil
headers["x-kong-upstream-latency"] = nil
headers["date"] = nil
headers2["date"] = nil
assert.are.equal(response, response2)
assert.are.equal(status, status2)
assert.are.same(headers, headers2)
-- Any other request that's not a preflight request, should match our plugin configuration
local _, status, headers = http_client.get(PROXY_URL.."/get", {}, {host = "cors3.com"})
assert.are.equal(200, status)
assert.are.equal("example.com", headers["access-control-allow-origin"])
assert.are.equal("x-auth-token", headers["access-control-expose-headers"])
assert.are.equal(tostring(true), headers["access-control-allow-credentials"])
local _, status, headers = http_client.get(PROXY_URL.."/response-headers", {["access-control-allow-origin"] = "*"}, {host = "cors3.com"})
assert.are.equal(200, status)
assert.are.equal("example.com", headers["access-control-allow-origin"])
assert.are.equal("x-auth-token", headers["access-control-expose-headers"])
assert.are.equal(tostring(true), headers["access-control-allow-credentials"])
end)
it("should work with preflight_continue=false", function()
-- An OPTIONS preflight request with preflight_continue=false should be handled by Kong instead
local response, status, headers = http_client.options(PROXY_URL.."/headers", {}, {host = "cors4.com"})
local response2, status2, headers2 = http_client.options("http://httpbin.org/response-headers", {}, {host = "cors4.com"})
headers["via"] = nil
headers["x-kong-proxy-latency"] = nil
headers["x-kong-upstream-latency"] = nil
headers["date"] = nil
headers2["date"] = nil
assert.are.equal(response, response2)
assert.are_not.equal(status, status2)
assert.are_not.same(headers, headers2)
assert.are.equal("example.com", headers["access-control-allow-origin"])
assert.are.equal("GET", headers["access-control-allow-methods"])
assert.are.equal("origin,type,accepts", headers["access-control-allow-headers"])
assert.are.equal(nil, headers["access-control-expose-headers"])
assert.are.equal(tostring(true), headers["access-control-allow-credentials"])
assert.are.equal(tostring(23), headers["access-control-max-age"])
-- Any other request that's not a preflight request, should match our plugin configuration
local _, status, headers = http_client.get(PROXY_URL.."/get", {}, {host = "cors4.com"})
assert.are.equal(200, status)
assert.are.equal("example.com", headers["access-control-allow-origin"])
assert.are.equal("x-auth-token", headers["access-control-expose-headers"])
assert.are.equal(tostring(true), headers["access-control-allow-credentials"])
end)
it("should work with preflight_continue=false and a duplicate header set by the API", function()
-- An OPTIONS preflight request with preflight_continue=false should be handled by Kong instead
local response, status, headers = http_client.options(PROXY_URL.."/headers", {}, {host = "cors4.com"})
local response2, status2, headers2 = http_client.options("http://httpbin.org/response-headers", {}, {host = "cors4.com"})
headers["via"] = nil
headers["x-kong-proxy-latency"] = nil
headers["x-kong-upstream-latency"] = nil
headers["date"] = nil
headers2["date"] = nil
assert.are.equal(response, response2)
assert.are_not.equal(status, status2)
assert.are_not.same(headers, headers2)
assert.are.equal("example.com", headers["access-control-allow-origin"])
assert.are.equal("GET", headers["access-control-allow-methods"])
assert.are.equal("origin,type,accepts", headers["access-control-allow-headers"])
assert.are.equal(nil, headers["access-control-expose-headers"])
assert.are.equal(tostring(true), headers["access-control-allow-credentials"])
assert.are.equal(tostring(23), headers["access-control-max-age"])
-- Any other request that's not a preflight request, should match our plugin configuration
local _, status, headers = http_client.get(PROXY_URL.."/response-headers", {["access-control-allow-origin"] = "*"}, {host = "cors4.com"})
assert.are.equal(200, status)
assert.are.equal("example.com", headers["access-control-allow-origin"])
assert.are.equal("x-auth-token", headers["access-control-expose-headers"])
assert.are.equal(tostring(true), headers["access-control-allow-credentials"])
end)
end)
describe("GET,PUT,POST,ETC", function()
it("should give appropriate defaults when no options are passed", function()
-- make proxy request
local _, status, headers = http_client.get(PROXY_URL.."/", {}, {host = "cors1.com"})
-- assertions
assert.are.equal(200, status)
assert.are.equal("*", headers["access-control-allow-origin"])
assert.are.equal(nil, headers["access-control-allow-methods"])
assert.are.equal(nil, headers["access-control-allow-headers"])
assert.are.equal(nil, headers["access-control-expose-headers"])
assert.are.equal(nil, headers["access-control-allow-credentials"])
assert.are.equal(nil, headers["access-control-max-age"])
end)
it("should reflect some of what is specified in options", function()
-- make proxy request
local _, status, headers = http_client.get(PROXY_URL.."/", {}, {host = "cors2.com"})
-- assertions
assert.are.equal(200, status)
assert.are.equal("example.com", headers["access-control-allow-origin"])
assert.are.equal("x-auth-token", headers["access-control-expose-headers"])
assert.are.equal(nil, headers["access-control-allow-headers"])
assert.are.equal(nil, headers["access-control-allow-methods"])
assert.are.equal(nil, headers["access-control-max-age"])
assert.are.equal(tostring(true), headers["access-control-allow-credentials"])
end)
end)
end)
| 48.942584 | 143 | 0.622642 |
a3757c611ccb467bdc6edf2f45a174be9efac9b9 | 124 | ts | TypeScript | src/app/components/page/index.ts | deniss-muhla/web-app-template | 4b89673fc094b93e8896ab9c7dd5616a42762ab6 | [
"MIT"
] | null | null | null | src/app/components/page/index.ts | deniss-muhla/web-app-template | 4b89673fc094b93e8896ab9c7dd5616a42762ab6 | [
"MIT"
] | 3 | 2021-03-09T17:30:36.000Z | 2021-09-01T22:24:59.000Z | src/app/components/page/index.ts | deniss-muhla/web-app-template | 4b89673fc094b93e8896ab9c7dd5616a42762ab6 | [
"MIT"
] | null | null | null | import _ from 'lodash';
import Page from './page';
import { memo } from 'react';
export default _.flowRight([memo])(Page);
| 20.666667 | 41 | 0.685484 |
e71932197432afb6ee415501a031cf97c0bd43e1 | 3,220 | php | PHP | src/app/code/Magestudy/Crud/Model/Repository/TagRepository.php | waghron/m2_code | a87d4999a10a9d7df645e5968487c345e2ec1a31 | [
"MIT"
] | 73 | 2018-02-19T16:46:02.000Z | 2022-03-21T07:06:05.000Z | Crud/Model/Repository/TagRepository.php | dbashyal/Magestudy | d6e8f234e2c4da4dc3c6cf6939e370b1e7ce3975 | [
"MIT"
] | 2 | 2019-06-23T13:41:04.000Z | 2020-12-29T19:42:15.000Z | Crud/Model/Repository/TagRepository.php | dbashyal/Magestudy | d6e8f234e2c4da4dc3c6cf6939e370b1e7ce3975 | [
"MIT"
] | 31 | 2018-07-31T20:18:22.000Z | 2022-03-02T13:44:07.000Z | <?php
namespace Magestudy\Crud\Model\Repository;
use Exception;
use Magento\Framework\Exception\CouldNotSaveException;
use Magento\Framework\Exception\NoSuchEntityException;
use Magento\Framework\Exception\StateException;
use Magento\Framework\Exception\ValidatorException;
use Magestudy\Crud\Api\TagRepositoryInterface;
use Magestudy\Crud\Api\Data\TagInterface;
use Magestudy\Crud\Model\TagFactory;
use Magestudy\Crud\Model\ResourceModel\Tag;
class TagRepository implements TagRepositoryInterface
{
/**
* @var array
*/
protected $_instances = [];
/**
* @var Tag
*/
protected $_resource;
/**
* @var TagFactory
*/
protected $_factory;
public function __construct(
Tag $resource,
TagFactory $factory
) {
$this->_resource = $resource;
$this->_factory = $factory;
}
/**
* Save data.
*
* @param TagInterface $object
* @return TagInterface
* @throws \Magento\Framework\Exception\LocalizedException
*/
public function save(TagInterface $object)
{
/** @var TagInterface|\Magento\Framework\Model\AbstractModel $object */
try {
$this->_resource->save($object);
} catch (Exception $exception) {
throw new CouldNotSaveException(__(
'Could not save the record: %1',
$exception->getMessage()
));
}
return $object;
}
/**
* Retrieve data.
*
* @param int $id
* @return TagInterface
* @throws \Magento\Framework\Exception\LocalizedException
*/
public function getById($id)
{
if (!isset($this->_instances[$id])) {
/** @var TagInterface|\Magento\Framework\Model\AbstractModel $object */
$object = $this->_factory->create();
$this->_resource->load($object, $id);
if (!$object->getId()) {
throw new NoSuchEntityException(__('Data does not exist'));
}
$this->_instances[$id] = $object;
}
return $this->_instances[$id];
}
/**
* Delete data.
*
* @param TagInterface $object
* @return bool true on success
* @throws \Magento\Framework\Exception\LocalizedException
*/
public function delete(TagInterface $object)
{
/** @var TagInterface|\Magento\Framework\Model\AbstractModel $object */
$id = $object->getId();
try {
unset($this->_instances[$id]);
$this->_resource->delete($object);
} catch (ValidatorException $e) {
throw new CouldNotSaveException(__($e->getMessage()));
} catch (Exception $e) {
throw new StateException(
__('Unable to remove %1', $id)
);
}
unset($this->_instances[$id]);
return true;
}
/**
* Delete data by ID.
*
* @param int $id
* @return bool true on success
* @throws \Magento\Framework\Exception\NoSuchEntityException
* @throws \Magento\Framework\Exception\LocalizedException
*/
public function deleteById($id)
{
return $this->delete($this->getById($id));
}
} | 27.058824 | 83 | 0.586025 |
2fa49b0ebec4aa7284ee66766356611800b89a25 | 4,072 | py | Python | test/test_mittens.py | akanshajainn/mittens | da8986385f785ef33954f59f3f907565e7b3da2f | [
"Apache-2.0"
] | 234 | 2018-03-28T11:55:53.000Z | 2022-01-05T13:09:35.000Z | test/test_mittens.py | akanshajainn/mittens | da8986385f785ef33954f59f3f907565e7b3da2f | [
"Apache-2.0"
] | 17 | 2018-04-16T03:50:38.000Z | 2022-03-16T16:36:38.000Z | test/test_mittens.py | akanshajainn/mittens | da8986385f785ef33954f59f3f907565e7b3da2f | [
"Apache-2.0"
] | 32 | 2018-03-29T08:17:57.000Z | 2021-09-03T13:08:35.000Z | """test_mittens.py
Test Mittens and GloVe using both NumPy and TensorFlow (if available).
If TensorFlow is not installed, those tests are skipped. If it is,
all tests are run twice: first with NumPy and then with TensorFlow,
according to the `framework` fixture.
Tests use pytest: from the command line, run:
$ pytest PATH/TO/MITTENS/test/
Add a `-v` flag to get detailed output.
Author: Nick Dingwall
"""
import numpy as np
import pytest
import mittens.np_mittens as np_mittens
try:
TENSORFLOW_INSTALLED = True
import mittens.tf_mittens as tf_mittens
except ImportError:
TENSORFLOW_INSTALLED = False
tf_mittens = None
FRAMEWORK_TO_MODULE = {'np': np_mittens, 'tf': tf_mittens}
@pytest.fixture(scope="module", params=['np', 'tf'])
def framework(request):
return request.param
def test_glove(framework):
if not TENSORFLOW_INSTALLED and framework == 'tf':
pytest.skip("Tensorflow not installed.")
np.random.seed(42)
corr = _run_glove(FRAMEWORK_TO_MODULE[framework].GloVe, max_iter=1000)
assert corr > 0.4
def test_glove_initialization(framework):
if not TENSORFLOW_INSTALLED and framework == 'tf':
pytest.skip("Tensorflow not installed.")
np.random.seed(42)
corr = _run_glove(FRAMEWORK_TO_MODULE[framework].GloVe, max_iter=0)
assert abs(corr) < 0.2
def test_mittens(framework):
"""Test that Mittens moves initial representations in the correct
direction.
"""
if not TENSORFLOW_INSTALLED and framework == 'tf':
pytest.skip("Tensorflow not installed.")
np.random.seed(42)
embedding_dim = 10
vocab = ['a', 'b', 'c', 'd', 'e']
initial_embeddings = {v: np.random.normal(0, 1, size=embedding_dim)
for v in vocab}
X = _make_word_word_matrix(len(vocab))
true = X.ravel()
mittens = FRAMEWORK_TO_MODULE[framework].Mittens(n=embedding_dim,
max_iter=50)
post_G = mittens.fit(X, vocab=vocab,
initial_embedding_dict=initial_embeddings)
pre_G = mittens.G_start
pre_pred = pre_G.dot(pre_G.T).ravel()
post_pred = post_G.dot(post_G.T).ravel()
pre_corr = _get_correlation(true, pre_pred)
post_corr = _get_correlation(true, post_pred)
assert post_corr > pre_corr
def test_mittens_parameter(framework):
"""Test that a large Mittens parameter keeps learned representations
closer to the original than a small Mittens parameter.
"""
if not TENSORFLOW_INSTALLED and framework == 'tf':
pytest.skip("Tensorflow not installed.")
np.random.seed(42)
embedding_dim = 50
vocab = ['a', 'b', 'c', 'd', 'e']
initial_embeddings = {v: np.random.normal(0, 1, size=embedding_dim)
for v in vocab}
X = _make_word_word_matrix(len(vocab))
diffs = dict()
small = 0.001
mid = 1
big = 1000
for m in [small, mid, big]:
mittens = FRAMEWORK_TO_MODULE[framework].Mittens(n=embedding_dim,
max_iter=50,
mittens=m)
G = mittens.fit(X, vocab=vocab,
initial_embedding_dict=initial_embeddings)
original = mittens.G_start
diffs[m] = np.linalg.norm(G - original)
assert diffs[small] > diffs[mid]
assert diffs[mid] > diffs[big]
def _make_word_word_matrix(n=50):
"""Returns a symmetric matrix where the entries are drawn from a
Poisson distribution"""
base = np.random.zipf(2, size=(n, n)) - 1
return base + base.T
def _get_correlation(true, pred):
"""Check correlation for nonzero elements of 'true'"""
nonzero = true > 0
return np.corrcoef(np.log(true[nonzero]), pred[nonzero])[0][1]
def _run_glove(glove_implementation, w=50, n=200, max_iter=100):
X = _make_word_word_matrix(w)
glove = glove_implementation(n=n, max_iter=max_iter)
G = glove.fit(X)
pred = G.dot(G.T).ravel()
true = X.ravel()
return _get_correlation(true, pred)
| 30.616541 | 74 | 0.649312 |
fa22efbe915b9c31259c8762a991b410db1c96cb | 2,920 | cpp | C++ | client/cpp/src/net/tcp_client.cpp | Heng-W/network | 49e34a51b19f34b443f7a914b45f47acd146d562 | [
"MIT"
] | null | null | null | client/cpp/src/net/tcp_client.cpp | Heng-W/network | 49e34a51b19f34b443f7a914b45f47acd146d562 | [
"MIT"
] | null | null | null | client/cpp/src/net/tcp_client.cpp | Heng-W/network | 49e34a51b19f34b443f7a914b45f47acd146d562 | [
"MIT"
] | null | null | null |
#include "tcp_client.h"
#include "../util/logger.h"
#include "connector.h"
#include "socket.h"
namespace net
{
TcpClient::TcpClient(const InetAddress& serverAddr)
: connector_(new Connector(serverAddr)),
connectionCallback_(defaultConnectionCallback),
messageCallback_(defaultMessageCallback),
quit_(false),
recv_(false),
connect_(true),
retry_(false),
name_("Tcp Client"),
baseThreadId_(std::this_thread::get_id()),
recvThread_{&TcpClient::recvThreadFunc, this}
{
connector_->setNewConnectionCallback([this](int sockfd) { this->newConnection(sockfd); });
LOG(INFO) << "TcpClient::TcpClient[" << name_
<< "] - connector " << connector_.get();
}
TcpClient::~TcpClient()
{
LOG(INFO) << "TcpClient::~TcpClient[" << name_
<< "] - connector " << connector_.get();
quit_ = true;
recvCond_.notify_one();
recvThread_.join();
}
void TcpClient::start()
{
assert(isInBaseThread());
LOG(INFO) << "TcpClient::connect[" << name_ << "] - connecting to "
<< connector_->serverAddress().toIpPort();
connect_ = true;
connector_->start();
}
void TcpClient::stop()
{
connect_ = false;
TcpConnectionPtr conn;
{
std::lock_guard<std::mutex> lock(mutex_);
conn = connection_;
}
if (conn)
{
conn->shutdown();
}
else
{
connector_->stop();
}
}
void TcpClient::setRetryDelay(int delayMs, bool fixed)
{
connector_->setRetryDelay(delayMs, fixed);
}
void TcpClient::newConnection(int sockfd)
{
InetAddress peerAddr = sockets::getPeerAddr(sockfd);
TcpConnectionPtr conn = std::make_shared<TcpConnection>(sockfd, peerAddr);
conn->setMessageCallback(messageCallback_);
conn->setWriteCompleteCallback(writeCompleteCallback_);
{
std::lock_guard<std::mutex> lock(mutex_);
connection_ = conn;
}
if (!connect_) return;
conn->connectEstablished();
// enable reading
recv_ = true;
recvCond_.notify_one();
connectionCallback_(conn);
conn->doSendEvent();
{
std::unique_lock<std::mutex> lock(mutex_);
recvCond_.wait(lock, [this] { return !recv_ || quit_; });
connection_.reset();
}
conn->connectDestroyed();
connectionCallback_(conn);
conn.reset();
if (retry_ && connect_)
{
LOG(INFO) << "TcpClient::connect[" << name_ << "] - Reconnecting to "
<< connector_->serverAddress().toIpPort();
connector_->start();
}
}
void TcpClient::recvThreadFunc()
{
while (!quit_)
{
{
std::unique_lock<std::mutex> lock(mutex_);
recvCond_.wait(lock, [this] { return recv_ || quit_; });
}
if (quit_) return;
connection_->doRecvEvent();
recv_ = false;
recvCond_.notify_one();
}
}
} // namespace net
| 21.62963 | 94 | 0.603082 |
b0e1a13c6288ba1ad7da38f63107757e48bb6f3b | 6,573 | sql | SQL | crud_laravel.sql | birunidev/lara-crud | 2e2d79be358908392d0f29f2780a5f0bb313e82b | [
"MIT"
] | null | null | null | crud_laravel.sql | birunidev/lara-crud | 2e2d79be358908392d0f29f2780a5f0bb313e82b | [
"MIT"
] | 3 | 2021-02-02T20:16:54.000Z | 2022-02-27T07:39:00.000Z | crud_laravel.sql | birunidev/lara-crud | 2e2d79be358908392d0f29f2780a5f0bb313e82b | [
"MIT"
] | null | null | null | -- phpMyAdmin SQL Dump
-- version 4.9.5deb2
-- https://www.phpmyadmin.net/
--
-- Host: localhost:3306
-- Generation Time: Jul 03, 2020 at 09:17 PM
-- Server version: 8.0.20-0ubuntu0.20.04.1
-- PHP Version: 7.4.3
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `crud_laravel`
--
-- --------------------------------------------------------
--
-- Table structure for table `answers`
--
CREATE TABLE `answers` (
`id` bigint UNSIGNED NOT NULL,
`name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`jawaban` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`pertanyaanId` bigint UNSIGNED NOT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Dumping data for table `answers`
--
INSERT INTO `answers` (`id`, `name`, `jawaban`, `pertanyaanId`, `created_at`, `updated_at`) VALUES
(1, 'John Doe', 'Karena ngoding itu kaya olahraga ngelatih otak banget', 5, '2020-07-02 14:21:21', '2020-07-02 14:21:21'),
(2, 'Bayu Skak', 'Klo aku sih lebih suka bikin algoritma nya', 5, '2020-07-02 07:50:40', '2020-07-02 07:50:40'),
(3, 'Gilfoyle', 'Enak nya tuh ga usah ngurusin database wkwkwk', 6, '2020-07-02 08:09:21', '2020-07-02 08:09:21'),
(4, 'Gilfoyle', 'Suka aja', 5, '2020-07-03 06:12:29', '2020-07-03 06:12:29');
-- --------------------------------------------------------
--
-- Table structure for table `failed_jobs`
--
CREATE TABLE `failed_jobs` (
`id` bigint UNSIGNED NOT NULL,
`connection` text COLLATE utf8mb4_unicode_ci NOT NULL,
`queue` text COLLATE utf8mb4_unicode_ci NOT NULL,
`payload` longtext COLLATE utf8mb4_unicode_ci NOT NULL,
`exception` longtext COLLATE utf8mb4_unicode_ci NOT NULL,
`failed_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- --------------------------------------------------------
--
-- Table structure for table `migrations`
--
CREATE TABLE `migrations` (
`id` int UNSIGNED NOT NULL,
`migration` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`batch` int NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Dumping data for table `migrations`
--
INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES
(1, '2014_10_12_000000_create_users_table', 1),
(2, '2014_10_12_100000_create_password_resets_table', 1),
(3, '2019_08_19_000000_create_failed_jobs_table', 1),
(4, '2020_07_02_114504_create_questions_table', 1),
(6, '2020_07_02_124911_add_name_to_users_question', 2),
(7, '2020_07_02_134747_create_answers_table', 3);
-- --------------------------------------------------------
--
-- Table structure for table `password_resets`
--
CREATE TABLE `password_resets` (
`email` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`token` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`created_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- --------------------------------------------------------
--
-- Table structure for table `questions`
--
CREATE TABLE `questions` (
`id` bigint UNSIGNED NOT NULL,
`judul_pertanyaan` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
`name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`isi_pertanyaan` text CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Dumping data for table `questions`
--
INSERT INTO `questions` (`id`, `judul_pertanyaan`, `created_at`, `updated_at`, `name`, `isi_pertanyaan`) VALUES
(6, 'Apa sih enaknya jadi front-end developer ?', '2020-07-02 06:46:48', '2020-07-02 06:46:48', 'John Doe', 'saya sekarang lagi di jenjang ingin menjadi developer tapi masih bingung pilih front-end atau back-end'),
(7, 'Cara jadi developer?', '2020-07-03 06:14:46', '2020-07-03 06:14:46', 'Johns', 'bagaimana cara kita menjadi developer yang handal ?');
-- --------------------------------------------------------
--
-- Table structure for table `users`
--
CREATE TABLE `users` (
`id` bigint UNSIGNED NOT NULL,
`name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`email` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`email_verified_at` timestamp NULL DEFAULT NULL,
`password` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL,
`remember_token` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
--
-- Indexes for dumped tables
--
--
-- Indexes for table `answers`
--
ALTER TABLE `answers`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `failed_jobs`
--
ALTER TABLE `failed_jobs`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `migrations`
--
ALTER TABLE `migrations`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `password_resets`
--
ALTER TABLE `password_resets`
ADD KEY `password_resets_email_index` (`email`);
--
-- Indexes for table `questions`
--
ALTER TABLE `questions`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `users`
--
ALTER TABLE `users`
ADD PRIMARY KEY (`id`),
ADD UNIQUE KEY `users_email_unique` (`email`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `answers`
--
ALTER TABLE `answers`
MODIFY `id` bigint UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5;
--
-- AUTO_INCREMENT for table `failed_jobs`
--
ALTER TABLE `failed_jobs`
MODIFY `id` bigint UNSIGNED NOT NULL AUTO_INCREMENT;
--
-- AUTO_INCREMENT for table `migrations`
--
ALTER TABLE `migrations`
MODIFY `id` int UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=8;
--
-- AUTO_INCREMENT for table `questions`
--
ALTER TABLE `questions`
MODIFY `id` bigint UNSIGNED NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=9;
--
-- AUTO_INCREMENT for table `users`
--
ALTER TABLE `users`
MODIFY `id` bigint UNSIGNED NOT NULL AUTO_INCREMENT;
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
| 29.877273 | 214 | 0.695573 |
79ae852e4da7fb4f202b819a625c8d9368eae0d2 | 10,115 | php | PHP | extensions/language_redirect/lib/class.languageredirect.php | Sasha-Olesky/Symphony-CMS | 1e77b0ec058b53879984fa9d08baf7a3e82d1864 | [
"MIT"
] | null | null | null | extensions/language_redirect/lib/class.languageredirect.php | Sasha-Olesky/Symphony-CMS | 1e77b0ec058b53879984fa9d08baf7a3e82d1864 | [
"MIT"
] | 1 | 2020-08-31T12:18:48.000Z | 2020-08-31T20:30:17.000Z | extensions/language_redirect/lib/class.languageredirect.php | Sasha-Olesky/Symphony-CMS | 1e77b0ec058b53879984fa9d08baf7a3e82d1864 | [
"MIT"
] | null | null | null | <?php
if(!defined('__IN_SYMPHONY__')) die('<h2>Error</h2><p>You cannot directly access this file</p>');
Class LanguageRedirect{
private static $_instance;
private $_language;
private $_region;
private $_language_code;
private $_supported_language_codes;
// I don't know those languages, so if You know for sure that browser uses different code,
// or that native name should be different, please let me know about that :).
// It would also be great, if whole string could be in native form, including name of country.
private $_languages = array( // [English name]
'ab' => 'аҧсуа бызшәа', // Abkhazian
'af' => 'Afrikaans', // Afrikaans
'sq' => 'shqip', // Albanian
'am' => 'አማርኛ', // Amharic
'ar-dz' => 'العربية (Algeria)', // Arabic
'ar-bh' => 'العربية (Bahrain)', // Arabic
'ar-eg' => 'العربية (Egypt)', // Arabic
'ar-iq' => 'العربية (Iraq)', // Arabic
'ar-jo' => 'العربية (Jordan)', // Arabic
'ar-kw' => 'العربية (Kuwait)', // Arabic
'ar-lb' => 'العربية (Lebanon)', // Arabic
'ar-ly' => 'العربية (Libya)', // Arabic
'ar-ma' => 'العربية (Morocco)', // Arabic
'ar-om' => 'العربية (Oman)', // Arabic
'ar-qa' => 'العربية (Qatar)', // Arabic
'ar-sa' => 'العربية (Saudi Arabia)', // Arabic
'ar-sy' => 'العربية (Syria)', // Arabic
'ar-tn' => 'العربية (Tunisia)', // Arabic
'ar-ae' => 'العربية (U.A.E.)', // Arabic
'ar-ye' => 'العربية (Yemen)', // Arabic
'ar' => 'العربية', // Arabic
'hy' => 'Հայերեն', // Armenian
'as' => 'অসমীয়া', // Assamese
'az' => 'azərbaycan', // Azeri
'eu' => 'euskera', // Basque
'be' => 'Беларуская', // Belarusian
'bn' => 'বাংলা', // Bengali
'bg' => 'Български', // Bulgarian
'ca' => 'Català', // Catalan
'zh-cn' => '简体中文 (China)', // Chinese simplified script
'zh-hk' => '繁體中文 (Hong Kong SAR)', // Chinese traditional script
'zh-mo' => '繁體中文 (Macau SAR)', // Chinese traditional script
'zh-sg' => '简体中文 (Singapore)', // Chinese simplified script
'zh-tw' => '繁體中文 (Taiwan)', // Chinese traditional script
'zh' => '中文', // Chinese
'hr' => 'Hrvatski', // Croatian
'cs' => 'čeština', // Czech
'da' => 'Dansk', // Danish
'dv' => 'ދިވެހި', // Divehi
'nl-be' => 'Nederlands (Belgium)', // Dutch
'nl' => 'Nederlands (Netherlands)', // Dutch
'en-au' => 'English (Australia)', // English
'en-bz' => 'English (Belize)', // English
'en-ca' => 'English (Canada)', // English
'en-ie' => 'English (Ireland)', // English
'en-jm' => 'English (Jamaica)', // English
'en-nz' => 'English (New Zealand)', // English
'en-ph' => 'English (Philippines)', // English
'en-za' => 'English (South Africa)', // English
'en-tt' => 'English (Trinidad)', // English
'en-gb' => 'English (United Kingdom)', // English
'en-us' => 'English (United States)', // English
'en-zw' => 'English (Zimbabwe)', // English
'en' => 'English', // English
'ee' => 'Ɛʋɛ', // Ewe
'et' => 'Eesti', // Estonian
'fo' => 'føroyskt', // Faeroese
'fa' => 'فارسی', // Farsi
'fi' => 'suomi', // Finnish
'fr-be' => 'français (Belgium)', // French (Belgium)
'fr-ca' => 'français canadien', // French (Canada)
'fr-lu' => 'français (Luxembourg)', // French
'fr-mc' => 'français (Monaco)', // French
'fr-ch' => 'français (Switzerland)', // French
'fr' => 'français', // French
'ff' => 'Fulfulde, Pulaar, Pular', // Fula, Fulah, Fulani
'gl' => 'Galego', // Galician
'gd' => 'Gàidhlig', // Gaelic (Scottish)
'ga' => 'Gaeilge', // Gaelic (Irish)
'gv' => 'Gaelg', // Gaelic (Manx) (Isle of Man)
'ka' => 'ქართული ენა', // Georgian
'de-at' => 'Deutsch (Austria)', // German
'de-li' => 'Deutsch (Liechtenstein)', // German
'de-lu' => 'Deutsch (Luxembourg)', // German
'de-ch' => 'Deutsch (Switzerland)', // German
'de' => 'Deutsch', // German
'el' => 'Ελληνικά', // Greek
'gu' => 'ગુજરાતી', // Gujarati
'ha' => 'هَوْسَ', // Hausa
'he' => 'עברית', // Hebrew
'hi' => 'हिंदी', // Hindi
'hu' => 'Magyar', // Hungarian
'is' => 'Íslenska', // Icelandic
'id' => 'Bahasa Indonesia', // Indonesian
'it-ch' => 'italiano (Switzerland)', // Italian
'it' => 'italiano', // Italian
'ja' => '日本語', // Japanese
'kn' => 'ಕನ್ನಡ', // Kannada
'kk' => 'Қазақ', // Kazakh
'rw' => 'Kinyarwanda', // Kinyarwanda
'kok' => 'कोंकणी', // Konkani
'ko' => '한국어/조선말', // Korean
'kz' => 'Кыргыз', // Kyrgyz
'lv' => 'Latviešu', // Latvian
'lt' => 'Lietuviškai', // Lithuanian
'luo'=> 'Dholuo', // Luo
'ms' => 'Bahasa Melayu', // Malay
'mk' => 'Македонски', // Macedonian
'ml' => 'മലയാളം', // Malayalam
'mt' => 'Malti', // Maltese
'mr' => 'मराठी', // Marathi
'mn' => 'Монгол', // Mongolian (Cyrillic)
'ne' => 'नेपाली', // Nepali
'nb-no' => 'Norsk bokmål', // Norwegian Bokmål
'nb' => 'Norsk bokmål', // Norwegian Bokmål
'nn-no' => 'Norsk nynorsk', // Norwegian Nynorsk
'nn' => 'Norsk nynorsk', // Norwegian Nynorsk
'no' => 'Norsk', // Norwegian
'or' => 'ଓଡ଼ିଆ', // Oriya
'ps' => 'پښتو', // Pashto
'pl' => 'polski', // Polish
'pt-br' => 'português brasileiro', // Portuguese (Brasil)
'pt' => 'português', // Portuguese
'pa' => 'پنجابی/ਪੰਜਾਬੀ', // Punjabi
'qu' => 'Runa Simi/Kichwa', // Quechua
'rm' => 'Romansch', // Rhaeto-Romanic
'ro-md' => 'Română (Moldova)', // Romanian
'ro' => 'Română', // Romanian
'rn' => 'kiRundi', // Rundi
'ru-md' => 'Pyccĸий (Moldova)', // Russian
'ru' => 'Pyccĸий', // Russian
'sg' => 'yângâ tî sängö', // Sango
'sa' => 'संस्कृतम्', // Sanskrit
'sc' => 'sardu', // Sardinian
'sr' => 'Srpski/српски', // Serbian
'sn' => 'chiShona', // Shona
'ii' => 'ꆇꉙ', // Sichuan Yi
'si' => 'සිංහල', // Sinhalese, Sinhala
'sk' => 'Slovenčina', // Slovak
'ls' => 'Slovenščina', // Slovenian
'so' => 'Soomaaliga/af Soomaali', // Somali
'st' => 'Sesotho', // Sotho, Sutu
'es-ar' => 'Español (Argentina)', // Spanish
'es-bo' => 'Español (Bolivia)', // Spanish
'es-cl' => 'Español (Chile)', // Spanish
'es-co' => 'Español (Colombia)', // Spanish
'es-cr' => 'Español (Costa Rica)', // Spanish
'es-do' => 'Español (Dominican Republic)',// Spanish
'es-ec' => 'Español (Ecuador)', // Spanish
'es-sv' => 'Español (El Salvador)', // Spanish
'es-gt' => 'Español (Guatemala)', // Spanish
'es-hn' => 'Español (Honduras)', // Spanish
'es-mx' => 'Español (Mexico)', // Spanish
'es-ni' => 'Español (Nicaragua)', // Spanish
'es-pa' => 'Español (Panama)', // Spanish
'es-py' => 'Español (Paraguay)', // Spanish
'es-pe' => 'Español (Peru)', // Spanish
'es-pr' => 'Español (Puerto Rico)', // Spanish
'es-us' => 'Español (United States)', // Spanish
'es-uy' => 'Español (Uruguay)', // Spanish
'es-ve' => 'Español (Venezuela)', // Spanish
'es' => 'Español', // Spanish
'sw' => 'kiswahili', // Swahili
'sv-fi' => 'svenska (Finland)', // Swedish
'sv' => 'svenska', // Swedish
'syr' => 'ܣܘܪܝܝܐ', // Syriac
'ta' => 'தமிழ்', // Tamil
'tt' => 'татарча/تاتارچا', // Tatar
'te' => 'తెలుగు', // Telugu
'th' => 'ภาษาไทย', // Thai
'ti' => 'ትግርኛ', // Tigrinya
'ts' => 'Xitsonga', // Tsonga
'tn' => 'Setswana', // Tswana
'tr' => 'Türkçe', // Turkish
'tk' => 'Түркмен', // Turkmen
'ug' => 'ئۇيغۇرچە/Uyƣurqə/Уйғурчә', // Uighur, Uyghur
'uk' => 'Українська', // Ukrainian
'ur' => 'اردو', // Urdu
'uz' => 'o\'zbek', // Uzbek
've' => 'Tshivenḓa', // Venda
'vi' => 'Tiếng Việt', // Vietnamese
'wa' => 'walon', // Waloon
'cy' => 'Cymraeg', // Welsh
'wo' => 'Wolof', // Wolof
'xh' => 'isiXhosa', // Xhosa
'yi' => 'ייִדיש', // Yiddish
'yo' => 'Yorùbá', // Yoruba
'zu' => 'isiZulu', // Zulu
);
private function __construct(){
$this->_language = General::sanitize($_REQUEST['language']);
$this->_region = General::sanitize($_REQUEST['region']);
$this->_language_code = $this->_region ? $this->_language.'-'.$this->_region : $this->_language;
$supported_language_codes = explode(',', General::sanitize(Symphony::Configuration()->get('language_codes', 'language_redirect')));
$this->_supported_language_codes = $this->cleanLanguageCodes($supported_language_codes);
}
public static function instance() {
if (!self::$_instance) {self::$_instance = new self(); }
return self::$_instance;
}
/**
* Get current language.
*
* @return string
*/
public function getLanguage(){
return $this->_language;
}
/**
* Get current region.
*
* @return string
*/
public function getRegion(){
return $this->_region;
}
/**
* Get current language code.
*
* @return string
*/
public function getLanguageCode(){
return $this->_language_code;
}
/**
* Get supported language codes.
*
* @return array
*/
public function getSupportedLanguageCodes(){
return $this->_supported_language_codes;
}
/**
* Return languages array.
*/
public function getAllLanguages(){
return $this->_languages;
}
/*------------------------------------------------------------------------------------------------*/
/* Utilities */
/*------------------------------------------------------------------------------------------------*/
public function cleanLanguageCodes($language_codes){
$clean = array_map('trim', $language_codes);
$clean = array_filter($clean);
return $clean;
}
}
| 37.60223 | 134 | 0.504498 |
f881928db93179030bf2887c8e6986133f13352f | 311 | lua | Lua | Projects/DataTemplates/template_lua/test_tbexcelfromjsonmultirow.lua | fanlanweiy/luban_examples | 9ddca2a01e8db1573953be3f32c59104451cd96e | [
"MIT"
] | 44 | 2021-05-06T06:16:55.000Z | 2022-03-30T06:27:25.000Z | Projects/DataTemplates/template_lua/test_tbexcelfromjsonmultirow.lua | HFX-93/luban_examples | 5b90e392d404950d12ff803a186b26bdea5e0292 | [
"MIT"
] | 1 | 2021-07-25T16:35:32.000Z | 2021-08-23T04:59:49.000Z | Projects/DataTemplates/template_lua/test_tbexcelfromjsonmultirow.lua | HFX-93/luban_examples | 5b90e392d404950d12ff803a186b26bdea5e0292 | [
"MIT"
] | 14 | 2021-06-09T10:38:59.000Z | 2022-03-30T06:27:24.000Z |
return
{
[1] = {id=1,x=5,items={{x=1,y=true,z="abcd",a={x=10,y=100,},b={1,3,5,},},{x=2,y=false,z="abcd",a={x=22,y=33,},b={4,5,},},},},
[2] = {id=2,x=9,items={{x=2,y=true,z="abcd",a={x=10,y=11,},b={1,3,5,},},{x=4,y=false,z="abcd",a={x=22,y=33,},b={4,5,},},{x=5,y=false,z="abcd",a={x=22,y=33,},b={4,5,},},},},
}
| 44.428571 | 172 | 0.463023 |
6f5481483630fab3929c5afff0a8d8782f073f9d | 408 | rb | Ruby | qa/qa/page/main/oauth.rb | abdullatif1992/gitlabhq | 2de002b3db1bc199755f8be212fa8804fcb80905 | [
"MIT"
] | 1 | 2018-07-31T00:12:33.000Z | 2018-07-31T00:12:33.000Z | qa/qa/page/main/oauth.rb | abdullatif1992/gitlabhq | 2de002b3db1bc199755f8be212fa8804fcb80905 | [
"MIT"
] | 5 | 2021-05-21T00:46:08.000Z | 2022-03-02T11:15:45.000Z | qa/qa/page/main/oauth.rb | abdullatif1992/gitlabhq | 2de002b3db1bc199755f8be212fa8804fcb80905 | [
"MIT"
] | 2 | 2019-11-28T19:12:40.000Z | 2020-11-04T05:29:58.000Z | module QA
module Page
module Main
class OAuth < Page::Base
view 'app/views/doorkeeper/authorizations/new.html.haml' do
element :authorization_button, 'submit_tag "Authorize"'
end
def needs_authorization?
page.current_url.include?('/oauth')
end
def authorize!
click_button 'Authorize'
end
end
end
end
end
| 20.4 | 67 | 0.598039 |
3895c2dac4fe91113cd4e588a16c600c628b1428 | 91 | php | PHP | sites/index.php | cbatista8a/docker-lamp | ecfc33edcc9e5530f16ba85310478d60cceee303 | [
"MIT"
] | null | null | null | sites/index.php | cbatista8a/docker-lamp | ecfc33edcc9e5530f16ba85310478d60cceee303 | [
"MIT"
] | null | null | null | sites/index.php | cbatista8a/docker-lamp | ecfc33edcc9e5530f16ba85310478d60cceee303 | [
"MIT"
] | null | null | null | <html>
<head>
<title>Hello Docker</title>
</head>
<body>
Hello Docker
</body>
</html>
| 9.1 | 31 | 0.615385 |
80f694ead0128a352106ea998f863e06c1a0fb1c | 1,494 | dart | Dart | test/render_mark_test.dart | contentstack/contentstack-utils-dart | a3c1db59179fd672d01dfdf3de98ccf382fcdea5 | [
"MIT"
] | null | null | null | test/render_mark_test.dart | contentstack/contentstack-utils-dart | a3c1db59179fd672d01dfdf3de98ccf382fcdea5 | [
"MIT"
] | null | null | null | test/render_mark_test.dart | contentstack/contentstack-utils-dart | a3c1db59179fd672d01dfdf3de98ccf382fcdea5 | [
"MIT"
] | 1 | 2021-04-15T19:17:30.000Z | 2021-04-15T19:17:30.000Z | import 'package:contentstack_utils/src/model/Option.dart';
import 'package:test/expect.dart';
import 'package:test/scaffolding.dart';
void main() {
test('test option superscript styletype', () {
var option = Option().renderMark('superscript', 'lorem ipsum lorem ipsum');
expect('<sup>lorem ipsum lorem ipsum</sup>', option);
});
test('test option subscript styletype', () {
var option = Option().renderMark('subscript', 'lorem ipsum lorem ipsum');
expect('<sub>lorem ipsum lorem ipsum</sub>', option);
});
test('test option inlineCode styletype', () {
var option = Option().renderMark('inlineCode', 'lorem ipsum lorem ipsum');
expect('<span>lorem ipsum lorem ipsum</span>', option);
});
test('test option strikethrough styletype', () {
var option =
Option().renderMark('strikethrough', 'lorem ipsum lorem ipsum');
expect('<strike>lorem ipsum lorem ipsum</strike>', option);
});
test('test option underline styletype', () {
var option = Option().renderMark('underline', 'lorem ipsum lorem ipsum');
expect('<u>lorem ipsum lorem ipsum</u>', option);
});
test('test option italic styletype', () {
var option = Option().renderMark('italic', 'lorem ipsum lorem ipsum');
expect('<em>lorem ipsum lorem ipsum</em>', option);
});
test('test option bold styletype', () {
var option = Option().renderMark('bold', 'lorem ipsum lorem ipsum');
expect('<strong>lorem ipsum lorem ipsum</strong>', option);
});
}
| 35.571429 | 79 | 0.662651 |
d5772eae82d09423d8f21d9f657e8d5a7e3382a2 | 245 | swift | Swift | crashes-duplicates/13580-swift-typechecker-checksubstitutions.swift | radex/swift-compiler-crashes | 41a18a98ae38e40384a38695805745d509b6979e | [
"MIT"
] | null | null | null | crashes-duplicates/13580-swift-typechecker-checksubstitutions.swift | radex/swift-compiler-crashes | 41a18a98ae38e40384a38695805745d509b6979e | [
"MIT"
] | null | null | null | crashes-duplicates/13580-swift-typechecker-checksubstitutions.swift | radex/swift-compiler-crashes | 41a18a98ae38e40384a38695805745d509b6979e | [
"MIT"
] | null | null | null | // Distributed under the terms of the MIT license
// Test case submitted to project by https://github.com/practicalswift (practicalswift)
// Test case found by fuzzing
enum A {
{
}
enum S<e {
{
}
{
}
func b( f
{
}
}
enum S<I : S<Int>
let f = S
| 12.894737 | 87 | 0.665306 |
35d17e941d18189b429c256b0990de2de1009f1a | 214 | swift | Swift | Example/ItemEdit.swift | Weebly/Cereal | 11833453f2ef1e19afb0e97e545d5e2b7df4e044 | [
"BSD-3-Clause"
] | 410 | 2015-10-16T00:33:12.000Z | 2021-12-07T08:42:50.000Z | Example/ItemEdit.swift | Weebly/Cereal | 11833453f2ef1e19afb0e97e545d5e2b7df4e044 | [
"BSD-3-Clause"
] | 26 | 2015-10-17T10:15:39.000Z | 2017-11-16T12:20:18.000Z | Example/ItemEdit.swift | Weebly/Cereal | 11833453f2ef1e19afb0e97e545d5e2b7df4e044 | [
"BSD-3-Clause"
] | 23 | 2015-10-23T05:16:50.000Z | 2018-03-26T11:08:08.000Z | //
// ItemEdit.swift
// Cereal
//
// Created by James Richard on 9/30/15.
// Copyright © 2015 Weebly. All rights reserved.
//
import Foundation
enum ItemEdit {
case creating
case editing(IndexPath)
}
| 14.266667 | 49 | 0.668224 |
589deb75b6eebc5f0c11156b959e21f2461163a2 | 3,090 | css | CSS | css/good_typography/intentional_typography.css | jpamental/rwt-workshop | 0feaeea5fd3a33f59db9c3084b62915ece9eceef | [
"MIT"
] | 2 | 2016-09-13T07:24:49.000Z | 2020-02-11T04:33:04.000Z | css/good_typography/intentional_typography.css | jpamental/rwt-workshop | 0feaeea5fd3a33f59db9c3084b62915ece9eceef | [
"MIT"
] | null | null | null | css/good_typography/intentional_typography.css | jpamental/rwt-workshop | 0feaeea5fd3a33f59db9c3084b62915ece9eceef | [
"MIT"
] | null | null | null | body,
p,
th,
td,
ul,
li {
font-family: "Playfair Display", Georgia, "New Times Roman", serif;
font-feature-settings: "liga" 1, "kern" 1;
}
p {
/* Warning: Needed for oldIE support, but words are broken up letter-by-letter */
-ms-word-break: break-all;
word-break: break-all;
/* Non standard for webkit */
word-break: break-word;
-webkit-hyphens: auto;
-moz-hyphens: auto;
-ms-hyphens: auto;
hyphens: auto;
}
h1, h2, h3, h4, h5, h6 {
font-family: "Playfair Display", Georgia, "New Times Roman", serif;
}
h1 {
font-family: "Playfair Display", Georgia, "New Times Roman", serif;
font-feature-settings: "liga" 1, "kern" 1;
font-weight: 900;
}
h2 {
font-family: "Playfair Display", Georgia, "New Times Roman", serif;
font-weight: 400;
font-style: italic;
}
.byline {
font-family: "Playfair Display", Georgia, "New Times Roman", serif;
}
blockquote {
font-family: "Playfair Display", Georgia, "New Times Roman", serif;
}
.footer-contact p {
font-family: "Playfair Display", Georgia, "New Times Roman", serif;
}
h2 + p {
margin-top: 1em;
}
/* Section end styles */
.section-end:after {
color: #777;
content: "\00a7";
display: block;
font-family: "Playfair Display", Georgia, "New Times Roman", serif;
font-size: 1.5em;
font-weight: 300;
margin-top: 1em;
text-align: center;
}
/* First-line styles */
.section-end + p:first-line,
p.first-line:first-line {
font-family: "Playfair Display", Georgia, "New Times Roman", serif;
font-weight: bold;
font-variant: small-caps;
}
/* Content end styles */
.content-area p:last-child:after {
color: #777;
content: "\2766";
display: inline;
font-family: "Playfair Display", Georgia, "New Times Roman", serif;
font-size: 1.25em;
font-style: italic;
font-weight: 300;
}
/* Initial Cap styles */
.article-detail.initial p:first-of-type:first-letter {
float: left;
padding-right: 0.05em;
font-family: "Playfair Display", Georgia, "New Times Roman", serif;
font-weight: bold;
font-size: 3em;
line-height: 0.85em;
}
.article-detail.initial p:first-of-type:first-line {
text-transform: uppercase;
}
.lt-ie9 .article-detail.initial footer + p:first-letter {
font-size: 4em;
font-family: "Playfair Display", Georgia, "New Times Roman", serif;
line-height: 0.85em;
float: left;
padding-right: 0.15em;
}
@media screen and (min-width: 43.75em) {
.article-detail.initial p:first-of-type:first-letter {
font-size: 5em;
line-height: 0.725em;
padding-right: 0.05em;
}
.lt-ie9 .article-detail.initial footer + p:first-letter {
font-size: 5em;
}
}
@media screen and (min-width: 81.25em) {
.article-detail.initial p:first-of-type:first-letter {
font-size: 5em;
line-height: 0.8em;
}
}
/* German styles */
.lang-de p {
/* Warning: Needed for oldIE support, but words are broken up letter-by-letter */
-ms-word-break: none;
word-break: none;
/* Non standard for webkit */
word-break: break-word;
-webkit-hyphens: none;
-moz-hyphens: none;
-ms-hyphens: none;
hyphens: none;
}
| 21.458333 | 81 | 0.656958 |
Subsets and Splits