code
stringlengths 0
29.6k
| language
stringclasses 9
values | AST_depth
int64 3
30
| alphanumeric_fraction
float64 0.2
0.86
| max_line_length
int64 13
399
| avg_line_length
float64 5.02
139
| num_lines
int64 7
299
| source
stringclasses 4
values |
---|---|---|---|---|---|---|---|
// Copyright 2018 cloudy All rights reserved.
// Use of this source code is governed by a MIT style
// license that can be found in the LICENSE file.
package consts
const VERSION = "0.1.0"
//default value
const (
//user upload file url
USER_UPLOAD_FILE_URL = "/upload_files/"
//system file url
SYSTEM_STATIC_FILE_URL = "/system_statics/"
// default database type
DefaultDatabase = "postgres"
// DefaultConfigFile name of config file (toml format)
DefaultConfigFile = "config.toml"
// DefaultWorkdirName name of working directory
DefaultWorkdirName = "config"
// DefaultPidFilename is default filename of pid file
DefaultPidFilename = "base-framework.pid"
// DefaultLockFilename is default filename of lock file
DefaultLockFilename = "base-framework.lock"
//DefaultLogFileName
DefaultLogFileName = "base-framework.log"
// server file dir
DefaultSystemDataDirName = "system-data"
// user file upload file dir
DefaultUserDataDirName = "user-data"
// temp file dir
DefaultTempDirName = "framework-temp"
)
//context variable
const (
// login user name
LoginUserName = "LOGIN_USER_NAME"
// login user id
LoginUserID = "LOGIN_USER_ID"
// login user roles []string
LoginUserRoleIds = "LOGIN_USER_ROLE_IDS"
LoginUserRoleCodes = "LOGIN_USER_ROLE_CODES"
//login user is admin
LoginIsAdmin = "LOGIN_IS_ADMIN"
// token is valid
TokenValid = "TOKEN_VALID"
)
const (
DefaultPage = 1
DefaultSize = 20
)
|
go
| 7 | 0.739422 | 56 | 24.672414 | 58 |
starcoderdata
|
package crex
import (
"github.com/evzpav/crex/utils"
"strconv"
)
var idGen *utils.IdGenerate
func SetIdGenerate(g *utils.IdGenerate) {
idGen = g
}
func GenOrderId() string {
id := idGen.Next()
return strconv.Itoa(int(id))
}
|
go
| 9 | 0.699571 | 41 | 12.705882 | 17 |
starcoderdata
|
Error ReindexerImpl::closeNamespace(string_view nsName, const RdxContext& ctx, bool dropStorage, bool enableDropSlave) {
Namespace::Ptr ns;
try {
ULock lock(mtx_, &ctx);
auto nsIt = namespaces_.find(nsName);
if (nsIt == namespaces_.end()) {
return Error(errNotFound, "Namespace '%s' does not exist", nsName);
}
// Temporary save namespace. This will call destructor without lock
ns = nsIt->second;
if (ns->GetReplState(ctx).slaveMode && !enableDropSlave) {
return Error(errLogic, "Can't modify slave ns '%s'", nsName);
}
namespaces_.erase(nsIt);
if (dropStorage) {
ns->DeleteStorage(ctx);
} else {
ns->CloseStorage(ctx);
}
if (dropStorage) observers_.OnWALUpdate(0, nsName, WALRecord(WalNamespaceDrop));
} catch (const Error& err) {
ns.reset();
return err;
}
// Here will called destructor
ns.reset();
return errOK;
}
|
c++
| 11 | 0.683544 | 120 | 27.064516 | 31 |
inline
|
<?php
namespace GD\Tests;
use GD\Helpers\BuildOutContent;
class BuildOutContentTest extends TestCase
{
use BuildOutContent;
public function testShouldAddParentMethodInfoToArray()
{
$name = 'testFoo';
$results = $this->getParentLevelContent($name);
$what_it_should_be = [
"method_name" => $name
];
$this->assertEquals($what_it_should_be["method_name"], $results["method_name"]);
}
public function testShouldAddStepMethodInfoToArray()
{
$name = 'foo';
$results = $this->getStepLevelContent($name);
$what_it_should_be = [
'method_name' => "foo",
'reference' => '$this->foo()',
];
$this->assertEquals($what_it_should_be, $results);
$this->assertArrayHasKey("method_name", $results);
$this->assertEquals("foo", $what_it_should_be["method_name"]);
}
}
|
php
| 11 | 0.582535 | 88 | 19.866667 | 45 |
starcoderdata
|
using System.Threading.Tasks;
using Plcway.Communication.Ethernet.Profinet.AllenBradley;
namespace Plcway.Communication.Ethernet.Profinet.Melsec
{
///
/// 三菱PLC的EIP协议的实现,当PLC使用了 QJ71EIP71 模块时就需要使用本类来访问
///
public class MelsecCipNet : AllenBradleyNet
{
public MelsecCipNet()
{
}
public MelsecCipNet(string ipAddress, int port = 44818)
: base(ipAddress, port)
{
}
///
/// Read data information, data length for read array length information
///
/// <param name="address">Address format of the node
/// <param name="length">In the case of arrays, the length of the array
/// data with result object
public override OperateResult Read(string address, ushort length)
{
return Read(new string[1] { address }, new int[1] { length });
}
public override async Task ReadAsync(string address, ushort length)
{
return await ReadAsync(new string[1] { address }, new int[1] { length });
}
}
}
|
c#
| 15 | 0.705128 | 92 | 28.513514 | 37 |
starcoderdata
|
package leetcode.search_insert_position;
public class Solution {
public int searchInsert(int[] A, int target) {
if(A == null) return 0;
if(A.length == 0) return 0;
int U = A.length;
int L = 0;
if(target <= A[0]) return 0;
while(L + 1 < U){
int mid = (L + U) / 2;
if(A[mid] == target) return mid;
if(A[mid] > target){
U = mid;
}else{
L = mid;
}
}
return U;
}
public static void main(String[] args){
System.err.println("" + new Solution().searchInsert(new int[]{1, 2, 3, 5}, 0));
System.err.println("" + new Solution().searchInsert(new int[]{1, 2, 3, 5}, 1));
System.err.println("" + new Solution().searchInsert(new int[]{1, 2, 3, 5}, 2));
System.err.println("" + new Solution().searchInsert(new int[]{1, 2, 3, 5}, 3));
System.err.println("" + new Solution().searchInsert(new int[]{1, 2, 3, 5}, 4));
System.err.println("" + new Solution().searchInsert(new int[]{1, 2, 3, 5}, 5));
System.err.println("" + new Solution().searchInsert(new int[]{1, 2, 3, 5}, 6));
}
}
|
java
| 13 | 0.544878 | 84 | 34.580645 | 31 |
starcoderdata
|
async def test_invalid_methods_fail_append_async(self, resource_group, location, storage_account,
storage_account_key):
await self._setup(storage_account, storage_account_key)
self.bsc.key_encryption_key = KeyWrapper('key1')
blob_name = self._get_blob_reference(BlobType.AppendBlob)
blob = self.bsc.get_blob_client(self.container_name, blob_name)
# Assert
with self.assertRaises(ValueError) as e:
await blob.append_block(urandom(32))
self.assertEqual(str(e.exception), _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)
with self.assertRaises(ValueError) as e:
await blob.create_append_blob()
self.assertEqual(str(e.exception), _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)
# All append_from operations funnel into append_from_stream, so testing one is sufficient
with self.assertRaises(ValueError) as e:
await blob.upload_blob(b'To encrypt', blob_type=BlobType.AppendBlob)
self.assertEqual(str(e.exception), _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)
|
python
| 11 | 0.666667 | 97 | 55.3 | 20 |
inline
|
def createPlayMenu(self):
"""creates the menu to select which level is going to be played
Note:
TODO Make columns rows dynamic with value of Levels
Returns:
Menu: contains the Play menu
Test:
* the score must be set with the help of the decorator
* the size of this submenu must match WINDOWSIZE
* clicking play on a existing level must start the gameloop
"""
# pygame_menu makes screen scrollable if levels dont fit next to eachother
playMenu = pygame_menu.Menu(width=WINDOWSIZE[0],height=WINDOWSIZE[1], theme=self.myTheme, title=" ", columns=2, rows=3)
decorator = playMenu.get_decorator()
self.decorator.append(decorator)
self.addScore(decorator)
playMenu.add.label(f"Level One")
playMenu.add.image(self.levelOnePreview)
playMenu.add.button(f"PLAY", self.game.gameLoop, self.player, self, 1)
playMenu.add.label(f"Level Two")
playMenu.add.image(self.comingSoon)
playMenu.add.button(f"PLAY")
logger.info(f"Play menu created")
return playMenu
|
python
| 9 | 0.618609 | 127 | 37.516129 | 31 |
inline
|
import {createElement, Component, render} from 'rax';
import Bar from './Bar';
import BarRegion from './BarRegion';
import Area from './Area';
import AreaStack from './AreaStack';
import Line from './Line';
import LinePoint from './LinePoint';
import LineSmooth from './LineSmooth';
import Point from './Point';
import Pie from './Pie';
import PieRose from './PieRose';
class Page extends Component {
render() {
return
<Bar />
<BarRegion />
<Area />
<AreaStack />
<Line />
<LinePoint />
<LineSmooth />
<Point />
<Pie />
<PieRose />
}
}
render(<Page />);
|
javascript
| 8 | 0.595645 | 53 | 19.741935 | 31 |
starcoderdata
|
using System;
namespace Telegram.Bot.Helper
{
///
/// Inline command is callback data of inline button. This command is split by selected separator.
///
public class InlineCommand
{
///
/// Count of commands
///
public int Count => Commands.Length;
///
/// Commands parsed from callback data
///
public readonly string[] Commands;
internal InlineCommand(string command, string separator)
{
Commands = command.Split(new[] { separator }, StringSplitOptions.None);
}
public bool Equals(InlineCommand valueToCompareWith)
{
if (valueToCompareWith == null || Count != valueToCompareWith.Count)
return false;
for (int commandIndex = 0; commandIndex < Count; commandIndex++)
{
if (string.IsNullOrWhiteSpace(Commands[commandIndex]))
continue;
if (string.IsNullOrWhiteSpace(valueToCompareWith.Commands[commandIndex]))
continue;
if (Commands[commandIndex] != valueToCompareWith.Commands[commandIndex])
return false;
}
return true;
}
}
}
|
c#
| 16 | 0.564759 | 102 | 29.883721 | 43 |
starcoderdata
|
def _decode_record(record, name_to_features):
"""Decodes a record to a TensorFlow example."""
example = tf.parse_single_example(record, name_to_features)
# tf.Example only supports tf.int64, but the TPU only supports tf.int32.
# So cast all int64 to int32.
example["input_ids_1"] = tf.cast(example["input_ids_1"], tf.int32)
example["input_ids_2"] = tf.cast(example["input_ids_2"], tf.int32)
example["documents_match_labels"] = tf.cast(example["documents_match_labels"],
tf.float32)
example["input_mask_1"] = tf.cast(example["input_mask_1"], tf.int32)
example["input_mask_2"] = tf.cast(example["input_mask_2"], tf.int32)
if (FLAGS.train_mode == constants.TRAIN_MODE_PRETRAIN or
FLAGS.train_mode == constants.TRAIN_MODE_JOINT_TRAIN):
example["masked_lm_ids_1"] = tf.cast(example["masked_lm_ids_1"], tf.int32)
example["masked_lm_ids_2"] = tf.cast(example["masked_lm_ids_2"], tf.int32)
example["masked_lm_weights_1"] = tf.cast(example["masked_lm_weights_1"],
tf.float32)
example["masked_lm_weights_2"] = tf.cast(example["masked_lm_weights_2"],
tf.float32)
example["masked_lm_positions_1"] = tf.cast(example["masked_lm_positions_1"],
tf.int32)
example["masked_lm_positions_2"] = tf.cast(example["masked_lm_positions_2"],
tf.int32)
return example
|
python
| 11 | 0.588042 | 80 | 62.458333 | 24 |
inline
|
<?php
namespace App\Http\Controllers\Frontend\User;
use App\Models\Cases;
use App\Models\StudentAnswer;
use App\Models\Task;
use Illuminate\Contracts\Foundation\Application;
use Illuminate\Contracts\View\Factory;
use Illuminate\Contracts\View\View;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\Auth;
use JetBrains\PhpStorm\NoReturn;
/**
* Class TaskController.
*/
class TaskController
{
/**
* @param $id
* @return Application|Factory|View
*/
public function index($caseId, $id)
{
$case = Cases::find($caseId);
$task = Task::find($id);
$studentId = Auth::user()->id;
$studentAnswer = StudentAnswer::select()
->where('user_id', $studentId)
->where('cases_id', $caseId)
->where('task_id', $id)
->first();
return view('frontend.user.case.task')
->with('case',$case)
->with('task',$task)
->with('student_answer',$studentAnswer);
}
/**
* @param $id
* @return Application|Factory|View
*/
public function ending($caseId, $id)
{
$case = Cases::find($caseId);
$task = Task::find($id);
$studentId = Auth::user()->id;
$studentAnswer = StudentAnswer::select()
->where('user_id', $studentId)
->where('cases_id', $caseId)
->where('task_id', $id)
->first();
return view('frontend.user.case.task.task-ending')
->with('case',$case)
->with('task',$task)
->with('student_answer',$studentAnswer);
}
/**
* @param Request $request
* @return mixed
*/
public function store(Request $request, $caseId, $id)
{
//dd($request->input());
$userId = Auth::user()->id;
StudentAnswer::updateOrCreate(
// Check if available
[
'user_id' => $userId,
'cases_id' => $caseId,
'task_id' => $id
],
// Create or Update Value
[
'user_id' => $userId,
'cases_id' => $caseId,
'task_id' => $id,
'emo_1' => $request->input('emo_1'),
'emo_2' => $request->input('emo_2'),
'nvc_1' => $request->input('nvc_1'),
'nvc_2' => $request->input('nvc_2'),
'nvc_3' => $request->input('nvc_3'),
'nvc_4' => $request->input('nvc_4'),
]
);
if($request->input('nvc_1') && $request->input('nvc_2') && $request->input('nvc_3') && $request->input('nvc_4')){
$redirect = redirect()->route('frontend.user.case.task.ending', ['caseId' => $caseId, 'id' => $id])->withFlashSuccess(__('The case was successfully created.'));
} else {
$redirect = redirect()->route('frontend.user.case', ['id' => $caseId])->withFlashSuccess(__('The case was successfully created.'));
}
return $redirect;
}
/**
* @param Request $request
* @return mixed
*/
public function storeEnding(Request $request, $caseId, $id)
{
//dd($request->input());
$userId = Auth::user()->id;
StudentAnswer::updateOrCreate(
// Check if available
[
'user_id' => $userId,
'cases_id' => $caseId,
'task_id' => $id
],
// Create or Update Value
[
'user_id' => $userId,
'cases_id' => $caseId,
'task_id' => $id,
'nvc_end' => $request->input('nvc_end')
]
);
return redirect()->route('frontend.user.case', ['id' => $caseId])->withFlashSuccess(__('The case was successfully created.'));
}
}
|
php
| 17 | 0.4909 | 172 | 27.072993 | 137 |
starcoderdata
|
using Bb.Oracle.Models;
using Bb.Oracle.Structures.Models;
using Bb.Oracle.Validators;
using Bb.Oracle.Visitors;
using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Diagnostics;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Bb.Oracle.Solutions
{
public abstract class ScriptParsers : ISolutionEvaluator
{
public ScriptParsers(ScriptParserContext ctx)
{
this._Current_context = ctx;
}
///
/// Evaluate all script file in the visitor
///
/// <typeparam name="T">
/// <param name="visitor">
public abstract void Visit visitor);
///
/// Parse all scripts if the filter return true
///
/// <typeparam name="T">
/// <param name="filter"> function return true or false when the script must to be parsed.
/// <param name="scripts">List of scripts
/// <param name="visitor">visitor that must parse the script's list
/// count of parsed scripts
protected int Process bool> filter, List scripts, Antlr4.Runtime.Tree.IParseTreeVisitor visitor)
{
int count = 0;
int cut = this._Current_context._cut;
List _scripts = new List
foreach (ScriptFileInfo script in scripts)
if (filter(script))
{
count++;
try
{
script.Visit visitor);
_scripts.Add(script);
}
catch (Exception e)
{
if (System.Diagnostics.Debugger.IsAttached)
System.Diagnostics.Debugger.Break();
throw e;
}
}
return count;
}
protected readonly ScriptParserContext _Current_context;
}
}
|
c#
| 19 | 0.556951 | 147 | 29.786667 | 75 |
starcoderdata
|
package main
import "fmt"
type Figure interface {
Area() int
Scale(int)
}
type Square struct {
a int
}
func (s Square) Area() int {
return s.a * s.a
}
func (s *Square) Scale(scaleValue int) {
s.a *= scaleValue
}
func main() {
var f Figure
s := Square{a: 5}
s.Area()
f = &s
f.Scale(2)
fmt.Println(f)
}
|
go
| 9 | 0.638122 | 41 | 10.3125 | 32 |
starcoderdata
|
/*
* Copyright (c) 2014, Inc.
* Copyright (c) 2016-2018, TES3MP Team
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
// If you want to change these defines, put them in NativeFeatureIncludesOverrides so your changes are not lost when updating RakNet
// The user should not edit this file
#include "NativeFeatureIncludesOverrides.h"
#ifndef __NATIVE_FEATURE_INCLDUES_H
#define __NATIVE_FEATURE_INCLDUES_H
// Uncomment below defines, and paste to NativeFeatureIncludesOverrides.h, to exclude plugins that you do not want to build into the static library, or DLL
// These are not all the plugins, only those that are in the core library
// Other plugins are located in DependentExtensions
// #define _CRABNET_SUPPORT_ConnectionGraph2 0
// #define _CRABNET_SUPPORT_DirectoryDeltaTransfer 0
// #define _CRABNET_SUPPORT_FileListTransfer 0
// #define _CRABNET_SUPPORT_FullyConnectedMesh2 0
// #define _CRABNET_SUPPORT_MessageFilter 0
// #define _CRABNET_SUPPORT_NatPunchthroughClient 0
// #define _CRABNET_SUPPORT_NatPunchthroughServer 0
// #define _CRABNET_SUPPORT_NatTypeDetectionClient 0
// #define _CRABNET_SUPPORT_NatTypeDetectionServer 0
// #define _CRABNET_SUPPORT_PacketLogger 0
// #define _CRABNET_SUPPORT_ReadyEvent 0
// #define _CRABNET_SUPPORT_ReplicaManager3 0
// #define _CRABNET_SUPPORT_Router2 0
// #define _CRABNET_SUPPORT_RPC4Plugin 0
// #define _CRABNET_SUPPORT_TeamBalancer 0
// #define _CRABNET_SUPPORT_TeamManager 0
// #define _CRABNET_SUPPORT_UDPProxyClient 0
// #define _CRABNET_SUPPORT_UDPProxyCoordinator 0
// #define _CRABNET_SUPPORT_UDPProxyServer 0
// #define _CRABNET_SUPPORT_ConsoleServer 0
// #define _CRABNET_SUPPORT_RakNetTransport 0
// #define _CRABNET_SUPPORT_TelnetTransport 0
// #define _CRABNET_SUPPORT_TCPInterface 0
// #define _CRABNET_SUPPORT_LogCommandParser 0
// #define _CRABNET_SUPPORT_RakNetCommandParser 0
// #define _CRABNET_SUPPORT_EmailSender 0
// #define _CRABNET_SUPPORT_HTTPConnection 0
// #define _CRABNET_SUPPORT_HTTPConnection2 0
// #define _CRABNET_SUPPORT_PacketizedTCP 0
// #define _CRABNET_SUPPORT_TwoWayAuthentication 0
// SET DEFAULTS IF UNDEFINED
/*#ifndef LIBCAT_SECURITY
#define LIBCAT_SECURITY 0
#endif*/
#ifndef _CRABNET_SUPPORT_ConnectionGraph2
#define _CRABNET_SUPPORT_ConnectionGraph2 1
#endif
#ifndef _CRABNET_SUPPORT_DirectoryDeltaTransfer
#define _CRABNET_SUPPORT_DirectoryDeltaTransfer 1
#endif
#ifndef _CRABNET_SUPPORT_FileListTransfer
#define _CRABNET_SUPPORT_FileListTransfer 1
#endif
#ifndef _CRABNET_SUPPORT_FullyConnectedMesh
#define _CRABNET_SUPPORT_FullyConnectedMesh 1
#endif
#ifndef _CRABNET_SUPPORT_FullyConnectedMesh2
#define _CRABNET_SUPPORT_FullyConnectedMesh2 1
#endif
#ifndef _CRABNET_SUPPORT_MessageFilter
#define _CRABNET_SUPPORT_MessageFilter 1
#endif
#ifndef _CRABNET_SUPPORT_NatPunchthroughClient
#define _CRABNET_SUPPORT_NatPunchthroughClient 1
#endif
#ifndef _CRABNET_SUPPORT_NatPunchthroughServer
#define _CRABNET_SUPPORT_NatPunchthroughServer 1
#endif
#ifndef _CRABNET_SUPPORT_NatTypeDetectionClient
#define _CRABNET_SUPPORT_NatTypeDetectionClient 1
#endif
#ifndef _CRABNET_SUPPORT_NatTypeDetectionServer
#define _CRABNET_SUPPORT_NatTypeDetectionServer 1
#endif
#ifndef _CRABNET_SUPPORT_PacketLogger
#define _CRABNET_SUPPORT_PacketLogger 1
#endif
#ifndef _CRABNET_SUPPORT_ReadyEvent
#define _CRABNET_SUPPORT_ReadyEvent 1
#endif
#ifndef _CRABNET_SUPPORT_ReplicaManager3
#define _CRABNET_SUPPORT_ReplicaManager3 1
#endif
#ifndef _CRABNET_SUPPORT_Router2
#define _CRABNET_SUPPORT_Router2 1
#endif
#ifndef _CRABNET_SUPPORT_RPC4Plugin
#define _CRABNET_SUPPORT_RPC4Plugin 1
#endif
#ifndef _CRABNET_SUPPORT_TeamBalancer
#define _CRABNET_SUPPORT_TeamBalancer 1
#endif
#ifndef _CRABNET_SUPPORT_TeamManager
#define _CRABNET_SUPPORT_TeamManager 1
#endif
#ifndef _CRABNET_SUPPORT_UDPProxyClient
#define _CRABNET_SUPPORT_UDPProxyClient 1
#endif
#ifndef _CRABNET_SUPPORT_UDPProxyCoordinator
#define _CRABNET_SUPPORT_UDPProxyCoordinator 1
#endif
#ifndef _CRABNET_SUPPORT_UDPProxyServer
#define _CRABNET_SUPPORT_UDPProxyServer 1
#endif
#ifndef _CRABNET_SUPPORT_ConsoleServer
#define _CRABNET_SUPPORT_ConsoleServer 1
#endif
#ifndef _CRABNET_SUPPORT_RakNetTransport
#define _CRABNET_SUPPORT_RakNetTransport 1
#endif
#ifndef _CRABNET_SUPPORT_TelnetTransport
#define _CRABNET_SUPPORT_TelnetTransport 1
#endif
#ifndef _CRABNET_SUPPORT_TCPInterface
#define _CRABNET_SUPPORT_TCPInterface 1
#endif
#ifndef _CRABNET_SUPPORT_LogCommandParser
#define _CRABNET_SUPPORT_LogCommandParser 1
#endif
#ifndef _CRABNET_SUPPORT_RakNetCommandParser
#define _CRABNET_SUPPORT_RakNetCommandParser 1
#endif
#ifndef _CRABNET_SUPPORT_EmailSender
#define _CRABNET_SUPPORT_EmailSender 1
#endif
#ifndef _CRABNET_SUPPORT_HTTPConnection
#define _CRABNET_SUPPORT_HTTPConnection 1
#endif
#ifndef _CRABNET_SUPPORT_HTTPConnection2
#define _CRABNET_SUPPORT_HTTPConnection2 1
#endif
#ifndef _CRABNET_SUPPORT_PacketizedTCP
#define _CRABNET_SUPPORT_PacketizedTCP 1
#endif
#ifndef _CRABNET_SUPPORT_TwoWayAuthentication
#define _CRABNET_SUPPORT_TwoWayAuthentication 1
#endif
#ifndef _CRABNET_SUPPORT_CloudClient
#define _CRABNET_SUPPORT_CloudClient 1
#endif
#ifndef _CRABNET_SUPPORT_CloudServer
#define _CRABNET_SUPPORT_CloudServer 1
#endif
#ifndef _CRABNET_SUPPORT_DynDNS
#define _CRABNET_SUPPORT_DynDNS 1
#endif
#ifndef _CRABNET_SUPPORT_Rackspace
#define _CRABNET_SUPPORT_Rackspace 1
#endif
#ifndef _CRABNET_SUPPORT_FileOperations
#define _CRABNET_SUPPORT_FileOperations 1
#endif
#ifndef _CRABNET_SUPPORT_UDPForwarder
#define _CRABNET_SUPPORT_UDPForwarder 1
#endif
#ifndef _CRABNET_SUPPORT_StatisticsHistory
#define _CRABNET_SUPPORT_StatisticsHistory 1
#endif
#ifndef _CRABNET_SUPPORT_LibVoice
#define _CRABNET_SUPPORT_LibVoice 0
#endif
#ifndef _CRABNET_SUPPORT_RelayPlugin
#define _CRABNET_SUPPORT_RelayPlugin 1
#endif
// Take care of dependencies
#if _CRABNET_SUPPORT_DirectoryDeltaTransfer==1
#undef _CRABNET_SUPPORT_FileListTransfer
#define _CRABNET_SUPPORT_FileListTransfer 1
#endif
#if _CRABNET_SUPPORT_FullyConnectedMesh2==1
#undef _CRABNET_SUPPORT_ConnectionGraph2
#define _CRABNET_SUPPORT_ConnectionGraph2 1
#endif
#if _CRABNET_SUPPORT_TelnetTransport==1
#undef _CRABNET_SUPPORT_PacketizedTCP
#define _CRABNET_SUPPORT_PacketizedTCP 1
#endif
#if _CRABNET_SUPPORT_PacketizedTCP==1 || _CRABNET_SUPPORT_EmailSender==1 || _CRABNET_SUPPORT_HTTPConnection==1
#undef _CRABNET_SUPPORT_TCPInterface
#define _CRABNET_SUPPORT_TCPInterface 1
#endif
#endif // __NATIVE_FEATURE_INCLDUES_H
|
c
| 6 | 0.785268 | 155 | 32.592965 | 199 |
starcoderdata
|
from dvc.output.base import BaseOutput
from ..fs.s3 import S3FileSystem
class S3Output(BaseOutput):
FS_CLS = S3FileSystem
|
python
| 5 | 0.756944 | 38 | 17 | 8 |
starcoderdata
|
const canvasTxt = require('canvas-txt').default
const {
loadImage
} = require('canvas')
const {
titleStyle,
titleDescriptionStype,
urlStype,
sponsorStyle,
sponsorImageStyle,
sponsorBox,
speakerStyle,
speakerBox,
organizerStyle,
organizerImageStyle,
organizerBox
} = require('./style')
class Meetup {
constructor(context) {
this.context = context;
this.title = undefined;
this.description = undefined;
this.url = undefined;
this.sponsors = undefined;
this.speakers = undefined;
this.organizers = undefined;
}
setTitle(title) {
this.title = title
return this
}
setDescription(date, time, location) {
this.description = date + " | " + time + " | " + location
return this
}
setURL(url) {
this.url = url
return this
}
setSponsors(sponsors) {
this.sponsors = sponsors
return this
}
setSpeakers(speakers) {
this.speakers = speakers
return this
}
setOrganizers(organizers) {
this.organizers = organizers
return this
}
async build() {
this.buildDescription()
await this.buildSpeakers()
await this.buildOrganizers()
if (this.sponsors) {
await this.buildSponsors()
}
}
buildDescription() {
drawText(this.context, this.title, titleStyle)
drawText(this.context, this.description, titleDescriptionStype)
drawText(this.context, this.url, urlStype)
}
async buildSponsors() {
drawText(this.context, "In Partnership With", sponsorStyle)
var sponsorCount = 0
for (var y = 0; y < sponsorBox.verticalDivider; y++) {
for (var x = 0; x < sponsorBox.horizontalDivider; x++) {
if (this.sponsors.length > sponsorCount) {
const dx = sponsorBox.dx + (sponsorImageStyle.width * x) + (sponsorImageStyle.padding * x)
const dy = sponsorBox.dy + (sponsorImageStyle.height * y) + (sponsorImageStyle.padding * y)
await drawImage(
this.context,
this.sponsors[sponsorCount].image_path,
dx,
dy,
sponsorImageStyle.width,
sponsorImageStyle.height
)
sponsorCount++
}
}
}
}
async buildOrganizers() {
drawText(this.context, "Organized By", organizerStyle)
var organizerCount = 0
for (var y = 0; y < organizerBox.verticalDivider; y++) {
for (var x = 0; x < organizerBox.horizontalDivider; x++) {
if (this.organizers.length > organizerCount) {
const dx = organizerBox.dx + (organizerImageStyle.width * x) + (organizerImageStyle.padding * x)
const dy = organizerBox.dy + (organizerImageStyle.height * y) + (organizerImageStyle.padding * y)
await drawImage(
this.context,
this.organizers[organizerCount].image_path,
dx,
dy,
organizerImageStyle.width,
organizerImageStyle.height
)
organizerCount++
}
}
}
}
async buildSpeakers() {
var speakerCount = 0
for (var y = 0; y < speakerBox.verticalDivider; y++) {
for (var x = 0; x < speakerBox.horizontalDivider; x++) {
if (this.speakers.length > speakerCount) {
const dxImage = speakerBox.dx + (speakerStyle.image.width * x) + (speakerStyle.padding * x)
const dyImage = speakerBox.dy + (speakerStyle.image.height * y) + 2 * (speakerStyle.padding * y)
await drawImage(
this.context,
this.speakers[speakerCount].image_path,
dxImage,
dyImage,
speakerStyle.image.width,
speakerStyle.image.height
)
const nameStyle = speakerStyle.name
nameStyle.dx = dxImage + speakerStyle.image.width + (3 * speakerStyle.padding)
nameStyle.dy = dyImage + speakerStyle.padding
drawText(this.context, this.speakers[speakerCount].name, nameStyle)
const companyStyle = speakerStyle.company
companyStyle.dx = dxImage + speakerStyle.image.width + (3 * speakerStyle.padding)
companyStyle.dy = nameStyle.dy + nameStyle.dHeight + speakerStyle.padding
drawText(this.context, `${this.speakers[speakerCount].position} @ ${this.speakers[speakerCount].company}`, companyStyle)
const titleStyle = speakerStyle.title
titleStyle.dx = dxImage + speakerStyle.image.width + (3 * speakerStyle.padding)
titleStyle.dy = companyStyle.dy + companyStyle.dHeight + speakerStyle.padding
drawText(this.context, this.speakers[speakerCount].title, titleStyle)
speakerCount++
}
}
}
}
}
const drawText = (ctx, text, style) => {
ctx.fillStyle = style.fontColor
canvasTxt.font = style.fontType
canvasTxt.align = style.align
canvasTxt.vAlign = style.vAlign
canvasTxt.fontSize = style.fontSize
canvasTxt.fontWeight = style.fontWeight
canvasTxt.lineHeight = style.fontSize + (style.fontSize * 0.1)
canvasTxt.drawText(
ctx,
text,
style.dx,
style.dy,
style.dWidth,
style.dHeight
)
}
const drawImage = async (ctx, image_path, dx, dy, width, height) => {
const image = await loadImage(image_path)
var scale = Math.min(width / image.width, height / image.height);
ctx.drawImage(image, dx, dy, image.width * scale, image.height * scale)
return {
widthSubtrator: width - (image.width * scale),
}
}
module.exports = {
Meetup
};
|
javascript
| 19 | 0.546508 | 140 | 31.474227 | 194 |
starcoderdata
|
/*
* For clearness library handle different multiplication cases:
* Matrix vs Matrix
* Matrix vs Vector
* Vector vs Matrix
*/
#include
namespace stf
{
#if defined(STF_MATRIX_H) && !defined(STF_USE_MATRIX_MULTIPLICATION_CASE)
#define STF_USE_MATRIX_MULTIPLICATION_CASE
// Matrix vs Matrix multiplication
template <size_t LHS_ROWS, size_t LHS_COLS, size_t RHS_COLS, typename LHS_VALUE_TYPE, typename RHS_VALUE_TYPE,
typename RETURN_VALUE_TYPE = decltype(static_cast + static_cast
Matrix<LHS_ROWS, RHS_COLS, RETURN_VALUE_TYPE> operator*(const Matrix<LHS_ROWS, LHS_COLS, LHS_VALUE_TYPE>& Lhs, const Matrix<LHS_COLS, RHS_COLS, RHS_VALUE_TYPE>& Rhs)
{
Matrix<LHS_ROWS, RHS_COLS, RETURN_VALUE_TYPE> result_mat;
for (size_t x = 0; x < LHS_ROWS; ++x)
for (size_t y = 0; y < RHS_COLS; ++y)
{
RETURN_VALUE_TYPE sum = static_cast
for (size_t k = 0; k < LHS_COLS; ++k)
sum += Lhs[x][k] * Rhs[k][y];
result_mat[x][y] = sum;
}
return result_mat;
}
#endif
#if defined(STF_MATRIX_H) && defined(STF_VECTOR_H) && !defined(STF_USE_VECTOR_MULTIPLICATION_CASE)
#define STF_USE_VECTOR_MULTIPLICATION_CASE
// Matrix vs Vector multiplication
template < size_t LHS_ROWS, size_t LHS_COLS, typename LHS_VALUE_TYPE, typename RHS_VALUE_TYPE,
typename RETURN_VALUE_TYPE = decltype(static_cast + static_cast >
Matrix<LHS_ROWS, 1, RETURN_VALUE_TYPE> operator*(const Matrix<LHS_ROWS, LHS_COLS, LHS_VALUE_TYPE>& Lhs, const Matrix<LHS_COLS, 1, RHS_VALUE_TYPE>& Rhs)
{
Matrix<LHS_ROWS, 1, RETURN_VALUE_TYPE> result_mat;
for (size_t x = 0; x < LHS_ROWS; ++x)
{
RETURN_VALUE_TYPE sum = static_cast
for (size_t k = 0; k < LHS_COLS; ++k)
sum += Lhs[x][k] * Rhs[k];
result_mat[x] = sum;
}
return result_mat;
}
// Vector vs Matrix multiplication
template < size_t LHS_ROWS, size_t RHS_COLS, typename LHS_VALUE_TYPE, typename RHS_VALUE_TYPE,
typename RETURN_VALUE_TYPE = decltype(static_cast + static_cast >
Matrix<LHS_ROWS, RHS_COLS, RETURN_VALUE_TYPE> operator*(const Matrix<LHS_ROWS, 1, LHS_VALUE_TYPE>& Lhs, const Matrix<1, RHS_COLS, RHS_VALUE_TYPE>& Rhs)
{
Matrix<LHS_ROWS, RHS_COLS, RETURN_VALUE_TYPE> result_mat;
for (size_t x = 0; x < LHS_ROWS; ++x)
for (size_t y = 0; y < RHS_COLS; ++y)
{
result_mat[x][y] = static_cast * Rhs[0][y]);
}
return result_mat;
}
#endif
}
|
c
| 21 | 0.672956 | 166 | 35.884058 | 69 |
starcoderdata
|
package com.yuzeduan.lovesong.music;
import com.yuzeduan.lovesong.music.bean.LrcEntity;
import java.util.List;
/**
* author: Allen
* date: On 2018/10/8
*/
public class MVPContract {
public interface IView{
void showLrc(List list);
}
public interface IPresenter{
void getData(String address);
}
}
|
java
| 10 | 0.692513 | 50 | 17.7 | 20 |
starcoderdata
|
<?php
namespace App\Models;
use Illuminate\Database\Eloquent\Model;
class UserPoint extends Model
{
const TABLE_NAME = 'user_point';
const FIELD_USER_ID = 'user_id';
const FIELD_FROM = 'from_date';
const FIELD_TO = 'to_date';
const FIELD_COUNT_EASY = 'count_easy';
const FIELD_COUNT_MID = 'count_mid';
const FIELD_COUNT_HARD = 'count_hard';
const FIELD_POINT = 'point';
const FIELD_IS_PASS = 'is_pass';
const IS_PASS_YES = 1;
const IS_PASS_NO = 0;
const CHANGEABLE_FIELDS = [
self::FIELD_USER_ID,
self::FIELD_FROM,
self::FIELD_TO,
self::FIELD_COUNT_EASY,
self::FIELD_COUNT_MID,
self::FIELD_COUNT_HARD,
self::FIELD_POINT,
self::FIELD_IS_PASS,
];
protected $table = self::TABLE_NAME;
public function setUserId($userId)
{
$this->attributes[self::FIELD_USER_ID] = $userId;
return $this;
}
public function setFromDate($from)
{
$this->attributes[self::FIELD_FROM] = $from;
return $this;
}
public function setToDate($to)
{
$this->attributes[self::FIELD_TO] = $to;
return $this;
}
public function setCountEasy($count)
{
$this->attributes[self::FIELD_COUNT_EASY] = $count;
return $this;
}
public function setCountMid($count)
{
$this->attributes[self::FIELD_COUNT_MID] = $count;
return $this;
}
public function setCountHard($count)
{
$this->attributes[self::FIELD_COUNT_HARD] = $count;
return $this;
}
public function setPoint($point)
{
$this->attributes[self::FIELD_POINT] = $point;
return $this;
}
public function setIsPass($isPass)
{
$this->attributes[self::FIELD_IS_PASS] = $isPass;
return $this;
}
public function getCountEasy()
{
return $this->attributes[self::FIELD_COUNT_EASY];
}
public function getCountMid()
{
return $this->attributes[self::FIELD_COUNT_MID];
}
public function getCountHard()
{
return $this->attributes[self::FIELD_COUNT_HARD];
}
public function getPoint()
{
return $this->attributes[self::FIELD_POINT];
}
public function getIsPass()
{
return $this->attributes[self::FIELD_IS_PASS];
}
}
|
php
| 10 | 0.583854 | 59 | 20.845455 | 110 |
starcoderdata
|
package org.museautomation.ui.steptask;
import org.museautomation.core.*;
import org.museautomation.core.project.*;
import org.museautomation.core.step.*;
import org.museautomation.core.steptask.*;
import java.util.*;
/**
* Ensures that each step in a SteppedTest has a unique ID (within the test).
*
* @author (see LICENSE.txt for license details)
*/
public class UniqueIds
{
public static void addToStepsIfNeeded(SteppedTask task, MuseProject project)
{
StepConfiguration step = task.getStep();
addToStepIfNeeded(step, project, new IdTracker());
}
private static void addToStepIfNeeded(StepConfiguration step, MuseProject project, IdTracker tracker)
{
checkAndRepairId(step, project, tracker);
if (step.getStepId() == null)
step.setStepId(StepIdGenerator.get(project).generateLongId());
if (step.getChildren() != null)
for (StepConfiguration child : step.getChildren())
addToStepIfNeeded(child, project, tracker);
}
private static void checkAndRepairId(StepConfiguration step, MuseProject project, IdTracker tracker)
{
// upgrade tests that used the original id name. // TODO remove at some point (this was needed for 0.11 update)
if (step.getMetadata() != null)
{
Object old_id = step.getMetadata().remove(StepConfiguration.META_ID_OLD);
if (old_id != null)
step.getMetadata().put(StepConfiguration.META_ID, old_id);
}
Long id = step.getStepId();
if (id == null)
return; // nothing to repair
if (tracker._existing_ids.contains(id))
{
Long new_id = StepIdGenerator.get(project).generateLongId();
while (tracker._existing_ids.contains(new_id))
{
StepIdGenerator.get(project).conflict();
new_id = StepIdGenerator.get(project).generateLongId();
}
step.setStepId(new_id);
tracker._existing_ids.add(new_id);
}
else
tracker._existing_ids.add(id);
}
private static class IdTracker
{
Set _existing_ids = new HashSet<>();
}
}
|
java
| 14 | 0.717647 | 114 | 29 | 68 |
starcoderdata
|
def verify(b, prefix, p):
"""Verify setup was replicated to backup b"""
# Wait for configuration to replicate.
wait_address(b.connection, prefix+"x");
self.assert_browse_retry(b, prefix+"q1", ["b", "1", "4"])
self.assertEqual(p.receiver(prefix+"q1").fetch(timeout=0).content, "b")
p.acknowledge()
self.assert_browse_retry(b, prefix+"q1", ["1", "4"])
self.assert_browse_retry(b, prefix+"q2", []) # configuration only
assert not valid_address(b.connection, prefix+"q3")
# Verify exchange with replicate=all
b.sender(prefix+"e1/key1").send(qm.Message(prefix+"e1"))
self.assert_browse_retry(b, prefix+"q1", ["1", "4", prefix+"e1"])
# Verify exchange with replicate=configuration
b.sender(prefix+"e2/key2").send(qm.Message(prefix+"e2"))
self.assert_browse_retry(b, prefix+"q2", [prefix+"e2"])
b.sender(prefix+"e4/key4").send(qm.Message("drop2")) # Verify unbind.
self.assert_browse_retry(b, prefix+"q4", ["6","7"])
# Verify deletes
assert not valid_address(b.connection, prefix+"dq")
assert not valid_address(b.connection, prefix+"de")
|
python
| 13 | 0.571317 | 83 | 46.296296 | 27 |
inline
|
@Test
public void checkInheritanceRelationships() {
// inheritance relationship check
assertTrue(checkInheritance(RootInterface.class, ChildInterface.class));
assertTrue(checkInheritance(RootInterface.class, AnotherChildInterface.class));
assertTrue(checkInheritance(ChildInterface.class, ConcreteChild.class));
assertTrue(checkInheritance(AnotherChildInterface.class, AnotherConcreteChild.class));
assertTrue(checkInheritance(ConcreteChild.class, ConcreteGrandChild.class));
assertTrue(checkInheritance(AnotherConcreteChild.class, AnotherConcreteGrandChild.class));
// non inheritance relationship check
assertFalse(checkInheritance(ChildInterface.class, AnotherChildInterface.class));
assertFalse(checkInheritance(AnotherChildInterface.class, ChildInterface.class));
assertFalse(checkInheritance(ChildInterface.class, AnotherConcreteGrandChild.class));
assertFalse(checkInheritance(AnotherChildInterface.class, ConcreteGrandChild.class));
}
|
java
| 9 | 0.820641 | 94 | 51.578947 | 19 |
inline
|
def AutoSaveInit():
'''
1. We will open the UserInfo.py file and see if the AutoSave variable is set to true
'''
# Set Variable from UserInfo.py equal to True when the Auto Save feature is turned on
UserInfo_AutoSave = True
# Open the UserInfo.py file and write this variable as True
# Turn ON Auto Save
if AutoSave_CheckMark.get() and UserInfo_AutoSave:
AutoSaveDeclare()
TextBox.after(1000, AutoSaveInit)
else:
# Undeclare the Python Function - Turn OFF the AutoSave Feature
UserInfo_AutoSave = False
# Open the UserInfo.py file and write this variable as False
|
python
| 8 | 0.682243 | 89 | 36.823529 | 17 |
inline
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Core.Entities;
using Core.Enums;
using Core.Models;
using Core.Repositories;
using Lykke.Common.Log;
using Microsoft.AspNetCore.Mvc;
using Newtonsoft.Json;
using Services;
using Web.Models;
namespace Web.Controllers
{
[Route("api/[controller]")]
public class ApiOverrideController : BaseController
{
private readonly IKeyValuesRepository _keyValuesRepository;
private readonly IKeyValueHistoryRepository _keyValueHistoryRepository;
private readonly IRepositoryDataRepository _repositoryDataRepository;
public ApiOverrideController(
ILogFactory logFactory,
IUserActionHistoryRepository userActionHistoryRepository,
IKeyValuesRepository keyValuesRepository,
IRepositoryDataRepository repositoryDataRepository,
IKeyValueHistoryRepository keyValueHistoryRepository)
: base(userActionHistoryRepository, logFactory)
{
_keyValuesRepository = keyValuesRepository;
_keyValueHistoryRepository = keyValueHistoryRepository;
_repositoryDataRepository = repositoryDataRepository;
}
[HttpGet]
public async Task Get()
{
try
{
var keyValues = await _keyValuesRepository.GetKeyValuesAsync();
return keyValues;
}
catch (Exception ex)
{
_log.Error(ex);
return new List
}
}
[HttpGet("{id}")]
public async Task Get(string id)
{
try
{
var keyValues = await _keyValuesRepository.GetAsync(x => x.KeyValueId == id);
return keyValues.FirstOrDefault();
}
catch (Exception ex)
{
_log.Error(ex, context: id);
return new KeyValue();
}
}
[HttpGet("collectionKeys")]
public async Task GetKeysFromCollection()
{
try
{
var keyValues = await _keyValuesRepository.GetKeyValuesAsync();
return keyValues.Select(x => x.KeyValueId);
}
catch (Exception ex)
{
_log.Error(ex);
return new List
}
}
[HttpGet("blobKeys")]
public async Task GetKeysFromBlob()
{
try
{
var placeholders = await GetPlaceholdersList();
return placeholders.Select(x => x.KeyValueId).Distinct();
}
catch (Exception ex)
{
_log.Error(ex);
return new List
}
}
[HttpPut]
public async Task Put([FromBody]KeyValueToUpdate entity)
{
try
{
var keyValues = await _keyValuesRepository.GetKeyValuesAsync();
var keyValue = keyValues.FirstOrDefault(x => x.KeyValueId == entity.RowKey);
if (keyValue == null)
{
return new ApiOverrideModel
{
Status = UpdateSettingsStatus.NotFound
};
}
var duplicatedKeys = keyValues.Where(x => x.KeyValueId != keyValue.KeyValueId && x.Value == entity.Value).ToList();
if (entity.Forced == false && IS_PRODUCTION)
{
if (duplicatedKeys.Count > 0)
{
return new ApiOverrideModel
{
Status = UpdateSettingsStatus.HasDuplicated,
DuplicatedKeys = duplicatedKeys.Select(x => x.KeyValueId)
};
}
}
var keyValueEntity = new KeyValue
{
KeyValueId = keyValue.KeyValueId,
Value = entity.Value,
IsDuplicated = duplicatedKeys.Count > 0,
Override = keyValue.Override,
Types = keyValue.Types,
RepositoryNames = keyValue.RepositoryNames,
EmptyValueType = keyValue.EmptyValueType
};
var entitiesToUpload = new List { keyValueEntity };
if (duplicatedKeys.Any())
{
var duplicationsToUpload = duplicatedKeys.Where(x => !x.IsDuplicated.HasValue || !x.IsDuplicated.Value);
duplicationsToUpload.ToList().ForEach(item =>
{
item.IsDuplicated = true;
entitiesToUpload.Add(item);
});
}
var oldDuplications = keyValues.Where(x => x.KeyValueId != keyValue.KeyValueId && x.Value == keyValue.Value);
if (oldDuplications.Count() == 1)
{
var oldDuplication = oldDuplications.First();
oldDuplication.IsDuplicated = false;
entitiesToUpload.Add(oldDuplication);
}
var result = await _keyValuesRepository.UpdateKeyValueAsync(entitiesToUpload);
string strObj = JsonConvert.SerializeObject(keyValues);
await _keyValueHistoryRepository.SaveKeyValueHistoryAsync(
keyValueEntity?.KeyValueId,
keyValueEntity?.Value,
strObj,
UserInfo.UserEmail,
UserInfo.Ip);
var updatedKeyValues = await _keyValuesRepository.GetKeyValuesAsync();
return new ApiOverrideModel
{
Status = result ? UpdateSettingsStatus.Ok : UpdateSettingsStatus.InternalError,
KeyValues = updatedKeyValues
};
}
catch (Exception ex)
{
_log.Error(ex, context: entity);
return new ApiOverrideModel { Status = UpdateSettingsStatus.InternalError };
}
}
[HttpDelete("{id}")]
public async Task DeleteKeyValue(string id)
{
try
{
var keyValues = await _keyValuesRepository.GetKeyValuesAsync();
var keyValue = keyValues.FirstOrDefault(x => x.KeyValueId == id);
if (keyValue == null || string.IsNullOrWhiteSpace(keyValue.Value))
return false;
List keysToUpdate = new List
// check for duplications. if duplicatedKeys == 1, then change isDuplicated property to false
var duplicatedKeys = keyValues.Where(x => x.KeyValueId != keyValue.KeyValueId && x.Value == keyValue.Value).ToList();
if (duplicatedKeys.Count == 1)
{
var duplicatedKey = duplicatedKeys.First();
duplicatedKey.IsDuplicated = false;
keysToUpdate.Add(duplicatedKey);
}
keyValue.Value = null;
// this key has no values, so it is not duplicated anymore
keyValue.IsDuplicated = false;
keysToUpdate.Add(keyValue);
await _keyValuesRepository.ReplaceKeyValueAsync(keysToUpdate.ToArray());
string strObj = JsonConvert.SerializeObject(keyValues);
await _keyValueHistoryRepository.SaveKeyValueHistoryAsync(
keyValue?.KeyValueId,
keyValue?.Value,
strObj,
UserInfo.UserEmail,
UserInfo.Ip);
return true;
}
catch (Exception ex)
{
_log.Error(ex, context: id);
return false;
}
}
#region Private Methods
private async Task GetPlaceholdersList()
{
try
{
var jsonDatas = await _repositoryDataRepository.GetBlobFilesDataAsync();
var jsonKeys = new List
foreach (var jsonData in jsonDatas)
{
jsonKeys.AddRange(jsonData.AsString().PlaceholderList());
}
return jsonKeys.Distinct().ToList();
}
catch (Exception ex)
{
_log.Error(ex);
return new List
}
}
#endregion
}
}
|
c#
| 23 | 0.521531 | 133 | 36.041322 | 242 |
starcoderdata
|
import os
import boto3
from botocore.exceptions import ClientError
import avm_common
import time
import json
import sys
# This script will setup GuardDuty on the master account and invite a member account.
# Contributed by
#
# Run this script using master account access-key and secret-access key
#
# Usage: python3 gd_setup_master.py --member_account_id 123456789012 --member_account_email
def get_regions(ec2_client):
"""
Return all AWS regions
"""
regions = []
try:
aws_regions = ec2_client.describe_regions()['Regions']
except ClientError as e:
print(e.response['Error']['Message'])
else:
for region in aws_regions:
regions.append(region['RegionName'])
return regions
def send_error_notification(e, account_id):
body = f"Unexpected error : {e}"
print(body)
sns_topic = avm_common.get_param("sns_topic_arn")
print(sns_topic)
print(account_id)
sub = "ERROR: GuardDuty invites"
func = "avm-guardduty-invite-member-accounts"
#(accountId, snsARN, function_name, subject, body):
avm_common.send_pipeline_notification(account_id,sns_topic,func, sub,body)
def lambda_handler(event, context):
try:
account_id = event["AccountId"]
lambda_handler_inner(event, context)
except ClientError as e:
send_error_notification(e,account_id)
raise e
def lambda_handler_inner(event, context):
core_security = avm_common.get_param("core_security_account")
master_role = avm_common.get_param("tlz_admin_role")
region_name = os.getenv('AWS_REGION')
ROLE_ARN_SECURITY=f"arn:aws:iam::{core_security}:role/{master_role}"
session_assumed = avm_common.aws_session(role=ROLE_ARN_SECURITY, session_name='security-services')
gd_client = session_assumed.client('ec2', region_name=region_name)
if core_security == event["AccountId"]:
print("No need to send guradduty invites as guardDuty master account cannot be member to itself")
return
regions = get_regions(gd_client)
for region in regions:#
print("Processing %s region" %(region))
try:
client = session_assumed.client('guardduty', region_name=region)
#Find out if GuardDuty already enabled:
detectors_list = client.list_detectors()
if not detectors_list["DetectorIds"]:
print ("GuardDuty is not enabled ... enabling GuardDuty on master account")
response = client.create_detector(Enable=True)
# Save DetectorID handler
DetectorId = response["DetectorId"]
else:
print("GuardDuty already enabled on account")
DetectorId = detectors_list['DetectorIds'][0]
# Do error handling here
# print all Detectorts
detectors_list = client.list_detectors()
print ("Detector lists: ")
for x in detectors_list["DetectorIds"]:
#print(x, end=" ")
print(x)
account_id = event["AccountId"]
account = avm_common.get_account_details(account_id)
print(f"id from event : {account_id} id from orgdetails: {account['org_details']['id']}")
if account:
# invite an account
print(account['org_details'])
print ("\nInviting member account " + account_id)
invite_member = client.create_members(
AccountDetails=[
{
'AccountId': account_id,
'Email': account['org_details']["email"]
},
],
DetectorId=DetectorId
)
gd_members = client.get_members(
AccountIds=[
account_id,
],
DetectorId=DetectorId
)
# the future member account is now staged
if gd_members:
print(gd_members)
print ("Memeber account RelationshipStatus: " + gd_members['Members'][0]['RelationshipStatus'])
# Invite members account(s)
response = client.invite_members(
AccountIds=[
account_id,
],
DetectorId=DetectorId,
Message='Please join AWS GuardDuty master account'
)
gd_members = client.get_members(
AccountIds=[
account_id,
],
DetectorId=DetectorId
)
# the future member account should be 'pending'
print ("Memeber account RelationshipStatus: " + gd_members['Members'][0]['RelationshipStatus'])
except ClientError as e:
send_error_notification(e,account_id)
# Enable GuardDuty on target account and accept the invites
print(f"About to enable and accept requests for {account_id}")
target_session = avm_common.aws_session(role=f"arn:aws:iam::{account_id}:role/{master_role}", session_name='target-account')
for region in regions:
print("Processing %s region" %(region))
try:
client = target_session.client('guardduty', region_name=region)
#Find out if GuardDuty already enabled:
detectors_list = client.list_detectors()
if not detectors_list["DetectorIds"]:
print (f"GuardDuty is not enabled ... enabling GuardDuty on {account_id} account")
response = client.create_detector(Enable=True)
# Save DetectorID handler
DetectorId = response["DetectorId"]
else:
print(f"GuardDuty already enabled on account {account_id}")
DetectorId = detectors_list['DetectorIds'][0]
# accept the invites
response = client.list_invitations(MaxResults=10)
if response:
invites = [i for i in response['Invitations']]
for i in invites:
r = client.accept_invitation(DetectorId=DetectorId,InvitationId=i["InvitationId"],MasterId=i["AccountId"])
except ClientError as e:
body = f"Unexpected error : {e}"
print(body)
send_error_notification(e,account_id)
if __name__ == "__main__":
from optparse import OptionParser
import pprint
import json
import sys
parser = OptionParser()
parser.add_option("-a", "--account_number", dest="account_number", help="AccountNUmber to test",default="694866286020")
pp = pprint.PrettyPrinter(indent=4)
(options, args) = parser.parse_args(sys.argv)
pp.pprint(options)
event = { "AccountId": f"{options.account_number}"}
lambda_handler(event,None)
|
python
| 20 | 0.580451 | 128 | 36.058511 | 188 |
starcoderdata
|
//
// EvenTree.cpp
// HackerRank
//
// Created by on 04/08/16.
//
// https://www.hackerrank.com/challenges/even-tree
#include
#include
using namespace std;
class Tree {
private:
/// Number of vertices of the tree.
int numberOfVertices;
/// Adjacency list for vertices.
vector adj;
/*!
Depth first search visit method.
It implements the recursive part of DFS.
This is a modified version of the algorithm that only counts the node visited.
@param v current vertex visited.
@param countVisited count of already visited node.
*/
void depthFirstSearchVisit(int vertex, int& countVisited) {
//Increment count visited.
countVisited++;
//Visit recursively all child of the current vertex.
//Tree is directed directed acylic graph, so no need
//to check if node in the adjacency list has been visited:
//they are not visited for sure.
for (int i = 0; i < adj[vertex].size(); i++) {
depthFirstSearchVisit(adj[vertex][i], countVisited);
}
}
public:
/*!
Constructor
@param numVertices number of vertices of the tree.
*/
Tree(int numVertices) {
numberOfVertices = numVertices;
for (int i = 0; i <= numberOfVertices; i++) {
adj.push_back(vector
}
};
/*!
Add an edge to the tree.
@param vi vertex 1
@param ui vertex 2
*/
void addEdge(int vi, int ui) {
adj[vi].push_back(ui);
}
/*!
Get child for vertex received as parameter.
@param vertex vertex used as indices to get the list of its children.
@returns the list of children for vertex.
*/
const vector getChildForVertex(int vertex) {
return adj[vertex];
}
/*!
Depth first search method.
Modified version to count the number of elements visited.
@param vertex starting vertex.
@returns the number of elements visited.
*/
int depthFirstSearch(int vertex) {
//Set all vertices as not visited.
int countVisited = 0;
//Start depth search first.
depthFirstSearchVisit(vertex, countVisited);
return countVisited;
}
};
int main() {
//n is the number of vertices.
//m is the number of edges.
int n, m;
//Read parameters.
cin >> n >> m;
//Create tree.
Tree t(n);
//Vertices of an edge.
int ui, vi;
for (int i = 0; i < m; i++) {
//Read edge vertices.
cin >> ui >> vi;
//Add edge to tree.
t.addEdge(vi, ui);
}
int edgesToRemove = 0;
//Count number of edges that can be removed from the tree
//to get a forest such that each connected component of
//the forest contains an even number of vertices.
for (int i = 1; i <= n; i++) {
//For each child of the current node count
//the number of elements using DSF (including the child vertex).
//If the count is even, then the edge could be removed.
for (int h = 0; h < t.getChildForVertex(i).size(); h++) {
int countChildVisited = t.depthFirstSearch(t.getChildForVertex(i)[h]);
if(countChildVisited % 2 == 0) {
edgesToRemove++;
}
}
}
cout << edgesToRemove;
return 0;
}
|
c++
| 14 | 0.552746 | 83 | 22.883117 | 154 |
starcoderdata
|
@Override
protected void event(UserRequest ureq, Controller source, Event event) {
if(source instanceof TaxonomyLevelOverviewController) {
if(event instanceof DeleteTaxonomyLevelEvent || event == Event.CANCELLED_EVENT) {
stackPanel.popController(source);
loadModel(false, false);
} else if(event instanceof NewTaxonomyLevelEvent) {
stackPanel.popController(source);
loadModel(false, false);
doSelectTaxonomyLevel(ureq, ((NewTaxonomyLevelEvent)event).getTaxonomyLevel());
} else if(event instanceof MoveTaxonomyLevelEvent) {
stackPanel.popController(source);
loadModel(true, true);
doSelectTaxonomyLevel(ureq, ((MoveTaxonomyLevelEvent)event).getTaxonomyLevel());
} else if(event == Event.DONE_EVENT || event == Event.CHANGED_EVENT) {
dirty = true;
}
} else if(createTaxonomyLevelCtrl == source) {
if(event == Event.DONE_EVENT || event == Event.CHANGED_EVENT) {
loadModel(false, false);
if(createTaxonomyLevelCtrl.getParentLevel() != null) {
int openIndex = model.indexOf(createTaxonomyLevelCtrl.getParentLevel());
model.open(openIndex);
tableEl.reset(false, false, true);
}
doSelectTaxonomyLevel(ureq, createTaxonomyLevelCtrl.getTaxonomyLevel());
}
cmc.deactivate();
cleanUp();
} else if(mergeCtrl == source || typeLevelCtrl == source || moveLevelCtrl == source) {
if(event == Event.DONE_EVENT || event == Event.CHANGED_EVENT || event instanceof DeleteTaxonomyLevelEvent) {
loadModel(true, true);
}
cmc.deactivate();
cleanUp();
} else if(confirmDeleteCtrl == source) {
if(event == Event.DONE_EVENT || event == Event.CHANGED_EVENT || event instanceof DeleteTaxonomyLevelEvent) {
loadModel(true, true);
}
cmc.deactivate();
cleanUp();
} else if(importWizardCtrl == source) {
if(event == Event.CANCELLED_EVENT || event == Event.DONE_EVENT || event == Event.CHANGED_EVENT) {
// Close the dialog
getWindowControl().pop();
// Remove steps controller
cleanUp();
// Reload data
loadModel(true, true);
}
} else if(cmc == source) {
cleanUp();
}
super.event(ureq, source, event);
}
|
java
| 16 | 0.671797 | 111 | 37.77193 | 57 |
inline
|
/*
* Copyright (C) 2009 Google Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef THIRD_PARTY_BLINK_PUBLIC_WEB_WEB_WIDGET_H_
#define THIRD_PARTY_BLINK_PUBLIC_WEB_WEB_WIDGET_H_
#include "base/callback.h"
#include "base/time/time.h"
#include "cc/input/browser_controls_state.h"
#include "cc/metrics/begin_main_frame_metrics.h"
#include "cc/paint/element_id.h"
#include "cc/trees/layer_tree_host_client.h"
#include "third_party/blink/public/common/input/web_menu_source_type.h"
#include "third_party/blink/public/common/metrics/document_update_reason.h"
#include "third_party/blink/public/mojom/input/input_event_result.mojom-shared.h"
#include "third_party/blink/public/mojom/manifest/display_mode.mojom-shared.h"
#include "third_party/blink/public/platform/input/input_handler_proxy.h"
#include "third_party/blink/public/platform/web_common.h"
#include "third_party/blink/public/platform/web_input_event_result.h"
#include "third_party/blink/public/platform/web_rect.h"
#include "third_party/blink/public/platform/web_size.h"
#include "third_party/blink/public/platform/web_text_input_info.h"
#include "third_party/blink/public/web/web_hit_test_result.h"
#include "third_party/blink/public/web/web_lifecycle_update.h"
#include "third_party/blink/public/web/web_range.h"
#include "third_party/blink/public/web/web_swap_result.h"
namespace cc {
class LayerTreeHost;
class TaskGraphRunner;
class UkmRecorderFactory;
class LayerTreeSettings;
}
namespace ui {
class Cursor;
class LatencyInfo;
}
namespace blink {
class WebCoalescedInputEvent;
namespace scheduler {
class WebRenderWidgetSchedulingState;
}
class WebWidget {
public:
// Initialize compositing. This will create a LayerTreeHost but will not
// allocate a frame sink or begin producing frames until SetCompositorVisible
// is called.
virtual cc::LayerTreeHost* InitializeCompositing(
cc::TaskGraphRunner* task_graph_runner,
const cc::LayerTreeSettings& settings,
std::unique_ptr ukm_recorder_factory) = 0;
// This method closes and deletes the WebWidget. If a |cleanup_task| is
// provided it should run on the |cleanup_runner| after the WebWidget has
// added its own tasks to the |cleanup_runner|.
virtual void Close(
scoped_refptr cleanup_runner = nullptr,
base::OnceCallback cleanup_task = base::OnceCallback {}
// Set the compositor as visible. If |visible| is true, then the compositor
// will request a new layer frame sink and begin producing frames from the
// compositor.
virtual void SetCompositorVisible(bool visible) = 0;
// Returns the current size of the WebWidget.
virtual WebSize Size() { return WebSize(); }
// Called to resize the WebWidget.
virtual void Resize(const WebSize&) {}
// Called to notify the WebWidget of entering/exiting fullscreen mode.
virtual void DidEnterFullscreen() {}
virtual void DidExitFullscreen() {}
// Called to run through the entire set of document lifecycle phases needed
// to render a frame of the web widget. This MUST be called before Paint,
// and it may result in calls to WebViewClient::DidInvalidateRect (for
// non-composited WebViews).
// |reason| must be used to indicate the source of the
// update for the purposes of metrics gathering.
virtual void UpdateAllLifecyclePhases(DocumentUpdateReason reason) {
UpdateLifecycle(WebLifecycleUpdate::kAll, reason);
}
// UpdateLifecycle is used to update to a specific lifestyle phase, as given
// by |LifecycleUpdate|. To update all lifecycle phases, use
// UpdateAllLifecyclePhases.
// |reason| must be used to indicate the source of the
// update for the purposes of metrics gathering.
virtual void UpdateLifecycle(WebLifecycleUpdate requested_update,
DocumentUpdateReason reason) {}
// Called to inform the WebWidget of a change in theme.
// Implementors that cache rendered copies of widgets need to re-render
// on receiving this message
virtual void ThemeChanged() {}
// Do a hit test at given point and return the WebHitTestResult.
virtual WebHitTestResult HitTestResultAt(const gfx::PointF&) = 0;
// Called to inform the WebWidget of an input event.
virtual WebInputEventResult HandleInputEvent(const WebCoalescedInputEvent&) {
return WebInputEventResult::kNotHandled;
}
// Send any outstanding touch events. Touch events need to be grouped together
// and any changes since the last time a touch event is going to be sent in
// the new touch event.
virtual WebInputEventResult DispatchBufferedTouchEvents() {
return WebInputEventResult::kNotHandled;
}
// Called to inform the WebWidget of the mouse cursor's visibility.
virtual void SetCursorVisibilityState(bool is_visible) {}
// Called to inform the WebWidget that mouse capture was lost.
virtual void MouseCaptureLost() {}
// Called to inform the WebWidget that it has gained or lost keyboard focus.
virtual void SetFocus(bool) {}
// Sets the display mode, which comes from the top-level browsing context and
// is applied to all widgets.
virtual void SetDisplayMode(mojom::DisplayMode) {}
// Returns the anchor and focus bounds of the current selection.
// If the selection range is empty, it returns the caret bounds.
virtual bool SelectionBounds(WebRect& anchor, WebRect& focus) const {
return false;
}
// Calling WebWidgetClient::requestPointerLock() will result in one
// return call to didAcquirePointerLock() or didNotAcquirePointerLock().
virtual void DidAcquirePointerLock() {}
virtual void DidNotAcquirePointerLock() {}
// Pointer lock was held, but has been lost. This may be due to a
// request via WebWidgetClient::requestPointerUnlock(), or for other
// reasons such as the user exiting lock, window focus changing, etc.
virtual void DidLosePointerLock() {}
// Called by client to request showing the context menu.
virtual void ShowContextMenu(WebMenuSourceType) {}
// Accessor to the WebWidget scheduing state.
virtual scheduler::WebRenderWidgetSchedulingState*
RendererWidgetSchedulingState() = 0;
// When the WebWidget is part of a frame tree, returns the active url for
// main frame of that tree, if the main frame is local in that tree. When
// the WebWidget is of a different kind (e.g. a popup) it returns the active
// url for the main frame of the frame tree that spawned the WebWidget, if
// the main frame is local in that tree. When the relevant main frame is
// remote in that frame tree, then the url is not known, and an empty url is
// returned.
virtual WebURL GetURLForDebugTrace() = 0;
virtual void SetCursor(const ui::Cursor& cursor) = 0;
// Get the current tooltip text.
virtual WebString GetLastToolTipTextForTesting() const { return WebString(); }
// Whether or not the widget is in the process of handling input events.
virtual bool HandlingInputEvent() = 0;
// Set state that the widget is in the process of handling input events.
virtual void SetHandlingInputEvent(bool handling) = 0;
using HandledEventCallback = base::OnceCallback<void(
mojom::InputEventResultState ack_state,
const ui::LatencyInfo& latency_info,
std::unique_ptr
base::Optional
// Process the input event, invoking the callback when complete. This
// method will call the callback synchronously.
virtual void ProcessInputEventSynchronously(const WebCoalescedInputEvent&,
HandledEventCallback) = 0;
virtual void DidOverscrollForTesting(
const gfx::Vector2dF& overscroll_delta,
const gfx::Vector2dF& accumulated_overscroll,
const gfx::PointF& position_in_viewport,
const gfx::Vector2dF& velocity_in_viewport) {}
protected:
~WebWidget() = default;
};
} // namespace blink
#endif
|
c
| 15 | 0.749102 | 81 | 41.241071 | 224 |
starcoderdata
|
'use strict'
const sanitize = require('./components/sanitization')
const identify = require('./components/identify')
const AcceptedColors = require('./components/accepted_colors')
const convertColor = require('./components/convert_color')
const colorFrame = {
clone: function (data) {
if (typeof data === "object") {
// return Object.create(data)
return JSON.parse(JSON.stringify(data))
} else {
return data
}
},
get acceptedColors() {
return new AcceptedColors()
}
}
class objectiveColor {
constructor(inputColor) {
this.init()
this.colorExtractor(inputColor)
}
colorExtractor(inputColor, setFormat) {
delete this.format
delete this.sanitizedColor
if (inputColor) {
this.format = (setFormat) ? ((identify(inputColor, setFormat)) ? setFormat : false) : identify(inputColor)
if (this.format) {
this.sanitizedColor = sanitize(inputColor, this.format)
} else {
this.format = false
this.sanitizedColor = false
}
}
}
init() {
for (const aColor of this.acceptedColors.keys) {
Object.defineProperty(this, aColor, {
get() {
if (this.format === aColor) {
return this.sanitizedColor
} else if (this.format) {
const stepsToConvert = convertColor.stepsToConvert(this.format, aColor)
let tempColor = this.clone(this.sanitizedColor);
if (stepsToConvert) {
for (let i = 0; i < stepsToConvert.length - 1; i++) {
if (tempColor) {
tempColor = convertColor[stepsToConvert[i]][stepsToConvert[i + 1]](this.clone(tempColor))
}
}
}
return tempColor
}
return false
},
set(input) {
this.colorExtractor(input, aColor)
}
})
}
Object.defineProperty(this, 'color', {
get() {
return this.sanitizedColor
},
set(input) {
this.colorExtractor(input)
}
})
Object.defineProperty(this, 'htmlref', {
get() {
const stepsToConvert = convertColor.stepsToConvert("html", "hex6")
let tempColor = this.clone(this.html);
if (stepsToConvert) {
for (let i = 0; i < stepsToConvert.length - 1; i++) {
if (tempColor) {
tempColor = convertColor[stepsToConvert[i]][stepsToConvert[i + 1]](this.clone(tempColor))
}
}
}
return (tempColor) ? "#" + tempColor : false
}
})
}
}
Object.assign(objectiveColor.prototype, colorFrame);
module.exports = objectiveColor
|
javascript
| 29 | 0.480325 | 125 | 31.683673 | 98 |
starcoderdata
|
var router = require('express').Router();
var express = require('express');
var router = express.Router();
var PDFDocument = require('pdfkit');
var orm = require('orm');
router.use('/', require('./login'));
router.use('/api', require('./api'));
let auth = require('./../controllers/login/auth');
router.use('/auth', auth);
router.use('/auth', require('./auth'));
console.log(`Step 6 index.js`);
router.use((err, req, res, next) => {
if (err.name === 'ValidationError') {
return res.status(422).json({
errors: Object.keys(err.errors).reduce((errors, key) => {
errors[key] = err.errors[key].message;
return errors;
}, {})
});
}
return next(err);
});
router.use(orm.express("mysql://root:Monu@1234@localhost/dbnews", {
define: function (db, models, next) {
models.news = db.define("tbl_posts", {
id : Number,
title : String,
detail : String,
author_name : String,
link : String,
publish_date : { type: 'date', time: false },
});
next();
}
}));
router.get('/', function(req, res, next) {
var result = req.models.news.find({
}, function(error, news){
if(error) throw error;
res.render('index', { news:news, title: 'Generate PDF using NodeJS' });
});
});
router.get('/pdf', function(req, res, next) {
var id = req.query.id;
const doc = new PDFDocument();
var result = req.models.news.find({id: id}, function(error, newspost){
if(error) throw error;
var title = newspost[0]['title'];
var content = newspost[0]['detail'];
var publish_date = newspost[0]['publish_date'];
var author_name = newspost[0]['author_name'];
var link = newspost[0]['link'];
var filename = encodeURIComponent(title) + '.pdf';
res.setHeader('Content-disposition', 'attachment; filename="' + filename + '"');
res.setHeader('Content-type', 'application/pdf');
doc.font('Times-Roman', 18)
.fontSize(25)
.text(title, 100, 50);
doc.fontSize(15)
.fillColor('blue')
.text('Read Full Article', 100, 100)
.link(100, 100, 160, 27, link);
doc.moveDown()
.fillColor('red')
.text("Author: "+author_name);
doc
.moveDown()
.fillColor('black')
.fontSize(15)
.text(content, {
align: 'justify',
indent: 30,
height: 300,
ellipsis: true
});
doc.pipe(res);
doc.end();
});
});
module.exports = router;
|
javascript
| 22 | 0.544585 | 86 | 27.402174 | 92 |
starcoderdata
|
package com.company.go.application.port.in.global;
import org.springframework.beans.BeanWrapperImpl;
import javax.validation.ConstraintValidator;
import javax.validation.ConstraintValidatorContext;
public class CompareFieldsValidator implements ConstraintValidator<CompareFields, Object> {
private String args[];
@Override
public void initialize(CompareFields constraintAnnotation) {
this.args = constraintAnnotation.args();
}
@Override
public boolean isValid(Object object, ConstraintValidatorContext constraintValidatorContext) {
if(args == null || args.length == 0){
return false;
}
if(args.length < 2){
args = args[0].split("\\s*,\\s*");
}
for (String arg1 : args) {
Object basePropertyValue = new BeanWrapperImpl(object).getPropertyValue(arg1);
for (String arg : args) {
Object propertyValue = new BeanWrapperImpl(object).getPropertyValue(arg);
if (basePropertyValue == null || propertyValue == null || !basePropertyValue.equals(propertyValue)) {
return false;
}
}
}
return true;
}
}
|
java
| 15 | 0.639209 | 117 | 33.685714 | 35 |
starcoderdata
|
Viewer/src/WidgetCheckbox.java
import java.awt.GridLayout;
import java.util.function.Consumer;
import javax.swing.JCheckBox;
import javax.swing.JLabel;
@SuppressWarnings("serial")
public class WidgetCheckbox extends Widget {
String label;
JCheckBox checkbox;
Consumer handler;
/**
* A widget that lets the user check or uncheck a checkbox.
*
* @param labelText Label to show at the right of the checkbox.
* @param isChecked If the checkbox should default to checked.
* @param eventHandler Will be notified when the checkbox changes.
*/
public WidgetCheckbox(String labelText, boolean isChecked, Consumer eventHandler) {
super();
label = labelText;
handler = eventHandler;
checkbox = new JCheckBox(label);
checkbox.setSelected(isChecked);
checkbox.addActionListener(event -> handler.accept(checkbox.isSelected()));
setLayout(new GridLayout(1, 2, 10, 10));
add(new JLabel(""));
add(checkbox);
handler.accept(checkbox.isSelected());
}
/**
* Updates the widget and chart based on settings from a layout file.
*
* @param lines A queue of remaining lines from the layout file.
*/
@Override public void importState(Controller.QueueOfLines lines) {
// parse the text
boolean checked = ChartUtils.parseBoolean(lines.remove(), label.trim().toLowerCase() + " = %b");
// update the widget
checkbox.setSelected(checked);
// update the chart
handler.accept(checkbox.isSelected());
}
/**
* Saves the current state to one or more lines of text.
*
* @return A String[] where each element is a line of text.
*/
@Override public String[] exportState() {
return new String[] {
label.trim().toLowerCase() + " = " + checkbox.isSelected()
};
}
}
|
java
| 13 | 0.695961 | 98 | 24.444444 | 72 |
starcoderdata
|
package com.example.demo.controller;
import java.util.Optional;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.servlet.ModelAndView;
import com.example.demo.dao.studentdao;
import com.example.demo.model.student;
import com.fasterxml.jackson.databind.deser.impl.CreatorCandidate.Param;
@RestController
public class studentContr
{
@Autowired
studentdao stdao;
@RequestMapping("/")
public String Home()
{
return "NewFile";
}
@PutMapping(path="/addstd", consumes = {"application/json"})
public student updatestudent(@PathVariable student std)
{
stdao.save(std);
return std;
}
@DeleteMapping("/adstd/{aid}")
public String dltstudent(@PathVariable int aid)
{
student st=stdao.getOne(aid);
stdao.delete(st);
return "Deleted";
}
@GetMapping("/adstd")
public String getstudent(student std)
{
System.out.println(std);
stdao.save(std);
return "NewFile";
}
@PostMapping(path="/adstd",consumes = {"application/json"})
public student poststudent(@RequestBody student std)
{
System.out.println(std);
stdao.save(std);
return std;
}
@RequestMapping(path="/student/{id}",produces= {"application/xml"})
@ResponseBody
public Optional getstudent(@PathVariable("aid") int id)
{
return stdao.findById(id);
}
}
|
java
| 10 | 0.748303 | 72 | 25.878378 | 74 |
starcoderdata
|
func (a *AdminClient) DeleteTopics(ctx context.Context, topics []string, options ...DeleteTopicsAdminOption) (result []TopicResult, err error) {
cTopics := make([]*C.rd_kafka_DeleteTopic_t, len(topics))
cErrstrSize := C.size_t(512)
cErrstr := (*C.char)(C.malloc(cErrstrSize))
defer C.free(unsafe.Pointer(cErrstr))
// Convert Go DeleteTopics to C DeleteTopics
for i, topic := range topics {
cTopics[i] = C.rd_kafka_DeleteTopic_new(C.CString(topic))
if cTopics[i] == nil {
return nil, newErrorFromString(ErrInvalidArg,
fmt.Sprintf("Invalid arguments for topic %s", topic))
}
defer C.rd_kafka_DeleteTopic_destroy(cTopics[i])
}
// Convert Go AdminOptions (if any) to C AdminOptions
genericOptions := make([]AdminOption, len(options))
for i := range options {
genericOptions[i] = options[i]
}
cOptions, err := adminOptionsSetup(a.handle, C.RD_KAFKA_ADMIN_OP_DELETETOPICS, genericOptions)
if err != nil {
return nil, err
}
defer C.rd_kafka_AdminOptions_destroy(cOptions)
// Create temporary queue for async operation
cQueue := C.rd_kafka_queue_new(a.handle.rk)
defer C.rd_kafka_queue_destroy(cQueue)
// Asynchronous call
C.rd_kafka_DeleteTopics(
a.handle.rk,
(**C.rd_kafka_DeleteTopic_t)(&cTopics[0]),
C.size_t(len(cTopics)),
cOptions,
cQueue)
// Wait for result, error or context timeout
rkev, err := a.waitResult(ctx, cQueue, C.RD_KAFKA_EVENT_DELETETOPICS_RESULT)
if err != nil {
return nil, err
}
defer C.rd_kafka_event_destroy(rkev)
cRes := C.rd_kafka_event_DeleteTopics_result(rkev)
// Convert result from C to Go
var cCnt C.size_t
cTopicRes := C.rd_kafka_DeleteTopics_result_topics(cRes, &cCnt)
return a.cToTopicResults(cTopicRes, cCnt)
}
|
go
| 14 | 0.713283 | 144 | 29.535714 | 56 |
inline
|
public static void setPersonData2(List<Integer> persons2) {
// Try to make a count of the array
int[] scoreCounter = new int[100]; // 100 is by default since we don't know the number of values
for (Integer score : persons2) {
Arrays.fill(scoreCounter, score);
// Try to delete all values equal to zero
int[] scoreCounter2 = IntStream.of(scoreCounter).filter(i -> i != 0).toArray();
// Calculate count
int test = scoreCounter2.length;
System.out.println(test);
}
}
|
java
| 12 | 0.594406 | 104 | 43.076923 | 13 |
inline
|
package commands
type BulkRegenerateCommand struct {
SignedBy string `required:"yes" long:"signed-by" description:"Selects the credential whose children should recursively be regenerated"`
OutputJSON bool `short:"j" long:"output-json" description:"Return response in JSON format"`
ClientCommand
}
func (c *BulkRegenerateCommand) Execute([]string) error {
credentials, err := c.client.BulkRegenerate(c.SignedBy)
if err != nil {
return err
}
printCredential(c.OutputJSON, credentials)
return nil
}
|
go
| 8 | 0.762646 | 138 | 27.555556 | 18 |
starcoderdata
|
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class MoveOnPointsTransition : Transition
{
[SerializeField] private float _distanceToPlayer;
private bool _playerNearby;
private List _availableItems;
private void Update()
{
if (Enemy.Player != null)
_playerNearby = Vector3.Distance(transform.position, Enemy.Player.Transform.position) < _distanceToPlayer;
if (_playerNearby && Enemy.Slime.UpgradingSlime.LevelSlime > Enemy.Player.Slime.UpgradingSlime.LevelSlime)
return;
if (Enemy.Target == null)
SwitchOnTransition();
}
}
|
c#
| 15 | 0.717507 | 118 | 31.782609 | 23 |
starcoderdata
|
"""Script which loads multiple datasets and prepares them for finetuning"""
import pandas as pd
import os
import datetime
import logging
import sys
import json
import glob
import tensorflow as tf
sys.path.append('../tensorflow_models')
sys.path.append('..')
from official.nlp.data.classifier_data_lib import DataProcessor, generate_tf_record_from_data_file, InputExample
from official.nlp.bert import tokenization
from utils.preprocess import preprocess_bert
from utils.misc import ArgParseDefault, add_bool_arg, save_to_json
from config import PRETRAINED_MODELS
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO, format='%(asctime)s [%(levelname)-5.5s] [%(name)-12.12s]: %(message)s')
REQUIRED_COLUMNS = ['id', 'label', 'text']
DATA_DIR = os.path.join('..', 'data')
VOCAB_PATH = os.path.join('..', 'vocabs')
class TextClassificationProcessor(DataProcessor):
"""Processor for arbitrary text classification data"""
def __init__(self, labels):
self.labels = labels
def save_label_mapping(self, data_dir):
with open(os.path.join(data_dir, 'label_mapping.json'), 'w') as f:
json.dump(self.labels, f)
def get_examples(self, data_dir, _type):
f_path = os.path.join(data_dir, f'{_type}.tsv')
lines = self._read_tsv(f_path)
return self._create_examples(lines, _type)
def get_train_examples(self, data_dir):
return self.get_examples(data_dir, 'train')
def get_dev_examples(self, data_dir):
return self.get_examples(data_dir, 'dev')
def get_test_examples(self, data_dir):
return self.get_examples(data_dir, 'test')
def get_labels(self):
return self.labels
@staticmethod
def get_processor_name():
return 'text-classification'
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for i, line in enumerate(lines):
guid = f'{set_type}-{i}'
text_a = tokenization.convert_to_unicode(line[REQUIRED_COLUMNS.index('text')])
if set_type == 'test':
label = '0'
else:
label = tokenization.convert_to_unicode(line[REQUIRED_COLUMNS.index('label')])
examples.append(InputExample(guid=guid, text_a=text_a, text_b=None, label=label))
return examples
def get_tokenizer(model_class):
model = PRETRAINED_MODELS[model_class]
vocab_file = os.path.join(VOCAB_PATH, model['vocab_file'])
tokenizer = tokenization.FullTokenizer(vocab_file=vocab_file, do_lower_case=model['lower_case'])
return tokenizer
def generate_tfrecords(args, dataset_dir, labels):
"""Generates tfrecords from generated tsv files"""
processor = TextClassificationProcessor(labels)
# save label mapping
processor.save_label_mapping(dataset_dir)
# get tokenizer
tokenizer = get_tokenizer(args.model_class)
processor_text_fn = tokenization.convert_to_unicode
# generate tfrecords
input_dir = os.path.join(dataset_dir, 'preprocessed')
output_dir = os.path.join(dataset_dir, 'tfrecords')
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
input_meta_data = generate_tf_record_from_data_file(
processor,
input_dir,
tokenizer,
train_data_output_path=os.path.join(output_dir, 'train.tfrecords'),
eval_data_output_path=os.path.join(output_dir, 'dev.tfrecords'),
max_seq_length=args.max_seq_length)
with tf.io.gfile.GFile(os.path.join(dataset_dir, 'meta.json'), 'w') as writer:
writer.write(json.dumps(input_meta_data, indent=4) + '\n')
logger.info(f'Sucessfully wrote tfrecord files to {output_dir}')
def read_data(sheet_name):
# Read the vaccine_sentiment_epfl
worksheet = sheet_handler.worksheet(sheet_name)
rows = worksheet.get_all_values()
# Get it into pandas
df = pd.DataFrame.from_records(rows)
df.columns = df.iloc[0]
df = df.reindex(df.index.drop(0))
return df
def get_run_name(args):
# Use timestamp to generate a unique run name
ts = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S_%f')
if args.run_prefix:
run_name = f'run_{ts}_{args.run_prefix}'
else:
run_name = f'run_{ts}'
return run_name
def main(args):
# create run dirs
run_name = get_run_name(args)
run_dir = os.path.join(DATA_DIR, 'finetune', run_name)
if not os.path.isdir(run_dir):
os.makedirs(run_dir)
# find input data
originals_dir = os.path.join(DATA_DIR, 'finetune', 'originals')
if args.finetune_datasets is None or len(args.finetune_datasets) == 0:
finetune_datasets = os.listdir(originals_dir)
else:
finetune_datasets = args.finetune_datasets
do_lower_case = PRETRAINED_MODELS[args.model_class]['lower_case']
for dataset in finetune_datasets:
logger.info(f'Processing dataset {dataset}...')
preprocessed_folder = os.path.join(run_dir, dataset, 'preprocessed')
if not os.path.isdir(preprocessed_folder):
os.makedirs(preprocessed_folder)
labels = set()
for _type in ['train', 'dev']:
f_name = f'{_type}.tsv'
logger.info(f'Reading data for for type {_type}...')
f_path = os.path.join(originals_dir, dataset, f_name)
if not os.path.isfile(f_path):
logger.info(f'Could not find file {f_path}. Skipping.')
continue
df = pd.read_csv(f_path, usecols=REQUIRED_COLUMNS, sep='\t')
logger.info('Creating preprocessed files...')
df.loc[:, 'text'] = df.text.apply(preprocess_bert, args=(args, do_lower_case))
df.to_csv(os.path.join(preprocessed_folder, f_name), columns=REQUIRED_COLUMNS, header=False, index=False, sep='\t')
# add labels
labels.update(df.label.unique().tolist())
logger.info('Creating tfrecords files...')
# we sort the labels alphabetically in order to maintain consistent label ids
labels = sorted(list(labels))
dataset_dir = os.path.join(run_dir, dataset)
generate_tfrecords(args, dataset_dir, labels)
# saving config
f_path_config = os.path.join(run_dir, 'create_finetune_config.json')
logger.info(f'Saving config to {f_path_config}')
save_to_json(vars(args), f_path_config)
def parse_args():
parser = ArgParseDefault()
parser.add_argument('--finetune_datasets', type=str, nargs='+', help='Finetune dataset(s) to process. These correspond to folder names in data/finetune. \
Data should be located in data/finetune/originals/{finetune_dataset}/[train.tsv/dev.tsv/test.tsv]. By default runs all datasets.')
parser.add_argument('--model_class', default='bert_large_uncased_wwm', choices=PRETRAINED_MODELS.keys(), help='Model class')
parser.add_argument('--run_prefix', help='Prefix to be added to all runs. Useful to identify runs')
parser.add_argument('--max_seq_length', default=96, type=int, help='Maximum sequence length')
parser.add_argument('--username_filler', default='twitteruser', type=str, help='Username filler')
parser.add_argument('--url_filler', default='twitterurl', type=str, help='URL filler (ignored when replace_urls option is false)')
add_bool_arg(parser, 'replace_usernames', default=True, help='Replace usernames with filler')
add_bool_arg(parser, 'replace_urls', default=True, help='Replace URLs with filler')
add_bool_arg(parser, 'asciify_emojis', default=True, help='Asciifyi emojis')
add_bool_arg(parser, 'replace_multiple_usernames', default=True, help='Replace "@user @user" with "2
add_bool_arg(parser, 'replace_multiple_urls', default=True, help='Replace "http://... http://.." with "2
add_bool_arg(parser, 'standardize_punctuation', default=True, help='Standardize (asciifyi) special punctuation')
add_bool_arg(parser, 'remove_unicode_symbols', default=True, help='After preprocessing remove characters which belong to unicode category "So"')
add_bool_arg(parser, 'remove_accented_characters', default=False, help='Remove accents/asciify everything. Probably not recommended.')
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
main(args)
|
python
| 18 | 0.667182 | 158 | 44.907104 | 183 |
starcoderdata
|
package com.github.Is0x4096.common.utils.network.http.enums;
/**
* @Author:
* @Project: common-utils
* @DateTime: 2019-10-06 18:30
* @Description: ContentType 枚举
*/
public enum HttpContentTypeEnum {
TEXT_HTML("text/html", "HTML格式"),
TEXT_PLAIN("text/plain", "纯文本格式"),
TEXT_XML("text/xml", "XML格式"),
APPLICATION_JSON("application/json", "JSON数据格式"),
APPLICATION_X_WWW_FORM_URLENCODED("application/x-www-form-urlencoded", "表单默认的提交数据的格式"),
;
private final String type;
private final String desc;
HttpContentTypeEnum(String type, String desc) {
this.type = type;
this.desc = desc;
}
public String getType() {
return type;
}
public String getDesc() {
return desc;
}
}
|
java
| 9 | 0.659478 | 106 | 22.184211 | 38 |
starcoderdata
|
static void power_usage_clock_single(t_power_usage * power_usage,
t_clock_network * single_clock) {
/*
*
* The following code assumes a spine-and-rib clock network as shown below.
* This is comprised of 3 main combonents:
* 1. A single wire from the io pad to the center of the chip
* 2. A H-structure which provides a 'spine' to all 4 quadrants
* 3. Ribs connect each spine with an entire column of blocks
___________________
| |
| |_|_|_2__|_|_|_ |
| | | | | | | | |
| |3| | | | | | |
| | |
| | | | | | | | |
| |_|_|__|_|_|_|_ |
| | | | | | | | |
|_______1|________|
* It is assumed that there are a single-inverter buffers placed along each wire,
* with spacing equal to the FPGA block size (1 buffer/block) */
t_power_usage clock_buffer_power;
int length;
t_power_usage buffer_power;
t_power_usage wire_power;
float C_segment;
float buffer_size;
auto& power_ctx = g_vpr_ctx.power();
auto& device_ctx = g_vpr_ctx.device();
power_usage->dynamic = 0.;
power_usage->leakage = 0.;
/* Check if this clock is active - this is used for calculating leakage */
if (single_clock->dens) {
} else {
VTR_ASSERT(0);
}
C_segment = power_ctx.commonly_used->tile_length * single_clock->C_wire;
if (single_clock->autosize_buffer) {
buffer_size = 1 + C_segment / power_ctx.commonly_used->INV_1X_C_in;
} else {
buffer_size = single_clock->buffer_size;
}
/* Calculate the capacitance and leakage power for the clock buffer */
power_usage_inverter(&clock_buffer_power, single_clock->dens,
single_clock->prob, buffer_size, single_clock->period);
length = 0;
/* 1. IO to chip center */
length += device_ctx.grid.height() / 2;
/* 2. H-Tree to 4 quadrants */
length += device_ctx.grid.height() / 2; //Vertical component of H
length += 2 * device_ctx.grid.width(); //Horizontal horizontal component of H (two rows)
/* 3. Ribs - to */
length += device_ctx.grid.width() / 2 * device_ctx.grid.height(); //Each rib spand 1/2 of width, two rows of ribs
buffer_power.dynamic = length * clock_buffer_power.dynamic;
buffer_power.leakage = length * clock_buffer_power.leakage;
power_add_usage(power_usage, &buffer_power);
power_component_add_usage(&buffer_power, POWER_COMPONENT_CLOCK_BUFFER);
power_usage_wire(&wire_power, length * C_segment, single_clock->dens,
single_clock->period);
power_add_usage(power_usage, &wire_power);
power_component_add_usage(&wire_power, POWER_COMPONENT_CLOCK_WIRE);
return;
}
|
c++
| 11 | 0.619762 | 114 | 31.805195 | 77 |
inline
|
/*
* Copyright (c) 2005-2011 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.accounts.loan.business;
import static org.mifos.accounts.loan.util.helpers.LoanConstants.RECALCULATE_INTEREST;
import java.util.ArrayList;
import java.util.Date;
import java.util.Map;
import java.math.BigDecimal;
import org.mifos.accounts.business.AccountPaymentEntity;
import org.mifos.accounts.loan.schedule.calculation.ScheduleCalculator;
import org.mifos.accounts.loan.schedule.domain.Schedule;
import org.mifos.config.AccountingRules;
import org.mifos.config.business.service.ConfigurationBusinessService;
import org.mifos.config.persistence.ConfigurationPersistence;
import org.mifos.customers.personnel.business.PersonnelBO;
import org.mifos.framework.util.helpers.Money;
import org.springframework.beans.factory.annotation.Autowired;
public class ScheduleCalculatorAdaptor {
private ScheduleCalculator scheduleCalculator;
private ScheduleMapper scheduleMapper;
private ConfigurationBusinessService configurationBusinessService;
@Autowired
public ScheduleCalculatorAdaptor(ScheduleCalculator scheduleCalculator, ScheduleMapper scheduleMapper, ConfigurationBusinessService configurationBusinessService) {
this.scheduleCalculator = scheduleCalculator;
this.scheduleMapper = scheduleMapper;
this.configurationBusinessService = configurationBusinessService;
}
public void applyPayment(LoanBO loanBO, Money amount, Date paymentDate, PersonnelBO personnel, AccountPaymentEntity accountPaymentEntity, boolean adjustment) {
Schedule schedule = scheduleMapper.mapToSchedule(loanBO.getLoanScheduleEntities(), loanBO.getDisbursementDate(),
getDailyInterest(loanBO.getInterestRate()), loanBO.getLoanAmount().getAmount());
scheduleCalculator.applyPayment(schedule, amount.getAmount(), paymentDate, adjustment);
scheduleMapper.populatePaymentDetails(schedule, loanBO, paymentDate, personnel, accountPaymentEntity);
}
public void computeExtraInterest(LoanBO loan, Date asOfDate) {
if(configurationBusinessService.isRecalculateInterestEnabled() && loan.isDecliningBalanceEqualPrincipleCalculation()){
Schedule schedule = scheduleMapper.mapToSchedule(new ArrayList
loan.getDisbursementDate(), getDailyInterest(loan.getInterestRate()), loan.getLoanAmount().getAmount());
scheduleCalculator.computeExtraInterest(schedule, asOfDate);
populateExtraInterestInLoanScheduleEntities(schedule, loan.getLoanScheduleEntityMap());
} else if (loan.isDecliningBalanceInterestRecalculation()) {
Schedule schedule = scheduleMapper.mapToSchedule(new ArrayList
loan.getDisbursementDate(), getDailyInterest(loan.getInterestRate()), loan.getLoanAmount().getAmount());
scheduleCalculator.computeExtraInterest(schedule, asOfDate);
populateExtraInterestInLoanScheduleEntities(schedule, loan.getLoanScheduleEntityMap());
}
}
public BigDecimal getExtraInterest(LoanBO loan, Date transactionDate) {
Schedule schedule = scheduleMapper.mapToSchedule(new ArrayList
loan.getDisbursementDate(), getDailyInterest(loan.getInterestRate()), loan.getLoanAmount().getAmount());
return scheduleCalculator.getExtraInterest(schedule, transactionDate);
}
private double getDailyInterest(Double annualInterest) {
return annualInterest / (AccountingRules.getNumberOfInterestDays() * 100d);
}
void populateExtraInterestInLoanScheduleEntities(Schedule schedule, Map<Integer, LoanScheduleEntity> loanScheduleEntities) {
scheduleMapper.populateExtraInterestInLoanScheduleEntities(schedule, loanScheduleEntities);
}
public RepaymentResultsHolder computeRepaymentAmount(LoanBO loanBO, Date asOfDate) {
Schedule schedule = scheduleMapper.mapToSchedule(loanBO.getLoanScheduleEntities(), loanBO.getDisbursementDate(),
getDailyInterest(loanBO.getInterestRate()), loanBO.getLoanAmount().getAmount());
return scheduleCalculator.computeRepaymentAmount(schedule, asOfDate);
}
}
|
java
| 16 | 0.774846 | 167 | 53.793478 | 92 |
starcoderdata
|
//@flow
/** this is foo */
function foo() {}
.foo.
// ^
|
javascript
| 3 | 0.571429 | 29 | 10.375 | 8 |
starcoderdata
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.tealReduxEgg = tealReduxEgg;
var _redux = require("redux");
var _InitialStateBuilder = require("./InitialStateBuilder");
var _ReduceBuilder = require("./ReduceBuilder");
var _AfterActionMiddlewareBuilder = require("./AfterActionMiddlewareBuilder");
function tealReduxEgg({
tool,
breed
}) {
const initialStateBuilder = new _InitialStateBuilder.InitialStateBuilder();
const reduceBuilder = new _ReduceBuilder.ReduceBuilder();
const afterActionMiddlewareBuilder = new _AfterActionMiddlewareBuilder.AfterActionMiddlewareBuilder();
const middlewareList = [];
const composeEnhancers = typeof window !== 'undefined' && window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__ || _redux.compose;
tool('initializeState', initializeFn => {
initialStateBuilder.add(initializeFn);
});
tool('reduceAction', (actionType, reduce) => {
reduceBuilder.add(actionType, reduce);
});
tool('afterAction', (actionType, reduce) => {
afterActionMiddlewareBuilder.add(actionType, reduce);
});
tool('addMiddleware', middleware => {
middlewareList.push(middleware);
});
breed('store', breeds => {
const initialState = initialStateBuilder.build(breeds);
return (0, _redux.createStore)(reduceBuilder.build(), initialState, composeEnhancers((0, _redux.applyMiddleware)(...middlewareList, afterActionMiddlewareBuilder.build(breeds))));
});
}
|
javascript
| 21 | 0.736806 | 182 | 34.214286 | 42 |
starcoderdata
|
<?php
namespace App\Models;
use App\Models\Report;
use App\Models\SubDivision;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Eloquent\Factories\HasFactory;
class Division extends Model
{
use HasFactory;
protected $guarded = [];
const NAMES = [
1 => 'Seksi Riksa I',
2 => 'Seksi Riksa II',
3 => 'Seksi Riksa III',
4 => 'Seksi Riksa IV',
];
const SUB_UNIT = [1, 2];
public function subDivisions()
{
return $this->hasMany( SubDivision::class );
}
public function report()
{
return $this->hasMany( Report::class );
}
public function users()
{
return $this->hasManyThrough( User::class, SubDivision::class);
}
}
|
php
| 10 | 0.603699 | 71 | 19.459459 | 37 |
starcoderdata
|
#ifndef _OSMAND_CORE_BUILDING_H_
#define _OSMAND_CORE_BUILDING_H_
#include
#include
#include
#include
#include
namespace OsmAnd
{
class OSMAND_CORE_API Building
{
Q_DISABLE_COPY_AND_MOVE(Building);
public:
enum Interpolation
{
Invalid = 0,
All = -1,
Even = -2,
Odd = -3,
Alphabetic = -4
};
private:
protected:
public:
Building();
virtual ~Building();
int64_t _id;
QString _name;
QString _latinName;
QString _name2; // WTF?
QString _latinName2; // WTF?
QString _postcode;
uint32_t _xTile24;
uint32_t _yTile24;
uint32_t _x2Tile24;
uint32_t _y2Tile24;
Interpolation _interpolation;
uint32_t _interpolationInterval;
uint32_t _offset;
};
} // namespace OsmAnd
#endif // !defined(_OSMAND_CORE_BUILDING_H_)
|
c
| 12 | 0.572477 | 44 | 20.8 | 50 |
starcoderdata
|
#include
#include
#include
namespace imp {
//-----------------------------------------------------------------------------
template<typename Pixel, imp::PixelType pixel_type>
ImageRaw<Pixel, pixel_type>::ImageRaw(std::uint32_t width, std::uint32_t height)
: Base(width, height)
{
data_.reset(Memory::alignedAlloc(width, height, &pitch_));
}
//-----------------------------------------------------------------------------
template<typename Pixel, imp::PixelType pixel_type>
ImageRaw<Pixel, pixel_type>::ImageRaw(const imp::Size2u& size)
: Base(size)
{
data_.reset(Memory::alignedAlloc(size, &pitch_));
}
//-----------------------------------------------------------------------------
template<typename Pixel, imp::PixelType pixel_type>
ImageRaw<Pixel, pixel_type>::ImageRaw(const ImageRaw& from)
: Base(from)
{
data_.reset(Memory::alignedAlloc(this->width(), this->height(), &pitch_));
from.copyTo(*this);
}
//-----------------------------------------------------------------------------
template<typename Pixel, imp::PixelType pixel_type>
ImageRaw<Pixel, pixel_type>::ImageRaw(const Image<Pixel, pixel_type>& from)
: Base(from)
{
data_.reset(Memory::alignedAlloc(this->width(), this->height(), &pitch_));
from.copyTo(*this);
}
//-----------------------------------------------------------------------------
template<typename Pixel, imp::PixelType pixel_type>
ImageRaw<Pixel, pixel_type>
::ImageRaw(Pixel* data, std::uint32_t width, std::uint32_t height,
size_t pitch, bool use_ext_data_pointer)
: Base(width, height)
{
if (data == nullptr)
{
throw imp::Exception("input data not valid", __FILE__, __FUNCTION__, __LINE__);
}
if(use_ext_data_pointer)
{
// This uses the external data pointer as internal data pointer.
auto dealloc_nop = [](Pixel*) { ; };
data_ = std::unique_ptr<Pixel, Deallocator>(data, Deallocator(dealloc_nop));
pitch_ = pitch;
}
else
{
data_.reset(Memory::alignedAlloc(this->width(), this->height(), &pitch_));
size_t stride = pitch / sizeof(Pixel);
if (this->bytes() == pitch*height)
{
std::copy(data, data+stride*height, data_.get());
}
else
{
for (std::uint32_t y=0; y<height; ++y)
{
for (std::uint32_t x=0; x<width; ++x)
{
data_.get()[y*this->stride()+x] = data[y*stride + x];
}
}
}
}
}
//-----------------------------------------------------------------------------
template<typename Pixel, imp::PixelType pixel_type>
ImageRaw<Pixel, pixel_type>::ImageRaw(Pixel* data,
std::uint32_t width, std::uint32_t height,
size_t pitch,
const std::shared_ptr<void const>& tracked)
: Base(width, height)
{
if (data == nullptr || tracked == nullptr)
{
throw imp::Exception("input data not valid", __FILE__, __FUNCTION__, __LINE__);
}
auto dealloc_nop = [](Pixel*) { ; };
data_ = std::unique_ptr<Pixel, Deallocator>(data, Deallocator(dealloc_nop));
pitch_ = pitch;
tracked_ = tracked;
}
//-----------------------------------------------------------------------------
template<typename Pixel, imp::PixelType pixel_type>
Pixel* ImageRaw<Pixel, pixel_type>::data(
std::uint32_t ox, std::uint32_t oy)
{
if (ox > this->width() || oy > this->height())
{
throw imp::Exception("Request starting offset is outside of the image.", __FILE__, __FUNCTION__, __LINE__);
}
return &data_.get()[oy*this->stride() + ox];
}
//-----------------------------------------------------------------------------
template<typename Pixel, imp::PixelType pixel_type>
const Pixel* ImageRaw<Pixel, pixel_type>::data(
std::uint32_t ox, std::uint32_t oy) const
{
if (ox > this->width() || oy > this->height())
{
throw imp::Exception("Request starting offset is outside of the image.", __FILE__, __FUNCTION__, __LINE__);
}
return reinterpret_cast<const Pixel*>(&data_.get()[oy*this->stride() + ox]);
}
//=============================================================================
// Explicitely instantiate the desired classes
// (sync with typedefs at the end of the hpp file)
template class ImageRaw<imp::Pixel8uC1, imp::PixelType::i8uC1>;
template class ImageRaw<imp::Pixel8uC2, imp::PixelType::i8uC2>;
template class ImageRaw<imp::Pixel8uC3, imp::PixelType::i8uC3>;
template class ImageRaw<imp::Pixel8uC4, imp::PixelType::i8uC4>;
template class ImageRaw<imp::Pixel16uC1, imp::PixelType::i16uC1>;
template class ImageRaw<imp::Pixel16uC2, imp::PixelType::i16uC2>;
template class ImageRaw<imp::Pixel16uC3, imp::PixelType::i16uC3>;
template class ImageRaw<imp::Pixel16uC4, imp::PixelType::i16uC4>;
template class ImageRaw<imp::Pixel32sC1, imp::PixelType::i32sC1>;
template class ImageRaw<imp::Pixel32sC2, imp::PixelType::i32sC2>;
template class ImageRaw<imp::Pixel32sC3, imp::PixelType::i32sC3>;
template class ImageRaw<imp::Pixel32sC4, imp::PixelType::i32sC4>;
template class ImageRaw<imp::Pixel32fC1, imp::PixelType::i32fC1>;
template class ImageRaw<imp::Pixel32fC2, imp::PixelType::i32fC2>;
template class ImageRaw<imp::Pixel32fC3, imp::PixelType::i32fC3>;
template class ImageRaw<imp::Pixel32fC4, imp::PixelType::i32fC4>;
} // namespace imp
|
c++
| 24 | 0.571482 | 111 | 33.792208 | 154 |
starcoderdata
|
const { join } = require('path');
const validStyles = {
css: 'css',
sass: 'sass',
scss: 'sass',
stylus: 'styl',
styl: 'styl',
less: 'less',
};
function applyOptionsToColocatedStyles(civicPaths, cliOptions) {
const { styles: styleExtension } = cliOptions;
const { appRoot, archRoot } = civicPaths;
return function mergeColocatedStyles(webpackConfig) {
// return webpackConfig;
const styleToLoad = validStyles[styleExtension.toLowerCase()];
// we'll grab the appropriate loader based on file extension, the loader just looks for a
// stylesheet of the same name and then ungracefully grafts an import statement to the beginning
// of a file. This of course makes it useless on the server but handy on the client. Coupled
// with extract-text-plugin, the application can either spit out its stylesheets, keep them
// within the JS bundle, or both
const rules = webpackConfig.module.rules.concat({
loader: join(archRoot, `webpack/loaders/colocatedStyleLoader-${styleToLoad}`),
test: /\.(js|jsx)$/,
enforce: 'post',
include: join(appRoot, 'src'),
});
// eslint-disable-next-line prefer-object-spread/prefer-object-spread
return Object.assign({}, webpackConfig, {
module: { rules },
});
};
}
module.exports = applyOptionsToColocatedStyles;
|
javascript
| 14 | 0.694706 | 100 | 33.475 | 40 |
starcoderdata
|
#region Copyright Syncfusion Inc. 2001 - 2013
// Copyright Syncfusion Inc. 2001 - 2013. All rights reserved.
// Use of this code is subject to the terms of our license.
// A copy of the current license can be obtained at any time by e-mailing
// Any infringement will be prosecuted under
// applicable laws.
#endregion
using System;
using System.Collections;
using System.Text;
using System.Diagnostics;
namespace SummaryInCaption
{
///
/// Provides a diagnostic utility for measuring performance.
///
/// following example display a message box with performance info.
/// <code lang="C#">
/// private void button1_Click(object sender, System.EventArgs e)
/// {
/// int rows = (int) this.numericUpDown1.Value;
///
/// this.gridControl1.theData = new VirtGrid.VirtData(rows, 20);
/// this.gridControl1.Refresh();
///
/// using (MeasureTime.Measure("gridControl1.TopRowIndex = 500000"))
/// {
/// this.gridControl1.TopRowIndex = 5000000;
/// }
///
/// MessageBox.Show(MeasureTime.DumpTimes());
/// }
///
///
public class MeasureTime : IDisposable
{
static Hashtable times = new Hashtable();
#region Externals
[System.Runtime.InteropServices.DllImport("Kernel32.dll")]
private static extern int QueryPerformanceFrequency(ref Int64 lpFrequency);
[System.Runtime.InteropServices.DllImport("Kernel32.dll")]
private static extern int QueryPerformanceCounter(ref Int64 lpPerformanceCount);
#endregion
///
/// Returns the time since Reset in microseconds.
///
public Int64 TickCount
{
get
{
Int64 m_LastCount = 0;
QueryPerformanceCounter(ref m_LastCount);
return m_LastCount;
}
}
Int64 ticks = 0;
string id = "";
MeasureTime(string id)
{
this.id = id;
ticks = TickCount;
}
///
/// Insert this before a code block that should be measured.
///
public static MeasureTime Measure(string id)
{
return new MeasureTime(id);
}
///
/// Ends the code block that should be measured and increments the associated performance counter.
///
public void Dispose()
{
Int64 time = TickCount - ticks;
if (times.ContainsKey(id))
times[id] = ((Int64) times[id]) + time;
else
times[id] = time;
}
private static long GetFrequency()
{
long ret = 0;
if (QueryPerformanceFrequency(ref ret) == 0)
throw new NotSupportedException("Error while querying the performance counter frequency.");
return ret;
}
///
/// Prints all performance counters into a string and sorts it by ids. All counters will be reset afterwards.
///
public static string DumpTimes()
{
StringBuilder sb = new StringBuilder();
sb.Append("MeasureTime results:");
ArrayList al = new ArrayList();
foreach (DictionaryEntry d in times)
{
Int64 value = (Int64) d.Value;
double t1 = ((double) value) / (double) GetFrequency() * 1000 * 1000;
string msg = d.Key.ToString();
StringBuilder s1 = new StringBuilder("\r\n");
if (t1 < 1000)
s1.AppendFormat("{0} Time = {1} us", msg, t1.ToString("F2"));
else if (t1 < 1000000)
s1.AppendFormat("{0} Time = {1} ms", msg, (t1 / 1000).ToString("F2"));
else
s1.AppendFormat("{0} Time = {1} s", msg, (t1 / 1000000).ToString("F2"));
al.Add(s1.ToString());
}
al.Sort();
foreach (string s in al)
sb.Append(s);
times.Clear();
return sb.ToString();
}
}
}
|
c#
| 19 | 0.544268 | 121 | 31.763359 | 131 |
starcoderdata
|
namespace Appmilla.Moneyhub.Refit.OpenFinance
{
[System.CodeDom.Compiler.GeneratedCode("NJsonSchema", "10.1.26.0 (Newtonsoft.Json v9.0.0.0)")]
public enum AccountDetailsRunningCostPeriod
{
[System.Runtime.Serialization.EnumMember(Value = @"month")]
Month = 0,
[System.Runtime.Serialization.EnumMember(Value = @"year")]
Year = 1,
}
}
|
c#
| 11 | 0.664921 | 98 | 30.916667 | 12 |
starcoderdata
|
func getVpaWeightFromIntervals(hvpa *autoscalingv1alpha1.Hvpa, desiredReplicas, currentReplicas int32) autoscalingv1alpha1.VpaWeight {
var vpaWeight autoscalingv1alpha1.VpaWeight
// lastFraction is set to default 1 to handle the case when vpaWeight is 1 in the matching interval,
// and there are no fractional vpaWeights in the previous intervals. So we need to default to this value
lastFraction := autoscalingv1alpha1.VpaWeight(1)
lookupNextFraction := false
for _, interval := range hvpa.Spec.WeightBasedScalingIntervals {
if lookupNextFraction {
if interval.VpaWeight < 1 {
vpaWeight = interval.VpaWeight
break
} else {
continue
}
}
// TODO: Following 2 if checks need to be done as part of verification process
if interval.StartReplicaCount == 0 {
interval.StartReplicaCount = *hvpa.Spec.HpaTemplate.MinReplicas
}
if interval.LastReplicaCount == 0 {
interval.LastReplicaCount = hvpa.Spec.HpaTemplate.MaxReplicas
}
if interval.VpaWeight < 1 {
lastFraction = interval.VpaWeight
}
if currentReplicas >= interval.StartReplicaCount && currentReplicas <= interval.LastReplicaCount {
vpaWeight = interval.VpaWeight
if vpaWeight == 1 {
if desiredReplicas < currentReplicas {
// If HPA wants to scale in, use last seen fractional value as vpaWeight
// If there is no such value, we cannot scale in anyway, so keep it default 1
vpaWeight = lastFraction
} else if desiredReplicas > currentReplicas {
// If HPA wants to scale out, use next fractional value as vpaWeight
// If there is no such value, we can not scale out anyway, so we will end up with vpaWeight = 1
lookupNextFraction = true
continue
}
}
break
}
}
return vpaWeight
}
|
go
| 14 | 0.736541 | 134 | 38.704545 | 44 |
inline
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using EnvDTE;
using Microsoft.VisualStudio;
namespace Enexure.SolutionSettings.Services
{
class VisualStudioSettingsManager
{
private readonly DTE environment;
static readonly VSConstants.VSStd97CmdID[] optionsCommands =
{
VSConstants.VSStd97CmdID.ToolsOptions,
VSConstants.VSStd97CmdID.DebugOptions,
VSConstants.VSStd97CmdID.CustomizeKeyboard
};
public event EventHandler OnSettingsUpdated;
public VisualStudioSettingsManager(DTE environment)
{
this.environment = environment;
foreach (var optionCmdId in optionsCommands) {
AddCommandEventHandler(VSConstants.GUID_VSStandardCommandSet97, optionCmdId, ToolsOptionsCommand_AfterExecute);
}
}
// Necessary to prevent event objects from being GC'd.
// See http://stackoverflow.com/a/13581371/34397
private readonly List commandEventHandlers = new List
private void AddCommandEventHandler(Guid group, VSConstants.VSStd97CmdID cmdId, _dispCommandEvents_AfterExecuteEventHandler handler)
{
var commandEvents = environment.Events.CommandEvents[group.ToString("B"), (int)cmdId];
commandEvents.AfterExecute += handler;
commandEventHandlers.Add(commandEvents);
}
private void ToolsOptionsCommand_AfterExecute(string Guid, int ID, object CustomIn, object CustomOut)
{
if (OnSettingsUpdated != null) {
OnSettingsUpdated(this, new EventArgs());
}
}
}
}
|
c#
| 18 | 0.781925 | 134 | 28.365385 | 52 |
starcoderdata
|
def CPJUpdater(request):
request_json = request.get_json(silent=True)
request_args = request.args
if request_json and 'event' in request_json:
event = request_json['event']
name = event['name']
version = event['latestVersion']
when = datetime.datetime.fromtimestamp(int(event['lastUpdate'])/1000)
elif request_args and 'name' in request_args:
name = request_args['name']
else:
name = 'World'
# Get the event type in case we have to handle others in the future
if request_json and 'webhook' in request_json:
webh = request_json['webhook']
webhook_event = webh['webhookEvent']
else:
webhook_event = 'Unknown'
# Notify the Slack and GChat CPJnotifiers we have a path title update
message = ( f'*{webhook_event} -- The Patch Title for {name} was upated for version {version} at {when} ').encode()
publisher = pubsub_v1.PublisherClient()
# Here we get from the Cloud function environment the Run Time environment variables we have set
# for the PubSub topics making this portable. These will be a string of the format
# projects/<yourprojectname>/topics/CPJNotifer
# assuming the notifier function is in the same GCS project
topic_name = os.environ.get('topicCPJName')
topic_download = os.environ.get('topicCPJDownload')
# publish to the notifiers
publisher.publish(topic_name, message)
# Notify the CPJDownloader to check for packages
data_download = ( f'{name}, {version}')
message_download = data_download.encode()
publisher_download = pubsub_v1.PublisherClient()
publisher_download.publish(topic_download, message_download)
return '200'
|
python
| 14 | 0.673739 | 119 | 38.659091 | 44 |
inline
|
// Copyright 2017 All rights reserved.
// Use of this source code is governed by a MIT license that can
// be found in the LICENSE file.
package widgets
import (
"fmt"
"image"
rw "github.com/mattn/go-runewidth"
. "github.com/buahaha/termui/v3"
)
type BarChart struct {
Block
BarColors []Color
LabelStyles []Style
NumStyles []Style // only Fg and Modifier are used
NumFormatter func(float64) string
Data []float64
Labels []string
BarWidth int
BarGap int
MaxVal float64
}
func NewBarChart() *BarChart {
return &BarChart{
Block: *NewBlock(),
BarColors: Theme.BarChart.Bars,
NumStyles: Theme.BarChart.Nums,
LabelStyles: Theme.BarChart.Labels,
NumFormatter: func(n float64) string { return fmt.Sprint(n) },
BarGap: 1,
BarWidth: 3,
}
}
func (self *BarChart) Draw(buf *Buffer) {
self.Block.Draw(buf)
maxVal := self.MaxVal
if maxVal == 0 {
maxVal, _ = GetMaxFloat64FromSlice(self.Data)
}
barXCoordinate := self.Inner.Min.X
for i, data := range self.Data {
// draw bar
height := int((data / maxVal) * float64(self.Inner.Dy()-1))
for x := barXCoordinate; x < MinInt(barXCoordinate+self.BarWidth, self.Inner.Max.X); x++ {
for y := self.Inner.Max.Y - 2; y > (self.Inner.Max.Y-2)-height; y-- {
c := NewCell(' ', NewStyle(ColorClear, SelectColor(self.BarColors, i)))
buf.SetCell(c, image.Pt(x, y))
}
}
// draw label
if i < len(self.Labels) {
labelXCoordinate := barXCoordinate +
int((float64(self.BarWidth) / 2)) -
int((float64(rw.StringWidth(self.Labels[i])) / 2))
buf.SetString(
self.Labels[i],
SelectStyle(self.LabelStyles, i),
image.Pt(labelXCoordinate, self.Inner.Max.Y-1),
)
}
// draw number
numberXCoordinate := barXCoordinate + int((float64(self.BarWidth) / 2))
if numberXCoordinate <= self.Inner.Max.X {
buf.SetString(
self.NumFormatter(data),
NewStyle(
SelectStyle(self.NumStyles, i+1).Fg,
SelectColor(self.BarColors, i),
SelectStyle(self.NumStyles, i+1).Modifier,
),
image.Pt(numberXCoordinate, self.Inner.Max.Y-2),
)
}
barXCoordinate += (self.BarWidth + self.BarGap)
}
}
|
go
| 20 | 0.652193 | 92 | 23.822222 | 90 |
starcoderdata
|
const simpleDebounce = require('../../src/function/debounce/simple');
const complexDebounce = require('../../src/function/debounce/complex');
describe('防抖', () => {
it('简易版', (done) => {
const fn = jest.fn(() => {
expect(fn).toHaveBeenCalledTimes(1);
done();
});
const debounce = simpleDebounce(fn, 1000);
for (let i = 0; i < 10000; i++) {
debounce();
expect(fn).not.toHaveBeenCalled();
}
});
it('复杂版', (done) => {
const fn = jest.fn(function () {
expect(fn).toHaveBeenCalledTimes(1);
expect(this.test).toBe('test');
done();
});
const debounce = complexDebounce(fn, 1000, {
context: { test: 'test' }
});
for (let i = 0; i < 10000; i++) {
debounce();
expect(fn).not.toHaveBeenCalled();
}
});
it('复杂版 - 立即执行', () => {
const fn = jest.fn();
const debounce = complexDebounce(fn, 1000, {
leading: true
});
for (let i = 0; i < 10000; i++) {
debounce();
expect(fn).toHaveBeenCalledTimes(1);
}
});
it('复杂版 - 终止', (done) => {
const fn = jest.fn();
const debounce = complexDebounce(fn, 1000);
for (let i = 0; i < 10000; i++) {
debounce();
expect(fn).not.toHaveBeenCalled();
}
debounce.cancel();
setTimeout(() => {
expect(fn).not.toHaveBeenCalled();
done();
}, 2000);
});
});
|
javascript
| 21 | 0.541723 | 97 | 21.515152 | 66 |
starcoderdata
|
<?php
namespace ColorDifference\CIE;
/**
* Description of CIE76
*
* @author solarys
*/
class CIE76 implements CIEInterface
{
/**
* @var array matrix for convert rgb to xyz
*/
private $matrix = [
[0.4124, 0.3576, 0.1805],
[0.2126, 0.7152, 0.0722],
[0.0193, 0.1192, 0.9505],
];
/**
* Converts rgb to xyz
* @param $r int red
* @param $g int green
* @param $b int blue
* @return array [x, y, z]
*/
public function rgbToXyz($r, $g, $b)
{
$red = $this->adjustValueForXyz($r);
$green = $this->adjustValueForXyz($g);
$blue = $this->adjustValueForXyz($b);
$x = $red * $this->matrix[0][0] + $green * $this->matrix[0][1] + $blue * $this->matrix[0][2];
$y = $red * $this->matrix[1][0] + $green * $this->matrix[1][1] + $blue * $this->matrix[1][2];
$z = $red * $this->matrix[2][0] + $green * $this->matrix[2][1] + $blue * $this->matrix[2][2];
return [$x, $y, $z];
}
/**
* @param $value float
* @return float
*/
private function adjustValueForXyz($value)
{
$value = $value / 255; //normalize
if($value > 0.04045){
$value = pow(($value + 0.055) / 1.055, 2.4);
}
else{
$value = $value / 12.92;
}
return $value * 100;
}
/**
* Converts xyz to Lab
* @param $x int x
* @param $y int y
* @param $z int z
* @return array [l, a, b]
*/
public function xyzToLab($x, $y, $z)
{
$x = $this->adjustValueForLab($x / 95.047);
$y = $this->adjustValueForLab($y / 100);
$z = $this->adjustValueForLab($z / 108.883);
$L= 116 * $y - 16;
$a= 500 * ($x - $y);
$b= 200 * ($y - $z);
return [$L, $a, $b];
}
/**
* @param $value float
* @return float
*/
private function adjustValueForLab($value){
if($value > 0.008856){
$value = pow($value, 1/3);
}
else{
$value = (7.787 * $value) + (16 / 116);
}
return $value;
}
/**
* @param $Lab1 array [L, a, b]
* @param $Lab2 array [L, a, b]
* @return float difference
*/
public function deltaE($Lab1, $Lab2){
return sqrt(
pow($Lab2[0] - $Lab1[0], 2) +
pow($Lab2[1] - $Lab1[1], 2) +
pow($Lab2[2] - $Lab1[2], 2)
);
}
public function getColorDifference($rgb1, $rgb2){
$xyz1 = $this->rgbToXyz($rgb1[0], $rgb1[1], $rgb1[2]);
$Lab1 = $this->xyzToLab($xyz1[0], $xyz1[1], $xyz1[2]);
$xyz2 = $this->rgbToXyz($rgb2[0], $rgb2[1], $rgb2[2]);
$Lab2 = $this->xyzToLab($xyz2[0], $xyz2[1], $xyz2[2]);
return $this->deltaE($Lab1, $Lab2);
}
}
|
php
| 17 | 0.463035 | 101 | 22.966102 | 118 |
starcoderdata
|
<?php
/**
* Created by PhpStorm.
* User: Pedro
* Date: 13/01/2016
* Time: 12:04 AM
*/
namespace DHI\Libraries\Wallets;
use DHI\UserMovement;
class MovementHelper
{
static function getMinRow(UserMovement $user_movement)
{
$result = '
$result .= ' . $user_movement->id . '
switch ($user_movement->movement_id) {
case 1:
$result .= ' . $user_movement->movement->name . ': ' . $user_movement->payment->user->user . '
[' . $user_movement->payment->product->name . ']</td >';
break;
case 2:
$result .= ' . $user_movement->movement->name . '
break;
}
$result .= ' class="' . ($user_movement->type == 'income' ? 'text-success' : 'text-danger') . ' text-semibold" >
<i class="icon ' . ($user_movement->type == 'income' ? 'md-chevron-up' : 'md-chevron-down') . '" aria - hidden = "true" >
</i > $' . number_format($user_movement->amount, 2, '.', ',') . '</span >
$result .= ' . date('Y M d', strtotime($user_movement->created_at)) . '
$result .= '
return $result;
}
}
|
php
| 19 | 0.49088 | 137 | 35.057143 | 35 |
starcoderdata
|
"use strict";
exports.__esModule = true;
exports.MenuItem = MenuItem;
var _react = require("react");
var _react2 = _interopRequireDefault(_react);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function MenuItem(_ref) {
var selected = _ref.selected,
title = _ref.title,
content = _ref.content,
items = _ref.items,
type = _ref.type,
mouseEntered = _ref.mouseEntered,
hasData = _ref.hasData,
props = _ref.props,
selectedProps = _ref.selectedProps;
var cname = "menu-item";
if (selected) {
cname = "menu-item-selected";
}
return _react2.default.createElement(
"li",
{ id: "menu-item", className: cname, onMouseEnter: mouseEntered },
title
);
}
exports.default = MenuItem;
|
javascript
| 9 | 0.600473 | 95 | 22.527778 | 36 |
starcoderdata
|
var EventCollector = require('./event_collector');
var CommandHub = function (eventHub) {
/**
* List of all command collections
* @type Object
*/
var commandCollections = {};
/**
* Create new event collector for a certain namespace
* @returns {EventCollector}
*/
function getEventCollectorForNamespace(namespace) {
return new EventCollector(namespace, eventHub);
}
/**
* Initialize the passed command collection
* @param namespace
* @param commandCollection
* @returns CommandCollection
*/
function initializeCommandCollection(namespace, commandCollection) {
var eventCollectorForNamespace = getEventCollectorForNamespace(namespace);
return new commandCollection(eventCollectorForNamespace);
}
/**
* Register a new command collection
* @param namespace
* @param commandCollection
*/
this.register = function (namespace, commandCollection) {
commandCollections[namespace] = initializeCommandCollection(namespace, commandCollection);
};
/**
* Find a command collection
* @param namespace
* @return CommandCollection
* @throws error when command collection is not found
*/
this.find = function (namespace) {
return commandCollections[namespace];
}
};
module.exports = CommandHub;
|
javascript
| 13 | 0.670051 | 98 | 26.6 | 50 |
starcoderdata
|
def run(self):
attrs = {}
queue = self.conn.dms.find_queue(name_or_id=self.params['queue_name'])
if not queue:
self.exit(
changed=False,
failed=True,
message=('No Queue with name or ID %s found') % (self.params['queue_name'])
)
queue_group = self.conn.dms.find_group(queue=queue, name_or_id=self.params['group_name'], ignore_missing=True)
if self.params['state'] == 'present':
# Queue-Group creation
if not queue_group:
attrs['queue'] = queue.id
attrs['name'] = self.params['group_name']
if self.ansible.check_mode:
self.exit(changed=True)
group = self.conn.dms.create_group(**attrs)
self.exit(changed=True, group=group)
# Queue-Group Modification - not possible
elif queue:
self.exit(
changed=False,
failed=True,
message=('A Queue-Group with this name already exists. Aborting')
)
if self.params['state'] == 'absent':
# Queue-Group Deletion
if queue_group:
attrs['queue'] = queue.id
attrs['group'] = queue_group.id
if self.ansible.check_mode:
self.exit(changed=True)
queue = self.conn.dms.delete_group(**attrs)
self.exit(changed=True)
elif not queue_group:
self.exit(
changed=False,
failed=True,
message=('No Queue-Group with name or ID %s found') % (self.params['name'])
)
|
python
| 16 | 0.480791 | 118 | 35.142857 | 49 |
inline
|
package com.lothrazar.cyclic.util;
import com.lothrazar.cyclic.data.Const;
import net.minecraft.entity.player.PlayerEntity;
import net.minecraft.item.ItemStack;
import net.minecraft.item.Items;
import net.minecraft.nbt.CompoundNBT;
import net.minecraft.util.math.BlockPos;
public class UtilNBT {
public static ItemStack buildNamedPlayerSkull(PlayerEntity player) {
return buildNamedPlayerSkull(player.getDisplayName().getString());
}
public static ItemStack buildNamedPlayerSkull(String displayNameString) {
CompoundNBT t = new CompoundNBT();
t.putString(Const.SKULLOWNER, displayNameString);
return buildSkullFromTag(t);
}
public static ItemStack buildSkullFromTag(CompoundNBT player) {
ItemStack skull = new ItemStack(Items.PLAYER_HEAD);
skull.setTag(player);
return skull;
}
public static void setItemStackBlockPos(ItemStack item, BlockPos pos) {
if (pos == null || item.isEmpty()) {
return;
}
UtilNBT.setItemStackNBTVal(item, "xpos", pos.getX());
UtilNBT.setItemStackNBTVal(item, "ypos", pos.getY());
UtilNBT.setItemStackNBTVal(item, "zpos", pos.getZ());
}
public static void putBlockPos(CompoundNBT tag, BlockPos pos) {
tag.putInt("xpos", pos.getX());
tag.putInt("ypos", pos.getY());
tag.putInt("zpos", pos.getZ());
}
public static BlockPos getItemStackBlockPos(ItemStack item) {
if (item.isEmpty() || item.getTag() == null || !item.getTag().contains("xpos")) {
return null;
}
CompoundNBT tag = item.getOrCreateTag();
return getBlockPos(tag);
}
public static BlockPos getBlockPos(CompoundNBT tag) {
return new BlockPos(tag.getInt("xpos"), tag.getInt("ypos"), tag.getInt("zpos"));
}
public static void setItemStackNBTVal(ItemStack item, String prop, int value) {
if (item.isEmpty()) {
return;
}
item.getOrCreateTag().putInt(prop, value);
}
public static CompoundNBT getItemStackNBT(ItemStack held) {
return held.getOrCreateTag();
}
}
|
java
| 12 | 0.709355 | 85 | 29.753846 | 65 |
starcoderdata
|
func (q *PostingsListCache) emitCacheGetMetrics(patternType PatternType, hit bool) {
var method *postingsListCacheMethodMetrics
switch patternType {
case PatternTypeRegexp:
method = q.metrics.regexp
case PatternTypeTerm:
method = q.metrics.term
case PatternTypeField:
method = q.metrics.field
case PatternTypeSearch:
method = q.metrics.search
default:
method = q.metrics.unknown // should never happen
}
if hit {
method.hits.Inc(1)
} else {
method.misses.Inc(1)
}
}
|
go
| 9 | 0.75102 | 84 | 23.55 | 20 |
inline
|
<?php
namespace PSFS\Command;
use Composer\Console\Application;
use PSFS\base\Router;
use PSFS\base\types\helpers\DeployHelper;
use PSFS\base\types\helpers\GeneratorHelper;
use Symfony\Component\Console\Input\InputArgument;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Output\OutputInterface;
if (!isset($console)) {
$console = new Application();
}
$console
->register('psfs:deploy:project')
->setDefinition(array(
new InputArgument('path', InputArgument::OPTIONAL, t('Path en el que crear el Document Root')),
))
->setDescription(t('Comando de despliegue de proyectos basados en PSFS'))
->setCode(function (InputInterface $input, OutputInterface $output) {
// Creates the html path
$path = $input->getArgument('path');
if (empty($path)) {
$path = WEB_DIR;
}
GeneratorHelper::createRoot($path, $output);
$output->writeln(str_replace('%path', $path, t("Document root re-generado en %path")));
$version = DeployHelper::updateCacheVar();
$output->writeln(str_replace('%version', $version, t("Versión de cache actualizada a %version")));
$router = Router::getInstance();
$router->hydrateRouting();
$router->simpatize();
$output->writeln(t("Rutas del proyecto generadas con éxito"));
});
|
php
| 18 | 0.667396 | 106 | 34.153846 | 39 |
starcoderdata
|
package edu.osu.cse.pa.dsmodels;
import edu.osu.cse.pa.spg.AbstractSPGEdge;
public class NumberedFldPair extends FldPair {
private int ctxHash;
public NumberedFldPair(AbstractSPGEdge edge, boolean isBar, int ctxHash) {
super(edge, isBar);
this.ctxHash = ctxHash;
}
public int getCtxHash() {
return ctxHash;
}
}
|
java
| 8 | 0.739394 | 75 | 18.411765 | 17 |
starcoderdata
|
using System;
using System.Collections.Generic;
namespace MatrixDotNet.Extensions.Sorting
{
public static class MatrixSortExtension
{
public static void Sort Matrix matrix)
where T : unmanaged
{
Array.Sort(matrix._Matrix);
}
public static void Sort Matrix matrix, IComparer comparer)
where T : unmanaged
{
Array.Sort(matrix._Matrix, comparer);
}
public static void SortByRows Matrix matrix)
where T : unmanaged
{
for (int i = 0; i < matrix.Rows; i++)
{
Array.Sort(matrix._Matrix, i * matrix.Columns, matrix.Columns);
}
}
public static void SortByRows Matrix matrix, IComparer comparer)
where T : unmanaged
{
for (int i = 0; i < matrix.Rows; i++)
{
Array.Sort(matrix._Matrix, i * matrix.Columns, matrix.Columns, comparer);
}
}
public static void SortByColumns Matrix matrix)
where T : unmanaged
{
matrix.SortByColumns(Comparer
}
public static void SortByColumns Matrix matrix, IComparer comparer)
where T : unmanaged
{
for (int column = 0; column < matrix.Columns; column++)
{
for (int i = 0; i < matrix.Rows - 1; i++)
{
for (int j = i + 1; j > 0; j--)
{
if (comparer.Compare(matrix[j - 1, column], matrix[j, column]) > 0)
{
// swap
(matrix[j - 1, column], matrix[j, column]) =
(matrix[j, column], matrix[j - 1, column]);
}
}
}
}
}
}
}
|
c#
| 23 | 0.45927 | 91 | 30.28125 | 64 |
starcoderdata
|
/* This is free and unencumbered software released into the public domain.
*/
package maluach;
import javax.microedition.lcdui.Command;
import javax.microedition.lcdui.Displayable;
import javax.microedition.lcdui.Form;
import javax.microedition.lcdui.TextField;
/**
*
* @author orr
*/
public class IntSelect extends Form implements ScreenView,DisplaySelect
{
public static interface ValueListen
{
public void ValueChanged(int value);
}
public static final class IntValue
{
int m_value;
ValueListen m_vl;
public IntValue(ValueListen vl)
{
m_value=0;
m_vl=vl;
}
public IntValue(int new_value,ValueListen vl)
{
m_value=new_value;
m_vl=vl;
}
void setValue(int new_value)
{
m_value=new_value;
if (m_vl!=null)
m_vl.ValueChanged(m_value);
}
int value()
{
return m_value;
}
}
private TextField tf_int;
IntValue pval;
static int m_value;
static boolean m_selected;
public IntSelect(String title,int maxSize,IntValue val)
{
super(title);
InitCommands();
setCommandListener(maluach.getInstance());
String vstr="";
if (val!=null)
vstr=Integer.toString(val.value());
tf_int = new TextField("", vstr, maxSize, TextField.NUMERIC);
tf_int.setLayout(TextField.LAYOUT_RIGHT);
super.append(tf_int);
pval=val;
}
private void InitCommands()
{
addCommand(CommandPool.getC_back());
addCommand(CommandPool.getC_select());
}
public void Select()
{
if (tf_int.getString().length()>0)
{
m_value=Integer.parseInt(tf_int.getString());
if (pval!=null)
pval.setValue(m_value);
m_selected=true;
}
}
public static boolean selected()
{
return m_selected;
}
public static int value()
{
return m_value;
}
public void OnShow(Object param)
{
m_selected=false;
}
}
|
java
| 13 | 0.559725 | 74 | 21.295918 | 98 |
starcoderdata
|
import pytest
import unittest
from data_pyetl.connectors import DB_Connector
class TestConnectors(unittest.TestCase):
def test_error_on_init_data_source(self):
ds_con = ["Not a string"]
credentials = {
"host": "host",
"db": "db",
"username": "username",
"pwd": "
}
with pytest.raises(Exception, match=r"Data source engine must be a string"):
connector = DB_Connector(ds_con, credentials)
def test_error_on_init_credentials(self):
ds_con = "SQL"
credentials = ["Not a dict"]
with pytest.raises(Exception, match=r"The variable 'redentials' must be a dict of the data source credentials"):
connector = DB_Connector(ds_con, credentials)
def test_instance_created(self):
ds_con = "SQL"
credentials = {
"host": "host",
"db": "db",
"username": "username",
"pwd": "
}
connector = DB_Connector(ds_con, credentials)
self.assertIsInstance(connector, DB_Connector)
def test_create_sqlserver_connector(self):
ds_con = "SQL"
credentials = {
"host": "host",
"db": "db",
"username": "username",
"pwd": "
}
connector = DB_Connector(ds_con, credentials)
engine = connector.create_data_source_connection()
self.assertEqual(engine.name, "mssql")
def test_create_mysql_connector(self):
ds_con = "MySQL"
credentials = {
"host": "host",
"db": "db",
"username": "username",
"pwd": "
}
connector = DB_Connector(ds_con, credentials)
engine = connector.create_data_source_connection()
self.assertEqual(engine.name, "mysql")
def test_create_postgres_connector(self):
ds_con = "Postgres"
credentials = {
"host": "host",
"db": "db",
"username": "username",
"pwd": "
"port": "1234"
}
connector = DB_Connector(ds_con, credentials)
engine = connector.create_data_source_connection()
self.assertEqual(engine.name, "postgresql")
def test_create_firebird_connector(self):
ds_con = "Firebird"
credentials = {
"host": "host",
"db": "db",
"username": "username",
"pwd": "
"port": "1234"
}
connector = DB_Connector(ds_con, credentials)
engine = connector.create_data_source_connection()
self.assertEqual(engine.name, "firebird")
def test_create_oracle_connector(self):
ds_con = "Oracle"
credentials = {
"host": "host",
"db": "db",
"username": "username",
"pwd": "
"port": "1234",
"service_name": "name1"
}
connector = DB_Connector(ds_con, credentials)
engine = connector.create_data_source_connection()
self.assertEqual(engine.name, "oracle")
def test_create_none_connector(self):
ds_con = "None"
credentials = {
"host": "host",
"db": "db",
"username": "username",
"pwd": "
"port": "1234"
}
connector = DB_Connector(ds_con, credentials)
engine = connector.create_data_source_connection()
self.assertEqual(engine, None)
|
python
| 12 | 0.532561 | 120 | 28.950413 | 121 |
starcoderdata
|
package com.dxr.apply.service.api;
import com.dawnwing.framework.core.ServiceException;
import com.dxr.system.entity.SystemConstant;
/**
* @description:
* @author: w.xL
* @date: 2018-4-30
*/
public interface IOriginType {
/**
* 获取来源类型page数据, EasyUI格式
* @param page
* @param rows
* @param systemConstant
* @return
* @throws ServiceException
*/
public String getOriginTypePage(int page, int rows, SystemConstant constant)
throws ServiceException;
/**
* 保存常量
* @param constant
* @return
* @throws ServiceException
*/
public String saveOriginType(SystemConstant constant)
throws ServiceException;
/**
* 更新常量
* @param constant
* @return
* @throws ServiceException
*/
public String updateOriginType(SystemConstant constant)
throws ServiceException;
/**
* 删除常量
* @param constantId
* @return
* @throws ServiceException
*/
public String deleteOriginType(String constantId) throws ServiceException;
/**
* 获取常量详细信息
* @param constantId
* @return
* @throws ServiceException
*/
public String getDetails(String constantId) throws ServiceException;
/**
* 获取来源类型下拉框
* @return
* @throws ServiceException
*/
public String getOriginTypeCommbox() throws ServiceException;
}
|
java
| 7 | 0.634507 | 80 | 21.1875 | 64 |
starcoderdata
|
from controllers.type_result import Error
def unknownError(err):
return Error(10001,"Unknown error",err)
def invalidParamError(field,condition,err):
return Error(10007, "Invalid field(%s: %s)"%(field, condition), err)
def parameterParsingError(err):
return Error(10008, "Parameter parsing error", err)
def missRequiredParamError(v):
return Error(10009,"'%s' is required parameter" % (v), None)
def notFoundError():
return Error(10010, "Resource is not found", None)
def notAuthorizedError():
return Error(10011, "Resource is not authorized", None)
def notAuthorizedActionError():
return Error(10012, "Action is not authorized", None)
def statusError(v):
return Error(10013, "'%s', Status not Allowed"% (v), None)
def notUpdatedError():
return Error(10014, "Resource is not updated", None)
def notDeletedError():
return Error(10015, "Resource is not deleted", None)
def notCreatedError():
return Error(10016, "Resource is not created", None)
def invalidFieldError(field):
return Error(10018, "Invalid fields [ %v ]" % field, None)
# print(statusError(111).code)
|
python
| 8 | 0.727354 | 72 | 25.571429 | 42 |
starcoderdata
|
<?php
require_once("conexion/conexion.php");
$cnn = new conexion();
$conn=$cnn->conectar();
$sql = "SELECT usuario_asistencia.idAsistencia, usuario.NombreUsuario, puntosdeventa.NombrePdV, date_format(usuario_asistencia.FechaRegistro, '%H:%i:%s') as Hora, "
. "(usuario_asistencia.CantGastosMovim + usuario_asistencia.CantGastosMovimTaxi + usuario_asistencia.CantGastosAlim + usuario_asistencia.CantGastosHosped + usuario_asistencia.CantGastosVario) as Gastos,"
. "date_format(usuario_asistencia.FechaRegistro, '%d-%b-%Y') as FechaRegistro, usuario_asistencia.Observacion "
. "FROM "
. "usuario_asistencia INNER JOIN "
. "usuario ON usuario.idUsuario = usuario_asistencia.idUsuario LEFT JOIN "
. "puntosdeventa ON puntosdeventa.IdPdV = usuario_asistencia.IdPdV "
. "WHERE usuario_asistencia.idUsuario = " . $_POST['IdSupervisor'] . " AND "
. "date_format(usuario_asistencia.FechaRegistro, '%Y-%m-%d') = date_format('" . $_POST['Fecha'] . "', '%Y-%m-%d')";
$resAsis = mysqli_query($conn, $sql);
$CADENA = "<ul class='timeline'>
<li class='time-label'>";
while ($row = mysqli_fetch_array($resAsis)) {
$CADENA = $CADENA . "<span class='bg-yellow'>". $row['FechaRegistro']."
<i class='fa fa-arrow-right bg-blue'>
<div class='timeline-item'>
<span class='time'>
<i class='fa fa-clock-o'> . $row['Hora'] . "
<h3 class='timeline-header'>
<i class='fa fa-angle-double-right'>
" . utf8_encode($row['NombrePdV']) . "
<div class='timeline-body'>
. utf8_encode($row['Observacion']) . "
}
$CADENA = $CADENA . "
$arrayName = array('mensaje' => $CADENA);
echo json_encode($arrayName);
?>
|
php
| 16 | 0.647619 | 211 | 36.208333 | 48 |
starcoderdata
|
export {
createServerApi,
ServerApi
}
from './api'
export {
createServer,
GraphQLServer
}
from './server'
|
javascript
| 4 | 0.729412 | 54 | 13.166667 | 12 |
starcoderdata
|
def test_read_yaml_toml_json_config(dl1_image_file, config_files):
"""check that we can read multiple formats of config file"""
tool = ProcessorTool()
for config_base in config_files:
config = resource_file(config_base)
tool.load_config_file(config)
tool.config.EventSource.input_url = dl1_image_file
tool.config.DataWriter.overwrite = True
tool.setup()
assert (
tool.get_current_config()["ProcessorTool"]["DataWriter"]["contact_info"].name
== "YOUR-NAME-HERE"
)
|
python
| 12 | 0.664773 | 85 | 34.266667 | 15 |
inline
|
#pragma once
#ifndef TARGETSELECTOR
#define TARGETSELECTOR
#include "LBuffInstance.h"
class TargetSelector : public pComponent
{
public:
bool bEnabled = true;
bool bInited = false;
Obj_AI_Base * target=nullptr;
Obj_AI_Base * forcetarget=nullptr;
Obj_AI_Base*localplayer = nullptr;
TargetSelector() { strcpy(classname, "TargetSelector"); strcpy(version, "0.0.1"); };
~TargetSelector() {};
void onProcessSpell(SpellData*spelldata, SpellCastInfo*spellcastinfo)
{
}
void onStart()
{
target = nullptr;
forcetarget = nullptr;
bInited = true;
ENGINE_MSG("Loaded Component: %s : Version: %s\n", classname, version);
}
void onUpdate()
{
if (!bInited)
return;
if (!bEnabled)
return;
localplayer = ObjectManager::GetPlayer();
if (forcetarget != nullptr)
if (!forcetarget->isValid() || !*forcetarget->GetVisible())
forcetarget = nullptr; // seçilmiþ hedef ölüyse veya visible deðilse nulle
if (target != nullptr)
if (!target->isValid() || !*target->GetVisible())
target = nullptr; // seçilmiþ hedef ölüyse veya visible deðilse nulle
}
void onRender()
{
if (!bInited)
return;
if (!bEnabled)
return;
if (!localplayer)
return;
auto ttr = (Obj_AI_Base*)ObjectManager::GetNearMouseTarget();
if (ttr != nullptr && forcetarget == nullptr)
target = ttr;
else
target = nullptr;
if (forcetarget != nullptr)
{
target = forcetarget;
}
if (GetAsyncKeyState(0x1))
{
auto obj = ObjectManager::GetUnderMouseObj();
if (obj)
{
if(reinterpret_cast && *reinterpret_cast
forcetarget = (Obj_AI_Base*)obj;
}
}
else if (GetAsyncKeyState(VK_CAPITAL))
{
forcetarget = nullptr;
}
}
void onMenu()
{
if (!bInited)
return;
if (ImGui::TreeNode("TargetSelector"))
{
ui::Checkbox("Enabled", &bEnabled);
ImGui::TreePop();
}
}
};
#endif
|
c++
| 21 | 0.647331 | 108 | 16.87156 | 109 |
starcoderdata
|
// pages/index/index.js
const shiftLimit = { x: 60, y: 60 } //滑动切换新闻类型时,触发切换的位移限制
const dftImg = '/images/default-img.png' //未获取到新闻图片时的默认图片
var touchStart //记录触摸开始坐标
Page({
/**
* 页面的初始数据
*/
data: {
// 新闻类型
newsType: [
{ type: 'gn', name: '国内' },
{ type: 'gj', name: '国际' },
{ type: 'cj', name: '财经' },
{ type: 'yl', name: '娱乐' },
{ type: 'ty', name: '体育' },
{ type: 'other', name: '其他' },
],
currentNewsType: 'gn', // 当前激活的新闻类型(初始默认为国内)
newsList: [], //新闻列表
},
/**
* 生命周期函数--监听页面加载
*/
onLoad: function (options) {
this.getNewsList()
},
/**
* 页面相关事件处理函数--监听用户下拉动作
*/
onPullDownRefresh: function () {
//下拉刷新新闻列表(通过回调函数,在列表加载完成后结束下拉动作)
this.getNewsList(() => {
wx.stopPullDownRefresh()
})
},
/**
* 新闻列表点击事件的处理函数
*/
onTapNewsList(event) {
this.setData({
currentNewsType: event.currentTarget.dataset.type,
})
this.getNewsList()
},
/**
* 获取新闻列表
*/
getNewsList(callback) {
wx.request({
url: 'https://test-miniprogram.com/api/news/list',
data: {
type: this.data.currentNewsType,
},
success: res => {
//获取新闻列表数据
let newsList = res.data.result
//新闻列表数据处理
for (let i = 0; i < newsList.length; i++) {
//修改接口获取到的新闻时间的格式
let newsDate = newsList[i].date.substr(0, 10)
let newsTime = newsList[i].date.substr(11, 5)
newsList[i].date = `${newsDate} ${newsTime}`
//若接口未返回新闻图片,设置默认图片
if (newsList[i].firstImage == '')
newsList[i].firstImage = dftImg
}
this.setData({
newsList: newsList
})
},
complete: res => {
callback && callback()
},
})
},
/**
* 点击跳转新闻详情页
*/
onTapNewsDetial(event) {
let newsId = event.currentTarget.dataset.newsid
console.log(newsId)
wx.navigateTo({
url: `/pages/detial/detial?newsId=${newsId}`,
})
},
/**
* 滑动切换新闻类型
*/
onSwitchTypeStart(event) {
//记录触摸开始的位置坐标
touchStart = event.changedTouches[0]
},
onSwitchTypeEnd(event) {
//获取触摸开始及结束时的坐标
let startX = touchStart.pageX
let startY = touchStart.pageY
let endX = event.changedTouches[0].pageX
let endY = event.changedTouches[0].pageY
//计算水平方向与垂直方向的位移
let shiftX = endX - startX
let shiftY = Math.abs(endY - startY)
//获取当前新闻类型在列表中的索引
let typeIndex = this.getCurrentTypeIdx()
//判断位移是否触发切换
if (shiftY < shiftLimit.y && shiftX > shiftLimit.x) {
//触发左滑切换
typeIndex -= 1
//判断是否已经到最左边的新闻类型
if (typeIndex < 0)
typeIndex = 0
} else if (shiftY < shiftLimit.y && shiftX < -shiftLimit.x) {
//触发右滑切换
typeIndex += 1
//判断是否已经到最右边的新闻类型
if (typeIndex >= this.data.newsType.length)
typeIndex = this.data.newsType.length - 1
}
//获取切换后的新闻类型
let chengedType = this.data.newsType[typeIndex].type
//判断新闻类型是否发生变化
if (chengedType != this.data.currentNewsType) {
this.setData({
currentNewsType: chengedType
})
this.getNewsList()
}
},
/**
* 获取当前新闻类型在列表中的索引值
*/
getCurrentTypeIdx() {
let currentType = this.data.currentNewsType
let typeList = this.data.newsType
//利用Array的findIndex函数获取当前新闻类型在新闻类型列表中的索引
let idx = typeList.findIndex(function(tmpType) {
return tmpType.type == currentType
})
return idx
}
})
|
javascript
| 17 | 0.567982 | 65 | 21.025 | 160 |
starcoderdata
|
private void unitAttack(Unit enemy, Unit attacker, List<Unit> deadUnits, List<Unit> units,
List<Square> area) {
enemy.setHp(enemy.getHp() - attacker.getAttackPower());
if (enemy.getHp() <= 0) {
deadUnits.add(enemy);
// we need to destroy the Unit
units.removeIf(u -> u.getPosition().equals(enemy.getPosition()));
// update the unit position in the map area to '.'
area.stream()
.filter(s -> s.getPosition().equals(enemy.getPosition()))
.forEach(s -> s.setSymbol('.'));
}
}
|
java
| 14 | 0.612546 | 90 | 40.769231 | 13 |
inline
|
const { Command } = require('discord.js-commando');
const Discord = require('discord.js');
const { RichEmbed } = require('discord.js');
module.exports = class TavsiyeCommand extends Command {
constructor(client) {
super(client, {
name: 'kapat',
group: 'diğer',
memberName: 'kapat',
description: 'Bulunduğunuz sunucunun davet linkini verir.',
throttling: {
usages: 1,
duration: 5
},
});
}
async run(message) {
if (message.content.toLowerCase().startsWith(`${message.guild.commandPrefix}` + `kapat`)) {
if (!message.channel.name.startsWith(`destek-`)) return message.channel.send(`Bu komut sadece Destek Talebi kanallarında kullanılabilir.`);
const embed = new Discord.RichEmbed()
.setColor("RANDOM")
.setAuthor(`Destek Talebi Kapatma İşlemi`)
.setDescription(`Destek talebini kapatmayı onaylamak için, \n10 saniye içinde \`evet\` yazınız.`)
.setFooter(`${this.client.user.username} | Destek Sistemi`)
.setTimestamp()
message.channel.send({embed})
.then((m) => {
message.channel.awaitMessages(response => response.content === 'evet', {
max: 1,
time: 10000,
errors: ['time'],
})
.then((collected) => {
message.channel.delete();
})
.catch(() => {
m.edit('Destek talebi kapatma isteği zaman aşımına uğradı.').then(m2 => {
m2.delete();
}, 3000);
});
});
}
}
}
|
javascript
| 27 | 0.538835 | 147 | 34.085106 | 47 |
starcoderdata
|
def save_nnet(self, outpath: str, mins: Iterable[float], maxs: Iterable[float]):
""" Output the current parameters to a file, in the same format as NNET files.
Following the code (already validated) in NNET.
:param mins: lower bounds for input vector
:param maxs: upper bounds for input vector
"""
with open(outpath, 'w') as f:
# headers
timestr = datetime.datetime.now().strftime('%m/%d/%Y %H:%M:%S')
f.writelines([
'// The contents of this file are licensed under the Creative Commons\n',
'// Attribution 4.0 International License: https://creativecommons.org/licenses/by/4.0/\n',
'// Neural Network File Format by Kyle Julian, Stanford 2016 (generated on %s)\n' % timestr
])
def _write_comma_line(vs):
""" Write a list of values into file ending with \n, each one followed by a comma.
:param vs: a list of values
"""
if isinstance(vs, Tensor):
# otherwise, enumeration will output strings like tensor(1.0)
vs = vs.numpy()
for v in vs:
f.write(str(v) + ',')
f.write('\n')
return
# line 1 - basics
max_hidden = 0 if len(self.hidden_sizes) == 0 else max(self.hidden_sizes)
max_layer_size = max(self.input_size, max_hidden, self.output_size)
_write_comma_line([self.n_layers, self.input_size, self.output_size, max_layer_size])
# line 2 - layer sizes
layer_sizes = [self.input_size] + self.hidden_sizes + [self.output_size]
_write_comma_line(layer_sizes)
# line 3 - symmetric
f.write('0,\n')
# line 4 - mins of input
_write_comma_line(mins)
# line 5 - maxs of input
_write_comma_line(maxs)
# line 6 - means
_write_comma_line(self.means)
# line 7 - ranges
_write_comma_line(self.ranges)
# writing parameters
for linear in self.all_linears:
in_size = linear.in_features
out_size = linear.out_features
# (1) write "weights"
w = linear.weight.data
for i in range(out_size):
for j in range(in_size):
f.write('%e,' % w[i][j])
f.write('\n')
# (2) write "biases"
b = linear.bias.data
for i in range(out_size):
# only 1 item for each
f.write('%e,\n' % b[i])
return
|
python
| 17 | 0.497469 | 107 | 38.528571 | 70 |
inline
|
import pytest
from pytest import approx
from experiments.evaluation import calculate_metrics
from models.linear import LinearGenModel
from data.synthetic import generate_wty_linear_multi_w_data
from causal_estimators.ipw_estimator import IPWEstimator
from causal_estimators.standardization_estimator import StandardizationEstimator
from utils import class_name
ATE = 5
N = 50
@pytest.fixture('module')
def linear_gen_model():
w, t, y = generate_wty_linear_multi_w_data(n=N, wdim=5, binary_treatment=True, delta=ATE, data_format='numpy')
return LinearGenModel(w, t, y, binary_treatment=True)
@pytest.fixture('module', params=[IPWEstimator(), StandardizationEstimator()], ids=class_name)
def estimator(request):
return request.param
def test_ate_metrics(linear_gen_model, estimator):
metrics = calculate_metrics(gen_model=linear_gen_model, estimator=estimator, n_seeds=10, conf_ints=False)
assert metrics['ate_squared_bias'] + metrics['ate_variance'] == approx(metrics['ate_mse'])
assert metrics['ate_bias'] == approx(0, abs=1)
def test_mean_ite_metrics(linear_gen_model):
metrics = calculate_metrics(gen_model=linear_gen_model, estimator=StandardizationEstimator(),
n_seeds=10, conf_ints=False)
assert metrics['mean_ite_abs_bias'] == approx(0, abs=1.3)
assert metrics['mean_ite_mse'] == approx(metrics['mean_pehe_squared'])
def test_vector_ite_metrics(linear_gen_model):
metrics = calculate_metrics(gen_model=linear_gen_model, estimator=StandardizationEstimator(),
n_seeds=10, conf_ints=False, return_ite_vectors=True)
assert metrics['ite_squared_bias'] + metrics['ite_variance'] == approx(metrics['ite_mse'])
|
python
| 10 | 0.725943 | 114 | 40.325581 | 43 |
starcoderdata
|
package process
import (
"context"
"fmt"
"math/rand"
"sync"
"sync/atomic"
"time"
"github.com/DataDog/datadog-agent/pkg/aggregator"
"github.com/DataDog/datadog-agent/pkg/autodiscovery/integration"
"github.com/DataDog/datadog-agent/pkg/collector/check"
core "github.com/DataDog/datadog-agent/pkg/collector/corechecks"
"github.com/DataDog/datadog-agent/pkg/process/config"
model "github.com/n9e/agent-payload/process"
"github.com/n9e/n9e-agentd/pkg/util"
"github.com/n9e/n9e-agentd/plugins/proc/checks"
"k8s.io/klog/v2"
"sigs.k8s.io/yaml"
)
const checkName = "proc"
var collector *Collector
type InstanceConfig struct {
checks.ProcessFilter `json:",inline"`
}
type checkConfig struct {
InstanceConfig
}
func (p checkConfig) String() string {
return util.Prettify(p)
}
func defaultInstanceConfig() InstanceConfig {
return InstanceConfig{}
}
func buildConfig(rawInstance integration.Data, _ integration.Data) (*checkConfig, error) {
instance := defaultInstanceConfig()
if err := yaml.Unmarshal(rawInstance, &instance); err != nil {
return nil, err
}
return &checkConfig{
InstanceConfig: instance,
}, nil
}
// Check doesn't need additional fields
type Check struct {
core.CheckBase
*Collector
filter *checks.ProcessFilter
}
// Run executes the check
func (c *Check) Run() error {
klog.V(6).Infof("entering Run()")
sender, err := aggregator.GetSender(c.ID())
if err != nil {
return err
}
procs := c.process.GetProcs()
stats := c.rtProcess.GetStats()
for _, pid := range c.filter.Pids {
proc, ok := procs[pid]
if !ok {
continue
}
stat, ok := stats[pid]
if !ok {
continue
}
collectProc(sender, proc, stat, []string{"target:" + c.filter.Target})
}
sender.Commit()
return nil
}
func collectProc(sender aggregator.Sender, proc *model.Process, stat *model.ProcessStat, tags []string) {
sender.Count("proc.num", 1, "", tags)
// uptime
sender.Gauge("proc.uptime", float64(time.Now().Unix()-stat.CreateTime/1000), "", tags)
sender.Gauge("proc.createtime", float64(stat.CreateTime)/1000, "", tags)
// fd
sender.Count("proc.open_fd_count", float64(stat.OpenFdCount), "", tags)
if mem := stat.Memory; mem != nil {
sender.Count("proc.mem.rss", float64(mem.Rss), "", tags)
sender.Count("proc.mem.vms", float64(mem.Vms), "", tags)
sender.Count("proc.mem.swap", float64(mem.Swap), "", tags)
sender.Count("proc.mem.shared", float64(mem.Shared), "", tags)
sender.Count("proc.mem.text", float64(mem.Text), "", tags)
sender.Count("proc.mem.lib", float64(mem.Lib), "", tags)
sender.Count("proc.mem.data", float64(mem.Data), "", tags)
sender.Count("proc.mem.dirty", float64(mem.Dirty), "", tags)
}
if cpu := stat.Cpu; cpu != nil {
sender.Count("proc.cpu.total", float64(cpu.TotalPct), "", tags)
sender.Count("proc.cpu.user", float64(cpu.UserPct), "", tags)
sender.Count("proc.cpu.sys", float64(cpu.SystemPct), "", tags)
sender.Count("proc.cpu.threads", float64(cpu.NumThreads), "", tags)
}
if io := stat.IoStat; io != nil {
sender.Count("proc.io.read_rate", float64(io.ReadRate), "", tags)
sender.Count("proc.io.write_rate", float64(io.WriteRate), "", tags)
sender.Count("proc.io.readbytes_rate", float64(io.ReadBytesRate), "", tags)
sender.Count("proc.io.writebytes_rate", float64(io.WriteBytesRate), "", tags)
}
if net := stat.Networks; net != nil {
sender.Count("proc.net.conn_rate", float64(net.ConnectionRate), "", tags)
sender.Count("proc.net.bytes_rate", float64(net.BytesRate), "", tags)
}
}
func (c *Check) Cancel() {
defer c.CheckBase.Cancel()
c.process.DelFilter(c.filter)
}
// Configure the Prom check
func (c *Check) Configure(rawInstance integration.Data, rawInitConfig integration.Data, source string) (err error) {
if collector == nil {
if collector, err = initCollector(); err != nil {
return err
}
}
c.Collector = collector
// Must be called before c.CommonConfigure
c.BuildID(rawInstance, rawInitConfig)
if err = c.CommonConfigure(rawInstance, source); err != nil {
return fmt.Errorf("common configure failed: %s", err)
}
config, err := buildConfig(rawInstance, rawInitConfig)
if err != nil {
return fmt.Errorf("build config failed: %s", err)
}
c.filter = &config.ProcessFilter
c.process.AddFilter(c.filter)
return nil
}
func checkFactory() check.Check {
return &Check{
CheckBase: core.NewCheckBase(checkName),
}
}
type Collector struct {
groupID int32
rtIntervalCh chan time.Duration
cfg *config.AgentConfig
// counters for each type of check
runCounters sync.Map
enabledChecks []checks.Check
// Controls the real-time interval, can change live.
realTimeInterval time.Duration
ctx context.Context
cancel context.CancelFunc
process *checks.ProcessCheck
rtProcess *checks.RTProcessCheck
}
func initCollector() (*Collector, error) {
c, err := newCollector(config.NewDefaultAgentConfig(false))
if err != nil {
return nil, err
}
c.run()
return c, nil
}
// newCollector creates a new Collector
func newCollector(cfg *config.AgentConfig) (*Collector, error) {
sysInfo, err := checks.CollectSystemInfo(cfg)
if err != nil {
return nil, err
}
for _, c := range checks.All {
c.Init(cfg, sysInfo)
}
ctx, cancel := context.WithCancel(context.Background())
return &Collector{
rtIntervalCh: make(chan time.Duration),
cfg: cfg,
groupID: rand.Int31(),
enabledChecks: checks.All,
// Defaults for real-time on start
realTimeInterval: 2 * time.Second,
ctx: ctx,
cancel: cancel,
process: checks.Process,
rtProcess: checks.RTProcess,
}, nil
}
func (p *Collector) run() {
for _, c := range p.enabledChecks {
go func(c checks.Check) {
klog.Infof("process run %s", c.Name())
if !c.RealTime() {
p.runCheck(c)
}
ticker := time.NewTicker(p.cfg.CheckInterval(c.Name()))
for {
select {
case <-ticker.C:
p.runCheck(c)
case <-p.ctx.Done():
return
}
}
}(c)
}
}
func (p *Collector) runCheck(c checks.Check) {
klog.V(11).Infof("process run check %s", c.Name())
messages, err := c.Run(p.cfg, atomic.AddInt32(&p.groupID, 1))
if err != nil {
klog.Errorf("unable to run check %s %s", c.Name(), err)
return
}
for _, s := range messages {
klog.V(6).Infof("process run check %s %s", c.Name(), s.String())
}
}
func init() {
core.RegisterCheck(checkName, checkFactory)
}
|
go
| 18 | 0.678116 | 116 | 23.557252 | 262 |
starcoderdata
|
import React from 'react';
import {
View,
StatusBar,
Image,
Text,
TouchableOpacity,
Linking,
} from 'react-native';
import logo from '../../assets/images/logo_grey.png';
import tank from '../../assets/images/tank_white.png';
import BoxBackground from '../../components/BoxBackground/index';
import {colors} from '../../styles';
import styles from './styles';
function ScreenAbout() {
const viewOfContent = [
<View style={styles.viewBoxContent}>
<Text style={styles.textContent}>
O aplicativo “Computação Plugada – Pixel se baseia na atividade
“Representação de Imagens” do livro de e Fellows, M. (2011), “Computer Science Unplugged – Ensinando
Ciência da Computação sem o uso do Computador”. Tradução de Luciano
2011.
<View style={styles.viewBoxContent}>
<Text style={styles.textContent}>
Este livro apresenta atividades práticas, lúdicas e acessíveis sobre
diversos temas da ciência da computação. O mesmo engloba técnicas fáceis
para seu uso, tornando-se disponível para todos. Sua prática é fácil não
só em salas de aula mas também fora, podendo ser administrado por
professores e compreendido pelas crianças.
<View style={styles.viewBoxContent}>
<Text style={styles.textContent}>
O aplicativo aqui apresentado busca simplificar ainda mais a aplicação
das atividades propostas sem a necessidade de produção de materiais
adicionais. O aplicativo é uma grande inovação para as técnicas de
aprendizagem, apresentando temas complexos de uma forma elementar para o
conhecimento.
];
return (
<View style={styles.container}>
<StatusBar
barStyle="light-content"
backgroundColor={colors.colorPrimary}
/>
<Image source={logo} style={styles.logo} />
<BoxBackground content={viewOfContent} />
<Image source={tank} style={styles.tankTetris} />
<Text style={styles.credits}>
Desenvolvido e mantido pela equipe do projeto Computação Plugada da
UFPB campus IV e colaboradores de forma open source.
<TouchableOpacity
onPress={
() =>
Linking.openURL(
'https://github.com/pluggedcomputing/pixel/blob/develop/LICENSE',
)
// eslint-disable-next-line react/jsx-curly-newline
}>
<Text style={[styles.credits, {textDecorationLine: 'underline'}]}>
License MIT {new Date().getFullYear()}
);
}
export default ScreenAbout;
|
javascript
| 15 | 0.644178 | 116 | 34.805195 | 77 |
starcoderdata
|
import React from 'react';
import MediaList from './components/MediaList';
import MediaUploader from './components/MediaUpload';
function Media() {
return (
<>
<MediaUploader />
<MediaList />
);
}
export default Media;
|
javascript
| 7 | 0.664286 | 53 | 16.5 | 16 |
starcoderdata
|
inline UINT32 NES_N106::get_phase (int channel)
{
// 24-bit phase stored in channel regs 1/3/5
channel = channel << 3;
return (reg[0x41 + channel] )
+ (reg[0x43 + channel] << 8 )
+ (reg[0x45 + channel] << 16);
}
|
c++
| 11 | 0.533597 | 48 | 29.875 | 8 |
inline
|
<?php
namespace App\Http\Controllers\API;
use Illuminate\Http\Request;
use App\Http\Controllers\Controller;
use App\LichSuMuaCredit;
use App\NguoiChoi;
class LichSuMuaCreditController extends Controller
{
public function listHistoryBuyCredit()
{
$lichSuMuaCredits = LichSuMuaCredit::all();
if ($lichSuMuaCredits != null) {
$result = [
'success' => true,
'message' => "Lấy danh sách lịch sử mua credit thành công",
'data' => $lichSuMuaCredits
];
return response()->json($result);
}
return response()->json([
'success' => false,
'message' => "Lấy danh sách lịch sử mua credit thất bại"
]);
}
// muốn test post man trên parameter thì chuyển $id thành Request $request rồi $lichSuMuaCredits = $request->id
public function getHistoryBuyCreditPathById($id)
{
// truyền theo route (PATH)
$lichSuMuaCredits = LichSuMuaCredit::find($id);
if ($lichSuMuaCredits == null) {
return response()->json([
'success' => false,
'message' => "Lấy lịch sử mua credit theo id thất bại"
]);
}
return response()->json([
'success' => true,
'message' => "Lấy lịch sử mua credit theo id thành công",
'data' => $lichSuMuaCredits
]);
}
// đang lỗi chỗ này
public function updateHistoryBuyCreditById(Request $request)
{
$nguoiChois = NguoiChoi::find("id", $request->id);
$nguoiChois->credit = $nguoiChois->credit + $request->credit;
$nguoiChois->save();
//
$lichSuMuaCredits = new LichSuMuaCredit();
$lichSuMuaCredits->nguoi_choi_id = $request->id;
$lichSuMuaCredits->goi_credit_id = $request->goi_credit_id;
$lichSuMuaCredits->credit = $request->credit;
$lichSuMuaCredits->so_tien = $request->so_tien;
$lichSuMuaCredits->save();
return response()->json([
'success' => true,
'message' => 'Mua gói credit thành công',
]);
}
//
public function getHistoryBuyCreditByIdQueryString(Request $request)
{
// truyền theo query string
$lichSuMuaCredits = LichSuMuaCredit::query($request->id)->where('id', $request->id)->get();
if ($lichSuMuaCredits == null) {
return response()->json([
'success' => false,
'message' => "Lấy lịch sử mua credit theo id thất bại"
]);
}
return response()->json([
'success' => true,
'message' => "Lấy lịch sử mua credit theo id thành công",
'data' => $lichSuMuaCredits
]);
}
//
}
|
php
| 14 | 0.553329 | 115 | 32.662651 | 83 |
starcoderdata
|
import React from "react";
import PropTypes from "prop-types";
import { Link } from "gatsby";
const MarkdownLinkRender = ({ href, children }) => {
if (href && href[0] === "/") {
return (
<Link
to={href}
className="text-lg text-blue-700 hover:text-blue-800 hover:underline"
>
{children}
);
}
return (
<a
target="_blank"
rel="noopener noreferrer"
href={href}
className="text-lg text-blue-700 hover:text-blue-800 hover:underline"
>
{children}
);
};
MarkdownLinkRender.propTypes = {
href: PropTypes.string.isRequired,
children: PropTypes.array.isRequired,
};
export default MarkdownLinkRender;
|
javascript
| 13 | 0.604225 | 77 | 20.515152 | 33 |
starcoderdata
|
virtual Scalar
{
/*private:
CombinedScalar(const std::list<MemLocObjectPtr>& memLocs) : CombinedMemLocObject<defaultMayEq>(memLocs) {}
public:
// Creates a new CombinedScalar when called by CombinedMemLocObject::create. It is assumed that all the
// sub-objects have type Scalar.
static boost::shared_ptr<CombinedScalar<defaultMayEq> > create(const std::list<MemLocObjectPtr>& memLocs, PartEdgePtr pedge)
{ return boost::make_shared<CombinedScalar<defaultMayEq> >(memLocs); }*/
public:
CombinedScalar(const std::list<MemLocObjectPtr>& memLocs);
std::string str(std::string indent); // pretty print for the object
std::string strp(PartEdgePtr pedge, std::string indent="");
}
|
c
| 10 | 0.746799 | 126 | 43 | 16 |
inline
|
package org.xlb.publish.svn.impl.service;
import org.xlb.publish.svn.inf.service.ISvnService;
/**
* {@link ISvnService} 池的实现
*
* @author Allen
* @date 2016年8月8日
*/
public class SvnServicePoolImpl extends SvnCommonImpl implements ISvnService {
@Override
public void createSVNRepository() {
// TODO Auto-generated method stub
}
@Override
public void closeRepo() {
// TODO Auto-generated method stub
}
@Override
public void createSVNClientManager() {
// TODO Auto-generated method stub
}
}
|
java
| 8 | 0.726744 | 78 | 16.2 | 30 |
starcoderdata
|
using Microsoft.AspNetCore.Builder;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.DependencyInjection;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using UserMicroservice.Data;
namespace UserMicroservice.Helpers
{
public class PrepareDatabase
{
public static void PreparePopulation(IApplicationBuilder app)
{
using (var serviceScope = app.ApplicationServices.CreateScope())
{
SeedData(serviceScope.ServiceProvider.GetService
}
}
public static void SeedData(UserContext context)
{
System.Console.WriteLine("Appling Migrations To User database...");
context.Database.Migrate();
}
}
}
|
c#
| 17 | 0.709091 | 124 | 29.16129 | 31 |
starcoderdata
|
class Mutex : public Reasons
{
friend class Condition;
private:
pthread_mutex_t mlock;
static pthread_mutexattr_t attr;
#ifdef DEV_BUILD
int lockCount;
#endif
private:
void init()
{
#ifdef DEV_BUILD
lockCount = 0;
#endif
int rc = pthread_mutex_init(&mlock, &attr);
if (rc)
system_call_failed::raise("pthread_mutex_init", rc);
}
public:
Mutex() { init(); }
explicit Mutex(MemoryPool&) { init(); }
~Mutex()
{
fb_assert(lockCount == 0);
int rc = pthread_mutex_destroy(&mlock);
if (rc)
system_call_failed::raise("pthread_mutex_destroy", rc);
}
void enter(const char* aReason)
{
int rc = pthread_mutex_lock(&mlock);
if (rc)
system_call_failed::raise("pthread_mutex_lock", rc);
reason(aReason);
#ifdef DEV_BUILD
++lockCount;
#endif
}
bool tryEnter(const char* aReason)
{
int rc = pthread_mutex_trylock(&mlock);
if (rc == EBUSY)
return false;
if (rc)
system_call_failed::raise("pthread_mutex_trylock", rc);
#ifdef DEV_BUILD
reason(aReason);
++lockCount;
#endif
return true;
}
void leave()
{
#ifdef DEV_BUILD
fb_assert(lockCount > 0);
--lockCount;
#endif
int rc = pthread_mutex_unlock(&mlock);
if (rc)
{
#ifdef DEV_BUILD
++lockCount;
#endif
system_call_failed::raise("pthread_mutex_unlock", rc);
}
}
#ifdef DEV_BUILD
bool locked()
{
// first of all try to enter the mutex
// this will help to make sure it's not locked by other thread
if (!tryEnter(FB_FUNCTION))
{
return false;
}
// make sure mutex was already locked
bool rc = lockCount > 1;
// leave to release lock, done by us in tryEnter
leave();
return rc;
}
#endif
public:
static void initMutexes();
private:
// Forbid copying
Mutex(const Mutex&);
Mutex& operator=(const Mutex&);
}
|
c
| 12 | 0.65812 | 64 | 16.56 | 100 |
inline
|
def getPrice(ticker):
'''This sub-function will take as input the stock symbol as a string and
returns the current price, as a float.'''
websiteName = 'https://finance.yahoo.com/quote/{tic}?p={tic}'.format(tic=ticker.upper())
htmlPage = urlopen(websiteName)
htmlText = htmlPage.read().decode('utf-8')
lookForStart = 'Price":{"raw":'
lookForEnd = ',"fmt":'
lookFor = lookForStart+'.*?'+lookForEnd
#here's a clip from manual searching:
'''Price":{"raw":47.785,"fmt":"47.78"}'''
#Find the text
priceSpot = re.search(lookFor,htmlText,re.IGNORECASE)
price = priceSpot.group()
#Remove the idenifiers
price = re.sub(lookForStart,'',price)
price = re.sub(lookForEnd,'',price)
price = float(price)
return price
|
python
| 10 | 0.643678 | 92 | 36.333333 | 21 |
inline
|
def test_filter_with_or_logic(self):
"""
Test if data provider correctly applies OR filters
"""
for i in range(10):
DummyModel.objects.create(
name="%idummy%i" % ((i%5), i),
number = i,
description="Some Dummy Description"
)
querystring_data = {
'skip': 0,
'take': 5,
'pageSize': 5,
'page': 1,
'filter': {
'logic': 'or',
'filters': [
{'operator': 'startswith', 'field': 'name', 'value': '1du'},
{'operator': 'gt', 'field': 'number', 'value': 7}
]
}
}
request = self.factory.get(
"/?%s" % builder.build(querystring_data),
HTTP_ACCEPT_ENCODING='application/json'
)
response = self.view(request)
json_response = json.loads(response.content)
self.assertEquals(json_response['result'], 1)
self.assertLessEqual(len(json_response['payload']), 5)
self.assertTrue(json_response.has_key('count'))
self.assertEquals(json_response['count'], 4) # i= 1,6, 8, 9
for item in json_response['payload']:
if(item['fields']['name'].lower()[:3] == '1du'):
self.assertEqual(item['fields']['name'].lower()[:3], '1du')
elif(item['fields']['number']>7):
self.assertGreater(item['fields']['number'], 7)
else:
self.fail()
|
python
| 16 | 0.60396 | 65 | 26.568182 | 44 |
inline
|
using System.Collections.Generic;
using Autodesk.Revit.DB;
namespace CarbonEmissionTool.Models
{
public class CurveLoopUtils
{
///
/// Creates a list of <see cref="CurveLoop"/>'s from a list of rectangles.
///
public static List GenerateCurveLoop(Dictionary<string, object> rectangle, double width, double height, double smallCurveTolerance, out XYZ ptTopLeft)
{
XYZ origin = new XYZ((double)rectangle["x"], (double)rectangle["y"], 0.0);
Transform transformX = Transform.CreateTranslation(new XYZ(width, 0.0, 0.0));
Transform transformY = Transform.CreateTranslation(new XYZ(0.0, height, 0.0));
XYZ ptBottomRight = transformX.OfPoint(origin);
XYZ ptTopRight = transformY.OfPoint(ptBottomRight);
ptTopLeft = new XYZ(origin.X, ptTopRight.Y, 0.0);
List cornerPoints = new List { origin, ptBottomRight, ptTopRight, ptTopLeft };
if ((origin.DistanceTo(ptTopLeft) <= smallCurveTolerance) | (origin.DistanceTo(ptBottomRight) <= smallCurveTolerance))
return null;
CurveLoop curveLoop = new CurveLoop();
for (int i = 0; i < cornerPoints.Count; i++)
{
XYZ ptStart = cornerPoints[i];
XYZ ptEnd = cornerPoints[i + 1 == cornerPoints.Count ? 0 : i + 1];
Line lnEdge = Line.CreateBound(ptStart, ptEnd);
curveLoop.Append(lnEdge);
}
return new List { curveLoop };
}
}
}
|
c#
| 19 | 0.619188 | 169 | 39.47619 | 42 |
starcoderdata
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# __author__ = "xh"
# Date: 2019/11/15
ip_a = '''
eth0 Link encap:Ethernet HWaddr 00:0C:29:20:5D:1A
inet addr:10.0.0.200 Bcast:10.0.0.255 Mask:255.255.255.0
inet6 addr: fe80::20c:29ff:fe20:5d1a/64 Scope:Link
UP BROADCAST RUNNING MULTICAST MTU:1500 Metric:1
RX packets:1678 errors:0 dropped:0 overruns:0 frame:0
TX packets:918 errors:0 dropped:0 overruns:0 carrier:0
collisions:0 txqueuelen:1000
RX bytes:150792 (147.2 KiB) TX bytes:72371 (70.6 KiB)
eth1 Link encap:Ethernet HWaddr 00:0C:29:20:5D:24
inet addr:172.16.1.200 Bcast:172.16.1.255 Mask:255.255.255.0
inet6 addr: fe80::20c:29ff:fe20:5d24/64 Scope:Link
UP BROADCAST RUNNING MULTICAST MTU:1500 Metric:1
RX packets:0 errors:0 dropped:0 overruns:0 frame:0
TX packets:24 errors:0 dropped:0 overruns:0 carrier:0
collisions:0 txqueuelen:1000
RX bytes:0 (0.0 b) TX bytes:1656 (1.6 KiB)
lo Link encap:Local Loopback
inet addr:127.0.0.1 Mask:255.0.0.0
inet6 addr: ::1/128 Scope:Host
UP LOOPBACK RUNNING MTU:65536 Metric:1
RX packets:245 errors:0 dropped:0 overruns:0 frame:0
TX packets:245 errors:0 dropped:0 overruns:0 carrier:0
collisions:0 txqueuelen:0
RX bytes:22839 (22.3 KiB) TX bytes:22839 (22.3 KiB)
'''
def nicinfo():
raw_data = ip_a.strip().split("\n")
# print("raw_data", raw_data)
nic_dict = {}
next_ip_line = False
last_mac_addr = None
for line in raw_data:
if next_ip_line:
next_ip_line = False
nic_name = last_mac_addr.split()[0]
mac_addr = last_mac_addr.split("HWaddr")[1].strip()
row_ip_addr = line.split("inet addr:")
# print("row_ip", row_ip_addr)
row_bcast = line.split("Bcast:")
row_netmask = line.split("Mask:")
# print(len(row_ip_addr))
if len(row_ip_addr) > 1:
ip_addr = row_ip_addr[1].split()[0]
bcast = row_bcast[1].split()[0]
netmask = row_netmask[1].split()[0]
else:
ip_addr = None
bcast = None
netmask = None
if mac_addr not in nic_dict:
nic_dict[mac_addr] = {
"nic_name": nic_name,
"mac_addr": mac_addr,
"ip_addr": ip_addr,
"bcast": bcast,
"netmask": netmask,
"bonding": 0,
"model": "unkown"
}
# print("nic_dict", nic_dict)
else:
# MAC已经存在,就需要绑定地址
if "%s_bonding_addr" % (mac_addr) not in nic_dict:
random_mac_addr = "%s_bonding_addr" % mac_addr
else:
random_mac_addr = "%s_bonding_addr2" % mac_addr
nic_dict[random_mac_addr] = {
"nic_name": nic_name,
"mac_addr": random_mac_addr,
"ip_addr": ip_addr,
"bcast": bcast,
"netmask": netmask,
"bonding": 1,
"model": "unkown"
}
if "HWaddr" in line:
next_ip_line = True
last_mac_addr = line
nic_list = []
for k, v in nic_dict.items():
print(k, v)
nic_list.append(v)
return {"nic": nic_list}
print("nicinfo", nicinfo())
|
python
| 17 | 0.500547 | 72 | 37.87234 | 94 |
starcoderdata
|
import React from 'react';
import { Grid, Typography } from '@material-ui/core';
import styled from 'styled-components';
import { useTranslation } from 'react-i18next';
import { DC2019Result } from '@/data/ElectionResults';
import VoteVsSeatChart from '@/components/charts/VoteVsSeat';
import { calculateSeatBoxForPrimary } from '@/utils';
import { withLanguage, getLocalizedPath } from '@/utils/i18n';
import { Link, navigate, useStaticQuery, graphql } from 'gatsby';
import { PeopleBox } from '@/components/People';
import ResponsiveSections from '@/components/ResponsiveSections';
import List from '@/components/List';
import SEO from '@/components/seo';
import { CompactImageLinkBox } from '@/components/LinkBox';
import { useTheme } from '@material-ui/core/styles';
const Nav = styled.div`
padding-bottom: ${props => props.theme.spacing(1)}px;
overflow-x: auto;
white-space: nowrap;
.nav-link {
color: ${props => props.theme.palette.text.primary};
text-decoration: none;
font-size: 14px;
margin-bottom: ${props => props.theme.spacing(1)}px;
margin-right: ${props => props.theme.spacing(1)}px;
padding: 3px 8px;
}
.active {
font-weight: 700;
color: #ffffff;
background: ${props => props.theme.palette.secondary.main};
border-radius: 5px;
}
`;
const Header = styled(Grid)`
margin-bottom: ${props => props.theme.spacing(2)}px;
.title-box {
margin-right: ${props => props.theme.spacing(3)}px;
}
}
.title {
font-size: 24px;
font-weight: 700;
}
`;
const CandidatesWrapper = styled.div`
margin-top: ${props => props.theme.spacing(2)}px;
display: grid;
grid-row-gap: ${props => props.theme.spacing(1)}px;
grid-column-gap: ${props => props.theme.spacing(1.5)}px;
grid-template-columns: repeat(2, 1fr);
${props => props.theme.breakpoints.up('sm')} {
grid-template-columns: repeat(4, 1fr);
}
${props => props.theme.breakpoints.up('md')} {
grid-template-columns: repeat(6, 1fr);
}
.candi-box {
display: flex;
align-items: center;
}
`;
const PrimaryTemplate = ({
pageContext: { uri, allConstituencies, constituency, candidates, assets },
}) => {
const { t, i18n } = useTranslation();
const { site } = useStaticQuery(
graphql`
query {
site {
siteMetadata {
siteUrl
}
}
}
`
);
const theme = useTheme();
const sections = [];
if (assets && assets.filter(asset => asset.type === 'youtube').length) {
sections.push({
name: 'election_forum',
title: t('election_forum'),
content: (
{assets.map(asset => (
<CompactImageLinkBox
key={asset.id}
onClick={() => {
window.open(
`https://www.youtube.com/watch?v=${asset.asset_id}`,
'_blank'
);
}}
image={(
<img
style={{
height: '100%',
}}
src={`https://i.ytimg.com/vi/${asset.asset_id}/hqdefault.jpg`}
alt={asset.title}
/>
)}
title={asset.title}
subTitle={asset.channel}
/>
))}
),
});
}
return (
<>
<SEO
uri={uri}
titleOveride={`${t('primary.title')} | ${withLanguage(
i18n,
constituency,
'name'
)}`}
// TODO: duplicated entries, filter out in SEO later?
meta={[
{
property: 'og:title',
content: `${t('primary.title')} | ${withLanguage(
i18n,
constituency,
'name'
)}`,
},
{
property: 'og:description',
content: withLanguage(i18n, constituency, 'primary_description'),
},
]}
/>
<Nav theme={theme}>
{allConstituencies.map(c => (
<Link
key={c.node.key}
className={`nav-link ${
c.node.key === constituency.key ? 'active' : ''
}`}
to={getLocalizedPath(i18n, `/primary/${c.node.key}`)}
>
{withLanguage(i18n, c.node, 'alias')}
))}
<Header container theme={theme}>
<Grid item>
<Grid
container
direction="column"
justify="center"
className="title-box"
>
<div className="title">
{withLanguage(i18n, constituency, 'name')}
<Typography variant="body2" color="textSecondary">
{withLanguage(i18n, constituency, 'primary_rule')}
<Typography variant="body2" color="textSecondary">
{t('list_count', { list_count: candidates.length })}
<Grid item>
{DC2019Result[constituency.key] && (
<VoteVsSeatChart
title={{
vote: t('dc2019_demo_beijing_ratio'),
seat:
Number(constituency.target) > 0
? t('demo_target', { target: constituency.target })
: t('demo_target_not_decide'),
}}
votes={DC2019Result[constituency.key].votes}
seats={calculateSeatBoxForPrimary(constituency)}
/>
)}
{withLanguage(i18n, constituency, 'primary_description') && (
<Typography
className="block"
variant="body2"
dangerouslySetInnerHTML={{
__html: withLanguage(i18n, constituency, 'primary_description'),
}}
/>
)}
<CandidatesWrapper theme={theme}>
{candidates
.sort((a, b) => {
if (a.node.primary_list_no && b.node.primary_list_no) {
if (
Number(a.node.primary_list_no) > Number(b.node.primary_list_no)
) {
return 1;
}
return -1;
}
if (a.node.name_en > b.node.name_en) {
return 1;
}
return -1;
})
.map(c => (
<Grid
item
key={withLanguage(i18n, c.node, 'name')}
className="clickable"
>
<PeopleBox
item
imgUrl={`${site.siteMetadata.siteUrl}/images/avatars/${c.node.uuid}.png`}
key={c.node.name_zh}
info={c.node}
name={withLanguage(i18n, c.node, 'name')}
subText={
withLanguage(i18n, c.node, 'title') &&
withLanguage(i18n, c.node, 'title')
.split(/[,、,]+/)
.shift()
}
onClick={() => {
navigate(
getLocalizedPath(
i18n,
`/profile/${c.node.uuid}/${c.node.name_zh}`
)
);
}}
/>
))}
<ResponsiveSections
sections={sections}
pageName={`primary_${constituency.name_zh}`}
/>
);
};
export default PrimaryTemplate;
|
javascript
| 30 | 0.490957 | 89 | 27.484848 | 264 |
starcoderdata
|
gboolean
cmd_script(int argc, char** argv)
{
if(argc < 1)
return TRUE;
char* path = argv[0];
char* content = read_file(path);
if(!content)
{
gchar* message = g_strdup_printf("Could not open or read file '%s'", path);
notify(ERROR, message);
g_free(message);
return FALSE;
}
/* search for existing script to overwrite or reread it */
ScriptList* sl = Jumanji.Global.scripts;
while(sl && sl->next != NULL)
{
if(!strcmp(sl->path, path))
{
sl->path = path;
sl->content = content;
return TRUE;
}
sl = sl->next;
}
/* load new script */
ScriptList* entry = malloc(sizeof(ScriptList));
if(!entry)
out_of_memory();
entry->path = path;
entry->content = content;
entry->next = NULL;
/* append to list */
if(!Jumanji.Global.scripts)
Jumanji.Global.scripts = entry;
if(sl)
sl->next = entry;
return TRUE;
}
|
c
| 11 | 0.585233 | 79 | 17.816327 | 49 |
inline
|
@Override
public void delete(BulkActionContext bac, boolean deleteNext) {
if (deleteNext) {
// Recursively delete all versions of the container
for (NodeGraphFieldContainer next : getNextVersions()) {
next.delete(bac);
}
}
// Invoke common field removal operations
super.delete(bac);
// TODO delete linked aggregation nodes for node lists etc
for (BinaryGraphField binaryField : outE(HAS_FIELD).has(BinaryGraphFieldImpl.class).frameExplicit(BinaryGraphFieldImpl.class)) {
binaryField.removeField(bac, this);
}
for (MicronodeGraphField micronodeField : outE(HAS_FIELD).has(MicronodeGraphFieldImpl.class).frameExplicit(MicronodeGraphFieldImpl.class)) {
micronodeField.removeField(bac, this);
}
// We don't need to handle node fields since those are only edges and will automatically be removed
// Delete the container from all branches and types
getBranchTypes().forEach(tuple -> {
String branchUuid = tuple.v1();
ContainerType type = tuple.v2();
if (type != ContainerType.INITIAL) {
bac.batch().delete(this, branchUuid, type, false);
}
});
getElement().remove();
bac.inc();
}
|
java
| 14 | 0.727986 | 142 | 31.8 | 35 |
inline
|
# -*- coding: utf-8 -*-
#
# Copyright (c) nexB Inc. and others. All rights reserved.
# ScanCode is a trademark of nexB Inc.
# SPDX-License-Identifier: Apache-2.0
# See http://www.apache.org/licenses/LICENSE-2.0 for the license text.
# See https://github.com/nexB/scancode-plugins for support or download.
# See https://aboutcode.org for more information about nexB OSS projects.
#
import re
import logging
from cluecode import finder
LOG = logging.getLogger(__name__)
# TODO: beef up.
# add detailed annotation for each of the common MODULE_XXX macros
# add support for checking the GPL symbols GPLONLY and so on
# add support for finding the init_module and module_init functions defs
# add separate support for finding all linux includes
LKM_REGEXES = [
# ('lkm-header-include', 'include[^\n]*
('lkm-header-include', 'include[^\n]*
('lkm-make-flag', '\-DMODULE'),
('lkm-make-flag', '\_\_KERNEL\_\_'),
('lkm-license', 'MODULE_LICENSE.*\("(.*)"\);'),
('lkm-symbol', 'EXPORT_SYMBOL.*\("(.*)"\);'),
('lkm-symbol-gpl', 'EXPORT_SYMBOL_GPL.*\("(.*)"\);'),
]
def lkm_patterns():
return [(key, re.compile(regex),) for key, regex in LKM_REGEXES]
def find_lkms(location):
"""
Yield possible LKM-related clues found in file at location.
"""
matches = finder.find(location, lkm_patterns())
matches = finder.apply_filters(matches, finder.unique_filter)
for key, lkm_clue, _line, _lineno in matches:
yield key, lkm_clue
|
python
| 9 | 0.674751 | 85 | 33.212766 | 47 |
starcoderdata
|
FSlateEditorStyle::FStyle::FStyle( const TWeakObjectPtr< UEditorStyleSettings >& InSettings )
: FSlateStyleSet("EditorStyle")
// Note, these sizes are in Slate Units.
// Slate Units do NOT have to map to pixels.
, Icon7x16(7.0f, 16.0f)
, Icon8x4(8.0f, 4.0f)
, Icon16x4(16.0f, 4.0f)
, Icon8x8(8.0f, 8.0f)
, Icon10x10(10.0f, 10.0f)
, Icon12x12(12.0f, 12.0f)
, Icon12x16(12.0f, 16.0f)
, Icon14x14(14.0f, 14.0f)
, Icon16x16(16.0f, 16.0f)
, Icon16x20(16.0f, 20.0f)
, Icon20x20(20.0f, 20.0f)
, Icon22x22(22.0f, 22.0f)
, Icon24x24(24.0f, 24.0f)
, Icon25x25(25.0f, 25.0f)
, Icon32x32(32.0f, 32.0f)
, Icon40x40(40.0f, 40.0f)
, Icon48x48(48.0f, 48.0f)
, Icon64x64(64.0f, 64.0f)
, Icon36x24(36.0f, 24.0f)
, Icon128x128(128.0f, 128.0f)
// These are the colors that are updated by the user style customizations
, DefaultForeground_LinearRef( MakeShareable( new FLinearColor( 0.72f, 0.72f, 0.72f, 1.f ) ) )
, InvertedForeground_LinearRef( MakeShareable( new FLinearColor( 0, 0, 0 ) ) )
, SelectorColor_LinearRef( MakeShareable( new FLinearColor( 0.701f, 0.225f, 0.003f ) ) )
, SelectionColor_LinearRef( MakeShareable( new FLinearColor( 0.728f, 0.364f, 0.003f ) ) )
, SelectionColor_Subdued_LinearRef( MakeShareable( new FLinearColor( 0.807f, 0.596f, 0.388f ) ) )
, SelectionColor_Inactive_LinearRef( MakeShareable( new FLinearColor( 0.25f, 0.25f, 0.25f ) ) )
, SelectionColor_Pressed_LinearRef( MakeShareable( new FLinearColor( 0.701f, 0.225f, 0.003f ) ) )
// These are the Slate colors which reference those above; these are the colors to put into the style
, DefaultForeground( DefaultForeground_LinearRef )
, InvertedForeground( InvertedForeground_LinearRef )
, SelectorColor( SelectorColor_LinearRef )
, SelectionColor( SelectionColor_LinearRef )
, SelectionColor_Subdued( SelectionColor_Subdued_LinearRef )
, SelectionColor_Inactive( SelectionColor_Inactive_LinearRef )
, SelectionColor_Pressed( SelectionColor_Pressed_LinearRef )
, InheritedFromBlueprintTextColor(FLinearColor(0.25f, 0.5f, 1.0f))
, Settings( InSettings )
{
}
|
c++
| 9 | 0.724322 | 102 | 40.3 | 50 |
inline
|
import numpy as np
import matplotlib.pyplot as plt
import copy
from AEC import *
from Experiment import *
from Filter_Rect_LinSpaced import *
from Filter_Rect_LogSpaced_AEC import *
from numpy.linalg import *
from random import sample
from time import time
class AEC_Badel(AEC) :
def __init__(self, dt):
# Define variables for optimal linear filter K_opt
self.K_opt = Filter_Rect_LogSpaced_AEC(length=150.0, binsize_lb=dt, binsize_ub=5.0, slope=10.0, clamp_period=0.5)
self.K_opt_all = [] # List of K_opt, store bootstrap repetitions
# Define variables for electrode filter
self.K_e = Filter_Rect_LinSpaced()
self.K_e_all = [] # List of K_e, store bootstrap repetitions
# Meta parameters used in AEC = Step 1 (compute optimal linear filter)
self.p_nbRep = 15 # nb of times the filer is estimated (each time resampling from available data)
self.p_pctPoints = 0.8 # between 0 and 1, fraction of datapoints in subthreshold recording used in bootstrap at each repetition
# Meta parameters used in AEC = Step 2 (estimation of Ke given Kopt)
self.p_Ke_l = 7.0 # ms, length of the electrode filter Ke
self.p_b0 = [15.0] # MOhm/ms, initial condition for exp fit on the tail of Kopt (amplitude)
self.p_tau0 = [30.0] # ms, initial condition for exp fit on the tail of Kopt (timescale)
self.p_expFitRange = [3.0, 50.0] # ms, range were to perform exp fit on the tail of K_opt
self.p_derivative_flag = False
##############################################################################################
# ABSTRACT METHODS FROM AEC THAT HAVE TO BE IMPLEMENTED
##############################################################################################
def performAEC(self, experiment):
print "\nPERFORM ACTIVE ELECTRODE COMPENSATION (Badel method)..."
# Estimate electrode filter
self.computeElectrodeFilter(experiment)
# Estimate electrode filter
self.compensateAllTraces(experiment)
##############################################################################################
# ESTIMATE ELECTRODE FILTER
# This function implements two steps:
# Step 1: compute optimal linear filter
# Step 2: compute electrode filter
##############################################################################################
def computeElectrodeFilter(self, expr) :
"""
Extract the optimal linter filter between I and V_rec.
The regression is performed using the tempral derivative of the signals (see Badel et al 2008).
To speed up, the optimal linear filter is expanded in rectangular basis functions.
"""
print "\nEstimate electrode properties..."
dt = expr.dt
# estimate optimal linear filter on I_dot - V_dot
if self.p_derivative_flag :
# Compute temporal derivative of the signal
V_dot = np.diff(expr.AEC_trace.V_rec)/dt
I_dot = np.diff(expr.AEC_trace.I)/dt
# estimate optimal linear filter on I - V
else :
# Just remove mean from signals (do not use derivative)
V_dot = expr.AEC_trace.V_rec - np.mean(expr.AEC_trace.V_rec)
I_dot = expr.AEC_trace.I - np.mean(expr.AEC_trace.I)
# Get ROI indices and remove initial part
ROI_selection = expr.AEC_trace.getROI_cutInitialSegments(self.K_opt.getLength())
ROI_selection = ROI_selection[:-1]
ROI_selection_l = len(ROI_selection)
# Build full X matrix for linear regression
X = self.K_opt.convolution_ContinuousSignal_basisfunctions(I_dot, dt)
nbPoints = int(self.p_pctPoints*ROI_selection_l)
# Estimate electrode filter on multiple repetitions by bootstrap
for rep in np.arange(self.p_nbRep) :
############################################
# ESTIMATE OPTIMAL LINEAR FILETR K_opt
############################################
# Sample datapoints from ROI
ROI_selection_sampled = sample(ROI_selection, nbPoints)
Y = np.array(V_dot[ROI_selection_sampled])
X_tmp = X[ROI_selection_sampled, :]
# Compute optimal linear filter
XTX = np.dot(np.transpose(X_tmp), X_tmp)
XTX_inv = inv(XTX)
XTY = np.dot(np.transpose(X_tmp), Y)
K_opt_coeff = np.dot(XTX_inv, XTY)
K_opt_coeff = K_opt_coeff.flatten()
############################################
# ESTIMATE ELECTRODE FILETR K_e
############################################
# Define K_opt
K_opt_tmp = copy.deepcopy(self.K_opt)
K_opt_tmp.setFilter_Coefficients(K_opt_coeff)
self.K_opt_all.append(K_opt_tmp)
# Fit exponential on tail of K_opt
(t,K_opt_tmp_interpol) = K_opt_tmp.getInterpolatedFilter(dt)
(K_opt_tmp_expfit_t, K_opt_tmp_expfit) = K_opt_tmp.fitSumOfExponentials(len(self.p_b0), self.p_b0, self.p_tau0, ROI=self.p_expFitRange, dt=dt)
# Generate electrode filter
Ke_coeff_tmp = (K_opt_tmp_interpol - K_opt_tmp_expfit)[ : int(self.p_Ke_l/dt) ]
Ke_tmp = Filter_Rect_LinSpaced(length=self.p_Ke_l, nbBins=len(Ke_coeff_tmp))
Ke_tmp.setFilter_Coefficients(Ke_coeff_tmp)
(Ke_tmp_expfit_t, Ke_tmp_expfit) = Ke_tmp.fitSumOfExponentials(1, [60.0], [0.5], ROI=[0.0,7.0], dt=dt)
self.K_e_all.append(Ke_tmp)
print "Repetition ", (rep+1), " R_e (MOhm) = %0.2f, " % (Ke_tmp.computeIntegral(dt))
# Average filters obtained through bootstrap
self.K_opt = Filter.averageFilters(self.K_opt_all)
self.K_e = Filter.averageFilters(self.K_e_all)
print "Done!"
##############################################################################################
# FUCTIONS TO APPLY AEC TO ALL TRACES IN THE EXPERIMENT
##############################################################################################
def compensateAllTraces(self, expr) :
print "\nCompensate experiment"
print "AEC trace..."
self.deconvolveTrace(expr.AEC_trace)
print "Training set..."
for tr in expr.trainingset_traces :
self.deconvolveTrace(tr)
print "Test set..."
for tr in expr.testset_traces :
self.deconvolveTrace(tr)
print "Done!"
def deconvolveTrace(self, trace):
V_e = self.K_e.convolution_ContinuousSignal(trace.I, trace.dt)
V_aec = trace.V_rec - V_e
trace.V = V_aec
trace.AEC_flag = True
trace.detectSpikesWithDerivative(threshold=15)
#trace.detectSpikes()
#####################################################################################
# FUNCTIONS FOR PLOTTING
#####################################################################################
def plot(self):
# Plot optimal linear filter K_opt
Filter.plotAverageFilter(self.K_opt_all, 0.05, loglog=False, label_x='Time (ms)', label_y='Optimal linear filter (MOhm/ms)')
# Plot optimal linear filter K_e
Filter.plotAverageFilter(self.K_e_all, 0.05, label_x='Time (ms)', label_y='Electrode filter (MOhm/ms)')
plt.show()
def plotKopt(self):
# Plot optimal linear filter K_opt
Filter.plotAverageFilter(self.K_opt_all, 0.05, loglog=False, label_x='Time (ms)', label_y='Optimal linear filter (MOhm/ms)')
plt.show()
def plotKe(self):
# Plot optimal linear filter K_e
Filter.plotAverageFilter(self.K_e_all, 0.05, label_x='Time (ms)', label_y='Electrode filter (MOhm/ms)', plot_expfit=False)
plt.show()
|
python
| 15 | 0.503977 | 154 | 39.909091 | 209 |
starcoderdata
|
import api
import requests
import re
import urllib.parse
def talk_to_ed(message):
print(urllib.parse.quote_plus(message))
ed_endpoint = f"https://in.bot/api/bot_gateway?pure=1&js=0&bot_id=133&bot_token=
try:
response = requests.get(ed_endpoint).text
print(response)
response = re.sub(r'<a href=(.*)\">', '', response).replace(" "")
return response
except Exception as e:
return f"ops o ed moreu........ {e}"
def run_ed(msg):
chat = msg["chat"]["id"]
msg_text = msg["text"]
pattern = re.compile("^(?:[Ee]d,? (.*))|(?:(.*),? [Ee]d\??)$")
match = pattern.search(msg_text)
if match:
if match.group(1) is not None:
response = talk_to_ed(match.group(1))
elif match.group(2) is not None:
response = talk_to_ed(match.group(2))
api.send_message(chat, response)
|
python
| 14 | 0.595308 | 142 | 29.088235 | 34 |
starcoderdata
|
<form class="required clear" action="<?php echo urlPath(sketch("menu_guid")); ?>?blogcat=<?php echo $i; ?>" method="post">
<ul class="forms">
<div style="position:relative;">
your comment here...
<textarea name="edit" class="required rich:true" cols="50" rows="3">
<input type="hidden" name="addcomment" value="addcomment">
<input type="hidden" value="<?php echo $i; ?>" class="required integer" name="blog">
<input type="hidden" name="token" value="<?php helper("session"); $tok = md5(rand()); sessionSet("token",$tok,false); echo sessionGet("token"); ?>" class="required"/>
<input type="hidden" name="blogcat" value="<?php echo $i; ?>" />
<input type="hidden" name="under" value="<?php echo $under; ?>"/>
<button type="submit" class="clear positive"><span class="icons check"> Comment
|
php
| 9 | 0.626531 | 170 | 50.631579 | 19 |
starcoderdata
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.