code
stringlengths 0
29.6k
| language
stringclasses 9
values | AST_depth
int64 3
30
| alphanumeric_fraction
float64 0.2
0.86
| max_line_length
int64 13
399
| avg_line_length
float64 5.02
139
| num_lines
int64 7
299
| source
stringclasses 4
values |
---|---|---|---|---|---|---|---|
using System;
using System.Collections.Generic;
using System.IO;
namespace Qart.Core.DataStore
{
public class ExtendedDataStore : IDataStore
{
private readonly IDataStore _dataStore;
private readonly Func<IDataStore, string, IDataStore, Stream> _streamFunc;
public ExtendedDataStore(IDataStore dataStore)
{
_dataStore = dataStore;
}
public ExtendedDataStore(IDataStore dataStore, Func<IDataStore, string, IDataStore, Stream> streamFunc)
{
_dataStore = dataStore;
_streamFunc = streamFunc;
}
public Stream GetReadStream(string itemId)
{
if (_dataStore.Contains(itemId))
{
return _dataStore.GetReadStream(itemId);
}
string itemRef = GetItemRef(itemId);
if (_dataStore.Contains(itemRef))
{
string target = CombinePaths(Path.GetDirectoryName(itemId), string.Empty, _dataStore.GetContent(itemRef));
return GetReadStream(target);
}
string func = GetItemFunc(itemId);
if (_dataStore.Contains(func))
{
return _streamFunc(_dataStore, func, this);
}
itemRef = GetRedirectedItemId(itemId);
if (itemRef != null && Contains(itemRef))
{
return GetReadStream(itemRef);
}
return null;
}
public Stream GetWriteStream(string itemId)
{
return _dataStore.GetWriteStream(itemId);
}
public bool Contains(string itemId)
{
if (_dataStore.Contains(itemId) || _dataStore.Contains(GetItemRef(itemId)))
return true;
var redirectedId = GetRedirectedItemId(itemId);
if (redirectedId != null && itemId != redirectedId)
{
return Contains(redirectedId);
}
string func = GetItemFunc(itemId);
return _dataStore.Contains(func);
}
public IEnumerable GetItemIds(string tag)
{
//TODO
return _dataStore.GetItemIds(tag);
}
public IEnumerable GetItemGroups(string group)
{
//TODO
return _dataStore.GetItemGroups(group);
}
private string GetRedirectedItemId(string itemId)
{
//TODO redirection logic should be rewritten as it won't work with items in subfolders
//first redirection point should be found and then itemId should be rewriten in accordance to redirection
var groupId = Path.GetDirectoryName(itemId);
var referenceItemId = Path.Combine(groupId, ".ref");
if (_dataStore.Contains(referenceItemId))
{
var reference = _dataStore.GetContent(referenceItemId).Trim();
return CombinePaths(groupId, reference, Path.GetFileName(itemId));
}
return null;
}
private string CombinePaths(string currentGroupId, string redirectedGroupId, string itemId)
{
var redirectedItemId = Path.Combine(redirectedGroupId, itemId);
if (!Path.IsPathRooted(redirectedItemId))
{
redirectedItemId = Path.Combine(currentGroupId, redirectedGroupId, itemId);
}
return redirectedItemId;
}
private string GetItemRef(string name)
{
return name + ".ref";
}
private string GetItemFunc(string name)
{
return name + ".transform";
}
}
}
|
c#
| 19 | 0.567918 | 122 | 29.858333 | 120 |
starcoderdata
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
def Graph1(filepath):
data1 = pd.read_csv(filepath)
data1_clean = (data1
.dropna(axis=0)
.sort_values(by = "city", ascending = True)
.rename({'park.name': "Park Name", 'park.alias': "Park Alias", 'city': "City", 'state': "State"}, axis = 1)
.reset_index()
.drop(['index'], axis = 1)
.drop(["Country"], axis = 1)
)
labels = ['OH', 'CA', 'IL', 'NY', 'PA', 'MA', 'MO', 'MI', 'IN', 'TX', 'QC', 'MD', 'IA', 'FL', 'CT', 'AZ', 'WA', 'ON', 'DC']
new_df = data1_clean.loc[data1_clean['State'].isin(['OH', 'CA', 'IL', 'NY', 'PA', 'MA', 'MO', 'MI', 'IN', 'TX', 'QC', 'MD','IA','FL', 'CT', 'AZ', 'WA', 'ON', 'DC'])]
return new_df
def Graph2(filepath):
fielding = pd.read_csv(filepath)
fielding1_clean = (fielding
.sort_values(by = "G", ascending = False)
[fielding["yearID"] > 1999]
.drop(["stint", "WP", "GS", "ZR", "lgID", "E", "DP", "InnOuts", "PO", "A", "PB", "SB", "CS", "yearID", "POS"], axis =1)
.rename({'playerID': "Player ID", 'yearID': "Year", 'teamID': "Team", 'G': "Games Played"}, axis = 1)
.reset_index().drop(['index'], axis = 1)
.dropna(axis = 0)
[fielding["Games Played"] > 150]
)
labels = ['PHI', 'SEA', 'ATL', 'BAL', 'LAN', 'NYA', 'CHN', 'KCA', 'OAK', 'FLO', 'COL', 'TEX', 'MIL', 'CIN', 'ARI', 'SDN', 'HOU', 'TOR', 'CHA', 'DET', 'BOS', 'WAS', 'PIT', 'SLN', 'NYN', 'CLE', 'MON', 'SFN', 'ANA', 'LAA', 'MIA', 'TBA', 'MIN']
Team_clean = fielding1_clean.loc[fielding1_clean['Team'].isin(['PHI', 'SEA', 'ATL', 'BAL', 'LAN', 'NYA', 'CHN', 'KCA', 'OAK', 'FLO', 'COL', 'TEX', 'MIL', 'CIN', 'ARI', 'SDN', 'HOU', 'TOR', 'CHA', 'DET', 'BOS', 'WAS', 'PIT', 'SLN', 'NYN', 'CLE', 'MON', 'SFN', 'ANA', 'LAA', 'MIA', 'TBA', 'MIN']
)]
return fielding1_clean
def Graph3(filepath):
fielding2_clean = (fielding
[fielding["yearID"] > 1999]
.sort_values(by = "PO", ascending = False)
.drop(["stint","SB","teamID",'yearID',"WP","GS","PB","WP","SB","CS","ZR","A","E","DP","InnOuts","G","lgID"], axis = 1)
.rename({'playerID': "Player ID", 'yearID': "Year", 'lgID': "League", 'G': "Games Played"}, axis = 1)
.reset_index()
.drop(['index'], axis = 1)
[fieldingSS2_clean["PO"] > 0]
)
return fielding2_clean
|
python
| 22 | 0.455123 | 298 | 45.929825 | 57 |
starcoderdata
|
<?php
namespace app\controllers;
use app\models\Bill;
use Yii;
use yii\filters\AccessControl;
use yii\web\Controller;
use yii\filters\VerbFilter;
use app\models\LoginForm;
use app\models\ContactForm;
class BillController extends Controller
{
public function behaviors()
{
return [
'verbs' => [
'class' => VerbFilter::className(),
'actions' => [
'delete' => ['post'],
],
],
'access' => [
'class' => AccessControl::className(),
'rules' => [
[
'allow' => true,
'actions' => ['index', 'view', 'create', 'update', 'delete', 'prices'],
'roles' => ['@'],
],
],
],
];
}
public function actionIndex()
{
$datas = Yii::$app->db->createCommand('SELECT * FROM warehouse LEFT JOIN unit ON warehouse.`idunit`=unit.`id` ')
->queryAll();
return $this->render('index', [
'datas' => $datas
]);
}
public function actionCreate()
{
$comboboxdata = Yii::$app->db->createCommand('SELECT * FROM unit')
->queryAll();
if (Yii::$app->request->post()) {
$idchungtu = $_REQUEST['idchungtu'];
$idproduct = $_REQUEST['idproduct'];
$productname = $_REQUEST['productname'];
$unit = $_REQUEST['unit'];
$count = $_REQUEST['count'];
$price = $_REQUEST['price'];
$cost = $_REQUEST['cost'];
$idnhacungcap = $_REQUEST['idnhacungcap'];
$note = $_REQUEST['note'];
$date = $_REQUEST['date'];
$sql = "insert into warehouse (idchungtu,idproduct,productname,unit,count,price,cost,idnhacungcap,note,date)
values (:idchungtu,:idproduct,:productname,:unit,:idnhacungcap,:note,:date)";
$parameters = array("idchungtu" => $idchungtu, "idproduct" => $idproduct, "productname" => $productname,
"unit" => $unit, "count" => $count, "price" => $price, "cost" => $cost, "idnhacungcap" => $idnhacungcap, "note" => $note, "date" => $date);
// Yii::$app->db->createCommand($sql)->insert($parameters)->execute();
Yii::$app->db->createCommand()->insert('warehouse', $parameters)->execute();
return $this->redirect(['/bill/index']);
}
return $this->render('create', [
'comboboxdata' => $comboboxdata
]);
}
public function actionUpdate()
{
$comboboxdata = Yii::$app->db->createCommand('SELECT * FROM unit')
->queryAll();
if (Yii::$app->request->post()) {
$idwarehouse=$_REQUEST['idwarehouse'];
$idchungtu = $_REQUEST['idchungtu'];
$idproduct = $_REQUEST['idproduct'];
$productname = $_REQUEST['productname'];
$unit = $_REQUEST['unit'];
$count = $_REQUEST['count'];
$price = $_REQUEST['price'];
$cost = $_REQUEST['cost'];
$idnhacungcap = $_REQUEST['idnhacungcap'];
$note = $_REQUEST['note'];
$date = $_REQUEST['date'];
$sql = "UPDATE warehouse set (idchungtu=".'$idchungtu'.") where idwarehouse=". $_REQUEST['idwarehouse'];
$parameters = array("idchungtu" => $idchungtu, "idproduct" => $idproduct, "productname" => $productname,
"unit" => $unit, "count" => $count, "price" => $price, "cost" => $cost, "idnhacungcap" => $idnhacungcap, "date" => $date);
// Yii::$app->db->createCommand($sql)->insert($parameters)->execute();
Yii::$app->db->createCommand()->update('warehouse', $parameters)->execute();
return $this->redirect(['/bill/index']);
}
$objWarehouse = $this->findModel($_REQUEST["id"]);
return $this->render('update', [
'comboboxdata' => $comboboxdata, 'objWarehouse' => $objWarehouse
]);
}
public function actionDelete($id)
{
$this->findModel($id)->delete();
return $this->redirect(['index']);
}
protected function findModel($id)
{
if (($model = Bill::findOne($id)) !== null) {
return $model;
} else {
throw new NotFoundHttpException('The requested page does not exist.');
}
}
}
|
php
| 17 | 0.505319 | 155 | 32.183824 | 136 |
starcoderdata
|
from typing import Optional
from urllib.parse import urlencode
import requests
from .errors import OneSocialOAuthError
class TokenGrant:
"""
Объект, содержащий токен доступа, выданный OneSocial, и его параметры.
access_token - токен доступа, str.
expires_in - срок действия токена с момента выдачи в секундах, int.
"""
def __init__(self, *, access_token, token_type, expires_in):
self.access_token = access_token
self.token_type = token_type
self.expires_in = expires_in
class OAuth:
"""
Класс OAuth позволяет работать с API входа через соцсети OneSocial.
Подробная документация: https://onesocial.dev/panel/docs/sociallogin/
"""
CODE = 'code'
TOKEN = 'token'
def __init__(self, *, client_id=None, client_secret=None):
"""
Client ID и Client Secret можно получить на странице настроек входа
через соцсети: https://onesocial.dev/panel/oauthkeys/index/
"""
self.client_id = client_id
self.client_secret = client_secret
def init(
self, *,
network: str = None,
response_type: str = None,
redirect_uri: str = None,
state: Optional[str] = None,
) -> str:
"""
Первый шаг в процессе аутентификации пользователя. Этот метод
возвращает URL страницы, на которую следует направить пользователя
для запуска аутентификации.
network - идентификатор соцсети, str.
См. список поддерживаемых соцсетей:
https://onesocial.dev/panel/docs/sociallogin/#networks
response_type - OAuth.CODE или OAuth.TOKEN.
Этот параметр определяет режим работы OAuth. Для сайтов следует
использовать OAuth.CODE. См. описание различий между двумя
режимами:
https://onesocial.dev/panel/docs/sociallogin/#modes
redirect_uri - URI, на который следует перенаправить пользователя после
авторизации, str. Домен этого URI должен быть указан в списке
доменов в настройках пары ключей Client ID / Client Secret.
state - любая произвольная строка, опционально, str. OneSocial
скопирует ее в redirect_uri.
Возвращает URI страницы запуска аутентификации, str.
"""
url = 'https://onesocial.dev/api/sociallogin/init/{}/'.format(network)
query = {
'client_id': self.client_id,
'response_type': response_type,
'redirect_uri': redirect_uri,
}
if state:
query['state'] = state
return url + '?' + urlencode(query)
def token(
self, *,
code: str = None,
redirect_uri: str = None,
) -> TokenGrant:
"""
Запрашивает токена доступа по коду авторизации.
code - код авторизации, str.
redirect_uri - Redirect URI, который был передан в метод init при
запуске аутентификации.
Возвращает объект TokenGrant.
"""
resp = requests.post('https://onesocial.dev/api/sociallogin/token/', {
'grant_type': 'authorization_code',
'code': code,
'redirect_uri': redirect_uri,
'client_secret': self.client_secret,
})
if resp.status_code < 200 or resp.status_code > 299:
try:
resp_json = resp.json()
error = resp_json['error']
error_description = resp_json['error_description']
except ValueError:
error = None
error_description = resp.text
raise OneSocialOAuthError(error_description, code=error)
resp_json = resp.json()
return TokenGrant(
access_token=resp_json['access_token'],
token_type=resp_json['token_type'],
expires_in=resp_json['expires_in'],
)
|
python
| 13 | 0.5915 | 79 | 32.057851 | 121 |
starcoderdata
|
/* Copyright (c) 2014 Oberon microsystems, Inc. (Switzerland)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
// Originally developed for the book
// "Getting Started with the Internet of Things", by
// Copyright 2011 Inc., 978-1-4493-9357-1.
//
// Version 4.3, for the .NET Micro Framework release 4.3.
//
// Internal abstractions, not documented.
using System.Diagnostics.Contracts;
using System.IO;
using System.Net;
using System.Net.Sockets;
using System.Threading;
namespace Oberon.Net.SocketStreams
{
public class SocketServerStreamProvider : IServerStreamProvider
{
public bool IsOpen { get; private set; }
public string LocalHostName { get; private set; }
public int LocalPort { get; set; }
public string LocalUrl { get; private set; }
// configuration properties specific to sockets (usually Ethernet sockets)
public int Backlog { get; set; }
const int dhcpTimeout = 10; // number of seconds we grant the DHCP server to provide an IP address to this board
Socket listener;
public SocketServerStreamProvider(int localPort)
{
Contract.Requires(localPort >= 0);
Contract.Requires(localPort <= 65535);
IsOpen = false;
LocalHostName = null;
LocalPort = localPort;
Backlog = 0;
Open(); // Added
}
void Open()
{
Contract.Requires(!IsOpen);
Contract.Requires(LocalPort >= 0);
Contract.Requires(LocalPort <= 65535);
if (Backlog <= 0) { Backlog = 4; }
Contract.Assert(listener == null);
try
{
var elapsedSeconds = 0;
IPAddress myAdr = IPAddress.GetDefaultLocalAddress();
while ((myAdr == IPAddress.Any) && (elapsedSeconds != dhcpTimeout)) // only necessary for lwIP stack?
{
Thread.Sleep(1000); // wait until local address is set
elapsedSeconds = elapsedSeconds + 1;
myAdr = IPAddress.GetDefaultLocalAddress();
}
if (myAdr == IPAddress.Any)
{
throw new IOException("timeout waiting for IP address from DHCP server - Ethernet cable not plugged in?");
}
LocalHostName = myAdr.ToString();
LocalUrl = "http://" + LocalHostName;
if (LocalPort != 80)
{
LocalUrl = LocalUrl + ':' + LocalPort;
}
listener = new Socket(AddressFamily.InterNetwork,
SocketType.Stream, ProtocolType.Tcp);
listener.SetSocketOption(SocketOptionLevel.Tcp, SocketOptionName.NoDelay, true);
listener.Bind(new IPEndPoint(IPAddress.Any, LocalPort));
listener.Listen(Backlog);
IsOpen = true;
}
catch (SocketException e)
{
if (listener != null) { Dispose(); }
listener = null;
throw new IOException("socket error " + e.ErrorCode, e);
}
}
public void Dispose()
{
if (IsOpen)
{
IsOpen = false;
Contract.Assert(listener != null);
listener.Close();
listener = null;
LocalHostName = null;
}
}
public Stream Accept()
{
if (!IsOpen)
{
Open();
}
try
{
Socket s = listener.Accept();
// Socket.Accept may throw an exception: SocketException,
// ObjectDisposedException, or InvalidOperationException
return new SocketStream(s);
}
catch (SocketException e) // this may happen, treat as expected error
{
throw new IOException("socket error " + e.ErrorCode, e);
}
}
}
}
|
c#
| 17 | 0.532208 | 126 | 35.186047 | 129 |
starcoderdata
|
public void testLockFreedDuringStoreError() throws Exception {
final LuceneIndex index = LuceneIndex.create(indexFolder, new KeywordAnalyzer());
final Collection<? extends Integer> dataSet = generateDataSet(1000);
final Logger log = Logger.getLogger(LuceneIndex.class.getName());
final TestHandler handler = new TestHandler(
new Runnable() {
@Override
public void run() {
//Break index a bit ;-)
for (File f : indexFolder.listFiles()) {
if (f.getName().startsWith("nb-lock")) { //NOI18N
continue;
}
f.delete();
}
}
});
log.setLevel(Level.FINE);
log.addHandler(handler);
boolean success = false;
try {
index.store(
dataSet,
Collections.<String>emptySet(),
new Convertor<Integer, Document>() {
@Override
public Document convert(Integer p) {
final Document doc = new Document();
doc.add(new Field(
"val", //NOI18N
Integer.toString(p),
Field.Store.YES,
Field.Index.ANALYZED_NO_NORMS));
return doc;
}
},
new Convertor<String, Query>() {
@Override
public Query convert(String p) {
throw new UnsupportedOperationException();
}
},
true);
success = true;
} catch (Throwable t) {
//Ignore - should be thrown and success should be false
} finally {
log.removeHandler(handler);
}
assertFalse(success);
success = false;
try {
index.store(
dataSet,
Collections.<String>emptySet(),
new Convertor<Integer, Document>() {
@Override
public Document convert(Integer p) {
final Document doc = new Document();
doc.add(new Field(
"val", //NOI18N
Integer.toString(p),
Field.Store.YES,
Field.Index.ANALYZED_NO_NORMS));
return doc;
}
},
new Convertor<String, Query>() {
@Override
public Query convert(String p) {
throw new UnsupportedOperationException();
}
},
true);
success = true;
} catch (Throwable t) {
//Should not be thrown and success should be true
t.printStackTrace();
}
assertTrue(success);
}
|
java
| 19 | 0.397693 | 89 | 39.679012 | 81 |
inline
|
/*
* Anarres C Preprocessor
* Copyright (c) 2007-2008, Shevek
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.anarres.cpp;
import java.io.File;
import java.io.IOException;
import java.io.Reader;
import static org.anarres.cpp.Token.*;
/**
* A Reader wrapper around the Preprocessor.
*
* This is a utility class to provide a transparent {@link Reader}
* which preprocesses the input text.
*
* @see Preprocessor
* @see Reader
*/
public class CppReader extends Reader {
private Preprocessor cpp;
private String token;
private int idx;
public CppReader(final Reader r) {
cpp = new Preprocessor(new LexerSource(r, true) {
@Override
public String getName() {
return "<CppReader Input@" +
System.identityHashCode(r) + ">";
}
});
token = "";
idx = 0;
}
public CppReader(Preprocessor p) {
cpp = p;
token = "";
idx = 0;
}
/**
* Returns the Preprocessor used by this CppReader.
*/
public Preprocessor getPreprocessor() {
return cpp;
}
/**
* Defines the given name as a macro.
*
* This is a convnience method.
*/
public void addMacro(String name)
throws LexerException {
cpp.addMacro(name);
}
/**
* Defines the given name as a macro.
*
* This is a convnience method.
*/
public void addMacro(String name, String value)
throws LexerException {
cpp.addMacro(name, value);
}
private boolean refill()
throws IOException {
try {
assert cpp != null : "cpp is null : was it closed?";
if (token == null)
return false;
while (idx >= token.length()) {
Token tok = cpp.token();
switch (tok.getType()) {
case EOF:
token = null;
return false;
case CCOMMENT:
case CPPCOMMENT:
if (!cpp.getFeature(Feature.KEEPCOMMENTS)) {
token = " ";
break;
}
default:
token = tok.getText();
break;
}
idx = 0;
}
return true;
}
catch (LexerException e) {
/* Never happens.
if (e.getCause() instanceof IOException)
throw (IOException)e.getCause();
*/
IOException ie = new IOException(String.valueOf(e));
ie.initCause(e);
throw ie;
}
}
public int read()
throws IOException {
if (!refill())
return -1;
return token.charAt(idx++);
}
/* XXX Very slow and inefficient. */
public int read(char cbuf[], int off, int len)
throws IOException {
if (token == null)
return -1;
for (int i = 0; i < len; i++) {
int ch = read();
if (ch == -1)
return i;
cbuf[off + i] = (char)ch;
}
return len;
}
public void close()
throws IOException {
if (cpp != null) {
cpp.close();
cpp = null;
}
token = null;
}
}
|
c#
| 17 | 0.63207 | 70 | 19.896104 | 154 |
starcoderdata
|
////////////////////////////////////////////////////////////
// from SFML vector
////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////
template <typename T>
inline Size2 :
mWidth(0),
mHeight(0)
{
}
////////////////////////////////////////////////////////////
template <typename T>
inline Size2 width, T height) :
mWidth(width),
mHeight(height)
{
}
////////////////////////////////////////////////////////////
template <typename T>
template <typename U>
inline Size2 Size2 vector) :
mWidth(static_cast
mHeight(static_cast
{
}
template <typename T>
inline float Size2 const
{
return sqrt(sgrMagnitude());
}
template <typename T>
inline float Size2 const
{
return mWidth * mWidth + mHeight * mHeight;
}
////////////////////////////////////////////////////////////
template <typename T>
inline Size2 operator -(const Size2 right)
{
return Size2 -right.mHeight);
}
////////////////////////////////////////////////////////////
template <typename T>
inline Size2 operator +=(Size2 left, const Size2 right)
{
left.mWidth += right.mWidth;
left.mHeight += right.mHeight;
return left;
}
////////////////////////////////////////////////////////////
template <typename T>
inline Size2 operator -=(Size2 left, const Size2 right)
{
left.mWidth -= right.mWidth;
left.mHeight -= right.mHeight;
return left;
}
////////////////////////////////////////////////////////////
template <typename T>
inline Size2 operator +(const Size2 left, const Size2 right)
{
return Size2 + right.mWidth, left.mHeight + right.mHeight);
}
////////////////////////////////////////////////////////////
template <typename T>
inline Size2 operator -(const Size2 left, const Size2 right)
{
return Size2 - right.mWidth, left.mHeight - right.mHeight);
}
////////////////////////////////////////////////////////////
template <typename T>
inline Size2 operator *(const Size2 left, T right)
{
return Size2 * right, left.mHeight * right);
}
////////////////////////////////////////////////////////////
template <typename T>
inline Size2 operator *(T left, const Size2 right)
{
return Size2 * left, right.mHeight * left);
}
////////////////////////////////////////////////////////////
template <typename T>
inline Size2 operator *=(Size2 left, T right)
{
left.mWidth *= right;
left.mHeight *= right;
return left;
}
////////////////////////////////////////////////////////////
template <typename T>
inline Size2 operator /(const Size2 left, T right)
{
return Size2 / right, left.mHeight / right);
}
////////////////////////////////////////////////////////////
template <typename T>
inline Size2 operator /=(Size2 left, T right)
{
left.mWidth /= right;
left.mHeight /= right;
return left;
}
////////////////////////////////////////////////////////////
template <typename T>
inline bool operator ==(const Size2 left, const Size2 right)
{
return (left.mWidth == right.mWidth) && (left.mHeight == right.mHeight);
}
////////////////////////////////////////////////////////////
template <typename T>
inline bool operator !=(const Size2 left, const Size2 right)
{
return (left.mWidth != right.mWidth) || (left.mHeight != right.mHeight);
}
|
c++
| 11 | 0.476297 | 78 | 22.592105 | 152 |
starcoderdata
|
void Game::ProcessInput(LONGLONG _gameTime)
{
WindowMessage* windowMessageArray = nullptr;
// Dequeues all windows messages from the queue at once.
// lock
size_t messageCount = m_WindowMessageQueue.size();
if (messageCount != 0)
{
windowMessageArray = new WindowMessage[messageCount];
auto iter = m_WindowMessageQueue.begin();
std::copy_n(iter, messageCount, windowMessageArray);
m_WindowMessageQueue.clear();
}
// unlock
// Process each window message.
for (size_t i = 0; i < messageCount; ++i)
{
WindowMessage& windowMessage = windowMessageArray[i];
ProcessWindowMessage(_gameTime, windowMessage);
}
if (windowMessageArray != nullptr)
delete[] windowMessageArray;
}
|
c++
| 9 | 0.732475 | 57 | 24.925926 | 27 |
inline
|
require('../css/image-view.css');
var React = require('react');
var util = require('./util');
var ImageView = React.createClass({
shouldComponentUpdate: function(nextProps) {
var hide = util.getBoolean(this.props.hide);
return hide != util.getBoolean(nextProps.hide) || !hide;
},
preview: function() {
util.openPreview(this.props.data);
},
render: function() {
var props = this.props;
return (
<div className={'fill w-image-view' + (props.hide ? ' hide' : '')}>
<img src={props.imgSrc || undefined} />
{props.data ? <a href="javascript:;" onClick={this.preview}>
Click here to preview page in new window
: undefined }
);
}
});
module.exports = ImageView;
|
javascript
| 19 | 0.60719 | 73 | 27.884615 | 26 |
starcoderdata
|
// Copyright (c) SimpleIdServer. All rights reserved.
// Licensed under the Apache License, Version 2.0. See LICENSE in the project root for license information.
using SimpleIdServer.Scim.Infrastructure.ValueProviders;
namespace Microsoft.AspNetCore.Mvc
{
public static class MvcOptionsExtensions
{
public static MvcOptions AddSCIMValueProviders(this MvcOptions mvcOptions)
{
mvcOptions.ValueProviderFactories.Insert(0, new SeparatedQueryStringValueProviderFactory(","));
return mvcOptions;
}
}
}
|
c#
| 15 | 0.741117 | 107 | 35.9375 | 16 |
starcoderdata
|
def pathToClosestPt(): # determine path to closest point
global currentX
global currentY
global unexploredPts
min_length = 100 # dummy starting value
path = [] # empty list to store path
for pt in unexploredPts: # calculate distance to each unexplored point
length = len(findPath([currentX, currentY], pt))
if length > 1 and length < min_length:
# set optimal path to closest point
path = findPath([currentX, currentY], pt)
return path
|
python
| 12 | 0.641075 | 74 | 36.285714 | 14 |
inline
|
from django.shortcuts import render, redirect
from quiz_app.models import get_default_background, Team
from spell_bee.models import *
QUIZ = QuizInfo.objects.all()[0]
QUESTIONS = SpellBeeQuestion.objects.all()
def sb_quiz_info(request):
print(QUIZ.get_background_image_url())
context = {
'quiz': QUIZ,
'is_sb': True,
'questions': QUESTIONS,
'background_img': QUIZ.get_background_image_url()
}
return render(request, 'sb_info.html', context)
def sb_question(request):
print("SEE MEE")
questions = list(QUESTIONS)
teams = Team.objects.all()
background_image = QUIZ.get_background_image_url()
media_asset = None
try:
get = request.GET
except:
pass
try:
q_number = get["qid"]
except:
if questions:
q_number = questions[0].id
else:
q_number = None
if q_number:
try:
question = QUESTIONS.get(question_number=q_number)
except:
return redirect("/quiz")
else:
question = "No questions"
if not isinstance(question, str):
# Select team
number_of_teams = len(teams)
q_num = question.question_number
if q_num <= number_of_teams:
team = teams[q_num - 1]
else:
team = teams[(q_num - 1) % number_of_teams]
else:
team = ""
context = {
'quiz': QUIZ,
'question': question,
'time': QUIZ.get_time(),
'teams': ",".join([t.name for t in teams]),
'team': team,
'increment': QUIZ.get_increment(),
'decrement': QUIZ.get_decrement(),
'background_img': background_image
}
return render(request, 'sb_question.html', context)
|
python
| 14 | 0.567873 | 62 | 23.901408 | 71 |
starcoderdata
|
// Copyright © All Rights Reserved. This source is subject to the MIT license. Please see license.md for more information.
namespace MediaLibrary
{
using System.Diagnostics;
using System.Windows.Forms;
public partial class AboutForm : Form
{
public AboutForm()
{
this.InitializeComponent();
}
private void IconAttributionLink_LinkClicked(object sender, LinkLabelLinkClickedEventArgs e)
{
Process.Start(@"https://www.streamlineicons.com/");
}
}
}
|
c#
| 11 | 0.648452 | 130 | 26.45 | 20 |
starcoderdata
|
def sort_and_group(iterable, key):
"""Sort an iterable and group the items by the given key func"""
groups = [
(k, list(g)) for k, g in
itertools.groupby(sorted(iterable, key=key), key=key)
]
return groups
|
python
| 12 | 0.616034 | 68 | 33 | 7 |
inline
|
const compose = require('./methods/compose');
const concatAll = require('./methods/concatAll');
const filter = require('./methods/filter');
const forEach = require('./methods/forEach');
const isObject = require('./internal/isObject');
const map = require('./methods/map');
const reduce = require('./methods/reduce');
const zipWith = require('./methods/zipWith');
(function go() {
const Go = {
compose,
concatAll,
filter,
forEach,
map,
reduce,
zipWith,
};
if (isObject(exports)) {
module.exports = Go;
} else {
this.Go = Go;
}
}());
|
javascript
| 12 | 0.62931 | 49 | 21.307692 | 26 |
starcoderdata
|
@Test
public void testStreamReceiverConfiguresSessionCapacityLowerThanMaxFrameSize() throws Exception {
// Read buffer is always halved by connection when creating new session for the stream
// unless it falls at the max frame size value which means only one is possible, in this
// case the user configured session window lower than max frame size and the client auto
// adjusts that to one frame.
doTestStreamReceiverSessionCapacity(100_000, 50_000, 1);
}
|
java
| 6 | 0.732673 | 101 | 62.25 | 8 |
inline
|
/****************************************************************************
* arch/risc-v/src/mpfs/hardware/mpfs_dma.h
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership. The
* ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
****************************************************************************/
#ifndef __ARCH_RISCV_SRC_MPFS_HARDWARE_MPFS_DMA_H
#define __ARCH_RISCV_SRC_MPFS_HARDWARE_MPFS_DMA_H
/****************************************************************************
* Included Files
****************************************************************************/
#include
#include "hardware/mpfs_memorymap.h"
/****************************************************************************
* Pre-processor Definitions
****************************************************************************/
/* Register offsets *********************************************************/
#define MPFS_DMA_CONTROL_OFFSET 0x0000 /* Channel control register */
#define MPFS_DMA_NEXT_CONFIG_OFFSET 0x0004 /* Next transfer type */
#define MPFS_DMA_NEXT_BYTES_OFFSET 0x0008 /* Number of bytes to move */
#define MPFS_DMA_NEXT_DESTINATION_OFFSET 0x0010 /* Destination start address */
#define MPFS_DMA_NEXT_SOURCE_OFFSET 0x0018 /* Source start address */
#define MPFS_DMA_EXEC_CONFIG_OFFSET 0x0104 /* Active transfer type */
#define MPFS_DMA_EXEC_BYTES_OFFSET 0x0108 /* Number of bytes remaining */
#define MPFS_DMA_EXEC_DESTINATION_OFFSET 0x0110 /* Destination current address */
#define MPFS_DMA_EXEC_SOURCE_OFFSET 0x0118 /* Source current address */
#define MPFS_DMA_CHANNEL_OFFSET 0x1000 /* Offset to channels */
#define MPFS_DMA_REG_OFFSET(x) \
(uint64_t)(MPFS_PDMA_BASE + (MPFS_DMA_CHANNEL_OFFSET * (x)))
/* Register bit field definitions *******************************************/
/* Control register */
/* Indicates that the channel is in use. Setting this bit clears all of the
* channel’s Next registers (NextConfig, NextBytes, NextDestination, and
* NextSource). This bit can only be cleared when run (CR bit 0) is low.
*/
#define DMA_CONTROL_CLAIM_SHIFT (0) /* Bit: 0: claim */
#define DMA_CONTROL_CLAIM_MASK (1 << DMA_CONTROL_CLAIM_SHIFT)
# define DMA_CONTROL_CLAIM (0 << DMA_CONTROL_CLAIM_SHIFT)
/* Setting this bit starts a DMA transfer by copying the Next registers
* into their Exec counterparts.
*/
#define DMA_CONTROL_RUN_SHIFT (1) /* Bit: 1: run */
#define DMA_CONTROL_RUN_MASK (1 << DMA_CONTROL_RUN_SHIFT)
# define DMA_CONTROL_RUN (1 << DMA_CONTROL_RUN_SHIFT)
/* Setting this bit will trigger the channel’s Done interrupt once
* a transfer is complete.
*/
#define DMA_CONTROL_DONEIE_SHIFT (14) /* Bit: 14: Done Irq enable */
#define DMA_CONTROL_DONEIE_MASK (1 << DMA_CONTROL_DONEIE_SHIFT)
# define DMA_CONTROL_DONEIE (1 << DMA_CONTROL_DONEIE_SHIFT)
/* Setting this bit will trigger the channel’s Done interrupt once
* a transfer is complete.
*/
#define DMA_CONTROL_ERRORIE_SHIFT (15) /* Bit: 15: Error Irq enable */
#define DMA_CONTROL_ERRORIE_MASK (1 << DMA_CONTROL_ERRORIE_SHIFT)
# define DMA_CONTROL_ERRORIE (1 << DMA_CONTROL_ERRORIE_SHIFT)
/* Indicates that a transfer has completed since the channel was claimed */
#define DMA_CONTROL_DONE_SHIFT (30) /* Bit: 30: Done */
#define DMA_CONTROL_DONE_MASK (1 << DMA_CONTROL_DONE_SHIFT)
# define DMA_CONTROL_DONE (1 << DMA_CONTROL_DONE_SHIFT)
/* Indicates that a transfer error has occurred since the channel
* was claimed
*/
#define DMA_CONTROL_ERROR_SHIFT (31) /* Bit: 31: Error */
#define DMA_CONTROL_ERROR_MASK (1 << DMA_CONTROL_ERROR_SHIFT)
# define DMA_CONTROL_ERROR (1 << DMA_CONTROL_ERROR_SHIFT)
/* Channel Next Configuration Register */
/* If set, the Exec registers are reloaded from the Next registers once a
* transfer is complete. The repeat bit must be cleared by software
* for the sequence to stop
*/
#define DMA_NEXT_CONFIG_REPEAT_SHIFT (2) /* Bit: 2: repeat */
#define DMA_NEXT_CONFIG_REPEAT_MASK (1 << DMA_NEXT_CONFIG_REPEAT_SHIFT)
# define DMA_NEXT_CONFIG_REPEAT (1 << DMA_NEXT_CONFIG_REPEAT_SHIFT)
/* Enforces strict ordering by only allowing one of each transfer type
* in-flight at a time
*/
#define DMA_NEXT_CONFIG_ORDER_SHIFT (3) /* Bit: 3: order */
#define DMA_NEXT_CONFIG_ORDER_MASK (1 << DMA_NEXT_CONFIG_ORDER_SHIFT)
# define DMA_NEXT_CONFIG_ORDER (1 << DMA_NEXT_CONFIG_ORDER_SHIFT)
/* WSIZE and RSIZE. Base 2 Logarithm of DMA transaction sizes.
* Example: 0 is 1 byte, 3 is 8 bytes, 5 is 32 bytes
* These fields are WARL (Write-Any Read-Legal), so the actual size used
* can be determined by reading the field after writing the requested size.
* */
#define DMA_NEXT_CONFIG_WSIZE_SHIFT (24) /* Bits: 24-27: write size */
#define DMA_NEXT_CONFIG_WSIZE_MASK (15 << DMA_NEXT_CONFIG_WSIZE_SHIFT)
# define DMA_NEXT_CONFIG_WSIZE(x) (x << DMA_NEXT_CONFIG_WSIZE_SHIFT)
#define DMA_NEXT_CONFIG_RSIZE_SHIFT (28) /* Bits: 28-31: read size */
#define DMA_NEXT_CONFIG_RSIZE_MASK (15 << DMA_NEXT_CONFIG_RSIZE_SHIFT)
# define DMA_NEXT_CONFIG_RSIZE(x) (x << DMA_NEXT_CONFIG_RSIZE_SHIFT)
#endif /* __ARCH_RISCV_SRC_MPFS_HARDWARE_MPFS_DMA_H */
|
c
| 8 | 0.634771 | 81 | 43.167883 | 137 |
starcoderdata
|
from django.urls import path
from django.conf.urls import include
from django.views.generic import RedirectView
from . import views
from rest_framework import routers
router = routers.DefaultRouter()
router.register("books", views.BookViewSet)
router.register("authors", views.AuthorViewSet)
app_name = "books"
urlpatterns = [
path("api/", include(router.urls)),
path("", RedirectView.as_view(url="/books/")),
path("books/", views.book_list, name="books"),
path("books/ views.book_info, name="book_info"),
path("authors/", views.author_list, name="authors"),
path("authors/ views.author_info, name="author_info"),
]
|
python
| 10 | 0.712676 | 76 | 31.272727 | 22 |
starcoderdata
|
private void setPaths( EvaluationConf conf ) throws Exception
{
String file_path = conf.getConfiguration().get( "sensibility_path" );
String result_path = conf.getConfiguration().get( "sensibility_result_path" );
projectPath = EvaluationManager.PROJECT_PATH + "/" + conf.getUserId() + "/";
filePath = file_path;
// ------------- SAVE IN ANOTHER PLACE
File f = new File( projectPath + "/" + result_path );
if( !f.exists() )
{
f.mkdirs();
}
String file_real_path = projectPath + "/" + result_path + "/sensibility.xml";
f = new File( file_real_path );
if( !f.exists() )
{
sensibility = SensibilityFile.open( projectPath + file_path );
SensibilityFile.save( file_path , sensibility );
}
else
{
sensibility = SensibilityFile.open( file_real_path );
}
SensibilityValidate validate = new SensibilityValidate( sensibility );
validate.validate();
file_real_path = null;
f = null;
}
|
java
| 10 | 0.525365 | 86 | 32.257143 | 35 |
inline
|
import torch
from torch import nn
from .Atss_head import ATSSLoss
from utils import boxlist_iou,BoxList
from utils import (
GIoULoss,
SigmoidFocalLoss,
SmoothL1loss_with_weight,
concat_box_prediction_layers,
get_num_gpus,
reduce_sum,
cat_boxlist,
)
class ATSSLoss_IOU(ATSSLoss):
def __init__(self, gamma, alpha, fg_iou_threshold, bg_iou_threshold, positive_type,
reg_loss_weight, angle_loss_weight, cls_loss_weight,
top_k, box_coder):
super(ATSSLoss_IOU, self).__init__(gamma, alpha, fg_iou_threshold, bg_iou_threshold, positive_type,
reg_loss_weight, angle_loss_weight, cls_loss_weight,
top_k, box_coder)
self.iou_loss_func = nn.BCEWithLogitsLoss(reduction="sum")
self.reg_loss_func = SmoothL1loss_with_weight()
self.angle_loss_func = nn.CrossEntropyLoss(reduction='sum')
# IOU aware
def compute_iou_targets(self, reg_targets, angle_targets, anchors):
assert reg_targets.shape[0] == angle_targets.shape[0] == anchors.shape[0]
reg_targets_with_angel = torch.cat([reg_targets.view(-1,4), angle_targets.view(-1, 1)],dim=-1)
gts = self.box_coder.decode(reg_targets_with_angel, anchors)
gts = BoxList(gts, image_size=(1024, 1024), mode='xywha_d')
anchors_box = BoxList(anchors, image_size=(1024,1024), mode='xywha_d')
ious = boxlist_iou(gts, anchors_box)
index = torch.linspace(0, anchors.shape[0]-1, anchors.shape[0]).to(torch.long).view(1, -1).to(ious.device)
ious = ious.gather(0, index).view(-1)
return ious
def __call__(self, box_cls, box_regression, iou, angle, targets, anchors):
'''
box_cls: list(tensor) tensor shape (N,class_num,H,W) classification branch output for every feature level ,
N is the batchsize,
box_regression : list(tensor) tensor shape (N,4,H,W) localization branch output for every feature level
iou: list(tensor) tensor shape (N,1.H,W) iou branch output for every feature level
angle: list(tensor) tensor shape (N,90,H,W) angle branch output for every feature level
taregts: list(boxlist) , boxlist object, ground_truth object for every image,
anchos: list(list) [image_1_anchors,...,image_N_anchors],
image_i_anchors : [leverl_1_anchor,...,leverl_n_anchor]
level_i_anchor:boxlist
'''
labels, reg_targets, weights_label = self.assigner(targets, anchors)
# prepare prediction
N = len(labels)
box_cls_flatten, box_regression_flatten = concat_box_prediction_layers(box_cls, box_regression)
iou_flatten = [ct.permute(0, 2, 3, 1).reshape(N, -1, 1) for ct in iou]
iou_flatten = torch.cat(iou_flatten, dim=1).reshape(-1)
angle_flatten = [an.permute(0, 2, 3, 1).reshape(N, -1, 90) for an in angle]
angle_flatten = torch.cat(angle_flatten, dim=1).reshape(-1, 90)
# prepare ground truth
labels_flatten = torch.cat(labels, dim=0)
reg_targets_flatten = torch.cat([reg_target[:, :4] for reg_target in reg_targets], dim=0)
angel_targets_flatten = torch.cat([reg_target[:, 4] for reg_target in reg_targets], dim=0)
weights_label_flatten = torch.cat(weights_label, dim=0)
# prepare anchors
anchors_flatten = torch.cat([cat_boxlist(anchors_per_image).bbox for anchors_per_image in anchors], dim=0)
pos_inds = torch.nonzero(labels_flatten > 0).squeeze(1)
num_gpus = get_num_gpus()
total_num_pos = reduce_sum(pos_inds.new_tensor([pos_inds.numel()])).item()
num_pos_avg_per_gpu = max(total_num_pos / float(num_gpus), 1.0)
cls_loss = self.cls_loss_func(box_cls_flatten, labels_flatten.int(),
weights_label_flatten) / num_pos_avg_per_gpu
if pos_inds.numel() > 0:
anchors_flatten = anchors_flatten[pos_inds]
# prepare positive sample matched gt
reg_targets_flatten = reg_targets_flatten[pos_inds]
angel_targets_flatten = angel_targets_flatten[pos_inds]
iou_targets = self.compute_iou_targets(reg_targets_flatten, angel_targets_flatten, anchors_flatten)
weights_label_flatten = weights_label_flatten[pos_inds]
# prepare positive sample prediction
box_regression_flatten = box_regression_flatten[pos_inds]
iou_flatten = iou_flatten[pos_inds]
angle_flatten = angle_flatten[pos_inds]
sum_iou_targets_avg_per_gpu= reduce_sum(iou_targets.sum()).item() / float(num_gpus)
# attention here
reg_loss = self.reg_loss_func(box_regression_flatten, reg_targets_flatten,
weights_label_flatten*iou_targets) \
/ sum_iou_targets_avg_per_gpu
# reg_loss = self.reg_loss_func(box_regression_flatten, reg_targets_flatten, anchors_flatten,
# weight=iou_targets*weights_label_flatten) \
# /sum_iou_targets_avg_per_gpu
iou_loss = self.iou_loss_func(iou_flatten, iou_targets) / num_pos_avg_per_gpu
angle_loss = self.angle_loss_func(angle_flatten, angel_targets_flatten.to(torch.long)) / num_pos_avg_per_gpu
else:
reg_loss = torch.tensor([0]).to(torch.float32)
iou_loss = reg_loss * 0
angle_loss = reg_loss * 0
return cls_loss * self.cls_loss_weight, reg_loss * self.reg_loss_weight, iou_loss, angle_loss * self.angle_loss_weight
|
python
| 18 | 0.622778 | 126 | 49.732143 | 112 |
starcoderdata
|
#ifndef GETTER_HPP
#define GETTER_HPP
#include
#include
#include
#include
#include
#include "beastly_connection.hpp"
#include "url_parser.hpp"
class getter
{
public:
constexpr static size_t number_of_threads=2;
getter();
~getter();
std::unique_ptr make_connection(parsed_url uri);
template<class Callable>
void post(Callable && c) {
boost::asio::post(ioc.get_executor(), c);
}
[[nodiscard]] std::future get(std::string url,
std::function<void(size_t, size_t)> progress_handler,
std::function<void(boost::beast::error_code const &, size_t, beastly_connection &)> completion_handler);
private:
void coro_download(std::string url,
std::function<void(size_t, size_t)> progress_handler,
std::function<void(boost::beast::error_code const &, size_t, beastly_connection &)> completion_handler,
std::promise future,
boost::asio::yield_context yield_ctx);
boost::asio::io_context ioc;
boost::asio::ip::tcp::resolver resolver;
boost::asio::executor_work_guard guard;
std::unique_ptr networking_threads;
};
#endif // GETTER_HPP
|
c++
| 17 | 0.635432 | 126 | 28.06383 | 47 |
starcoderdata
|
/**
* Imports
*/
import trigger from '@f/trigger-event'
import element from 'vdux/element'
import CSSEmulator from '../src'
import vdux from 'vdux/dom'
import test from 'tape'
/**
* Tests
*/
test('hover should work', t => {
const {render, subscribe} = vdux()
const off = subscribe(a => a)
let node
t.plan(2)
node = render(<CSSEmulator onHoverChange={hover => t.ok(hover)} />)
trigger(node, 'mouseenter')
render(<CSSEmulator onHoverChange={hover => t.ok(hover)} />)
node = render(<CSSEmulator onHoverChange={hover => t.notOk(hover)} />)
trigger(node, 'mouseleave')
node = render(<CSSEmulator onHoverChange={hover => t.notOk(hover)} />)
off()
t.end()
})
test('active should work', t => {
const {render, subscribe} = vdux()
const off = subscribe(a => a)
let node
t.plan(2)
node = render(<CSSEmulator onActiveChange={active => t.ok(active)} />)
trigger(node, 'mousedown')
node = render(<CSSEmulator onActiveChange={active => t.ok(active)} />)
node = render(<CSSEmulator onActiveChange={active => t.notOk(active)} />)
trigger(node, 'mouseup', {bubbles: true})
node = render(<CSSEmulator onActiveChange={active => t.notOk(active)} />)
off()
t.end()
})
test('focus should work', t => {
const {render, subscribe} = vdux()
const off = subscribe(a => a)
let node
t.plan(2)
node = render(<CSSEmulator onFocusChange={focus => t.ok(focus)} />)
trigger(node, 'focus')
node = render(<CSSEmulator onFocusChange={focus => t.ok(focus)} />)
node = render(<CSSEmulator onFocusChange={focus => t.notOk(focus)} />)
trigger(node, 'blur')
node = render(<CSSEmulator onFocusChange={focus => t.notOk(focus)} />)
off()
t.end()
})
test('linger should work', t => {
const {render, subscribe} = vdux()
const off = subscribe(a => a)
let node
t.plan(2)
node = render(<CSSEmulator onLingerChange={linger => t.notOk(linger)} />)
trigger(node, 'mouseenter')
node = render(<CSSEmulator onLingerChange={linger => t.notOk(linger)} />)
setTimeout(() => {
node = render(<CSSEmulator onLingerChange={linger => t.ok(linger)} />)
trigger(node, 'mouseleave')
node = render(<CSSEmulator onLingerChange={linger => t.notOk(linger)} />)
off()
t.end()
}, 500)
})
test('should still allow normal event handlers to work', t => {
const {render, subscribe} = vdux()
const off = subscribe(a => a)
let node
t.plan(2)
node = render(<CSSEmulator onFocus={() => t.pass()} onFocusChange={focus => t.ok(focus)} />)
trigger(node, 'focus')
node = render(<CSSEmulator onFocus={() => t.pass()} onFocusChange={focus => t.ok(focus)} />)
off()
t.end()
})
|
javascript
| 21 | 0.637911 | 94 | 24.660194 | 103 |
starcoderdata
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Text;
using System.Threading.Tasks;
using Newtonsoft.Json;
using Greenergy.Tesla.API;
namespace Greenergy.TeslaTools
{
public class TeslaOwner
{
public string Email { get; set; }
string AccessToken { get; set; }
public HttpClient Client { get; set; }
public TeslaOwner(string email)
{
Email = email;
Client = TeslaAPI.GetClient();
}
public TeslaOwner(string email, string accessToken) : this(email)
{
AccessToken = accessToken;
Client.DefaultRequestHeaders.Add("Authorization", "Bearer " + AccessToken);
}
public async Task AuthenticateAsync(string password)
{
var request = JsonConvert.SerializeObject(new
{
grant_type = "password",
client_id = TeslaAPI.TESLA_CLIENT_ID,
client_secret = TeslaAPI.TESLA_CLIENT_SECRET,
email = Email,
password
});
var response = await Client.PostAsync(
TeslaAPI.OAUTH_TOKEN_PATH,
new StringContent(
request,
Encoding.UTF8,
"application/json"
));
if (response.StatusCode != HttpStatusCode.OK)
{
throw new Exception($"Tesla login failed. StatusCode={response.StatusCode}, Reason={response.ReasonPhrase}");
}
var token = JsonConvert.DeserializeObject
await response.Content.ReadAsStringAsync()
);
AccessToken = token.AccessToken;
Client.DefaultRequestHeaders.Add("Authorization", "Bearer " + AccessToken);
return AccessToken;
// System.Console.WriteLine($"Got token {AccessToken}. Expires on {TokenExpiry.ToString("o")}");
}
public async Task GetVehiclesAsync()
{
var response = await Client.GetAsync(TeslaAPI.VEHICLES_API_BASE_URI);
if (response.StatusCode != HttpStatusCode.OK)
{
throw new Exception($"Failed to get Tesla Vehicles. StatusCode={response.StatusCode}, Reason={response.ReasonPhrase}");
}
var vehiclesDTO = JsonConvert.DeserializeObject
await response.Content.ReadAsStringAsync()
);
var vehicles = vehiclesDTO.Vehicles
.Select(v => new TeslaVehicle
{
Owner = this,
Id = v.Id,
VIN = v.VIN,
DisplayName = v.DisplayName //,
// InService = v.InService,
// State = v.State
})
.ToList();
return vehicles;
}
}
}
|
c#
| 21 | 0.544283 | 135 | 31.902174 | 92 |
starcoderdata
|
@RequestMapping(value = "/expression", method = RequestMethod.GET)
public String expression(Model model,
@RequestParam(required = false) String lastSpel,
@RequestParam(required = false) String result,
@RequestParam(required = false) String error) {
// Se adicionan los atributos flash
model.addAttribute("lastSpel", lastSpel);
model.addAttribute("result", result);
model.addAttribute("error", error);
return "expression";
}
|
java
| 8 | 0.587814 | 77 | 49.818182 | 11 |
inline
|
package com.btrapp.jklarfreader.objects;
/**
* An exception for when something's wrong with the Klarf format itself (like a list length is
* wrong)
*
* @see com.btrapp.jklarfreader.objects.KlarfContentException for when the format is correct but the
* contents don't follow a business logic rule (like if you say ResultTimestamp is expected to
* be a String of 2 items, but only 1 is present)
* @author btrapp
*/
public class KlarfException extends Exception {
private static final long serialVersionUID = -6451200672730727155L;
public enum ExceptionCode {
GenericError,
ListFormat,
NumberFormat,
UnsupportedKlarfVersion
}
private ExceptionCode code = ExceptionCode.GenericError;
private int lineNumber = -1;
public KlarfException(String msg) {
this(msg, null, ExceptionCode.GenericError);
}
public KlarfException(String msg, KlarfTokenizer kt, ExceptionCode code) {
super(
msg
+ ((kt == null)
? ""
: " (At line " + kt.getLineNumber() + ": " + kt.getCurrentLine() + ")"));
this.code = code;
this.lineNumber = kt.getLineNumber();
}
public ExceptionCode getCode() {
return code;
}
public int getLineNumber() {
return lineNumber;
}
}
|
java
| 16 | 0.676101 | 100 | 26.652174 | 46 |
starcoderdata
|
import React from 'react';
import './SliderArrow.scss';
function SliderArrow(props) {
const sliderArrowClassName = `slider-arrow slider-arrow-direction-${props.direction}`;
return (
<button type="button" className={sliderArrowClassName} onClick={props.onClick}>
<svg viewBox="0 0 24 24">
<path d="M8.59,16.58L13.17,12L8.59,7.41L10,6L16,12L10,18L8.59,16.58Z" />
);
}
export default SliderArrow;
|
javascript
| 10 | 0.67706 | 88 | 25.411765 | 17 |
starcoderdata
|
void Engine::InitRender()
{
// set firts line to draw to
m_CurrLine = 20;
// set pixel buffer address of first pixel
m_PPos = 20 * m_Width;
// screen plane in world space coordinates
m_WX1 = -4, m_WX2 = 4, m_WY1 = m_SY = 3, m_WY2 = -3;
// calculate deltas for interpolation
m_DX = (m_WX2 - m_WX1) / m_Width;
m_DY = (m_WY2 - m_WY1) / m_Height;
m_SY += 20 * m_DY;
// allocate space to store pointers to primitives for previous line
m_LastRow = new Primitive*[m_Width];
memset( m_LastRow, 0, m_Width * 4 );
}
|
c++
| 9 | 0.621013 | 68 | 31.4375 | 16 |
inline
|
void CalendarEvent::RemoveAllInvite(ObjectGuid const& removerGuid)
{
// build mail title
std::ostringstream title;
title << removerGuid << ':' << Title;
// build mail body
std::ostringstream body;
body << secsToTimeBitFields(time(NULL));
// creating mail draft
MailDraft draft(title.str(), body.str());
CalendarInviteMap::iterator itr = m_Invitee.begin();
while (itr != m_Invitee.end())
{
if (removerGuid != itr->second->InviteeGuid)
draft.SendMailTo(MailReceiver(itr->second->InviteeGuid), this, MAIL_CHECK_MASK_COPIED);
RemoveInviteByItr(itr++);
}
}
|
c++
| 13 | 0.646032 | 99 | 29.047619 | 21 |
inline
|
define(['./FS'], function (FS) {
'use strict';
let fs = new FS();
const _fsMock = {};
const fsProto = FS.prototype;
const keys = Object.getOwnPropertyNames(fsProto);
keys.forEach(key => {
if (typeof fs[key] === 'function') {
_fsMock[key] = function () {
return fs[key].apply(fs, arguments);
};
} else {
_fsMock[key] = fs[key];
}
});
_fsMock['changeFSModule'] = function (newFs) {
fs = newFs;
};
_fsMock['getFSModule'] = function () {
return fs;
};
_fsMock['FS'] = FS;
_fsMock['Stats'] = FS.Stats;
return _fsMock;
});
|
javascript
| 21 | 0.484211 | 53 | 24.615385 | 26 |
starcoderdata
|
fn init_ctx(vendor: u16, product: u16, intf: ftdi::Interface) -> Result<FTx232H> {
let mut context = ftdi::Context::new();
context.set_interface(intf)?;
if context.usb_open(vendor, product).is_err() {
return Err(Error::new(ErrorKind::Other, "no FTDI device found"));
}
context.set_write_chunksize(1024);
context.set_read_chunksize(1024);
context.usb_reset()?;
context.set_latency_timer(5)?;
context.set_bitmode(0, BitMode::MPSSE)?;
context.usb_purge_buffers()?;
// clock settings:
// - disable DIV_5 => 60MHz
// - disable adaptive clocking
// - disable 3-phase clocking
context.write_all(&[MPSSECmd_H::DISABLE_DIV_5_CLK.into()])?;
context.write_all(&[MPSSECmd_H::DISABLE_ADAPTIVE_CLK.into()])?;
context.write_all(&[MPSSECmd_H::DISABLE_3_PHASE_CLK.into()])?;
// disable loopback
context.write_all(&[MPSSECmd::LOOPBACK_DISABLE.into()])?;
// FIXME: current approach is limited: fixed in/out pin configuration:
// - low bits: all outputs(0)
context.write_all(&[MPSSECmd::SET_BITS_LOW.into(), 0x0, 0b1111_1111])?;
// FIXME: current approach is limited: fixed in/out pin configuration:
// - high bits: all outputs(0)
context.write_all(&[MPSSECmd::SET_BITS_HIGH.into(), 0x0, 0b1111_1111])?;
let d = FTx232H {
mtx: Mutex::new(RefCell::new(context)),
loopback: false,
i2c: RefCell::new(None),
spi: RefCell::new(None),
pl0: RefCell::new(true),
pl1: RefCell::new(true),
pl2: RefCell::new(true),
pl3: RefCell::new(true),
ph0: RefCell::new(true),
ph1: RefCell::new(true),
ph2: RefCell::new(true),
ph3: RefCell::new(true),
ph4: RefCell::new(true),
ph5: RefCell::new(true),
ph6: RefCell::new(true),
ph7: RefCell::new(true),
};
Ok(d)
}
|
rust
| 13 | 0.551624 | 82 | 33.983051 | 59 |
inline
|
const { google } = require("googleapis");
const nodemailer = require("nodemailer");
function newTransporter() {
const OAuth2 = google.auth.OAuth2;
const oauth2Client = new OAuth2(
process.env.EMAIL_CLIENT_ID,
process.env.EMAIL_CLIENT_SECRET,
process.env.EMAIL_REDIRECT_URL
);
oauth2Client.setCredentials({
refresh_token: process.env.EMAIL_REFRESH_TOKEN
});
const accessToken = oauth2Client.getAccessToken();
return nodemailer.createTransport({
service: process.env.EMAIL_SERVICE,
auth: {
type: "OAuth2",
user: process.env.EMAIL_USER,
clientId: process.env.EMAIL_CLIENT_ID,
clientSecret: process.env.EMAIL_CLIENT_SECRET,
refreshToken: process.env.EMAIL_REFRESH_TOKEN,
accessToken: accessToken
}
});
}
module.exports = newTransporter;
|
javascript
| 15 | 0.705678 | 52 | 24.382353 | 34 |
starcoderdata
|
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Http\Requests;
use App\Http\Controllers\Controller;
class BlogController extends Controller
{
public function template()
{
return view('template');
}
public function index($pagina)
{
switch ($pagina) {
case 'mundo':
$noticia_m1 = [
'titulo'=>'Dezenas de migrantes africanos pulam cerca rumo à Espanha',
'mensagem'=>'Cerca de 300 pessoas superaram barreira de 6 m de altura em Melilla. Forças marroquinas contiveram 200, mas 100 entraram na Espanha.',
'link'=> 'http://g1.globo.com/mundo/noticia/2016/10/dezenas-de-migrantes-africanos-pulam-cerca-para-chegar-espanha.html'];
$noticia_m2 = [
'titulo'=>'Rei da Tailândia, Bhumibol Adulyadej, morre após 70 anos no poder',
'mensagem'=>'Monarca de 88 anos estava internado em hospital em Bangcoc.Status quase divino do rei faz da sucessão um tabu no país.',
'link'=>'http://g1.globo.com/mundo/noticia/2016/10/rei-da-tailandia-bhumibol-adulyadej-morre.html'];
$mundo = ['M1'=>$noticia_m1,'M2'=>$noticia_m2];
$noticias = ['Pagina'=> $pagina, 'Mundo'=>$mundo];
$view = 'paginas.' . $pagina;
return view($view, ['noticias'=>$noticias]);
break;
case 'politica':
$noticia_p1 = [
'titulo'=>'Temer recebe FHC no Jaburu e conversa sobre PEC do teto',
'mensagem'=>'Presidente ofereceu almoço a tucano em dia sem previsão de compromisso.
Proposta, aprovada em 1º turno na Câmara, limita despesas públicas.',
'link'=>'http://g1.globo.com/politica/noticia/2016/10/temer-recebe-fhc-no-jaburu-e-conversa-sobre-pec-do-teto.html'];
$noticia_p2 = [
'titulo'=>'Na quarta viagem internacional, Temer vai à Índia para cúpula do Brics',
'mensagem'=>'Presidente embarcará na madrugada desta sexta; ele também irá ao Japão.
Primeira-dama, deve compor comitiva brasileira na viagem.',
'link'=>'http://g1.globo.com/politica/noticia/2016/10/na-quarta-viagem-internacional-temer-vai-india-para-cupula-do-brics.html'];
$politica = ['P1'=>$noticia_p1, 'P2'=>$noticia_p2];
$noticias = ['Pagina'=> $pagina, 'Politica'=>$politica];
$view = 'paginas.' . $pagina;
return view($view, ['noticias'=>$noticias]);
break;
case 'carros':
$noticia_c1 = [
'titulo'=>'Camaro supera Mustang em vendas pela 1ª vez em 2 anos',
'mensagem'=>'Ford paralisa produção do Mustang. após emplacamentos caírem 32%.
Vitória do modelo Chevrolet em setembro é atribuída a descontos maiores.',
'link'=>'http://g1.globo.com/carros/noticia/2016/10/camaro-supera-mustang-em-vendas-pela-1-vez-em-2-anos.html'];
$noticia_c2 = [
'titulo'=>'BMW Série 5 chega à sétima geração mais leve e tecnológico',
'mensagem'=>'Sedã ficou até 100 kg mais leve que a geração anterior.
Série 5 ganha luxo e tecnologias do irmão maior, o Série 7',
'link'=>'http://g1.globo.com/carros/noticia/2016/10/bmw-serie-5-chega-setima-geracao-mais-leve-e-tecnologico.html'];
$carros = ['C1'=>$noticia_c1, 'C2'=>$noticia_c2];
$noticias = ['Pagina'=> $pagina, 'Carros' => $carros,];
$view = 'paginas.' . $pagina;
return view($view, ['noticias'=>$noticias]);
break;
default:
return view('errors.404', ['pagina'=>$pagina]);
}
}
}
|
php
| 15 | 0.575735 | 163 | 44.37931 | 87 |
starcoderdata
|
private void layoutMarkup(NodeImpl node) {
// This is the "inline" layout of an element.
// The difference with layoutChildren is that this
// method checks for padding and margin insets.
RenderState rs = node.getRenderState();
Insets marginInsets = null;
Insets paddingInsets = null;
if(rs != null) {
HtmlInsets mi = rs.getMarginInsets();
marginInsets = mi == null ? null : mi.getSimpleAWTInsets(this.availContentWidth, this.availContentHeight);
HtmlInsets pi = rs.getPaddingInsets();
paddingInsets = pi == null ? null : pi.getSimpleAWTInsets(this.availContentWidth, this.availContentHeight);
}
int leftSpacing = 0;
int rightSpacing = 0;
if(marginInsets != null) {
leftSpacing += marginInsets.left;
rightSpacing += marginInsets.right;
}
if(paddingInsets != null) {
leftSpacing += paddingInsets.left;
rightSpacing += paddingInsets.right;
}
if(leftSpacing > 0) {
RLine line = this.currentLine;
line.addSpacing(new RSpacing(node, this.container, leftSpacing, line.height));
}
this.layoutChildren( node);
if(rightSpacing > 0) {
RLine line = this.currentLine;
line.addSpacing(new RSpacing(node, this.container, rightSpacing, line.height));
}
}
|
java
| 11 | 0.706601 | 110 | 36.212121 | 33 |
inline
|
#include "server_sync.h"
sem_t empty;
sem_t full;
int initSyncMechanisms(size_t num_threads)
{
logSyncMechSem(getLogfile(), MAIN_THREAD_ID, SYNC_OP_SEM_INIT, SYNC_ROLE_PRODUCER, 0, num_threads);
if(sem_init(&empty, 0, num_threads) != 0){
perror("Semaphore 'empty' error");
return -1;
}
logSyncMechSem(getLogfile(), MAIN_THREAD_ID, SYNC_OP_SEM_INIT, SYNC_ROLE_PRODUCER, 0, 0);
if(sem_init(&full, 0, 0) != 0){
perror("Semaphore 'full' error");
return -2;
}
return 0;
}
|
c
| 9 | 0.600368 | 103 | 19.923077 | 26 |
starcoderdata
|
namespace SimulatedTemperatureSensor
{
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
public class TaskTimer
{
readonly Action onTimer;
readonly TimeSpan timerPeriod;
readonly Action onError;
readonly ILogger logger;
public TaskTimer(Action onTimer,
TimeSpan timerPeriod,
ILogger logger,
Action onError = null)
{
this.timerPeriod = timerPeriod;
this.onTimer = onTimer;
this.logger = logger;
this.onError = onError;
}
public void Start(CancellationToken token)
{
Task elapsedTask = null;
elapsedTask = new Task((x) =>
{
OnTimer(elapsedTask, token);
}, token);
HandleError(elapsedTask, token);
elapsedTask.Start();
}
private void OnTimer(Task task, object objParam)
{
var start = DateTime.Now;
var token = (CancellationToken)objParam;
if (token.IsCancellationRequested)
{
logger.LogInformation("A cancellation has been requested.");
return;
}
onTimer();
var delay = timerPeriod - (DateTime.Now - start);
if (delay.Ticks > 0)
{
task = Task.Delay(delay);
}
HandleError(task.ContinueWith(OnTimer, token), token);
}
private void HandleError(Task task, CancellationToken token)
{
task.ContinueWith((e) =>
{
logger.LogError(
$"Exception when running timer callback: {e.Exception}");
onError?.Invoke();
if (!token.IsCancellationRequested)
task.ContinueWith(OnTimer, token);
}, TaskContinuationOptions.OnlyOnFaulted);
}
}
}
|
c#
| 20 | 0.526055 | 77 | 26.243243 | 74 |
starcoderdata
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package edu.gsgp.nodes.functions;
import edu.gsgp.nodes.Node;
/**
* @author
* http://homepages.dcc.ufmg.br/~luizvbo/
*
* Copyright (C) 2014, Federal University of Minas Gerais, Belo Horizonte, Brazil
*/
public abstract class Function implements Node{
protected Node[] arguments;
protected Node parent = null;
protected int parentArgPosition;
public Function() {
arguments = new Node[getArity()];
}
/** Methods to be implemented in the subclasses. **/
@Override
public abstract double eval(double[] inputs);
@Override
public abstract int getNumNodes();
@Override
public abstract Node clone(Node parent);
@Override
public abstract int getArity();
public abstract Function softClone();
/**
* Return the argument at the given position
* @param index Position
* @return The Node argument
*/
@Override
public final Node getChild(int index) {
return arguments[index];
}
@Override
public final Node getParent() {
return parent;
}
@Override
public final void setParent(Node parent, int argPosition) {
this.parent = parent;
this.parentArgPosition = argPosition;
}
public final void addNode(Node newNode, int argPosition) {
arguments[argPosition] = newNode;
newNode.setParent(this, argPosition);
}
@Override
public final int getParentArgPosition() {
return parentArgPosition;
}
}
|
java
| 11 | 0.655738 | 81 | 24.507463 | 67 |
starcoderdata
|
private string GetButtonList(ToolBar toolBar)
{
string buttonList = "ToolBarButtons: ";
IEnumerator x = toolBar.Buttons.GetEnumerator();
// Enumerate through the collection of toolbar buttons.
while(x.MoveNext())
{
buttonList += ((ToolBarButton)x.Current).Text + " ";
}
return buttonList;
}
|
c#
| 13 | 0.678019 | 59 | 23.923077 | 13 |
inline
|
n , m = map(int, input().split())
ipy = []
for i in range(m):
p, y = map(int, input().split())
ipy.append([i, p, y])
ipy.sort(key=lambda x:(x[1], x[2]))
num = None
count = 1
for i in range(len(ipy)):
if num == None:
num = ipy[i][1]
if num != ipy[i][1]:
count = 1
num = ipy[i][1]
bangou = str(ipy[i][1]).zfill(6) + str(count).zfill(6)
ipy[i].append(bangou)
count += 1
ipy.sort(key=lambda x:x[0])
for i in range(len(ipy)):
print(ipy[i][3])
|
python
| 12 | 0.509921 | 58 | 15.833333 | 30 |
codenet
|
#ifndef ORVIBO_STATE_H
#define ORVIBO_STATE_H
enum orvibo_state {
ORVIBO_STATE_UNKNOWN,
ORVIBO_STATE_OFF,
ORVIBO_STATE_ON
};
const char *
orvibo_state_string(enum orvibo_state state);
#endif
|
c
| 8 | 0.746193 | 45 | 14.153846 | 13 |
starcoderdata
|
/*
* Copyright 2016-2018
* MATRIX Labs [http://creator.matrix.one]
* This file is part of MATRIX Creator HAL
*
* MATRIX Creator HAL is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see
*/
#include "cpp/driver/bus_direct.h"
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include "cpp/driver/creator_memory_map.h"
namespace matrix_hal {
struct hardware_address {
uint8_t readnwrite : 1;
uint16_t reg : 15;
};
BusDirect::BusDirect()
: spi_fd_(0),
spi_mode_(3),
spi_bits_(8),
spi_speed_(15000000),
spi_delay_(0) {}
BusDirect::~BusDirect() {
if (spi_fd_) Close();
}
bool BusDirect::Init(std::string device_name) {
if (spi_fd_) Close();
if (device_name.size() == 0)
device_name_ = "/dev/spidev0.0";
else
device_name_ = device_name;
std::unique_lock lock(mutex_);
spi_fd_ = open(device_name_.c_str(), O_RDWR);
if (spi_fd_ < 0) {
return false;
}
if (ioctl(spi_fd_, SPI_IOC_WR_MODE, &spi_mode_) == -1) {
std::cerr << "can't set spi mode" << std::endl;
return false;
}
if (ioctl(spi_fd_, SPI_IOC_RD_MODE, &spi_mode_) == -1) {
std::cerr << "can't get spi mode" << std::endl;
return false;
}
/*
* bits per word
*/
if (ioctl(spi_fd_, SPI_IOC_WR_BITS_PER_WORD, &spi_bits_) == -1) {
std::cerr << "can't set bits per word" << std::endl;
return false;
}
if (ioctl(spi_fd_, SPI_IOC_RD_BITS_PER_WORD, &spi_bits_) == -1) {
std::cerr << "can't get bits per word" << std::endl;
return false;
}
/*
* max speed hz
*/
if (ioctl(spi_fd_, SPI_IOC_WR_MAX_SPEED_HZ, &spi_speed_) == -1) {
std::cerr << "can't set max speed Hz" << std::endl;
return false;
}
if (ioctl(spi_fd_, SPI_IOC_RD_MAX_SPEED_HZ, &spi_speed_) == -1) {
std::cerr << "can't get max speed Hz" << std::endl;
return false;
}
return true;
}
bool BusDirect::SpiTransfer(unsigned char *send_buffer,
unsigned char *receive_buffer, unsigned int size) {
spi_ioc_transfer tr;
memset(&tr, 0, sizeof(tr));
tr.tx_buf = (uint64_t)send_buffer;
tr.rx_buf = (uint64_t)receive_buffer;
tr.len = size;
tr.delay_usecs = spi_delay_;
tr.speed_hz = spi_speed_;
tr.bits_per_word = spi_bits_;
if (ioctl(spi_fd_, SPI_IOC_MESSAGE(1), &tr) < 1) {
std::cerr << "can't send spi message" << std::endl;
return false;
}
return true;
}
bool BusDirect::Read(uint16_t add, unsigned char *data, int length) {
std::unique_lock lock(mutex_);
hardware_address *hw_addr = reinterpret_cast<hardware_address *>(tx_buffer_);
hw_addr->reg = add;
hw_addr->readnwrite = 1;
if (SpiTransfer(tx_buffer_, rx_buffer_, length + 2)) {
memcpy(data, &rx_buffer_[2], length);
return true;
}
return false;
}
bool BusDirect::Write(uint16_t add, unsigned char *data, int length) {
std::unique_lock lock(mutex_);
hardware_address *hw_addr = reinterpret_cast<hardware_address *>(tx_buffer_);
hw_addr->reg = add;
hw_addr->readnwrite = 0;
memcpy(&tx_buffer_[2], data, length);
if (SpiTransfer(tx_buffer_, rx_buffer_, length + 2)) {
return true;
}
return false;
}
void BusDirect::Close(void) { close(spi_fd_); }
}; // namespace matrix_hal
|
c++
| 13 | 0.632096 | 79 | 25.422078 | 154 |
starcoderdata
|
using RiddleSolve.Model;
namespace RiddleSolve.Converters
{
public static class SideConverter
{
public static Position ToRelativePosition(this ITile.TileSide side)
=> side switch
{
ITile.TileSide.Left => (0, -1),
ITile.TileSide.Top => (-1, 0),
ITile.TileSide.Right => (0, 1),
ITile.TileSide.Bottom => (1, 0),
_ => (0, 0)
};
}
}
|
c#
| 12 | 0.496198 | 75 | 28.277778 | 18 |
starcoderdata
|
# Pyrogram - Telegram MTProto API Client Library for Python
# Copyright (C) 2017-2020 Dan
#
# This file is part of Pyrogram.
#
# Pyrogram is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pyrogram is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Pyrogram. If not, see
import os
import random
import unittest
import tgcrypto
class TestIGE256Input(unittest.TestCase):
TYPE_ERROR_PATTERN = r"'\w+' does not support the buffer interface|a bytes-like object is required, not '\w+'"
def test_ige256_encrypt_invalid_args_count(self):
with self.assertRaisesRegex(TypeError, r"function takes exactly \d arguments \(\d given\)"):
tgcrypto.ige256_encrypt(os.urandom(16), os.urandom(32))
def test_ige256_encrypt_invalid_args_type(self):
with self.assertRaisesRegex(TypeError, self.TYPE_ERROR_PATTERN):
tgcrypto.ige256_encrypt(1, 2, 3)
def test_ige256_encrypt_empty_data(self):
with self.assertRaisesRegex(ValueError, r"Data must not be empty"):
tgcrypto.ige256_encrypt(b"", os.urandom(32), os.urandom(32))
def test_ige256_encrypt_invalid_key_size(self):
with self.assertRaisesRegex(ValueError, r"Key size must be exactly 32 bytes"):
tgcrypto.ige256_encrypt(os.urandom(16), os.urandom(31), os.urandom(32))
def test_ige256_encrypt_invalid_iv_size(self):
with self.assertRaisesRegex(ValueError, r"IV size must be exactly 32 bytes"):
tgcrypto.ige256_encrypt(os.urandom(16), os.urandom(32), os.urandom(31))
def test_ige256_decrypt_invalid_args_count(self):
with self.assertRaisesRegex(TypeError, r"function takes exactly \d arguments \(\d given\)"):
tgcrypto.ige256_decrypt(os.urandom(16), os.urandom(32))
def test_ige256_decrypt_invalid_args_type(self):
with self.assertRaisesRegex(TypeError, self.TYPE_ERROR_PATTERN):
tgcrypto.ige256_decrypt(1, 2, 3)
def test_ige256_decrypt_empty_data(self):
with self.assertRaisesRegex(ValueError, r"Data must not be empty"):
tgcrypto.ige256_decrypt(b"", os.urandom(32), os.urandom(32))
def test_ige256_decrypt_invalid_key_size(self):
with self.assertRaisesRegex(ValueError, r"Key size must be exactly 32 bytes"):
tgcrypto.ige256_decrypt(os.urandom(16), os.urandom(31), os.urandom(32))
def test_ige256_decrypt_invalid_iv_size(self):
with self.assertRaisesRegex(ValueError, r"IV size must be exactly 32 bytes"):
tgcrypto.ige256_decrypt(os.urandom(16), os.urandom(32), os.urandom(31))
class TestIGE256Random(unittest.TestCase):
DATA_CHUNK_MAX_SIZE = 64
KEY_SIZE = 32
IV_SIZE = 32
TESTS_AMOUNT = 500
TEMPLATE = """
def test_ige256_random_{mode1}_{count}(self):
data = {data}
key = {key}
iv = {iv}
a = tgcrypto.ige256_{mode1}(data, key, iv)
b = tgcrypto.ige256_{mode2}(a, key, iv)
self.assertEqual(data, b)
""".replace("\n ", "\n")
for count in range(TESTS_AMOUNT):
exec(
TEMPLATE.format(
mode1="encrypt",
mode2="decrypt",
count=count,
data=os.urandom(random.randint(1, DATA_CHUNK_MAX_SIZE) * 16),
key=os.urandom(KEY_SIZE),
iv=os.urandom(IV_SIZE),
)
)
for count in range(TESTS_AMOUNT):
exec(
TEMPLATE.format(
mode1="decrypt",
mode2="encrypt",
count=count,
data=os.urandom(random.randint(1, DATA_CHUNK_MAX_SIZE) * 16),
key=os.urandom(KEY_SIZE),
iv=os.urandom(IV_SIZE),
)
)
if __name__ == "__main__":
unittest.main()
|
python
| 16 | 0.639306 | 114 | 36.608696 | 115 |
starcoderdata
|
<?php
// hostname or ip of server (for local testing, localhost should work)
$dbServer='localhost';
// username and password to log onto db server (what you entered in Step 4)
$dbUser='jim17';
$dbPass='
// name of database (what you created in step 4)
$dbName='test';
$link = mysql_connect("$dbServer", "$dbUser", "$dbPass") or die("Could not connect");
print "Connected successfully
mysql_select_db("$dbName") or die("Could not select database");
print "Database selected successfully
// close connection
mysql_close($link);
?>
|
php
| 9 | 0.688776 | 89 | 28.4 | 20 |
starcoderdata
|
/**
* This Source Code Form is subject to the terms of the Mozilla Public License,
* v. 2.0. If a copy of the MPL was not distributed with this file, You can
* obtain one at http://mozilla.org/MPL/2.0/. OpenMRS is also distributed under
* the terms of the Healthcare Disclaimer located at http://openmrs.org/license.
*
* Copyright (C) OpenMRS Inc. OpenMRS is a registered trademark and the OpenMRS
* graphic logo is a trademark of OpenMRS Inc.
*/
package org.openmrs.module.immunizationapi.api.impl;
import org.openmrs.Patient;
import org.openmrs.api.APIException;
import org.openmrs.api.UserService;
import org.openmrs.api.context.Context;
import org.openmrs.api.impl.BaseOpenmrsService;
import org.openmrs.module.immunizationapi.AdministeredVaccine;
import org.openmrs.module.immunizationapi.SearchMode;
import org.openmrs.module.immunizationapi.VaccineConfiguration;
import org.openmrs.module.immunizationapi.api.ImmunizationAPIService;
import org.openmrs.module.immunizationapi.api.dao.AdministeredVaccineDao;
import org.openmrs.module.immunizationapi.api.dao.VaccineConfigurationDao;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.List;
public class ImmunizationAPIServiceImpl extends BaseOpenmrsService implements ImmunizationAPIService {
@Autowired
private VaccineConfigurationDao vaccineConfigurationDao;
@Autowired
private AdministeredVaccineDao administeredVaccineDao;
UserService userService;
/**
* Injected in moduleApplicationContext.xml
*/
public void setUserService(UserService userService) {
this.userService = userService;
}
@Override
public VaccineConfiguration saveVaccineConfiguration(VaccineConfiguration vaccineConfiguration) throws APIException {
return vaccineConfigurationDao.saveOrUpdate(vaccineConfiguration);
}
@Override
public VaccineConfiguration retireVaccineConfiguration(VaccineConfiguration vaccineConfiguration, String reason)
throws APIException {
return vaccineConfigurationDao.saveOrUpdate(vaccineConfiguration);
}
@Override
public VaccineConfiguration unretireVaccineConfiguration(VaccineConfiguration vaccineConfiguration) throws APIException {
return vaccineConfigurationDao.saveOrUpdate(vaccineConfiguration);
}
@Override
public void purgeVaccineConfiguration(VaccineConfiguration vaccineConfiguration) throws APIException {
vaccineConfigurationDao.delete(vaccineConfiguration);
}
@Override
public VaccineConfiguration getVaccineConfigurationById(Integer id) throws APIException {
return vaccineConfigurationDao.getById(id);
}
@Override
public VaccineConfiguration getVaccineConfigurationByUuid(String uuid) throws APIException {
return vaccineConfigurationDao.getByUuid(uuid);
}
@Override
public List getAllVaccineConfigurations() throws APIException {
return vaccineConfigurationDao.getAll(false);
}
@Override
public List getAllVaccineConfigurations(boolean includeRetired) throws APIException {
return vaccineConfigurationDao.getAll(includeRetired);
}
@Override
public List getAllVaccineConfigurations(boolean includeRetired, Integer firstResult,
Integer maxResult) throws APIException {
return vaccineConfigurationDao.getAll(includeRetired, firstResult, maxResult);
}
@Override
public Integer getCountOfAllVaccineConfigurations() throws APIException {
return vaccineConfigurationDao.getAllCount(false);
}
@Override
public Integer getCountOfAllVaccineConfigurations(boolean includeRetired) throws APIException {
return vaccineConfigurationDao.getAllCount(includeRetired);
}
@Override
public Integer getCountOfSearchVaccineConfigurations(String searchText, SearchMode mode, boolean includeRetired,
Integer firstResut, Integer maxResults) throws APIException {
return vaccineConfigurationDao.getCountOfSearch(searchText, mode, includeRetired, firstResut, maxResults);
}
@Override
public Integer getCountOfSearchVaccineConfigurations(String searchText, SearchMode mode, Integer firstResut,
Integer maxResults) throws APIException {
return vaccineConfigurationDao.getCountOfSearch(searchText, mode, false, firstResut, maxResults);
}
@Override
public Integer getCountOfSearchVaccineConfigurations(String searchText, SearchMode mode, boolean includeRetired)
throws APIException {
return vaccineConfigurationDao.getCountOfSearch(searchText, mode, includeRetired, null, null);
}
@Override
public Integer getCountOfSearchVaccineConfigurations(String searchText, SearchMode mode) throws APIException {
return vaccineConfigurationDao.getCountOfSearch(searchText, mode, false, null, null);
}
@Override
public List searchVaccineConfigurations(String searchText, SearchMode mode,
boolean includeRetired, Integer firstResut, Integer maxResults) throws APIException {
return vaccineConfigurationDao.search(searchText, mode, includeRetired, firstResut, maxResults);
}
@Override
public List searchVaccineConfigurations(String searchText, SearchMode mode, Integer firstResut,
Integer maxResults) throws APIException {
return vaccineConfigurationDao.search(searchText, mode, false, firstResut, maxResults);
}
@Override
public List searchVaccineConfigurations(String searchText, Integer firstResut, Integer maxResults)
throws APIException {
return vaccineConfigurationDao.search(searchText, SearchMode.ANYWHERE, false, firstResut, maxResults);
}
@Override
public List searchVaccineConfigurations(String searchText, SearchMode mode, boolean includeRetired)
throws APIException {
return vaccineConfigurationDao.search(searchText, mode, false, null, null);
}
@Override
public List searchVaccineConfigurations(String searchText) throws APIException {
return vaccineConfigurationDao.search(searchText, SearchMode.ANYWHERE, false, null, null);
}
/***** AdministeredVaccine related stuff ***/
@Override
public AdministeredVaccine saveAdministeredVaccine(AdministeredVaccine administeredVaccine) throws APIException {
return administeredVaccineDao.saveOrUpdate(administeredVaccine);
}
@Override
public AdministeredVaccine voidAdministeredVaccine(AdministeredVaccine administeredVaccine, String reason)
throws APIException {
Context.getObsService().voidObs(administeredVaccine.getObs(), reason);
return administeredVaccineDao.saveOrUpdate(administeredVaccine);
}
@Override
public AdministeredVaccine unVoidAdministeredVaccine(AdministeredVaccine administeredVaccine) throws APIException {
//Void associated obs.
Context.getObsService().unvoidObs(administeredVaccine.getObs());
return administeredVaccineDao.saveOrUpdate(administeredVaccine);
}
@Override
public void purgeAdministeredVaccine(AdministeredVaccine administeredVaccine) throws APIException {
administeredVaccineDao.delete(administeredVaccine);
}
@Override
public AdministeredVaccine getAdministeredVaccineById(Integer id) throws APIException {
return administeredVaccineDao.getById(id);
}
@Override
public AdministeredVaccine getAdministeredVaccineByUuid(String uuid) throws APIException {
return administeredVaccineDao.getByUuid(uuid);
}
@Override
public List getAllAdministeredVaccines() throws APIException {
return administeredVaccineDao.getAll(false);
}
@Override
public List getAllAdministeredVaccines(boolean includeVoided) throws APIException {
return administeredVaccineDao.getAll(includeVoided);
}
@Override
public Integer getCountOfAllAdministeredVaccines() throws APIException {
return administeredVaccineDao.getAllCount(false);
}
@Override
public Integer getCountOfAllAdministeredVaccines(boolean includeVoided) throws APIException {
return administeredVaccineDao.getAllCount(includeVoided);
}
@Override
public List getAdministeredVaccinesForPatient(Patient patient,
VaccineConfiguration vaccineConfiguration, Integer startIndex, Integer limit, boolean includeVoided) {
return administeredVaccineDao.getAdministeredVaccinesForPatient(patient, vaccineConfiguration, startIndex, limit,
includeVoided);
}
@Override
public List getAdministeredVaccinesForPatient(Patient patient) {
return administeredVaccineDao.getAdministeredVaccinesForPatient(patient, null, null, null, false);
}
@Override
public List getAdministeredVaccinesForPatient(Patient patient,
VaccineConfiguration vaccineConfiguration) {
return administeredVaccineDao.getAdministeredVaccinesForPatient(patient, vaccineConfiguration, null, null, false);
}
@Override
public List getAdministeredVaccinesForPatient(Patient patient, Integer startIndex, Integer limit) {
return administeredVaccineDao.getAdministeredVaccinesForPatient(patient, null, startIndex, limit, false);
}
@Override
public List getAdministeredVaccinesForPatient(Patient patient, Integer startIndex, Integer limit,
boolean includeVoided) {
return administeredVaccineDao.getAdministeredVaccinesForPatient(patient, null, startIndex, limit, includeVoided);
}
@Override
public List getAdministeredVaccinesForPatient(Patient patient, boolean includeVoided) {
return administeredVaccineDao.getAdministeredVaccinesForPatient(patient, null, null, null, includeVoided);
}
@Override
public List getAdministeredVaccinesForVaccineConfiguration(
VaccineConfiguration vaccineConfiguration, Integer startIndex, Integer limit, boolean includeVoided) {
return administeredVaccineDao.getAdministeredVaccineForVaccineConfiguration(vaccineConfiguration, startIndex, limit,
includeVoided);
}
}
|
java
| 10 | 0.823524 | 122 | 38.983936 | 249 |
starcoderdata
|
def split(xs, n):
k, m = divmod(len(xs), n)
return (xs[i * k + min(i, m):(i + 1) * k + min(i + 1, m)] for i in range(n))
def get_path_by_pattern(pattern: str, xs: range, rhs_char: str):
xs_rem = list(split(xs, 2))[pattern[0] == rhs_char]
return xs_rem[0] if len(pattern) == 1 else get_path_by_pattern(pattern[1:], xs_rem, rhs_char)
def get_row(pattern: str):
return get_path_by_pattern(pattern, range(0, 128), 'B')
def get_column(pattern: str):
return get_path_by_pattern(pattern, range(0, 8), 'R')
def get_seat_id(pattern: str):
return get_row(pattern[:7]) * 8 + get_column(pattern[-3:])
input_list = []
with open("input.txt", "r") as f:
input_list = f.read().splitlines()
# solution part 1
# print(max(map(get_seat_id, input_list)))
all_occupied_seats = set(map(get_seat_id, input_list))
empty_seats = set(range(0, max(all_occupied_seats))) - all_occupied_seats
for empty_seat in empty_seats:
if empty_seat+1 in all_occupied_seats and empty_seat-1 in all_occupied_seats:
print(empty_seat)
exit(0)
|
python
| 11 | 0.634652 | 97 | 33.258065 | 31 |
starcoderdata
|
package org.infinispan.client.hotrod;
import static org.infinispan.client.hotrod.test.HotRodClientTestingUtil.killRemoteCacheManager;
import static org.infinispan.client.hotrod.test.HotRodClientTestingUtil.killServers;
import static org.infinispan.server.hotrod.test.HotRodTestingUtil.hotRodCacheConfiguration;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertFalse;
import static org.testng.AssertJUnit.assertTrue;
import java.net.InetSocketAddress;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.pool.impl.GenericKeyedObjectPool;
import org.infinispan.Cache;
import org.infinispan.client.hotrod.configuration.ExhaustedAction;
import org.infinispan.client.hotrod.impl.transport.tcp.TcpTransportFactory;
import org.infinispan.client.hotrod.test.HotRodClientTestingUtil;
import org.infinispan.client.hotrod.test.InternalRemoteCacheManager;
import org.infinispan.commands.VisitableCommand;
import org.infinispan.context.InvocationContext;
import org.infinispan.interceptors.base.CommandInterceptor;
import org.infinispan.server.hotrod.HotRodServer;
import org.infinispan.test.MultipleCacheManagersTest;
import org.infinispan.test.fwk.TestCacheManagerFactory;
import org.infinispan.util.logging.Log;
import org.infinispan.util.logging.LogFactory;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.Test;
/**
* @author
* @since 4.1
*/
@Test(testName = "client.hotrod.ClientConnectionPoolingTest", groups="functional")
public class ClientConnectionPoolingTest extends MultipleCacheManagersTest {
private static final Log log = LogFactory.getLog(ClientConnectionPoolingTest.class);
Cache<String, String> c1;
Cache<String, String> c2;
private HotRodServer hotRodServer1;
private HotRodServer hotRodServer2;
RemoteCache<String, String> remoteCache;
private RemoteCacheManager remoteCacheManager;
private GenericKeyedObjectPool<?, ?> connectionPool;
private InetSocketAddress hrServ1Addr;
private InetSocketAddress hrServ2Addr;
private WorkerThread workerThread1;
private WorkerThread workerThread2;
private WorkerThread workerThread3;
private WorkerThread workerThread4;
private WorkerThread workerThread5;
private WorkerThread workerThread6;
@Override
protected void assertSupportedConfig() {
// No-op
}
@Override
protected void createCacheManagers() throws Throwable {
// The caches are not configured to form a cluster
// so the client will have to use round-robin for balancing.
// This means requests will alternate between server 1 and server 2.
c1 = TestCacheManagerFactory.createCacheManager(hotRodCacheConfiguration()).getCache();
c2 = TestCacheManagerFactory.createCacheManager(hotRodCacheConfiguration()).getCache();
registerCacheManager(c1.getCacheManager(), c2.getCacheManager());
hotRodServer1 = HotRodClientTestingUtil.startHotRodServer(c1.getCacheManager());
hotRodServer2 = HotRodClientTestingUtil.startHotRodServer(c2.getCacheManager());
String servers = HotRodClientTestingUtil.getServersString(hotRodServer1, hotRodServer2);
org.infinispan.client.hotrod.configuration.ConfigurationBuilder clientBuilder =
new org.infinispan.client.hotrod.configuration.ConfigurationBuilder();
clientBuilder
.connectionPool()
.maxActive(2)
.maxTotal(8)
.maxIdle(6)
.exhaustedAction(ExhaustedAction.WAIT)
.testOnBorrow(false)
.testOnReturn(false)
.timeBetweenEvictionRuns(-2)
.minEvictableIdleTime(7)
.testWhileIdle(true)
.minIdle(-5)
.lifo(true)
.addServers(servers);
remoteCacheManager = new InternalRemoteCacheManager(clientBuilder.build());
remoteCache = remoteCacheManager.getCache();
TcpTransportFactory tcpConnectionFactory = (TcpTransportFactory) ((InternalRemoteCacheManager) remoteCacheManager).getTransportFactory();
connectionPool = (GenericKeyedObjectPool<?, ?>) tcpConnectionFactory.getConnectionPool();
workerThread1 = new WorkerThread(remoteCache);
workerThread2 = new WorkerThread(remoteCache);
workerThread3 = new WorkerThread(remoteCache);
workerThread4 = new WorkerThread(remoteCache);
workerThread5 = new WorkerThread(remoteCache);
workerThread6 = new WorkerThread(remoteCache);
hrServ1Addr = new InetSocketAddress("localhost", hotRodServer1.getPort());
hrServ2Addr = new InetSocketAddress("localhost", hotRodServer2.getPort());
}
@AfterMethod
public void tearDown() throws ExecutionException, InterruptedException {
killServers(hotRodServer1, hotRodServer2);
workerThread1.stop();
workerThread2.stop();
workerThread3.stop();
workerThread4.stop();
workerThread5.stop();
workerThread6.stop();
workerThread1.awaitTermination();
workerThread2.awaitTermination();
workerThread3.awaitTermination();
workerThread4.awaitTermination();
workerThread5.awaitTermination();
workerThread6.awaitTermination();
killRemoteCacheManager(remoteCacheManager);
}
@Test
public void testPropsCorrectlySet() {
assertEquals(2, connectionPool.getMaxActive());
assertEquals(8, connectionPool.getMaxTotal());
assertEquals(6, connectionPool.getMaxIdle());
assertEquals(1, connectionPool.getWhenExhaustedAction());
assertFalse(connectionPool.getTestOnBorrow());
assertFalse(connectionPool.getTestOnReturn());
assertEquals(-2, connectionPool.getTimeBetweenEvictionRunsMillis());
assertEquals(7, connectionPool.getMinEvictableIdleTimeMillis());
assertTrue(connectionPool.getTestWhileIdle());
assertEquals(-5, connectionPool.getMinIdle());
assertTrue(connectionPool.getLifo());
}
public void testMaxActiveReached() throws Exception {
workerThread1.put("k1", "v1");
workerThread1.put("k2", "v2");
// verify that each cache got a request
assertEquals(1, c1.size());
assertEquals(1, c2.size());
assertEquals("v1", remoteCache.get("k1"));
assertEquals(1, c1.size());
assertEquals("v2", remoteCache.get("k2"));
assertEquals(1, c2.size());
// there should be no active connections to any server
assertEquals(0, connectionPool.getNumActive(hrServ1Addr));
assertEquals(0, connectionPool.getNumActive(hrServ2Addr));
assertEquals(1, connectionPool.getNumIdle(hrServ1Addr));
assertEquals(1, connectionPool.getNumIdle(hrServ2Addr));
// install an interceptor that will block all requests on the server until the allow() call
DelayTransportInterceptor dt1 = new DelayTransportInterceptor(true);
DelayTransportInterceptor dt2 = new DelayTransportInterceptor(true);
c1.getAdvancedCache().addInterceptor(dt1, 0);
c2.getAdvancedCache().addInterceptor(dt2, 0);
log.info("Cache operations blocked");
try {
// start one operation on each server, using the existing connections
workerThread1.putAsync("k3", "v3");
workerThread2.putAsync("k4", "v4");
log.info("Async calls for k3 and k4 is done.");
eventually(new Condition() {
@Override
public boolean isSatisfied() throws Exception {
return 1 == connectionPool.getNumActive(hrServ1Addr) &&
1 == connectionPool.getNumActive(hrServ2Addr) &&
0 == connectionPool.getNumIdle(hrServ1Addr) &&
0 == connectionPool.getNumIdle(hrServ2Addr);
}
});
// another operation for each server, creating new connections
workerThread3.putAsync("k5", "v5");
workerThread4.putAsync("k6", "v6");
eventually(new Condition() {
@Override
public boolean isSatisfied() throws Exception {
return 2 == connectionPool.getNumActive(hrServ1Addr) &&
2 == connectionPool.getNumActive(hrServ2Addr) &&
0 == connectionPool.getNumIdle(hrServ1Addr) &&
0 == connectionPool.getNumIdle(hrServ2Addr);
}
});
// we've reached the connection pool limit, the new operations will block
// until a connection is released
workerThread5.putAsync("k7", "v7");
workerThread6.putAsync("k8", "v8");
Thread.sleep(2000); //sleep a bit longer to make sure the async threads do their job
assertEquals(2, connectionPool.getNumActive(hrServ1Addr));
assertEquals(2, connectionPool.getNumActive(hrServ2Addr));
assertEquals(0, connectionPool.getNumIdle(hrServ1Addr));
assertEquals(0, connectionPool.getNumIdle(hrServ2Addr));
}
catch (Exception e) {
log.error(e);
} finally {
//now allow
dt1.allow();
dt2.allow();
}
// give the servers some time to process the operations
eventually(new Condition() {
@Override
public boolean isSatisfied() throws Exception {
return connectionPool.getNumActive() == 0;
}
}, 1000);
assertExistKeyValue("k3", "v3");
assertExistKeyValue("k4", "v4");
assertExistKeyValue("k5", "v5");
assertExistKeyValue("k6", "v6");
assertExistKeyValue("k7", "v7");
assertExistKeyValue("k8", "v8");
// all the connections have been released to the pool, but haven't been closed
assertEquals(0, connectionPool.getNumActive(hrServ1Addr));
assertEquals(0, connectionPool.getNumActive(hrServ2Addr));
assertEquals(2, connectionPool.getNumIdle(hrServ1Addr));
assertEquals(2, connectionPool.getNumIdle(hrServ2Addr));
}
private void assertExistKeyValue(String key, String value) throws InterruptedException {
boolean exists = false;
for (int i = 0; i < 10; i++) {
exists = value.equals(remoteCache.get(key)) || value.equals(remoteCache.get(key));
if (exists) break;
Thread.sleep(1000);
}
assertEquals("key value not found: (" + key + ", " + value + ")", true, exists);
}
public static class DelayTransportInterceptor extends CommandInterceptor {
private final ReentrantLock lock = new ReentrantLock();
public DelayTransportInterceptor(boolean lock) {
if (lock)
block();
}
@Override
protected Object handleDefault(InvocationContext ctx, VisitableCommand command) throws Throwable {
log.trace("Acquiring lock. " + lockInfo());
lock.lock();
try {
return super.handleDefault(ctx, command);
} finally {
log.trace("Done operation, releasing lock" + lockInfo());
lock.unlock();
}
}
private String lockInfo() {
return " Is locked? " + lock.isLocked() + ". Lock held by me? " + lock.isHeldByCurrentThread();
}
public void block() {
log.trace("block. " + lockInfo());
lock.lock();
}
public void allow() {
log.trace("allow." + lockInfo());
lock.unlock();
}
}
}
|
java
| 19 | 0.691881 | 143 | 38.024055 | 291 |
starcoderdata
|
/*
* Copyright (c) 2017 All rights reserved.
*
* Licensed under the MIT License. See LICENSE file in the project root for full license
* information.
*
* JUnit framework component copyright (c) 2002-2017 JUnit. All Rights Reserved. Licensed under
* Eclipse Public License - v 1.0. You may obtain a copy of the License at
* https://www.eclipse.org/legal/epl-v10.html.
*/
package com.bynder.sdk.util;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import com.bynder.sdk.model.Credentials;
import com.bynder.sdk.model.HttpConnectionSettings;
import com.bynder.sdk.query.ApiField;
import com.bynder.sdk.query.ConversionType;
import com.bynder.sdk.query.MetapropertyField;
import io.reactivex.Observable;
import java.net.MalformedURLException;
import java.net.URL;
import java.security.InvalidParameterException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import retrofit2.Response;
import retrofit2.http.GET;
/**
* Tests the {@link Utils} class methods.
*/
public class UtilsTest {
/**
* Tests that {@link Utils#buildMapFromResponse(String)} correctly converts the API response to
* a {@link Map}.
*/
@Test
public void buildMapFromResponseTest() {
String keyA = "keyA";
String keyB = "keyB";
String valueA = "valueA";
String valueB = "valueB";
String response = keyA.concat(Utils.STR_EQUALS).concat(valueA).concat(Utils.STR_AND)
.concat(keyB).concat(Utils.STR_EQUALS).concat(valueB);
Map<String, String> map = Utils.buildMapFromResponse(response);
assertNotNull(map);
assertEquals(2, map.size());
assertEquals(valueA, map.get(keyA));
assertEquals(valueB, map.get(keyB));
response = keyA.concat(Utils.STR_EQUALS).concat(valueA);
map = Utils.buildMapFromResponse(response);
assertNotNull(map);
assertEquals(1, map.size());
assertEquals(valueA, map.get(keyA));
}
/**
* Tests that if the API response has invalid format the expected exception is thrown by
* {@link Utils#buildMapFromResponse(String)}.
*/
@Test(expected = InvalidParameterException.class)
public void buildMapFromResponseFailTest() {
String keyA = "keyA";
String keyB = "keyB";
String valueA = "valueA";
String response = keyA.concat(Utils.STR_EQUALS).concat(valueA).concat(Utils.STR_AND)
.concat(keyB).concat(Utils.STR_EQUALS);
Utils.buildMapFromResponse(response);
}
/**
* Tests that
* {@link Utils#createApiService(Class, URL, Credentials, HttpConnectionSettings)}
* correctly creates the API endpoints defined in a given interface.
*/
@Test
public void createApiServiceTest() throws MalformedURLException {
TestApi testApi = Utils
.createApiService(TestApi.class, new URL("https://example.bynder.com"),
new Credentials("consumerKey", "consumerSecret", "tokenKey", "tokenSecret"),
new HttpConnectionSettings());
assertNotNull(testApi);
assertNotNull(testApi.getTestMethod());
assertTrue(testApi.getTestMethod() instanceof Observable
}
/**
* Tests that {@link Utils#getApiParameters(Object)} returns only the parameters for the class
* fields that have the {@link ApiField} annotation.
*/
@Test
public void getApiParametersWithoutConversionTest()
throws IllegalArgumentException, IllegalAccessException {
Map<String, String> params = Utils.getApiParameters(new TestQuery("1", "2", "3"));
assertEquals(2, params.size());
assertEquals("1", params.get("field1"));
assertEquals("2", params.get("field2"));
}
/**
* Tests that {@link Utils#getApiParameters(Object)} correctly converts the fields that specify
* a {@link ConversionType} in the {@link ApiField} annotation.
*/
@Test
public void getApiParametersWithConversionTest()
throws IllegalArgumentException, IllegalAccessException {
Map<String, String> params = Utils.getApiParameters(
new TestConversionQuery(Arrays.asList("1", "2", "3"),
new MetapropertyField("1", Arrays.asList("1", "2")), true));
assertEquals(3, params.size());
assertEquals("1,2,3", params.get("listField"));
assertEquals("1,2", params.get("metapropertyField.1"));
assertEquals("1", params.get("booleanField"));
}
/**
* API interface only used for test purposes.
*/
private interface TestApi {
@GET("api/test/")
Observable getTestMethod();
}
/**
* Query class only used for test purposes.
*/
private class TestQuery {
/**
* Query field.
*/
@ApiField(name = "field1")
private final String field1;
/**
* Query field.
*/
@ApiField(name = "field2")
private final String field2;
/**
* Query field.
*/
private final String field3;
TestQuery(final String field1, final String field2, final String field3) {
this.field1 = field1;
this.field2 = field2;
this.field3 = field3;
}
}
/**
* Query class only used for test purposes.
*/
private class TestConversionQuery {
/**
* Query field.
*/
@ApiField(name = "listField", conversionType = ConversionType.LIST_FIELD)
private final List field1;
/**
* Query field.
*/
@ApiField(name = "metapropertyField", conversionType = ConversionType.METAPROPERTY_FIELD)
private final MetapropertyField field2;
/**
* Query field.
*/
@ApiField(name = "booleanField", conversionType = ConversionType.BOOLEAN_FIELD)
private final Boolean field3;
TestConversionQuery(final List field1, final MetapropertyField field2,
final Boolean field3) {
this.field1 = field1;
this.field2 = field2;
this.field3 = field3;
}
}
}
|
java
| 16 | 0.638638 | 99 | 32.062827 | 191 |
starcoderdata
|
<?php
/* @var $buyForm BuyVipStatusForm */
/* @var $model \frontend\models\forms\ObmenkaPayForm */
/* @var $action integer | null */
use frontend\models\forms\BuyVipStatusForm;
use yii\widgets\ActiveForm;
use yii\helpers\Html;
use frontend\widgets\UserSideBarWidget;
use yii\helpers\ArrayHelper;
$this->title = 'Оплата';
?>
<div class="row">
<?php if (!Yii::$app->user->isGuest) : ?>
<div class="col-3 filter-sidebar">
<?php echo UserSideBarWidget::Widget()?>
<?php endif; ?>
<div class="col-9">
<div class="col-12 col-xl-9 content">
<h1 class="mb-4"><?php echo $this->title ?>
<?php $form = ActiveForm::begin([
'id' => 'login-form',
'action' => '/vip/cust-pay',
'options' => ['class' => 'form-horizontal'],
]) ?>
<?= $form->field($model, 'sum')->hiddenInput(['value' => $buyForm->sum])->label(false) ?>
<?php if (isset($toUser)) : ?>
<?= $form->field($model, 'toUser')->hiddenInput(['value' => $toUser])->label(false) ?>
<?php endif; ?>
<?= $form->field($model, 'action')->hiddenInput(['value' => $action])->label(false) ?>
<?= $form->field($model, 'currency')
->radioList(ArrayHelper::map(\common\models\ObmenkaCurrency::find()->all(), 'id', 'name'),
[
'item' => function($index, $label, $name, $checked, $value) {
$chec = '';
$return = '
if ($index == 0) $chec = 'checked';
$return .= '<input '.$chec.' id="'.mb_strtolower($label).'_label-id" type="radio" name="' . $name . '" value="' . $value . '" tabindex="'.$index.'">';
$return .= '<label for="'.mb_strtolower($label).'_label-id" class="modal-radio '.mb_strtolower($label).'_label img-label-radio">';
$return .= '
$return .= '
return $return;
}
])
?>
<div class="form-group">
<?= Html::submitButton('Отправить', ['class' => 'type-btn']) ?>
<?php ActiveForm::end() ?>
|
php
| 23 | 0.438298 | 182 | 30.91358 | 81 |
starcoderdata
|
/*
* ============LICENSE_START==========================================
* org.onap.music
* ===================================================================
* Copyright (c) 2017 AT&T Intellectual Property
* Copyright (c) 2019 IBM Intellectual Property
* ===================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* ============LICENSE_END=============================================
* ====================================================================
*/
package org.onap.music.eelf.logging.format;
/**
* @author inam
*
*/
public enum AppMessages {
/*
* 100-199 Security/Permission Related - Authentication problems
* [ERR100E] Missing Information
* [ERR101E] Authentication error occured
*
* 200-299 Availability/Timeout Related/IO - connectivity error - connection timeout
* [ERR200E] Connectivity
* [ERR201E] Host not available
* [ERR202E] Error while connecting to Cassandra cluster
* [ERR203E] IO Error has occured
* [ERR204E] Execution Interrupted
* [ERR205E] Session Expired
* [ERR206E] Cache not authenticated
*
*
* 300-399 Data Access/Integrity Related
* [ERR300E] Incorrect data
*
* 400-499 - Cassandra Query Related
* [ERR400E] Error while processing prepared query object
* [ERR401E] Executing Session Failure for Request
* [ERR402E] Ill formed queryObject for the request
* [ERR403E] Error processing Prepared Query Object
*
* 500-599 - Locking Related
* [ERR500E] Invalid lock
* [ERR501E] Locking Error has occured
* [ERR502E] Deprecated
* [ERR503E] Failed to aquire lock store handle
* [ERR504E] Failed to create Lock Reference
* [ERR505E] Lock does not exist
* [ERR506E] Failed to aquire lock
* [ERR507E] Lock not aquired
* [ERR508E] Lock state not set
* [ERR509E] Lock not destroyed
* [ERR510E] Lock not released
* [ERR511E] Lock not deleted
* [ERR512E] Deprecated
*
*
* 600 - 699 - Music Service Errors
* [ERR600E] Error initializing the cache
*
* 700-799 Schema Interface Type/Validation - received Pay-load checksum is
* invalid - received JSON is not valid
*
* 800-899 Business/Flow Processing Related - check out to service is not
* allowed - Roll-back is done - failed to generate heat file
*
*
* 900-999 Unknown Errors - Unexpected exception
* [ERR900E] Unexpected error occured
* [ERR901E] Number format exception
*
*
* 1000-1099 Reserved - do not use
*
*/
MISSINGINFO("[ERR100E]", "Missing Information ","Details: NA", "Please check application credentials and/or headers"),
AUTHENTICATIONERROR("[ERR101E]", "Authentication error occured ","Details: NA", "Please verify application credentials"),
CONNCECTIVITYERROR("[ERR200E]"," Connectivity error","Details: NA ","Please check connectivity to external resources"),
HOSTUNAVAILABLE("[ERR201E]","Host not available","Details: NA","Please verify the host details"),
CASSANDRACONNECTIVITY("[ERR202E]","Error while connecting to Cassandra cluster",""," Please check cassandra cluster details"),
IOERROR("[ERR203E]","IO Error has occured","","Please check IO"),
EXECUTIONINTERRUPTED("[ERR204E]"," Execution Interrupted","",""),
SESSIONEXPIRED("[ERR205E]"," Session Expired","","Session has expired."),
CACHEAUTHENTICATION("[ERR206E]","Cache not authenticated",""," Cache not authenticated"),
INCORRECTDATA("[ERR300E]"," Incorrect data",""," Please verify the request payload and try again"),
MULTIPLERECORDS("[ERR301E]"," Multiple records found",""," Please verify the request payload and try again"),
ALREADYEXIST("[ERR302E]"," Record already exist",""," Please verify the request payload and try again"),
MISSINGDATA("[ERR300E]"," Incorrect data",""," Please verify the request payload and try again"),
QUERYERROR("[ERR400E]","Error while processing prepared query object",""," Please verify the query"),
SESSIONFAILED("[ERR401E]","Executing Session Failure for Request","","Please verify the session and request"),
INVALIDLOCK("[ERR500E]"," Invalid lock or acquire failed",""," Lock is not valid to aquire"),
LOCKINGERROR("[ERR501E]"," Locking Error has occured",""," Locking Error has occured"),
LOCKHANDLE("[ERR503E]","Failed to aquire lock store handle",""," Failed to aquire lock store handle"),
CREATELOCK("[ERR504E]","Failed to aquire lock store handle ","","Failed to aquire lock store handle "),
LOCKSTATE("[ERR508E]"," Lock state not set",""," Lock state not set"),
DESTROYLOCK("[ERR509E]"," Lock not destroyed",""," Lock not destroyed"),
RELEASELOCK("[ERR510E]"," Lock not released",""," Lock not released"),
DELTELOCK("[ERR511E]",""," Lock not deleted "," Lock not deleted "),
CACHEERROR("[ERR600E]"," Error initializing the cache",""," Error initializing the cache"),
UNKNOWNERROR("[ERR900E]"," Unexpected error occured",""," Please check logs for details");
private ErrorTypes eType;
private ErrorSeverity alarmSeverity;
private ErrorSeverity errorSeverity;
private String errorCode;
private String errorDescription;
private String details;
private String resolution;
AppMessages(String errorCode, String errorDescription, String details,String resolution) {
this.errorCode = errorCode;
this.errorDescription = errorDescription;
this.details = details;
this.resolution = resolution;
}
public ErrorTypes getEType() {
return eType;
}
public ErrorSeverity getAlarmSeverity() {
return alarmSeverity;
}
public ErrorSeverity getErrorSeverity() {
return errorSeverity;
}
public void setDetails(String details){ this.details=details; }
public String getDetails() {
return this.details;
}
public void setResolution(String resolution){ this.resolution=resolution; }
public String getResolution() {
return this.resolution;
}
public void setErrorCode(String errorCode){ this.errorCode=errorCode; }
public String getErrorCode() {
return this.errorCode;
}
public void setErrorDescription(String errorDescription){ this.errorDescription=errorDescription; }
public String getErrorDescription() {
return this.errorDescription;
}
}
|
java
| 13 | 0.6436 | 130 | 37.153439 | 189 |
starcoderdata
|
#!/usr/bin/env python3
# See: https://github.com/pr3d4t0r/COVIDvu/blob/master/LICENSE
# vim: set fileencoding=utf-8:
from covidvu.pipeline.vucounty import processCounties
import json
import os
# +++ constants +++
TEST_SITE_RESOURCES = 'resources/test_pipeline'
TEST_COUNTY_CASES_CSBS_US_FILE = 'counties-US-CSBS.json'
TEST_COUNTY_CASES_US_FILE = 'bogus.json'
TEST_SITE_DATA = 'resources/test_pipeline'
# --- tests ---
def test_processCounties():
dataset = processCounties(
TEST_SITE_RESOURCES,
TEST_COUNTY_CASES_CSBS_US_FILE,
TEST_SITE_DATA,
TEST_COUNTY_CASES_US_FILE)
assert 'California' in dataset
assert 'San Francisco' in dataset['California']
assert dataset['California']['San Francisco']['confirmed'] > 0
def test_processCounties_JSON():
# It must follow test_processCounties()
testFileName = os.path.join(TEST_SITE_DATA, TEST_COUNTY_CASES_US_FILE)
dataset = json.loads(open(testFileName, 'r').read())
os.unlink(testFileName)
assert 'California' in dataset
assert 'San Francisco' in dataset['California']
assert dataset['California']['San Francisco']['confirmed'] > 0
|
python
| 12 | 0.674029 | 83 | 28.177778 | 45 |
starcoderdata
|
package fly.admin.repository;
import fly.admin.entity.model.AdminRolePermission;
import org.springframework.data.jpa.repository.JpaRepository;
import java.util.List;
public interface AdminRolePermissionRepository extends JpaRepository<AdminRolePermission, Integer> {
void deleteByRoleId(int roleId);
List findByRoleId(Integer roleId);
List findByRoleIdIn(List roleIds);
}
|
java
| 8 | 0.818594 | 100 | 28.4 | 15 |
starcoderdata
|
<?php
namespace CaseStoreCaseStudyFieldTypeStringBundle\Repository;
use CaseStoreBundle\Entity\CaseStudy;
use CaseStoreBundle\Entity\CaseStudyFieldDefinition;
use Doctrine\ORM\EntityRepository;
use Doctrine\ORM\Mapping as ORM;
/**
* @license 3-clause BSD
* @link https://github.com/CaseStore/CaseStore-Core
*/
class CaseStudyFieldValueStringCacheRepository extends EntityRepository
{
}
|
php
| 6 | 0.815657 | 71 | 21 | 18 |
starcoderdata
|
def __init__(self, user_agent, consumer_key=None, consumer_secret=None, token=None, secret=None):
"""An interface to the Discogs API."""
self.user_agent = user_agent
self.verbose = False
self._fetcher = RequestsFetcher()
if consumer_key and consumer_secret:
self.set_consumer_key(consumer_key, consumer_secret)
if token and secret:
self.set_token(token, secret)
|
python
| 10 | 0.61991 | 97 | 43.3 | 10 |
inline
|
using System;
using System.Linq;
using FluentAssertions;
using JobTimer.Data.Access.JobTimer;
using JobTimer.Data.Model.JobTimer;
using JobTimer.Utils;
using NSubstitute;
using NUnit.Framework;
using Ploeh.AutoFixture;
namespace JobTimer.Data.Access.Test
{
[TestFixture]
public class TimerTest
{
private JobTimerDbContext _context;
Fixture _fixture;
private TimerAccess _sut;
private ITimeBuilder _timeBuilder;
[SetUp]
public void Setup()
{
_fixture = new Fixture();
_timeBuilder = Substitute.For
_context = new JobTimerDbContext();
_sut = new TimerAccess(_context, _timeBuilder);
}
[TearDown]
public void TearDown()
{
_context.Dispose();
}
[Test]
public async void timer_insert_and_delete()
{
var timerTypeAccess = new TimerTypeAccess(_context);
var item = new Timer
{
TimerTypeId = 2,
UserName = _fixture.Create
Date = _fixture.Create
Time = _fixture.Create
};
await _sut.SaveAsync(item);
item.ID.Should().NotBe(0);
var loaded = await _sut.LoadAsync(item.ID);
var typeLoaded = await timerTypeAccess.LoadAsync(item.TimerTypeId);
loaded.UserName.Should().Be(item.UserName);
loaded.TimerType.Type.Should().Be(typeLoaded.Type);
loaded.Date.Should().Be(item.Date);
loaded.Time.Should().Be(item.Time);
var deletedCount = await _sut.DeleteAsync(loaded);
deletedCount.Should().BeGreaterOrEqualTo(1);
}
//[Test]
//public async void timer_get_at_least_one()
//{
// var hrs = await timerAccess.GetAllAsync();
// hrs.Count.Should().BeGreaterThan(0);
//}
}
}
|
c#
| 18 | 0.571224 | 79 | 27.753425 | 73 |
starcoderdata
|
#ifndef AOM_DSP_DAALA_TX_H_
#define AOM_DSP_DAALA_TX_H_
#include "aom_dsp/aom_dsp_common.h"
#include "av1/common/odintrin.h"
void daala_fdct4(const tran_low_t *input, tran_low_t *output);
void daala_idct4(const tran_low_t *input, tran_low_t *output);
void daala_fdst4(const tran_low_t *input, tran_low_t *output);
void daala_idst4(const tran_low_t *input, tran_low_t *output);
void daala_idtx4(const tran_low_t *input, tran_low_t *output);
void daala_fdct8(const tran_low_t *input, tran_low_t *output);
void daala_idct8(const tran_low_t *input, tran_low_t *output);
void daala_fdst8(const tran_low_t *input, tran_low_t *output);
void daala_idst8(const tran_low_t *input, tran_low_t *output);
void daala_idtx8(const tran_low_t *input, tran_low_t *output);
void daala_fdct16(const tran_low_t *input, tran_low_t *output);
void daala_idct16(const tran_low_t *input, tran_low_t *output);
void daala_fdst16(const tran_low_t *input, tran_low_t *output);
void daala_idst16(const tran_low_t *input, tran_low_t *output);
void daala_idtx16(const tran_low_t *input, tran_low_t *output);
void daala_fdct32(const tran_low_t *input, tran_low_t *output);
void daala_idct32(const tran_low_t *input, tran_low_t *output);
void daala_fdst32(const tran_low_t *input, tran_low_t *output);
void daala_idst32(const tran_low_t *input, tran_low_t *output);
void daala_idtx32(const tran_low_t *input, tran_low_t *output);
#if CONFIG_TX64X64
void daala_fdct64(const tran_low_t *input, tran_low_t *output);
void daala_idct64(const tran_low_t *input, tran_low_t *output);
void daala_fdst64(const tran_low_t *input, tran_low_t *output);
void daala_idst64(const tran_low_t *input, tran_low_t *output);
void daala_idtx64(const tran_low_t *input, tran_low_t *output);
#endif
void od_bin_fdct4(od_coeff y[4], const od_coeff *x, int xstride);
void od_bin_idct4(od_coeff *x, int xstride, const od_coeff y[4]);
void od_bin_fdst4(od_coeff y[4], const od_coeff *x, int xstride);
void od_bin_idst4(od_coeff *x, int xstride, const od_coeff y[4]);
void od_bin_fidtx4(od_coeff y[4], const od_coeff *x, int xstride);
void od_bin_iidtx4(od_coeff *x, int xstride, const od_coeff y[4]);
void od_bin_fdct8(od_coeff y[8], const od_coeff *x, int xstride);
void od_bin_idct8(od_coeff *x, int xstride, const od_coeff y[8]);
void od_bin_fdst8(od_coeff y[8], const od_coeff *x, int xstride);
void od_bin_idst8(od_coeff *x, int xstride, const od_coeff y[8]);
void od_bin_fidtx8(od_coeff y[8], const od_coeff *x, int xstride);
void od_bin_iidtx8(od_coeff *x, int xstride, const od_coeff y[8]);
void od_bin_fdct16(od_coeff y[16], const od_coeff *x, int xstride);
void od_bin_idct16(od_coeff *x, int xstride, const od_coeff y[16]);
void od_bin_fdst16(od_coeff y[16], const od_coeff *x, int xstride);
void od_bin_idst16(od_coeff *x, int xstride, const od_coeff y[16]);
void od_bin_fidtx16(od_coeff y[16], const od_coeff *x, int xstride);
void od_bin_iidtx16(od_coeff *x, int xstride, const od_coeff y[16]);
void od_bin_fdct32(od_coeff y[32], const od_coeff *x, int xstride);
void od_bin_idct32(od_coeff *x, int xstride, const od_coeff y[32]);
void od_bin_fdst32(od_coeff y[32], const od_coeff *x, int xstride);
void od_bin_idst32(od_coeff *x, int xstride, const od_coeff y[32]);
void od_bin_fidtx32(od_coeff y[32], const od_coeff *x, int xstride);
void od_bin_iidtx32(od_coeff *x, int xstride, const od_coeff y[32]);
#if CONFIG_TX64X64
void od_bin_fdct64(od_coeff y[64], const od_coeff *x, int xstride);
void od_bin_idct64(od_coeff *x, int xstride, const od_coeff y[64]);
void od_bin_fidtx64(od_coeff y[64], const od_coeff *x, int xstride);
void od_bin_iidtx64(od_coeff *x, int xstride, const od_coeff y[64]);
#endif
#endif
|
c
| 8 | 0.71748 | 68 | 55.415385 | 65 |
starcoderdata
|
import unittest
from colours import *
class ColourTestCase(unittest.TestCase):
def test_colours_are_red_green_blue_tuples(self):
# Arrange
# Act
c = Colour(-0.5, 0.4, 1.7)
# Assert
self.assertEqual(c.red, -0.5)
self.assertEqual(c.green, 0.4)
self.assertEqual(c.blue, 1.7)
def test_adding_colours(self):
# Arrange
c1 = Colour(0.9, 0.6, 0.75)
c2 = Colour(0.7, 0.1, 0.25)
expected = Colour(1.6, 0.7, 1.0)
# Act
c3 = c1 + c2
# Assert
self.assertEqual(c3, expected)
def test_subtracting_colours(self):
# Arrange
c1 = Colour(0.9, 0.6, 0.75)
c2 = Colour(0.7, 0.1, 0.25)
expected = Colour(0.2, 0.5, 0.5)
# Act
c3 = c1 - c2
# Assert
self.assertEqual(c3, expected)
def test_multiplying_colour_by_a_scalar(self):
# Arrange
c = Colour(0.2, 0.3, 0.4)
expected = Colour(0.4, 0.6, 0.8)
# Act
c2 = c * 2
# Assert
self.assertEqual(c2, expected)
def test_multiplying_colours(self):
# Arrange
c1 = Colour(1, 0.2, 0.4)
c2 = Colour(0.9, 1, 0.1)
expected = Colour(0.9, 0.2, 0.04)
# Act
c3 = c1 * c2
# Assert
self.assertEqual(c3, expected)
if __name__ == '__main__':
unittest.main()
|
python
| 10 | 0.502845 | 53 | 20.30303 | 66 |
starcoderdata
|
const projects = (process.env.JEST_PROJECT || "unit,int").split(",");
module.exports = {
projects: [
{
"displayName": "unit",
"moduleFileExtensions": [
"js",
"json",
"ts",
"vue",
],
"rootDir": ".",
"testRegex": "\\.(spec).(t|j)s$",
"moduleNameMapper": {
"^~/(.*)$": "
"^~~/(.*)$": "
},
"transform": {
"^.+\\.(t|j)s$": "ts-jest",
".*\\.(vue)$": "vue-jest",
},
"snapshotSerializers": [
"jest-serializer-vue",
],
"testURL": "http://localhost:3000/",
"testEnvironment": "jsdom",
},
{
"displayName": "int",
"moduleFileExtensions": [
"js",
"json",
"ts",
"vue",
],
"rootDir": ".",
"testRegex": "\\.(int-spec).(t|j)s$",
"moduleNameMapper": {
"^~/(.*)$": "
"^~~/(.*)$": "
},
"transform": {
"^.+\\.(t|j)s$": "ts-jest",
".*\\.(vue)$": "vue-jest",
},
"snapshotSerializers": [
"jest-serializer-vue",
],
"testURL": "http://localhost:3000/",
"testEnvironment": "jsdom",
},
{
"displayName": "e2e",
"moduleFileExtensions": [
"js",
"json",
"ts",
"vue",
],
"preset": "jest-puppeteer",
"rootDir": ".",
"testRegex": "\\.e2e-spec.(t|j)s$",
"moduleNameMapper": {
"^~/(.*)$": "
"^~~/(.*)$": "
},
"transform": {
"^.+\\.(t|j)s$": "ts-jest",
".*\\.(vue)$": "vue-jest",
},
"snapshotSerializers": [
"jest-serializer-vue",
],
"testURL": "http://localhost:3000/",
},
{
"displayName": "p2p",
"moduleFileExtensions": [
"js",
"json",
"ts",
"vue",
],
"preset": "jest-puppeteer",
"rootDir": ".",
"testRegex": "\\.p2p-spec.(t|j)s$",
"moduleNameMapper": {
"^~/(.*)$": "
"^~~/(.*)$": "
},
"transform": {
"^.+\\.(t|j)s$": "ts-jest",
".*\\.(vue)$": "vue-jest",
},
"snapshotSerializers": [
"jest-serializer-vue",
],
"testURL": "http://localhost:3000/",
},
].filter(p => projects.includes(p.displayName)),
"collectCoverage": true,
"coverageDirectory": "./coverage",
"collectCoverageFrom": [
"
"
],
"reporters": ["default", "jest-sonar"],
};
|
javascript
| 13 | 0.402687 | 69 | 22.899083 | 109 |
starcoderdata
|
using System;
using System.Collections.Generic;
using RestSharp;
using Org.OpenAPITools.Client;
using com.spoonacular.client.model;
namespace com.spoonacular
{
///
/// Represents a collection of functions to interact with the API endpoints
///
public interface IWineApi
{
///
/// Dish Pairing for Wine Find a dish that goes well with a given wine.
///
/// <param name="wine">The type of wine that should be paired, e.g. \"merlot\", \"riesling\", or \"malbec\".
///
InlineResponse20044 GetDishPairingForWine (string wine);
///
/// Wine Description Get a simple description of a certain wine, e.g. \"malbec\", \"riesling\", or \"merlot\".
///
/// <param name="wine">The name of the wine that should be paired, e.g. \"merlot\", \"riesling\", or \"malbec\".
///
InlineResponse20046 GetWineDescription (string wine);
///
/// Wine Pairing Find a wine that goes well with a food. Food can be a dish name (\"steak\"), an ingredient name (\"salmon\"), or a cuisine (\"italian\").
///
/// <param name="food">The food to get a pairing for. This can be a dish (\"steak\"), an ingredient (\"salmon\"), or a cuisine (\"italian\").
/// <param name="maxPrice">The maximum price for the specific wine recommendation in USD.
///
InlineResponse20045 GetWinePairing (string food, decimal? maxPrice);
///
/// Wine Recommendation Get a specific wine recommendation (concrete product) for a given wine type, e.g. \"merlot\".
///
/// <param name="wine">The type of wine to get a specific product recommendation for.
/// <param name="maxPrice">The maximum price for the specific wine recommendation in USD.
/// <param name="minRating">The minimum rating of the recommended wine between 0 and 1. For example, 0.8 equals 4 out of 5 stars.
/// <param name="number">The number of wine recommendations expected (between 1 and 100).
///
InlineResponse20047 GetWineRecommendation (string wine, decimal? maxPrice, decimal? minRating, decimal? number);
}
///
/// Represents a collection of functions to interact with the API endpoints
///
public class WineApi : IWineApi
{
///
/// Initializes a new instance of the <see cref="WineApi"/> class.
///
/// <param name="apiClient"> an instance of ApiClient (optional)
///
public WineApi(ApiClient apiClient = null)
{
if (apiClient == null) // use the default one in Configuration
this.ApiClient = Configuration.DefaultApiClient;
else
this.ApiClient = apiClient;
}
///
/// Initializes a new instance of the <see cref="WineApi"/> class.
///
///
public WineApi(String basePath)
{
this.ApiClient = new ApiClient(basePath);
}
///
/// Sets the base path of the API client.
///
/// <param name="basePath">The base path
/// base path
public void SetBasePath(String basePath)
{
this.ApiClient.BasePath = basePath;
}
///
/// Gets the base path of the API client.
///
/// <param name="basePath">The base path
/// base path
public String GetBasePath(String basePath)
{
return this.ApiClient.BasePath;
}
///
/// Gets or sets the API client.
///
/// instance of the ApiClient
public ApiClient ApiClient {get; set;}
///
/// Dish Pairing for Wine Find a dish that goes well with a given wine.
///
/// <param name="wine">The type of wine that should be paired, e.g. \"merlot\", \"riesling\", or \"malbec\".
///
public InlineResponse20044 GetDishPairingForWine (string wine)
{
// verify the required parameter 'wine' is set
if (wine == null) throw new ApiException(400, "Missing required parameter 'wine' when calling GetDishPairingForWine");
var path = "/food/wine/dishes";
path = path.Replace("{format}", "json");
var queryParams = new Dictionary<String, String>();
var headerParams = new Dictionary<String, String>();
var formParams = new Dictionary<String, String>();
var fileParams = new Dictionary<String, FileParameter>();
String postBody = null;
if (wine != null) queryParams.Add("wine", ApiClient.ParameterToString(wine)); // query parameter
// authentication setting, if any
String[] authSettings = new String[] { "apiKeyScheme" };
// make the HTTP request
IRestResponse response = (IRestResponse) ApiClient.CallApi(path, Method.GET, queryParams, postBody, headerParams, formParams, fileParams, authSettings);
if (((int)response.StatusCode) >= 400)
throw new ApiException ((int)response.StatusCode, "Error calling GetDishPairingForWine: " + response.Content, response.Content);
else if (((int)response.StatusCode) == 0)
throw new ApiException ((int)response.StatusCode, "Error calling GetDishPairingForWine: " + response.ErrorMessage, response.ErrorMessage);
return (InlineResponse20044) ApiClient.Deserialize(response.Content, typeof(InlineResponse20044), response.Headers);
}
///
/// Wine Description Get a simple description of a certain wine, e.g. \"malbec\", \"riesling\", or \"merlot\".
///
/// <param name="wine">The name of the wine that should be paired, e.g. \"merlot\", \"riesling\", or \"malbec\".
///
public InlineResponse20046 GetWineDescription (string wine)
{
// verify the required parameter 'wine' is set
if (wine == null) throw new ApiException(400, "Missing required parameter 'wine' when calling GetWineDescription");
var path = "/food/wine/description";
path = path.Replace("{format}", "json");
var queryParams = new Dictionary<String, String>();
var headerParams = new Dictionary<String, String>();
var formParams = new Dictionary<String, String>();
var fileParams = new Dictionary<String, FileParameter>();
String postBody = null;
if (wine != null) queryParams.Add("wine", ApiClient.ParameterToString(wine)); // query parameter
// authentication setting, if any
String[] authSettings = new String[] { "apiKeyScheme" };
// make the HTTP request
IRestResponse response = (IRestResponse) ApiClient.CallApi(path, Method.GET, queryParams, postBody, headerParams, formParams, fileParams, authSettings);
if (((int)response.StatusCode) >= 400)
throw new ApiException ((int)response.StatusCode, "Error calling GetWineDescription: " + response.Content, response.Content);
else if (((int)response.StatusCode) == 0)
throw new ApiException ((int)response.StatusCode, "Error calling GetWineDescription: " + response.ErrorMessage, response.ErrorMessage);
return (InlineResponse20046) ApiClient.Deserialize(response.Content, typeof(InlineResponse20046), response.Headers);
}
///
/// Wine Pairing Find a wine that goes well with a food. Food can be a dish name (\"steak\"), an ingredient name (\"salmon\"), or a cuisine (\"italian\").
///
/// <param name="food">The food to get a pairing for. This can be a dish (\"steak\"), an ingredient (\"salmon\"), or a cuisine (\"italian\").
/// <param name="maxPrice">The maximum price for the specific wine recommendation in USD.
///
public InlineResponse20045 GetWinePairing (string food, decimal? maxPrice)
{
// verify the required parameter 'food' is set
if (food == null) throw new ApiException(400, "Missing required parameter 'food' when calling GetWinePairing");
var path = "/food/wine/pairing";
path = path.Replace("{format}", "json");
var queryParams = new Dictionary<String, String>();
var headerParams = new Dictionary<String, String>();
var formParams = new Dictionary<String, String>();
var fileParams = new Dictionary<String, FileParameter>();
String postBody = null;
if (food != null) queryParams.Add("food", ApiClient.ParameterToString(food)); // query parameter
if (maxPrice != null) queryParams.Add("maxPrice", ApiClient.ParameterToString(maxPrice)); // query parameter
// authentication setting, if any
String[] authSettings = new String[] { "apiKeyScheme" };
// make the HTTP request
IRestResponse response = (IRestResponse) ApiClient.CallApi(path, Method.GET, queryParams, postBody, headerParams, formParams, fileParams, authSettings);
if (((int)response.StatusCode) >= 400)
throw new ApiException ((int)response.StatusCode, "Error calling GetWinePairing: " + response.Content, response.Content);
else if (((int)response.StatusCode) == 0)
throw new ApiException ((int)response.StatusCode, "Error calling GetWinePairing: " + response.ErrorMessage, response.ErrorMessage);
return (InlineResponse20045) ApiClient.Deserialize(response.Content, typeof(InlineResponse20045), response.Headers);
}
///
/// Wine Recommendation Get a specific wine recommendation (concrete product) for a given wine type, e.g. \"merlot\".
///
/// <param name="wine">The type of wine to get a specific product recommendation for.
/// <param name="maxPrice">The maximum price for the specific wine recommendation in USD.
/// <param name="minRating">The minimum rating of the recommended wine between 0 and 1. For example, 0.8 equals 4 out of 5 stars.
/// <param name="number">The number of wine recommendations expected (between 1 and 100).
///
public InlineResponse20047 GetWineRecommendation (string wine, decimal? maxPrice, decimal? minRating, decimal? number)
{
// verify the required parameter 'wine' is set
if (wine == null) throw new ApiException(400, "Missing required parameter 'wine' when calling GetWineRecommendation");
var path = "/food/wine/recommendation";
path = path.Replace("{format}", "json");
var queryParams = new Dictionary<String, String>();
var headerParams = new Dictionary<String, String>();
var formParams = new Dictionary<String, String>();
var fileParams = new Dictionary<String, FileParameter>();
String postBody = null;
if (wine != null) queryParams.Add("wine", ApiClient.ParameterToString(wine)); // query parameter
if (maxPrice != null) queryParams.Add("maxPrice", ApiClient.ParameterToString(maxPrice)); // query parameter
if (minRating != null) queryParams.Add("minRating", ApiClient.ParameterToString(minRating)); // query parameter
if (number != null) queryParams.Add("number", ApiClient.ParameterToString(number)); // query parameter
// authentication setting, if any
String[] authSettings = new String[] { "apiKeyScheme" };
// make the HTTP request
IRestResponse response = (IRestResponse) ApiClient.CallApi(path, Method.GET, queryParams, postBody, headerParams, formParams, fileParams, authSettings);
if (((int)response.StatusCode) >= 400)
throw new ApiException ((int)response.StatusCode, "Error calling GetWineRecommendation: " + response.Content, response.Content);
else if (((int)response.StatusCode) == 0)
throw new ApiException ((int)response.StatusCode, "Error calling GetWineRecommendation: " + response.ErrorMessage, response.ErrorMessage);
return (InlineResponse20047) ApiClient.Deserialize(response.Content, typeof(InlineResponse20047), response.Headers);
}
}
}
|
c#
| 16 | 0.612656 | 192 | 54.059055 | 254 |
starcoderdata
|
// import api from '../lib/requestsManager'
import places from '../data/places_1.json'
// To experiment in console.
window.places = places
const actions = {
setCurrentPlace: (place) => {
return dispatch => {
dispatch({
type: 'SET_CURRENT_PLACE',
place
})
}
},
fetchAllPlaces: () => {
return (dispatch) => {
dispatch({
type: 'FETCH_ALL_PLACES_DONE',
places: places,
})
}
},
// fetchAllPlaces: () => {
// return (dispatch) => {
// dispatch({
// type: 'SET_IS_FETCHING_PLACES',
// })
// return (
// api.fetchAllPlaces()
// .then(res =>
// dispatch({
// type: 'FETCH_ALL_PLACES_DONE',
// places: res.data,
// })
// )
// .catch(error =>
// dispatch({
// type: 'FETCH_ALL_PLACES_FAIL',
// error,
// })
// )
// )
// }
// },
// fetchPlacesByKeyword: (q) => {
// return (dispatch) => {
// dispatch({
// type: 'SET_IS_FETCHING_PLACES',
// })
// return (
// api.fetchPlacesByKeyword(q)
// .then(res =>
// dispatch({
// type: 'FETCH_PLACES_BY_KEYWORD_DONE',
// places: res.data,
// })
// )
// .catch(error =>
// dispatch({
// type: 'FETCH_PLACES_BY_KEYWORD_FAIL',
// error,
// })
// )
// )
// }
// },
// fetchPlaceById: () => {
// return (dispatch) => {
// dispatch({
// type: 'SET_IS_FETCHING_PLACES',
// })
// return (
// api.fetchPlaceById()
// .then(res =>
// dispatch({
// type: 'FETCH_PLACE_BY_ID_DONE',
// places: res.data,
// })
// )
// .catch(error =>
// dispatch({
// type: 'FETCH_PLACE_BY_ID_FAIL',
// error,
// })
// )
// )
// }
// },
}
export default actions
|
javascript
| 16 | 0.385801 | 58 | 22.271739 | 92 |
starcoderdata
|
"""Exceptions for dingz API client."""
class DingzError(Exception):
"""General dingz exception occurred."""
pass
class DingzConnectionError(DingzError):
"""When a connection error is encountered."""
pass
class DingzNoDataAvailable(DingzError):
"""When no data is available."""
pass
|
python
| 5 | 0.684848 | 49 | 15.5 | 20 |
starcoderdata
|
//#include "rosic_ConvolverPartitioned.h"
//using namespace rosic;
//-----------------------------------------------------------------------------------------------------------------------------------------
// construction/destruction:
ConvolverPartitioned::ConvolverPartitioned()
{
mutex.lock();
M = 0;
fftConvolvers = NULL;
numFftConvolvers = 0;
mutex.unlock();
}
ConvolverPartitioned::~ConvolverPartitioned()
{
mutex.lock();
if( fftConvolvers != NULL ) delete[] fftConvolvers;
mutex.unlock();
}
//-----------------------------------------------------------------------------------------------------------------------------------------
// setup:
void ConvolverPartitioned::setImpulseResponse(double *newImpulseResponse, int newLength)
{
mutex.lock();
if( newLength < 0 )
{
DEBUG_BREAK;
mutex.unlock();
return;
}
if( newLength != M )
{
M = newLength;
directConvolver.setImpulseResponse(newImpulseResponse, RAPT::rsMin(newLength, directConvolutionLength));
int accu = directConvolutionLength;
int currentLength = directConvolutionLength;
numFftConvolvers = 0;
while( newLength > accu )
{
numFftConvolvers += 1;
accu += currentLength;
currentLength *= 2;
}
if( fftConvolvers != NULL )
delete[] fftConvolvers;
fftConvolvers = new ConvolverFFT[numFftConvolvers];
int currentStart = directConvolutionLength;
currentLength = directConvolutionLength;
for(int c=0; c<numFftConvolvers; c++)
{
if( c == numFftConvolvers-1 )
{
// last block might be shorter than currentLength, so we pass a zero-padded version:
double *finalBlock = new double[currentLength];
int finalLength = newLength-currentStart; // length of non-zero part
int k;
for(k=0; k<finalLength; k++)
finalBlock[k] = newImpulseResponse[currentStart+k];
for(k=finalLength; k<currentLength; k++)
finalBlock[k] = 0.0;
fftConvolvers[c].setImpulseResponse(finalBlock, currentLength);
delete[] finalBlock;
}
else
fftConvolvers[c].setImpulseResponse(&(newImpulseResponse[currentStart]), currentLength);
currentStart += currentLength;
currentLength *= 2;
}
}
mutex.unlock();
}
//-----------------------------------------------------------------------------------------------------------------------------------------
// others:
void ConvolverPartitioned::clearImpulseResponse()
{
mutex.lock();
directConvolver.clearImpulseResponse();
for(int c=0; c<numFftConvolvers; c++)
fftConvolvers[c].clearImpulseResponse();
mutex.unlock();
}
void ConvolverPartitioned::clearInputBuffers()
{
mutex.lock();
directConvolver.clearInputBuffer();
for(int c=0; c<numFftConvolvers; c++)
fftConvolvers[c].clearInputBuffer();
mutex.unlock();
}
|
c++
| 16 | 0.566127 | 139 | 26.192661 | 109 |
starcoderdata
|
import dotenv from 'dotenv'
dotenv.config({ path: '.env' })
dotenv.config({ path: '.env.local' })
export default {
// Global page headers (https://go.nuxtjs.dev/config-head)
publicRuntimeConfig: {
EDIT_API_URL: process.env.EDIT_API_URL,
EDIT_GITHUB_URL: process.env.EDIT_GITHUB_URL,
ALGOLIA_APP_ID: process.env.ALGOLIA_APP_ID,
ALGOLIA_API_PUBLIC_KEY: process.env.ALGOLIA_API_PUBLIC_KEY,
},
server: {
port: 3000, // default: 3000
host: '0.0.0.0', // default: localhost
},
target: 'static',
head: {
title: 'dev-portal',
meta: [
{ charset: 'utf-8' },
{ name: 'viewport', content: 'width=device-width, initial-scale=1' },
{ hid: 'description', name: 'description', property: '', content: '' },
{
hid: 'og:image',
property: 'og:image',
content: '/OG-image.png',
},
],
link: [
{
rel: 'canonical',
href: 'https://developers.chargetrip.com',
},
{ rel: 'icon', type: 'image/x-icon', href: '/favicon.ico' },
{
rel: 'stylesheet',
href: 'https://fonts.googleapis.com/css2?family=Inter:wght@400;600;700&family=Roboto+Mono:wght@400;500&display=swap',
},
],
script: [
{
src: 'https://cdn.usefathom.com/script.js',
defer: true,
'data-site': process.env.FATHOM_ID,
},
{
src: 'https://embed.small.chat/T33286SKGGSBRX0P8T.js',
defer: true,
},
],
},
router: {
base: '/',
extendRoutes(routes) {
routes.unshift({
path: '/',
name: 'home',
component: 'pages/_slug.vue',
})
routes.push({
path: '*',
name: `catch-all`,
component: 'pages/_slug.vue',
})
},
},
// Global CSS (https://go.nuxtjs.dev/config-css)
css: [
'~/assets/styles/index.scss',
'@chargetrip/internal-vue-components/dist/components.css',
'@chargetrip/internal-vue-components/src/assets/styles/icons.scss',
'@chargetrip/internal-vue-components/src/assets/styles/defaults.scss',
],
content: {
editor: '~/components/Editor.vue',
markdown: {
prism: {
theme: false,
},
},
},
// Plugins to run before rendering page (https://go.nuxtjs.dev/config-plugins)
plugins: [
'~/plugins/globals.ts',
'~/plugins/axios.ts',
'~/utilities/directives.ts',
'~/utilities/filters.ts',
],
// Auto import components (https://go.nuxtjs.dev/config-components)
components: true,
// Modules for dev and build (recommended) (https://go.nuxtjs.dev/config-modules)
buildModules: [
'@nuxt/image',
// https://go.nuxtjs.dev/typescript
'@nuxt/typescript-build',
// https://go.nuxtjs.dev/stylelint
'@nuxtjs/stylelint-module',
// https://go.nuxtjs.dev/tailwindcss
'@nuxtjs/pwa',
'@nuxtjs/tailwindcss',
'@nuxtjs/axios',
'@nuxt/content',
[
'@chargetrip/internal-vue-components/src/modules/file-upload',
{
CLOUDINARY_API_KEY: process.env.CLOUDINARY_API_KEY,
CLOUDINARY_API_SECRET: process.env.CLOUDINARY_API_SECRET,
ALLOWED_FORMATS: process.env.ALLOWED_FORMATS,
CLOUDINARY_CLOUD_NAME: process.env.CLOUDINARY_CLOUD_NAME,
CLOUDINARY_FOLDER: process.env.CLOUDINARY_FOLDER,
},
],
],
image: {
cloudinary: {
baseURL: `${process.env.CLOUDINARY_URL}/image/upload/`,
},
},
tailwindcss: {
jit: true,
},
// Modules (https://go.nuxtjs.dev/config-modules)
modules: [],
// Build Configuration (https://go.nuxtjs.dev/config-build)
build: {
babel: {
plugins: [
['@babel/plugin-proposal-decorators', { legacy: true }],
['@babel/plugin-proposal-private-methods', { loose: true }],
],
},
extend(config) {
config.node = {
fs: 'empty',
}
},
},
}
|
javascript
| 13 | 0.57817 | 125 | 26.35461 | 141 |
starcoderdata
|
'use strict';
class Cookies {
static getCookie(name){
let cookies = document.cookie.split(";");
for(let i in cookies) {
let cname = cookies[i].trim().split("=")[0];
if(cname == name){
return cookies[i].trim().slice(name.length + 1);
}
}
return "";
}
static setCookie(name, value, data = {SameSite: "Strict"}) {
let extra = "";
for(let key in data)
{
extra += "; " + key + "=" + data[key];
}
document.cookie = name + "=" + value + extra;
}
static setYearCookie(name, value) {
var date = new Date(Date.now());
date.setFullYear(date.getFullYear() + 1);
Cookies.setCookie(name, value, {SameSite: "Strict", expires: date.toUTCString()});
}
static removeCookie(name) {
var date = new Date(0);
Cookies.setCookie(name, "", {SameSite: "Strict", expires: date.toUTCString()});
}
}
|
javascript
| 17 | 0.622877 | 84 | 22.864865 | 37 |
starcoderdata
|
public override T Deserialize<T>(string data, bool encrypted = false)
{
// if encrypted decrypt first
var bytes = System.Text.Encoding.UTF8.GetBytes(data);
return SerializationUtility.DeserializeValue<T>(bytes, m_dataFormat);
}
|
c#
| 13 | 0.639286 | 81 | 39.142857 | 7 |
inline
|
package com.brein.api;
/**
* Exception in case of wrong configuration
*/
public class BreinInvalidConfigurationException extends RuntimeException {
public BreinInvalidConfigurationException(final Throwable e) {
super(e);
}
public BreinInvalidConfigurationException(final String msg) {
super(msg);
}
public BreinInvalidConfigurationException(final String msg, final Exception cause) {
super(msg, cause);
}
}
|
java
| 7 | 0.714286 | 88 | 22.1 | 20 |
starcoderdata
|
package cn.originx.stellaris;
import io.vertx.tp.jet.atom.JtApp;
import io.vertx.up.commune.config.Database;
/**
* @author <a href="http://www.origin-x.cn">Lang
*/
public interface OkX {
// ---------------- 基础环境 ---------------------
/**
* 数据库配置对象读取方法。
*
* @return {@link Database}
*/
Database configDatabase();
/**
* 应用配置对象读取方法。
*
* @return {@link JtApp}
*/
JtApp configApp();
}
|
java
| 10 | 0.556405 | 72 | 19.115385 | 26 |
starcoderdata
|
using System;
using System.Collections;
using System.Collections.Generic;
public class TimeHelper {
public static DateTime dt1970 = new DateTime(1970, 1, 1);
public static double ConvertTime(long timeStamp, string type = "second")
{
double result = 0;
TimeSpan time = TimeSpan.FromMilliseconds(timeStamp);
switch (type)
{
case "minute":
result = time.TotalMinutes;
break;
case "hour":
result = time.TotalHours;
break;
case "day":
result = time.TotalDays;
break;
default:
result = time.TotalSeconds;
break;
}
return result;
}
public static string ConverSecondToText(int second)
{
int hour = second / 3600;
int minutes = (second % 3600) / 60;
int seconds = (second % 3600) % 60;
string hourStr = "00";
string minStr = "00";
string secondStr = "00";
if (hour < 10) hourStr = "0" + hour;
else hourStr = hour.ToString();
if (minutes < 10) minStr = "0" + minutes;
else minStr = minutes.ToString();
if (seconds < 10) secondStr = "0" + seconds;
else secondStr = seconds.ToString();
if (hour > 0)
return hourStr + " : " + minStr + " : " + secondStr;
else
return minStr + " : " + secondStr;
}
}
|
c#
| 12 | 0.513514 | 76 | 27.461538 | 52 |
starcoderdata
|
@Override
protected void onStart(Session session, SessionContext context) {
super.onStart(session, context);
// initialize session as API session if this is an API request
if (mPathMatcher.matches(ApiAuthenticationFilter.PATH,
ChatNoirServlet.getStrippedRequestURI(WebUtils.getHttpRequest(context)))) {
initApiSession(session);
}
}
|
java
| 11 | 0.68 | 91 | 39.1 | 10 |
inline
|
var sinon = require('sinon');
var find = sinon.spy(function (collectionName, options, cb) {
if (WOLFPACK.errors) {
return cb(WOLFPACK.errors);
}
if (WOLFPACK.results.find) {
return cb(null, WOLFPACK.results.find);
}
return cb(null, []);
});
var create = sinon.spy(function (collectionName, values, cb) {
if (WOLFPACK.errors) {
return cb(WOLFPACK.errors);
}
if (WOLFPACK.results.create) {
return cb(null, WOLFPACK.results.create);
}
return cb(null, values);
});
var update = sinon.spy(function (collectionName, options, values, cb) {
if (WOLFPACK.errors) {
return cb(WOLFPACK.errors);
}
if (WOLFPACK.results.update) {
return cb(null, WOLFPACK.results.update);
}
return cb(null, values);
});
var destroy = sinon.spy(function (collectionName, options, cb) {
if (WOLFPACK.errors) {
return cb(WOLFPACK.errors);
}
return cb();
});
// Set pointers for CRUD operations
WOLFPACK.CRUD = {
find: find,
create: create,
update: update,
destroy: destroy
};
module.exports = (function () {
/**
* WolfpackAdapter
*
* @module :: Adapter
* @description :: An adapter for db-less testings
* @docs :: http://github.com/fdvj/wolfpack
*
* @syncable :: false
* @schema :: false
*/
var _modelReferences = {};
var Adapter = {
syncable: false,
defaults: {
schema: false
},
/**
* registerCollection() is run multiple times (once for each model, aka collection)
* before the server ever starts. It allows us to register our models with the
* underlying adapter interface. (don't forget to cb() when you're done!)
*/
registerCollection: function (collection, cb) {
// Keep a reference to this collection
_modelReferences[collection.identity] = collection;
cb();
},
find: find,
create: create,
update: update,
destroy: destroy
};
return Adapter;
})();
|
javascript
| 17 | 0.624171 | 87 | 19.427083 | 96 |
starcoderdata
|
/**
* Copyright 2016 Neeve Research, LLC
*
* This product includes software developed at Neeve Research, LLC
* (http://www.neeveresearch.com/) as well as software licenced to
* Neeve Research, LLC under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership.
*
* Neeve Research licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.neeve.toa.spi;
import java.util.Properties;
import com.neeve.sma.MessageChannel;
import com.neeve.toa.TopicOrientedApplication;
import com.neeve.toa.service.ToaService;
import com.neeve.toa.service.ToaServiceChannel;
/**
* Called by a Topic Oriented Application at the time it is configured which allows all or a portion
* of a channel's dynamic key parts to be determined statically at configuration time by returning a
* initial Key Resolution Table (KRT).
*
* @see TopicOrientedApplication TopicOrientedApplication messaging configuration
*/
public interface ChannelInitialKeyResolutionTableProvider {
/**
* Called by a Topic Oriented Application at the time it is configured which allows all or a portion
* of a channel's dynamic key parts to be determined statically at configuration time by returning a
* initial Key Resolution Table (KRT).
*
* Example:
*
*
* Given:
*
* channel, channel1 with a configured key of: ORDERS/${Region}/${Product}
* initial KRT returned by this method of: {"Region": "US", "HostName": MyPC}
*
*
* With the above KRT, the channel would be initialized with a key of
* The dynamic 'Region' portion of the
* key has become static while the 'Product' portion remains dynamic and
* eligible for substitution with a runtime KRT or from values reflected
* from a message reflector.
*
*
*
* The returned key resolution table is not used for individual send calls, if
* the channel key still contains dynamic portions then dynamic key resolution
* can be done on a per send basis using either the message's message reflector
* or a key resolution table provide as a argument to the send call.
*
* If more than one service share the same channel on the same bus, they will
* share the same channel key; at this time it is not possible to perform individual
* channel key resolution on a per service basis. In this sense the initial channel key
* resolution is global to a channel name. The serviceName is provided here as a hint
* to assist the application in locating a key resolution table for a channel.
*
* Initial Key Resolution adheres to the key resolution properties:
*
* {@link MessageChannel#PROP_TREAT_EMPTY_KEY_FIELD_AS_NULL} if set to true, then
* initial channel key resolution will ignore values that are 0 length strings.
* Otherwise if {@link MessageChannel#PROP_ALLOW_EMPTY_KEY_FIELD} is set then
* initial channel key resolution will fail with a ToaException if the key resolution
* table contains any value that are 0 length Strings.
*
*
* @param service The service name.
* @param channel The channel for which to perform key resolution
* @return A key resolution table to substitute some or all of the configured channel key.
*
* @see TopicOrientedApplication TopicOrientedApplication messaging configuration
*/
public Properties getInitialChannelKeyResolutionTable(ToaService service, ToaServiceChannel channel);
}
|
java
| 6 | 0.722469 | 105 | 46.522222 | 90 |
starcoderdata
|
# from data_collect import DataCollect
# from data_transform import DataTransform
# from data_insert import DataInsert
#
#
# if __name__ == '__main__':
#
# # Data Collect from H&M
# url = 'https://www2.hm.com/en_us/men/products/jeans.html'
#
# data_collect = DataCollect(url)
#
# page = data_collect.api_request()
#
# soup = data_collect.beautifulsoup_object(page)
#
# page_number = data_collect.number_of_products(soup)
#
# data_showroom = data_collect.showroom_products(page_number)
#
# data_color = data_collect.product_color(data_showroom)
#
# data_attributes = data_collect.product_attributes(data_color)
#
# data_collect = data_collect.join_data(data_showroom, data_color, data_attributes)
#
# # print( data_showroom.shape )
# # print( data_color.shape )
# # print( data_attributes.shape )
# # print( data_raw.shape )
# # print( data_raw )
# # print( data_raw.isna().sum() )
# #
# # data_showroom = pd.read_csv('../Datasets/data_showroom')
# # data_color = pd.read_csv('../Datasets/data_color')
# # data_attributes = pd.read_csv('../Datasets/data_attributes')
#
# # Data Transform
#
# # Data Showroom Transform
# showroom_transform = DataTransform(data_showroom)
#
# data_showroom_processed = showroom_transform.showroom_transform()
#
# data_showroom_processed.to_csv(
# '../Datasets/data_showroom_processed.csv', index=False
# )
#
# print( 'data_showroom_processed finished' )
#
# # Data Color Transform
# color_transform = DataTransform(data_color)
#
# data_color_processed = color_transform.color_transform()
#
# data_color_processed.to_csv(
# '../Datasets/data_color_processed.csv', index=False
# )
#
# print( 'data_color_processed finished' )
#
# # Data Composition Feature
# composition_transform = DataTransform(data_attributes)
#
# data_composition_feature = composition_transform.composition_feature()
#
# data_composition_feature.to_csv(
# '../Datasets/data_composition_feature.csv', index=False
# )
#
# print( 'data_composition_feature finished' )
#
# # Data Material Feature
# material_transform = DataTransform(data_composition_feature)
#
# data_material_feature = material_transform.material_feature()
#
# data_material_feature.to_csv(
# '../Datasets/data_material_feature.csv', index=False
# )
#
# print( 'data_material_feature finished' )
#
# # Data Attributes Transform
# attributes_transform = DataTransform(data_material_feature)
#
# data_attributes_processed = attributes_transform.attributes_transform()
#
# data_attributes_processed.to_csv(
# '../Datasets/data_attributes_processed.csv', index=False
# )
#
# print( 'data_attributes_processed finished' )
#
# # DATA LOAD
#
# # Data Showroom Insertion
# # with DataInsert(data_showroom_processed,
# # table_name='showroom_table') as showroom_insertion:
# # showroom_insertion.insert_data()
#
# showroom_insertion = DataInsert(data_showroom_processed, table_name='showroom_table')
#
# showroom_insertion.insert_data()
#
# # Data Color Insertion
# with DataInsert(data_color_processed,
# table_name='color_table') as color_insertion:
# color_insertion.insert_data()
#
# # color_insertion = DataInsert(data_color_processed, table_name='data_color')
# # color_insertion.insert_data()
#
# # Data Attributes Insertion
# with DataInsert(data_attributes_processed,
# table_name='attributes_table') as attributes_insertion:
# attributes_insertion.insert_data()
#
# # attributes_insertion = DataInsert(data_attributes_processed, table_name='data_attributes')
# # attributes_insertion.insert_data()
#
#
#
#
#
#
#
#
#
#
#
#
#
#
#
from Model.data_collect import DataCollect
from Model.data_transform import DataTransform
from Model.data_insert import DataInsert
def data_extraction(url):
data_collect = DataCollect(url)
#page = data_collect.api_request()
#soup = data_collect.beautifulsoup_object(page)
#page_number = data_collect.number_of_products(soup)
data_showroom = data_collect.showroom_products()
data_color = data_collect.product_color(data_showroom)
data_attributes = data_collect.product_attributes(data_color)
data_collect = data_collect.join_data(data_showroom, data_color, data_attributes)
return data_showroom, data_color, data_attributes
def data_transform(data_showroom, data_color, data_attributes):
# Data Showroom Transform
showroom_transform = DataTransform(data_showroom)
data_showroom_processed = showroom_transform.showroom_transform()
data_showroom_processed.to_csv(
'../Datasets/data_showroom_processed.csv', index=False
)
print('data_showroom_processed finished')
# Data Color Transform
color_transform = DataTransform(data_color)
data_color_processed = color_transform.color_transform()
data_color_processed.to_csv(
'../Datasets/data_color_processed.csv', index=False
)
print('data_color_processed finished')
# Data Composition Feature
composition_transform = DataTransform(data_attributes)
data_composition_feature = composition_transform.composition_feature()
data_composition_feature.to_csv(
'../Datasets/data_composition_feature.csv', index=False
)
print('data_composition_feature finished')
## data material feature
material_transform = DataTransform(data_composition_feature)
data_material_feature = material_transform.material_feature()
data_material_feature.to_csv(
'../Datasets/data_material_feature.csv', index=False
)
print('data_material_feature finished')
## data attributes transform
attributes_transform = DataTransform(data_material_feature)
data_attributes_processed = attributes_transform.attributes_transform()
data_attributes_processed.to_csv(
'../Datasets/data_attributes_processed.csv', index=False
)
print('data_attributes_processed finished')
return data_showroom_processed, data_color_processed, data_attributes_processed
def data_insert(data, table_name):
with DataInsert(data,
table_name=table_name) as data_insertion:
data_insertion.insert_data()
return None
if __name__ == '__main__':
# extraction
url = 'https://www2.hm.com/en_us/men/products/jeans.html'
showroom_collect, color_collect, attributes_collect = data_extraction(url=url)
# transform
showroom_process, color_process, attributes_process = data_transform(showroom_collect, color_collect,
attributes_collect)
# load
data_insert(showroom_process, table_name='showroom_table')
data_insert(color_process, table_name='color_table')
data_insert(attributes_process, table_name='attributes_table')
|
python
| 10 | 0.672918 | 105 | 27.962963 | 243 |
starcoderdata
|
def test__get_braindump(self):
# create a fake temporary submit dir and braindump.yml file
with TemporaryDirectory() as td:
bd_path = Path(td) / "braindump.yml"
with bd_path.open("w+") as bd_file:
yaml.dump({"user": "ryan", "submit_dir": "/submit_dir"}, bd_file)
bd_file.seek(0)
bd = Workflow._get_braindump(bd_path.parent)
assert bd.user == "ryan"
assert bd.submit_dir == Path("/submit_dir")
|
python
| 13 | 0.536062 | 81 | 45.727273 | 11 |
inline
|
"""
Test turning a bag into other forms.
"""
import sys
sys.path.append('.')
import simplejson
from tiddlyweb.serializer import Serializer
from tiddlyweb.model.bag import Bag
from tiddlyweb.config import config
from fixtures import bagfour
expected_string = """TiddlerOne
TiddlerTwo
TiddlerThree"""
expected_revbag_string = """TiddlerOne:0
TiddlerTwo:0
TiddlerThree:0"""
expected_html_string = """<ul id="tiddlers" class="listing">
href="/bags/bagfour/tiddlers/TiddlerOne">TiddlerOne
href="/bags/bagfour/tiddlers/TiddlerTwo">TiddlerTwo
href="/bags/bagfour/tiddlers/TiddlerThree">TiddlerThree
prefix_expected_html_string = """<ul id="tiddlers" class="listing">
href="/salacious/bags/bagfour/tiddlers/TiddlerOne">TiddlerOne
href="/salacious/bags/bagfour/tiddlers/TiddlerTwo">TiddlerTwo
href="/salacious/bags/bagfour/tiddlers/TiddlerThree">TiddlerThree
expected_html_revbag_string = """<ul id="tiddlers" class="listing">
href="/bags/bagfour/tiddlers/TiddlerOne/revisions/0">TiddlerOne:0
href="/bags/bagfour/tiddlers/TiddlerTwo/revisions/0">TiddlerTwo:0
href="/bags/bagfour/tiddlers/TiddlerThree/revisions/0">TiddlerThree:0
expected_bag_html = """
<div id="bagdesc" class="description">a tasty little bag
<div class="tiddlerslink"><a href="bagfour/tiddlers">Tiddlers in Bag bagfour
"""
def setup_module(module):
module.serializer = Serializer('text')
def test_generate_json():
serializer = Serializer('json')
bagfour.desc = 'a tasty little bag'
bagfour.policy.manage = ['NONE']
serializer.object = bagfour
string = serializer.to_string()
json = simplejson.loads(string)
assert json['policy']['manage'] == ['NONE']
assert json['desc'] == 'a tasty little bag'
def test_generated_string():
string = serializer.list_tiddlers(bagfour)
assert string == expected_string
def test_generated_string_with_revbag():
bagfour.revbag = True
string = serializer.list_tiddlers(bagfour)
assert string == expected_revbag_string
bagfour.revbag = False
def test_generated_html():
html_serializer = Serializer('html')
string = html_serializer.list_tiddlers(bagfour)
assert expected_html_string in string
def test_generated_html_with_prefix():
new_config = config.copy()
new_config['server_prefix'] = '/salacious'
environ = {'tiddlyweb.config': new_config}
html_serializer = Serializer('html', environ)
string = html_serializer.list_tiddlers(bagfour)
assert prefix_expected_html_string in string
def test_generated_wiki():
environ = {'tiddlyweb.config': config}
wiki_serializer = Serializer('wiki', environ)
# work around a limitation in the serializations
# when store is not set, we assume the bag has not been reified
string = wiki_serializer.list_tiddlers(bagfour)
assert '<div title="TiddlerOne' in string
assert '<div title="TiddlerTwo' in string
assert '<div title="TiddlerThree' in string
def test_generated_html_with_revbag():
html_serializer = Serializer('html')
bagfour.revbag = True
string = html_serializer.list_tiddlers(bagfour)
assert expected_html_revbag_string in string
bagfour.revbag = False
def test_json_to_bag():
serializer = Serializer('json')
json_string = simplejson.dumps(dict(policy=dict(read=['user1'], manage=['NONE']), desc='simply the best'))
newbag = Bag('bagho')
serializer.object = newbag
serializer.from_string(json_string)
assert newbag.name == 'bagho'
assert newbag.policy.read == ['user1']
assert newbag.policy.manage == ['NONE']
assert newbag.desc == 'simply the best'
def test_bag_to_html():
serializer = Serializer('html')
serializer.object = bagfour
html = serializer.to_string()
assert html == expected_bag_html
def test_text_list():
serializer = Serializer('text')
bags = [Bag('bag' + str(name)) for name in xrange(2)]
string = serializer.list_bags(bags)
assert string == 'bag0\nbag1'
def test_html_list():
serializer = Serializer('html')
bags = [Bag('bag' + str(name)) for name in xrange(2)]
string = serializer.list_bags(bags)
assert 'href="bags/bag0' in string
assert 'href="bags/bag1' in string
|
python
| 15 | 0.698032 | 110 | 29.701389 | 144 |
starcoderdata
|
func (f *FlagSet) Parse(arguments []string, options ...ParseOption) error {
f.parsed = true
f.args = arguments
for {
seen, err := f.parseOne()
if seen {
continue
}
if err == nil {
break
}
switch f.errorHandling {
case ContinueOnError:
return err
case ExitOnError:
os.Exit(2)
case PanicOnError:
panic(err)
}
}
// now run our parse options
for _, opt := range options {
if err := opt(f); err != nil {
switch f.errorHandling {
case ContinueOnError:
return err
case ExitOnError:
os.Exit(2)
case PanicOnError:
panic(err)
}
}
}
// if anything is still default, check if it should be set by ENV
for _, flag := range sortFlags(f.formal) {
if flag.SetBy != FlagSetByDefault || flag.NameInEnv == "" {
continue
}
value, ok := os.LookupEnv(flag.NameInEnv)
if !ok {
continue
}
//set the value
if err := f.set(flag, value); err != nil {
return f.failfNoUsage("invalid value %q for envvar %s: %v", value, flag.NameInEnv, err)
}
flag.SetBy = FlagSetByEnvVar
}
if f.configFile == nil {
return nil
}
// handle the config file after args and env since the filename may be set
// by either of those places
if err := f.configFile.Open(); err != nil {
if os.IsNotExist(err) {
return f.failfNoUsage("missing config file '%v'", f.configFile.FileName())
}
return f.failfNoUsage("error opening config file %v: %v", f.configFile.FileName(), err)
}
defer f.configFile.Close()
// now get our values from config file
for _, flag := range sortFlags(f.formal) {
if flag.SetBy < FlagSetByConfigFile && flag.NameInConfigFile != "" {
value, err := f.configFile.ConfigValue(flag.NameInConfigFile)
if err == ErrNoValue {
continue
} else if err != nil {
return f.failfNoUsage("error retrieving value for config file entry %s from file %s: %v", flag.NameInConfigFile, f.configFile.FileName(), err)
}
if err := f.set(flag, value); err != nil {
return f.failfNoUsage("invalid value %q for config file entry %s from file %s: %v", value, flag.NameInConfigFile, f.configFile.FileName(), err)
}
flag.SetBy = FlagSetByConfigFile
}
}
return nil
}
|
go
| 17 | 0.653917 | 147 | 24.244186 | 86 |
inline
|
#include
#include
#include
// Switch switchArray[N_SWITCHES];
Switches::Switches()
{
/*
for (int i = 0; i < N_SWITCHES; i++)
{
pinMode(switchArray[i].channel(), INPUT);
pinMode(switchArray[i].channel(), INPUT_PULLDOWN);
}
*/
}
void Switches::init()
{
switchArray[SWITCH_SF1] = Switch("sf1", 12, PUSH); // S1
switchArray[SWITCH_SF2] = Switch("sf2", 19, PUSH); // S2
switchArray[SWITCH_SF3] = Switch("sf3", 5, PUSH); // S3
switchArray[SWITCH_VOICED] = Switch("voiced", 14, PUSH); // S7
switchArray[SWITCH_ASPIRATED] = Switch("aspirated", 17, PUSH); // S5
switchArray[SWITCH_NASAL] = Switch("nasal", 23, PUSH); // S6
switchArray[SWITCH_DESTRESS] = Switch("destressed", 18, PUSH); // S4
switchArray[SWITCH_STRESS] = Switch("stressed", 13, PUSH); // S0
switchArray[TOGGLE_HOLD] = Switch("hold", 16, TOGGLE); // T0
switchArray[TOGGLE_CREAK] = Switch("creak", 4, TOGGLE); // T1
switchArray[TOGGLE_SING] = Switch("sing", 2, TOGGLE); // T2
switchArray[TOGGLE_SHOUT] = Switch("shout", 15, TOGGLE); // T3
for (int i = 0; i < N_SWITCHES; i++)
{
pinMode(switchArray[i].channel(), INPUT);
pinMode(switchArray[i].channel(), INPUT_PULLDOWN);
switchArray[i].on_ = false;
}
}
Switch& Switches::getSwitch(int switchN)
{
return switchArray[switchN];
}
|
c++
| 12 | 0.593813 | 73 | 30.659574 | 47 |
starcoderdata
|
package com.tyrfing.games.tyrlib3.model.game.stats;
import java.util.Map;
public interface IStatHolder<S extends Number> {
public Map<Stat, S> getStats();
}
|
java
| 5 | 0.769784 | 110 | 32.875 | 8 |
starcoderdata
|
package edu.fiuba.algo3.modelo;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.IOException;
import org.junit.jupiter.api.Test;
public class PartidaTest {
@Test
public void testPaisesRepartidosCorrectamenteEntreDosJugadores() throws Throwable{
Jugador jugador1 = new Jugador("Santiago", 1);
Jugador jugador2 = new Jugador("Julio", 2);
Partida unaPartida = new Partida();
unaPartida.crearMapa();
unaPartida.agregarJugador(jugador1);
unaPartida.agregarJugador(jugador2);
unaPartida.jugadorInicial(0);
unaPartida.iniciarPartida();
for(Pais pais: unaPartida.obtenerMapa().obtenerPaises()){
System.out.println(pais.obtenerNombrePais()+"-"+pais.obtenerJugadorEnControl().obtenerNombre());
}
}
@Test
public void testPaisesRepartidosCorrectamenteEntreTresJugadores() throws Throwable{
Jugador jugador1 = new Jugador("Santiago", 1);
Jugador jugador2 = new Jugador("Julio", 2);
Jugador jugador3 = new Jugador("Pablo", 3);
Partida unaPartida = new Partida();
unaPartida.crearMapa();
unaPartida.agregarJugador(jugador1);
unaPartida.agregarJugador(jugador2);
unaPartida.agregarJugador(jugador3);
unaPartida.jugadorInicial(0);
unaPartida.iniciarPartida();
for(Pais pais: unaPartida.obtenerMapa().obtenerPaises()){
System.out.println(pais.obtenerNombrePais()+"-"+pais.obtenerJugadorEnControl().obtenerNombre());
}
}
@Test
public void testRondaDeColocacion() throws Throwable{
Jugador jugador1 = new Jugador("Santiago", 1);
Jugador jugador2 = new Jugador("Julio", 2);
Partida unaPartida = new Partida();
unaPartida.crearMapa();
unaPartida.agregarJugador(jugador1);
unaPartida.agregarJugador(jugador2);
unaPartida.jugadorInicial(0);
unaPartida.iniciarPartida();
//___________________________________//
MovimientoColocacion movimiento = new MovimientoColocacion();
Pais destino = unaPartida.obtenerUnPais("Terranova");
movimiento.destinoPais(destino);
movimiento.numeroTropas(3);
unaPartida.ejecutarMovimiento(movimiento);
assertEquals(4, destino.obtenerNumeroTotalDeTropas());
// Jugador 1 ya coloco todas las tropas que le correspondia...
assertSame(jugador2, unaPartida.obtenerJugadorActual());
//___________________________________//
movimiento = new MovimientoColocacion();
destino = unaPartida.obtenerUnPais("Japon");
movimiento.destinoPais(destino);
movimiento.numeroTropas(2);
unaPartida.ejecutarMovimiento(movimiento);
assertEquals(3, destino.obtenerNumeroTotalDeTropas());
// Sigue jugando jugador 2 tiene una tropa mas para colocar...
assertSame(jugador2, unaPartida.obtenerJugadorActual());
//___________________________________//
movimiento = new MovimientoColocacion();
destino = unaPartida.obtenerUnPais("Rusia");
movimiento.destinoPais(destino);
movimiento.numeroTropas(1);
unaPartida.ejecutarMovimiento(movimiento);
assertEquals(2, destino.obtenerNumeroTotalDeTropas());
// Ahora le toca a jugador 1
assertSame(jugador1, unaPartida.obtenerJugadorActual());
//___________________________________//
movimiento = new MovimientoColocacion();
destino = unaPartida.obtenerUnPais("Terranova");
movimiento.destinoPais(destino);
movimiento.numeroTropas(4);
unaPartida.ejecutarMovimiento(movimiento);
assertEquals(8, destino.obtenerNumeroTotalDeTropas());
// Sigue jugando jugador1, puede colocar una tropa mas todavia...
assertSame(jugador1, unaPartida.obtenerJugadorActual());
}
@Test
public void paisesLimitrofes() throws Throwable{
Jugador jugador1 = new Jugador("Santiago", 1);
Jugador jugador2 = new Jugador("Julio", 2);
Partida unaPartida = new Partida();
unaPartida.crearMapa();
unaPartida.agregarJugador(jugador1);
unaPartida.agregarJugador(jugador2);
unaPartida.jugadorInicial(0);
unaPartida.iniciarPartida();
assertEquals(4, unaPartida.obtenerPaisesLimitrofesDe("Argentina").size());
}
@Test
public void testObjetivosCumplidos() throws Throwable{
Jugador jugador1 = new Jugador("Santiago", 1);
Jugador jugador2 = new Jugador("Julio", 2);
Partida unaPartida = new Partida();
unaPartida.crearMapa();
unaPartida.agregarJugador(jugador1);
unaPartida.agregarJugador(jugador2);
unaPartida.jugadorInicial(0);
unaPartida.iniciarPartida();
for(Pais pais: unaPartida.obtenerPaises()){
System.out.println(pais.obtenerNombrePais()+" "+pais.obtenerJugadorEnControl().obtenerNombre());
}
Continente americaDelSur = unaPartida.obtenerMapa().obtenerUnContinente("America Del Sur");
ObjetivoConquista conquistar4DeAmericaDelSur = new ObjetivoConquista();
conquistar4DeAmericaDelSur.agregarContinenteAConquistar(
americaDelSur,
4
);
conquistar4DeAmericaDelSur.setJugador(jugador1);
assertTrue(conquistar4DeAmericaDelSur.logroElObjetivo(unaPartida.obtenerMapa()));
}
@Test
public void testMazo() throws IOException{
Partida unaPartida = new Partida();
unaPartida.cargarMazo();
MazoPaises mazo = unaPartida.obtenerMazo();
assertEquals(50, mazo.numeroDeCartas());
CartaPais carta = mazo.obtenerSiguienteCarta();
assertEquals(49, mazo.numeroDeCartas());
mazo.agregarAlFondo(carta);
assertEquals(50, mazo.numeroDeCartas());
}
}
|
java
| 12 | 0.659547 | 108 | 36.228916 | 166 |
starcoderdata
|
var $day1table = $('#table1');
var $day2table = $('#table2');
var $day3table = $('#table3');
$(function () {
var day1data = [
{
"date": "DAY 1 27th June 2016",
"time": "8:00 - 8:30",
"activity": "Arrival and Registration",
"facilitator": "AIR Lab"
},
{
"date": "",
"time": "8:30 - 9:00",
"activity": "Opening remarks",
"facilitator": "Dean, School of Computing & IT, Makerere University Prof.
},
{
"date": "",
"time": "",
"activity": '<h5 class="text-center">SESSION 1 (Machine Learning & Data Science)
"facilitator": ""
},
{
"date": "",
"time": "9:00 - 10:00",
"activity": '<p class="text-warning">Lecture 1 : Introduction to Data Science and Machine Learning
"facilitator": 'Neil Lawrence <a href="#">Slides <a href="#">Other References
},
{
"date": "",
"time": "10:00 - 10:30",
"activity": "Break",
"facilitator": ""
},
{
"date": "",
"time": "10:30 - 12:00",
"activity": "Lecture 1 Practice Session",
"facilitator": 'All facilitators <a href="#">Code <a href="#">Dataset
},
{
"date": "",
"time": "",
"activity": '<h5 class="text-center">SESSION 2 (Machine Learning)
"facilitator": ""
},
{
"date": "",
"time": "12:00 - 13:00",
"activity": "Lecture 2",
"facilitator": "
},
{
"date": "",
"time": "13:00 - 14:00",
"activity": "Lunch",
"facilitator": ""
},
{
"date": "",
"time": "14:00 - 15:20",
"activity": "Lecture 2 Practice Session",
"facilitator": "All facilitators"
},
{
"date": "",
"time": "15:20 - 15:30",
"activity": "Break",
"facilitator": ""
},
{
"date": "",
"time": "",
"activity": '<h5 class="text-center">SESSION 3 (Data Science)
"facilitator": ""
},
{
"date": "",
"time": "15:30 - 16:30",
"activity": "Lecture 3",
"facilitator": "
},
{
"date": "",
"time": "16:30 - 18:00",
"activity": "Lecture 3 Practice Session",
"facilitator": "All facilitators"
}
];
$day1table.bootstrapTable({data: day1data});
});
$(function () {
var day2data = [
{
"date": "DAY 2 28th June 2016",
"time": "",
"activity": '<h5 class="text-center">SESSION 4 (Data Science)
"facilitator": ""
},
{
"date": "",
"time": "9:00 - 10:00",
"activity": '<p class="text-warning">Lecture 4 : Data wrangling with Pandas
"facilitator": ' <a href="#">Notebook <a href="#">Other References
},
{
"date": "",
"time": "10:00 - 10:30",
"activity": "Break",
"facilitator": ""
},
{
"date": "",
"time": "10:30 - 12:00",
"activity": "Lecture 4 Practice Session",
"facilitator": 'All facilitators <a href="#">Code <a href="#">Dataset
},
{
"date": "",
"time": "",
"activity": '<h5 class="text-center">SESSION 5 (Machine Learning)
"facilitator": ""
},
{
"date": "",
"time": "12:00 - 13:00",
"activity": "Lecture 5",
"facilitator": "
},
{
"date": "",
"time": "13:00 - 14:00",
"activity": "Lunch",
"facilitator": ""
},
{
"date": "",
"time": "14:00 - 15:20",
"activity": "Lecture 5 Practice Session",
"facilitator": "All facilitators"
},
{
"date": "",
"time": "15:20 - 15:30",
"activity": "Break",
"facilitator": ""
},
{
"date": "",
"time": "",
"activity": '<h5 class="text-center">SESSION 6 (Data Science)
"facilitator": ""
},
{
"date": "",
"time": "15:30 - 16:30",
"activity": "Lecture 6",
"facilitator": "
},
{
"date": "",
"time": "16:30 - 18:00",
"activity": "Lecture 6 Practice Session",
"facilitator": "All facilitators"
}
];
$day2table.bootstrapTable({data: day2data});
});
$(function () {
var day3data = [
{
"date": "DAY 3 29th June 2016",
"time": "",
"activity": '<h5 class="text-center">SESSION 7 (Data Science)
"facilitator": ""
},
{
"date": "",
"time": "9:00 - 10:00",
"activity": '<p class="text-warning">Lecture 7
"facilitator": ' <a href="#">Slides <a href="#">Other References
},
{
"date": "",
"time": "10:00 - 10:30",
"activity": "Break",
"facilitator": ""
},
{
"date": "",
"time": "10:30 - 12:00",
"activity": "Lecture 7 Practice Session",
"facilitator": 'All facilitators <a href="#">Code <a href="#">Dataset
},
{
"date": "",
"time": "",
"activity": '<h5 class="text-center">SESSION 8 (Machine Learning)
"facilitator": ""
},
{
"date": "",
"time": "12:00 - 13:00",
"activity": "Model Selection",
"facilitator": "
},
{
"date": "",
"time": "13:00 - 14:00",
"activity": "Lunch",
"facilitator": ""
},
{
"date": "",
"time": "14:00 - 15:20",
"activity": "Lecture 8 Practice Session",
"facilitator": "All facilitators"
},
{
"date": "",
"time": "15:20 - 15:30",
"activity": "Break",
"facilitator": ""
},
{
"date": "",
"time": "",
"activity": '<h5 class="text-center">SESSION 9 (Data Science)
"facilitator": ""
},
{
"date": "",
"time": "15:30 - 16:30",
"activity": "Lecture 9",
"facilitator": "IBM Research Nairobi"
},
{
"date": "",
"time": "16:30 - 18:00",
"activity": "Lecture 9 Practice Session",
"facilitator": "All facilitators"
},
{
"date": "END",
"time": "18:00 - 19:30",
"activity": '<h4 class="text-center text-danger">Cocktail
"facilitator": "
}
];
$day3table.bootstrapTable({data: day3data});
});
|
javascript
| 12 | 0.340692 | 130 | 32.709302 | 258 |
starcoderdata
|
package se.kth.jpf_visual;
import java.util.ArrayList;
import java.util.List;
public class TextLineList {
List list = new ArrayList<>();
boolean noSrc = false;
public TextLineList(List l) {
this.list = l;
}
public void setNoSrc(boolean b) {
this.noSrc = b;
}
public boolean isNoSrc() {
return noSrc;
}
public List getList() {
return new ArrayList<>(list);
}
public int getHeight() {
return list.size();
}
public TextLine getTextLine(int idx){
return list.get(idx);
}
}
|
java
| 9 | 0.679775 | 41 | 15.181818 | 33 |
starcoderdata
|
/** //
* Copyright (c) 2013-2017, The Kovri I2P Router Project //
* //
* All rights reserved. //
* //
* Redistribution and use in source and binary forms, with or without modification, are //
* permitted provided that the following conditions are met: //
* //
* 1. Redistributions of source code must retain the above copyright notice, this list of //
* conditions and the following disclaimer. //
* //
* 2. Redistributions in binary form must reproduce the above copyright notice, this list //
* of conditions and the following disclaimer in the documentation and/or other //
* materials provided with the distribution. //
* //
* 3. Neither the name of the copyright holder nor the names of its contributors may be //
* used to endorse or promote products derived from this software without specific //
* prior written permission. //
* //
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY //
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF //
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL //
* THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, //
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, //
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS //
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, //
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF //
* THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. //
*/
#ifndef SRC_CORE_UTIL_LOG_H_
#define SRC_CORE_UTIL_LOG_H_
#include
#include
#include
#include
#include
#include
#include
#include "core/util/byte_stream.h"
// TODO(anonimal):
// Boost.Log uses an "application-wide singleton" (note: our logger/sink setup applies globally from instance configuration)
// As a result, logging will not work when in daemon mode. http://www.boost.org/doc/libs/1_63_0/libs/log/doc/html/log/rationale/fork_support.html
// We've worked around this problem in the past by using some very gross hacking but we may be able to apply a cleaner work-around so we can set this up entirely in the core namespace
// (we could create an (inheritable?) logging class with overloaded stream operator and adjust our logging initialization and macro accordingly, or consider other options)
// Also note that a singleton will effect having multiple logging library options (there's no need to do that though when we have huge flexibility with sinks)
BOOST_LOG_GLOBAL_LOGGER(g_Logger, boost::log::sources::severity_logger_mt
#define LOG(severity) BOOST_LOG_SEV(g_Logger::get(), boost::log::trivial::severity)
namespace kovri
{
namespace core
{
/// @details This configures/sets up the global logger.
/// @param Parsed kovri variable map
/// @warning Kovri config must first be parsed
void SetupLogging(
const boost::program_options::variables_map& parsed_kovri_config);
/// @brief Log source and destination of a network request or response
/// @param boost::network::http::client:: request or response
/// @return human readable string
template <typename Type>
std::string LogNetEndpointsToString(const Type& req)
{
std::stringstream ss;
ss << "Source : \""
<< static_cast
<< "\" Dest : \""
<< static_cast
<< "\"";
return ss.str();
}
/// @brief Log headers of a network request or response
/// @param boost::network::http::client:: request or response
/// @return human readable string
template <typename Type>
std::string LogNetHeaderToString(const Type& req)
{
std::stringstream ss;
ss << "Headers : ";
for (auto const& header : req.headers())
ss << "\"" << header.first << "\" : \"" << header.second << "\" | ";
return ss.str();
}
/// @brief Log body of a network request or response
/// @param boost::network::http::client:: request or response
/// @return human readable string
template <typename Type>
std::string LogNetBodyToString(const Type& req)
{
std::stringstream ss;
std::string body(static_cast
ss << "Body : "
<< kovri::core::GetFormattedHex(
reinterpret_cast<const uint8_t*>(body.data()), body.length());
return ss.str();
}
/// @brief Log entire message (endpoints + headers + body)
/// @param boost::network::http::client:: request or response
/// @return human readable string
template <typename Type>
std::string LogNetMessageToString(const Type& req)
{
std::stringstream ss;
ss << LogNetEndpointsToString(req) << std::endl
<< LogNetHeaderToString(req) << std::endl
<< LogNetBodyToString(req);
return ss.str();
}
} // namespace core
} // namespace kovri
#endif // SRC_CORE_UTIL_LOG_H_
|
c
| 16 | 0.598359 | 183 | 50.380165 | 121 |
starcoderdata
|
<?php
namespace App;
use DB;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Foundation\Auth\User as Authenticatable;
use Illuminate\Notifications\Notifiable;
class User extends Authenticatable
{
use Notifiable;
protected $table = 'user';
const CREATED_AT = 'date_created';
const UPDATED_AT = 'date_updated';
public $fillable = [
'first_name',
'last_name',
'description',
'picture',
'id_department',
'email',
'external',
'id_status',
'password',
'token',
'is_contactable',
];
public $timestamps = true;
/**
* Define one-to-many relation between user and its userElements
* @return \Illuminate\Database\Eloquent\Relations\HasMany
*/
public function userElements()
{
return $this->hasMany(UserElement::class, 'id_user');
}
// Get all elements shared by the user
public function elementShared()
{
$elements = DB::select(DB::raw('SELECT * FROM element
WHERE id = (SELECT id_element
FROM user_element
WHERE is_exchangeable = 1
AND id_user = ' . $this->id . ')'));
// Array to Model
$elements = Element::hydrate($elements);
return $elements;
}
/**
* Define one-to-one relation between user and warnings that he report
* @return \Illuminate\Database\Eloquent\Relations\HasOne
*/
public function warningReported()
{
return $this->hasOne(Warning::class, 'id_whistleblower');
}
// Get all rooms in which the user participates
// A UTILISER COMME CA : $user->rooms() (et non $user->rooms)
public function rooms()
{
$rooms = DB::select('SELECT * FROM room
WHERE id = (SELECT id_room
FROM user_room
WHERE id_user = ' . $this->id . ')');
$rooms = Element::hydrate($rooms);
return $rooms;
}
/**
* Define one-to-one relation between user and department
* @return \Illuminate\Database\Eloquent\Relations\belongsTo
*/
public function department()
{
return $this->belongsTo(Department::class, 'id_department');
}
/**
* Define one-to-one relation between user and status
* @return \Illuminate\Database\Eloquent\Relations\belongsTo
*/
public function status()
{
return $this->belongsTo(Status::class, 'id_status');
}
// public function getEmailAttribute($value){return $value;}
// public function getTokenAttribute($value){return $value;}
// public function getPictureAttribute($value){return $value;}
// public function getIs_contactableAttribute($value){return $value;}
// public function setEmailAttribute($value){$this->attributes['email'] = $value;}
// public function setTokenAttribute($value){$this->attributes['token'] = $value;}
// public function setPictureAttribute($value){$this->attributes['picture'] = $value;}
// public function setIs_contactableAttribute($value){$this->attributes['is_contactable'] = $value;}
}
|
php
| 17 | 0.560456 | 104 | 31.301887 | 106 |
starcoderdata
|
pub fn default_home_path(chain: &ChainTypes) -> Result<PathBuf, Error> {
// Desktop OS case. Home dir does exist
#[cfg(not(target_os = "android"))]
let mut path = match dirs::home_dir() {
Some(home) => home,
None => std::env::current_dir()?,
};
// Android doesn't have Home dir. binary dir will be used instead of home dir
#[cfg(target_os = "android")]
panic!("Home path doesn't exist under Android");
path.push(WALLET713_HOME);
path.push(chain.shortname());
std::fs::create_dir_all(path.as_path())?;
Ok(path)
}
|
rust
| 11 | 0.552923 | 85 | 36.294118 | 17 |
inline
|
/*
* Copyright (C) 2005-2010 MaNGOS
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "CoordModelMapping.h"
#include
#include
using namespace G3D;
namespace VMAP
{
//============================================================
//============================================================
void CMappingEntry::addFilename(char *pName)
{
std::string name = std::string(pName);
if(!iFilenames.contains(name))
iFilenames.append(std::string(pName));
}
//============================================================
const std::string CMappingEntry::getKeyString() const
{
return(CMappingEntry::getKeyString(iMapId,xPos, yPos));
}
const std::string CMappingEntry::getKeyString( unsigned int pMapId, int pXPos, int pYPos )
{
char b[100];
sprintf(b,"%03u_%d_%d", pMapId, pXPos, pYPos);
return(std::string(b));
}
//============================================================
//============================================================
//============================================================
CoordModelMapping::~CoordModelMapping()
{
Array keys = iMapObjectFiles.getKeys();
for (int k = 0; k < keys.length(); k++)
{
CMappingEntry *value = getCMappingEntry(keys[k]);
if(value != 0)
{
iMapObjectFiles.remove(keys[k]);
delete value;
}
}
}
//============================================================
int findPosChar(const char *namebuffer, char pSearch, int pCount)
{
int result = -1;
int pos=0;
while(namebuffer[pos] != 0)
{
if(namebuffer[pos] == pSearch)
{
--pCount;
}
if(pCount == 0)
{
result = pos;
break;
}
++pos;
}
return result;
}
//============================================================
bool CoordModelMapping::readCoordinateMapping(const std::string& pDirectoryFileName)
{
FILE *f = fopen(pDirectoryFileName.c_str(), "rb");
if(!f)
{
printf("ERROR: Can't open file: %s\n",pDirectoryFileName.c_str());
return false;
}
char buffer[500+1];
CMappingEntry* cMappingEntry;
while(fgets(buffer, 500, f))
{
//char namebuffer[500];
char positionbuffer[500];
int xpos, ypos, noVec;
float scale;
xpos = ypos = noVec = 0;
//sscanf(buffer, "%d %d %s %s %f %d", &xpos, &ypos, namebuffer,positionbuffer, &scale, &noVec);
// this is ugly, but the format has no read delimiter and a space could be in the first part of the name
int nameStart = findPosChar(buffer, ' ', 2);// find the 2. space
if(nameStart > -1 && (iFilterMethod == NULL || (*iFilterMethod)(buffer)))
{
++nameStart;
// find the 1. / (now a space only can be found at the end of the name)
int nameEnd = nameStart + findPosChar(&buffer[nameStart], '/', 1);
// find the 1. space (after the name)
nameEnd += findPosChar(&buffer[nameEnd], ' ', 1);
buffer[nameEnd] = 0; // terminate the name
sscanf(buffer, "%d %d", &xpos, &ypos);
sscanf(&buffer[nameEnd+1], "%s %f %d", positionbuffer, &scale, &noVec);
unsigned int mapId = getMapIdFromFilename(std::string(&buffer[nameStart]));
if(!iMapIds.contains(mapId))
{
iMapIds.append(mapId);
printf("Coords for map %u...\n",mapId);
}
if(!isWorldAreaMap(mapId))
{
xpos = 0; // store all files under the groupKey
ypos = 0;
}
std::string key = CMappingEntry::getKeyString(mapId, xpos, ypos);
cMappingEntry = getCMappingEntry(key);
if(cMappingEntry == 0)
{
cMappingEntry = new CMappingEntry(mapId, xpos, ypos);
addCMappingEntry(cMappingEntry);
}
char namebuffer2[500];
sprintf(namebuffer2, "%d %s#%s_%f", noVec, &buffer[nameStart], positionbuffer, scale);
cMappingEntry->addFilename(namebuffer2);
//break;
}
}
fclose(f);
return true;
}
//============================================================
const NameCollection CoordModelMapping::getFilenamesForCoordinate(unsigned int pMapId, int xPos, int yPos)
{
NameCollection result;
Array rawNames;
CMappingEntry *entry = getCMappingEntry(CMappingEntry::getKeyString(pMapId, xPos, yPos));
if(entry != 0)
{
rawNames = entry->getFilenames();
int pos = 0;
while(pos < rawNames.size())
{
char namebuffer[500];
int noVerc;
int startName = findPosChar(rawNames[pos].c_str(), ' ', 1) + 1;
int endName = (int) rawNames[pos].length();
sscanf(rawNames[pos].c_str(), "%d", &noVerc);
memcpy(namebuffer, &rawNames[pos].c_str()[startName], endName-startName);
namebuffer[endName-startName] = 0;
sscanf(rawNames[pos].c_str(), "%d", &noVerc);
std::string modelPosFileName = std::string(namebuffer);
if(noVerc > MIN_VERTICES_FOR_OWN_CONTAINER_FILE)
{
result.appendToSingle(modelPosFileName);
}
else
{
result.appendToMain(modelPosFileName);
}
++pos;
}
}
return result;
}
//=================================================================
}
|
c++
| 18 | 0.476095 | 116 | 34.176768 | 198 |
starcoderdata
|
@GET
@Path("{shipmentId}")
@Produces(MediaType.APPLICATION_JSON)
public Response getTrackingList (@PathParam("shipmentId") String shipmentId) {
// request list of shipments
List<Tracking> trackingList = trackingService.getTrackingList(shipmentId);
// send response to client
return Response.ok(trackingList).build();
}
|
java
| 8 | 0.694215 | 82 | 39.444444 | 9 |
inline
|
<?php
/*
* This file is part of the Pushover package.
*
* (c)
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Api\Message;
use PHPUnit\Framework\TestCase;
use Serhiy\Pushover\Api\Message\Attachment;
use Serhiy\Pushover\Exception\InvalidArgumentException;
/**
* @author
*/
class AttachmentTest extends TestCase
{
public function testCanBeCreated()
{
$attachment = new Attachment('/images/test.jpeg', Attachment::MIME_TYPE_JPEG);
$this->assertInstanceOf(Attachment::class, $attachment);
return $attachment;
}
public function testCannotBeCreatedWithInvalidMimeType()
{
$this->expectException(InvalidArgumentException::class);
new Attachment('/images/test.jpeg', 'image/invalid');
}
public function testCannotBeCreatedWithInvalidExtension()
{
$this->expectException(InvalidArgumentException::class);
new Attachment('/images/test.invalid', Attachment::MIME_TYPE_JPEG);
}
/**
* @depends testCanBeCreated
* @param Attachment $attachment
*/
public function testGetMimeType(Attachment $attachment)
{
$this->assertEquals(Attachment::MIME_TYPE_JPEG, $attachment->getMimeType());
}
/**
* @depends testCanBeCreated
* @param Attachment $attachment
*/
public function testGetFilename(Attachment $attachment)
{
$this->assertEquals('/images/test.jpeg', $attachment->getFilename());
}
/**
* @depends testCanBeCreated
* @param Attachment $attachment
*/
public function testSetMimeType(Attachment $attachment)
{
$attachment->setMimeType(Attachment::MIME_TYPE_JPEG);
$this->assertEquals(Attachment::MIME_TYPE_JPEG, $attachment->getMimeType());
$this->expectException(InvalidArgumentException::class);
$attachment->setMimeType('image/invalid');
}
/**
* @depends testCanBeCreated
* @param Attachment $attachment
*/
public function testSetFilename(Attachment $attachment)
{
$attachment->setMimeType(Attachment::MIME_TYPE_JPEG);
$this->assertEquals(Attachment::MIME_TYPE_JPEG, $attachment->getMimeType());
$this->expectException(InvalidArgumentException::class);
$attachment->setMimeType('image/invalid');
}
/**
* @depends testCanBeCreated
* @param Attachment $attachment
*/
public function testGetSupportedAttachmentTypes(Attachment $attachment)
{
$supportedAttachmentsTypes = new \ReflectionClass(Attachment::class);
$this->assertEquals($supportedAttachmentsTypes->getConstants(), $attachment->getSupportedAttachmentTypes());
}
/**
* @depends testCanBeCreated
* @param Attachment $attachment
*/
public function testGetSupportedAttachmentExtensions(Attachment $attachment)
{
$supportedAttachmentExtensions = array(
'bmp', 'gif', 'ico', 'jpeg', 'jpg', 'png', 'svg', 'tif', 'tiff', 'webp'
);
$this->assertEquals($supportedAttachmentExtensions, $attachment->getSupportedAttachmentExtensions());
}
}
|
php
| 11 | 0.671393 | 116 | 27.964286 | 112 |
starcoderdata
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-07 11:43
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Area',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('floor', models.CharField(blank=True, max_length=255, null=True)),
('description', models.CharField(blank=True, max_length=255, null=True)),
('area', models.DecimalField(decimal_places=2, max_digits=16, null=True)),
('price', models.DecimalField(decimal_places=2, max_digits=16, null=True)),
('value', models.DecimalField(decimal_places=2, max_digits=16, null=True)),
],
),
migrations.CreateModel(
name='Property',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('assessment_reference', models.CharField(blank=True, max_length=32, null=True)),
('uarn', models.CharField(max_length=100, unique=True)),
('ba_code', models.CharField(blank=True, max_length=255, null=True)),
('firm_name', models.CharField(blank=True, max_length=255, null=True)),
('number_or_name', models.CharField(blank=True, max_length=255, null=True)),
('sub_street_1', models.CharField(blank=True, max_length=255, null=True)),
('sub_street_2', models.CharField(blank=True, max_length=255, null=True)),
('sub_street_3', models.CharField(blank=True, max_length=255, null=True)),
('street', models.CharField(blank=True, max_length=255, null=True)),
('town', models.CharField(blank=True, max_length=255, null=True)),
('postal_district', models.CharField(blank=True, max_length=255, null=True)),
('county', models.CharField(blank=True, max_length=255, null=True)),
('postcode', models.CharField(blank=True, max_length=255, null=True)),
('scheme_ref', models.CharField(blank=True, max_length=255, null=True)),
('primary_description', models.CharField(blank=True, max_length=255, null=True)),
('total_area', models.DecimalField(decimal_places=2, max_digits=16, null=True)),
('subtotal', models.DecimalField(decimal_places=2, max_digits=16, null=True)),
('total_value', models.DecimalField(decimal_places=2, max_digits=16, null=True)),
('adopted_rv', models.DecimalField(decimal_places=2, max_digits=16, null=True)),
('list_year', models.IntegerField(null=True)),
('ba_name', models.CharField(blank=True, max_length=255, null=True)),
('ba_reference_number', models.CharField(blank=True, max_length=255, null=True)),
('vo_ref', models.CharField(blank=True, max_length=255, null=True)),
('from_date', models.CharField(blank=True, max_length=255, null=True)),
('to_date', models.CharField(blank=True, max_length=255, null=True)),
('scat_code_only', models.CharField(blank=True, max_length=255, null=True)),
('unit_of_measurement', models.CharField(blank=True, max_length=255, null=True)),
('unadjusted_price', models.DecimalField(decimal_places=2, max_digits=16, null=True)),
],
),
migrations.AddField(
model_name='area',
name='property',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='voa.Property'),
),
]
|
python
| 16 | 0.593989 | 114 | 57.597015 | 67 |
starcoderdata
|
namespace WebEid.Security.Tests.Validator
{
using System;
using Exceptions;
using NUnit.Framework;
using TestUtils;
[TestFixture]
public class AuthTokenValidatorOriginTests : AbstractTestWithMockedDateValidatorAndCorrectNonce
{
[Test]
public void ValidateOriginMismatchFailure()
{
this.Validator = AuthTokenValidators.GetAuthTokenValidator("https://mismatch.ee", this.Cache);
Assert.ThrowsAsync () => await this.Validator.Validate(Tokens.SignedTest));
}
[Test]
public void TestOriginMissing()
{
Assert.ThrowsAsync () => await this.Validator.Validate(Tokens.OriginMissing))
.HasMessageStartingWith("aud field must be present in authentication token body and must be an array");
}
[Test]
public void TestOriginEmpty()
{
Assert.ThrowsAsync () => await this.Validator.Validate(Tokens.OriginEmpty))
.HasMessageStartingWith("origin from aud field must not be empty");
}
[Test]
public void TestOriginNotString()
{
Assert.ThrowsAsync () => await this.Validator.Validate(Tokens.OriginNotString));
}
[Test]
public void TestValidatorOriginNotUrl()
{
Assert.Throws =>
AuthTokenValidators.GetAuthTokenValidator("not-url", this.Cache));
}
[Test]
public void TestTokenOriginNotUrl()
{
Assert.ThrowsAsync () => await this.Validator.Validate(Tokens.OriginNotUrl));
}
[Test]
public void TestValidatorOriginExcessiveElements()
{
Assert.Throws =>
AuthTokenValidators.GetAuthTokenValidator("https://ria.ee/excessive-element", this.Cache))
.HasMessageStartingWith("Origin URI must only contain the HTTPS scheme, host and optional port component");
}
[Test]
public void TestTokenOriginExcessiveElements()
{
var validator = AuthTokenValidators.GetAuthTokenValidator("https://ria.ee", this.Cache);
Assert.ThrowsAsync () => await validator.Validate(Tokens.OriginUrlWithExcessiveElements));
}
[Test]
public void TestValidatorOriginNotHttps()
{
Assert.Throws => AuthTokenValidators.GetAuthTokenValidator("http://ria.ee", this.Cache));
}
[Test]
public void TestTokenOriginNotHttps()
{
var validator = AuthTokenValidators.GetAuthTokenValidator("https://ria.ee", this.Cache);
Assert.ThrowsAsync () => await validator.Validate(Tokens.OriginValidUrlNotHttps));
}
[Test]
public void TestValidatorOriginNotValidUrl()
{
Assert.Throws =>
AuthTokenValidators.GetAuthTokenValidator("ria://ria.ee", this.Cache));
}
[Test]
public void TestValidatorOriginNotValidSyntax()
{
Assert.Throws =>
AuthTokenValidators.GetAuthTokenValidator("https:///ria.ee", this.Cache));
}
}
}
|
c#
| 19 | 0.63435 | 133 | 36.376344 | 93 |
starcoderdata
|
import Model, { hasMany, belongsTo, attr } from '@ember-data/model';
export default class IlmSession extends Model {
@attr('number')
hours;
@attr('date')
dueDate;
@belongsTo('session', { async: true })
session;
@hasMany('learner-group', { async: true })
learnerGroups;
@hasMany('instructor-group', { async: true })
instructorGroups;
@hasMany('user', {
async: true,
inverse: 'instructorIlmSessions',
})
instructors;
@hasMany('user', {
async: true,
inverse: 'learnerIlmSessions',
})
learners;
}
|
javascript
| 8 | 0.641208 | 68 | 17.16129 | 31 |
starcoderdata
|
import pickle
from os import path
from src.features import build_features
from src.config import config
import torch
from torch.utils.data import Dataset
class FlickrDataset(Dataset):
img_caption_dict = {}
image_ids = []
word_to_int_map = {}
int_to_word_map = {}
def __init__(self, file_name, dtype="train"):
# read image ids
self.images_directory = config.IMAGES_DIRECTORY
with open(file_name, "r") as f:
self.image_ids = f.read().strip().splitlines()
img_caption_dict = build_features.load_img_caption_data(
config.CAPTIONS_TOKENS_FILE)
self.img_caption_dict = img_caption_dict
# create word mappings
word_mapping_fname = config.FEATURES_DIRECTORY / "word_mappings.pkl"
if path.exists(word_mapping_fname):
print("loading word maps")
(word_to_int_map, int_to_word_map) = pickle.load(open(
word_mapping_fname, "rb"))
self.word_to_int_map = word_to_int_map
self.int_to_word_map = int_to_word_map
else:
# create vocabulary
print("Creating word maps")
# merge caption text data
text_data = " ".join([" ".join(txt) for txt in
img_caption_dict.values()])
# create word to int mappings
word_to_int_map, int_to_word_map = build_features.create_word_mappings(
text_data)
self.word_to_int_map = word_to_int_map
self.int_to_word_map = int_to_word_map
pickle.dump((word_to_int_map, int_to_word_map),
open(word_mapping_fname, "wb"))
# extract image features
fname = dtype + "_image_features.pt"
img_features_fname = config.FEATURES_DIRECTORY / fname
if path.exists(img_features_fname):
print("loading image features")
# load features
self.image_features = torch.load(img_features_fname)
else:
image_features = build_features.extract_image_features(
self.images_directory,
self.image_ids)
self.image_features = image_features
# save features
torch.save(image_features, img_features_fname)
def __getitem__(self, index):
img_fname = self.image_ids[index]
# load image features
if(img_fname in self.image_features.keys()):
img_tensor = self.image_features[img_fname]
else:
img_features = build_features.extract_image_features(
self.images_directory,
self.img_fname)
img_tensor = img_features[img_fname]
# load caption features
caption_txt = self.img_caption_dict[img_fname]
txt_tensor = torch.tensor(build_features.convert_text_to_int(
caption_txt[1], self.word_to_int_map))
return img_tensor, txt_tensor
def __len__(self):
return len(self.img_caption_dict)
if __name__ == "__main__":
training_dataset = FlickrDataset(file_name=config.CAPTIONS_TRAIN_FILE, dtype="train")
validation_dataset = FlickrDataset(file_name=config.CAPTIONS_VALIDATION_FILE, dtype="valid")
test_dataset = FlickrDataset(file_name=config.CAPTIONS_TEST_FILE, dtype="test")
print(test_dataset[0])
|
python
| 16 | 0.532499 | 96 | 29.778689 | 122 |
starcoderdata
|
package com.alperez.bt_microphone.bluetoorh.connector.sound;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothSocket;
import android.media.AudioTrack;
import com.alperez.bt_microphone.bluetoorh.BtUtils;
import com.alperez.bt_microphone.bluetoorh.connector.OnConnectionStatusListener;
import com.alperez.bt_microphone.utils.ThreadLog;
import java.io.IOException;
import java.io.InputStream;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Created by stanislav.perchenko on 3/19/2017.
*/
public class BtSoundPlayerImpl implements BtSoundPlayer {
public static final String TAG = "BtSoundPlayer";
private static final int MAX_FRAME_SIZE = 512;
private SoundLevelMeter soundLevelMeter;
private BluetoothDevice device;
private UUID serviceUUID;
private AudioTrack aTrack;
private Thread workThread;
private volatile boolean released;
/**
* Whether playback enabled or disabled by a client code
*/
private final AtomicBoolean playEnabled = new AtomicBoolean(false);
/**
* Whether audio player really plays sound back
*/
private final AtomicBoolean trackPlaying = new AtomicBoolean(false);
/**
* Whether Bluetooth connection established or not
*/
private final AtomicBoolean connected = new AtomicBoolean(false);
public BtSoundPlayerImpl(BluetoothDevice device, UUID serviceUUID, AudioTrack aTrack, SoundLevelMeter soundLevelMeter) {
this.device = device;
this.serviceUUID = serviceUUID;
this.aTrack = aTrack;
this.soundLevelMeter = soundLevelMeter;
workThread = new Thread(() -> workerMethodFinalized(), "bt-player");
workThread.start();
}
@Override
public void setOnPlayerPerformanceListener(OnPlayerPerformanceListener l) {
synchronized (playerPerformanceListenerLock) {
playerPerformanceListener = l;
}
}
@Override
public void setOnConnectionStatusListener(OnConnectionStatusListener l) {
synchronized (connStatusListenerLock) {
connStatusListener = l;
}
}
@Override
public void play() {
if (released) throw new IllegalStateException("Already released");
playEnabled.set(true);
}
@Override
public void pause() {
if (released) throw new IllegalStateException("Already released");
playEnabled.set(false);
}
@Override
public void release() {
if (!released) {
playEnabled.set(false);
released = true;
workThread.interrupt();
workThread = null;
connected.set(false);
BtUtils.silentlyCloseCloseable(soc);
}
}
@Override
public boolean isConnected() {
return connected.get();
}
@Override
public boolean isPlaying() {
return playEnabled.get();
}
@Override
public boolean isAudioTrackPlaying() {
return trackPlaying.get();
}
/**********************************************************************************************/
private void workerMethodFinalized() {
try {
workerMethodNonFinalized();
} finally {
//---- Close and release all resources ----
BtUtils.silentlyCloseCloseable(iStream);
BtUtils.silentlyCloseCloseable(soc);
aTrack.pause();
trackPlaying.set(false);
aTrack.flush();
aTrack.release();
}
}
private BluetoothSocket soc = null;
private InputStream iStream = null;
private int nReconnects;
private int nConnFailed;
private void workerMethodNonFinalized() {
while (!released) {
while (!connected.get() && !released) {
try {
connect();
connected.set(true);
nConnFailed = 0;
notifyConnectionEstablished(++ nReconnects);
} catch (IOException e) {
if (!released) {
notifyConnectionAttemptFailed(++ nConnFailed);
}
}
}
if (released) {
return;
}
try {
int cntrPrePlayBytes = 0;
byte[] buffer = new byte[MAX_FRAME_SIZE];
while (true) {
int nBytes = iStream.read(buffer);
ThreadLog.d(TAG, nBytes+" bytes got");
notifyBytesReceived(nBytes);
if (playEnabled.get()) {
//--- Mode PLAY ---
aTrack.write(buffer, 0, Math.round(nBytes));
soundLevelMeter.submitSamples(buffer, 0, nBytes);
ThreadLog.d(TAG, nBytes+" bytes writen ~~~~~~");
if (!trackPlaying.get()) {
cntrPrePlayBytes += nBytes;
if (cntrPrePlayBytes >= 1024) {
ThreadLog.d(TAG, "-------> Track start");
aTrack.play();
trackPlaying.set(true);
notifyBytesPlayed(cntrPrePlayBytes);
cntrPrePlayBytes = 0;
}
} else {
notifyBytesPlayed(nBytes);
}
} else {
//--- Mode PAUSE ---
if (trackPlaying.get()) {
ThreadLog.d(TAG, "-------> Track pause");
aTrack.pause();
trackPlaying.set(false);
}
}
if (released) return;
}
} catch(IOException e) {
if (trackPlaying.get()) {
ThreadLog.d(TAG, "-------> Track stop");
aTrack.stop();
trackPlaying.set(false);
}
connected.set(false);
notifyConnectionBroken(e);
BtUtils.silentlyCloseCloseable(iStream);
BtUtils.silentlyCloseCloseable(soc);
}
} // TOP while(!released)
}
private void connect() throws IOException {
soc = device.createRfcommSocketToServiceRecord(serviceUUID);
soc.connect(); // Do the actual connecting job!!!!
iStream = soc.getInputStream();
}
/****************** Notify client code via listeners ****************************************/
private final Object playerPerformanceListenerLock = new Object();
private volatile OnPlayerPerformanceListener playerPerformanceListener;
private final Object connStatusListenerLock = new Object();
private volatile OnConnectionStatusListener connStatusListener;
private void notifyBytesReceived(int nBytes) {
if (playerPerformanceListener != null) {
synchronized (playerPerformanceListenerLock) {
if (playerPerformanceListener != null) {
playerPerformanceListener.onBytesReceived(nBytes);
}
}
}
}
private void notifyBytesPlayed(int nBytes) {
if (playerPerformanceListener != null) {
synchronized (playerPerformanceListenerLock) {
if (playerPerformanceListener != null) {
playerPerformanceListener.onBytesPlayed(nBytes);
}
}
}
}
private void notifyConnectionEstablished(int numReconnects) {
if (connStatusListener != null) {
synchronized (connStatusListenerLock) {
if (connStatusListener != null) {
connStatusListener.onConnectionRestorted(numReconnects);
}
}
}
}
private void notifyConnectionAttemptFailed(int nTry) {
if (connStatusListener != null) {
synchronized (connStatusListenerLock) {
if (connStatusListener != null) {
connStatusListener.onConnectionAttemptFailed(nTry);
}
}
}
}
private void notifyConnectionBroken(Throwable reason) {
if (connStatusListener != null) {
synchronized (connStatusListenerLock) {
if (connStatusListener != null) {
connStatusListener.onConnectionBroken(Thread.currentThread().getName(), reason);
}
}
}
}
}
|
java
| 21 | 0.541934 | 124 | 28.890411 | 292 |
starcoderdata
|
import requests
import sys
import urllib.request
from math import *
import time
from geopy.geocoders import Nominatim
# gn = geocoders.GeoNames(username = "")
geolocator = Nominatim()
def greatCircleDistance(lat1, lon1, lat2, lon2):
def haversin(x):
return sin(x/2)**2
return 2 * asin(sqrt(haversin(lat2-lat1) + cos(lat1) * cos(lat2) * haversin(lon2-lon1))) * 6371 * 0.02
d = {}
# mydict = {'carl':40, 'alan':2, 'bob':1, 'danny':3}
# for key, value in sorted(mydict.items(), key=lambda kv: (-kv[1], kv[0])):
# print("{}: {}".format(key, value))
addr1 = ""
city = ""
with open('textfiles/location1.txt', 'r') as file1:
lines = file1.readlines()
addr1 = lines[0]
city = lines[1]
response = requests.get('https://maps.googleapis.com/maps/api/geocode/json?address={}'.format(addr1))
resp_json_payload = response.json()
lat1 = resp_json_payload['results'][0]['geometry']['location']['lat']
long1 = resp_json_payload['results'][0]['geometry']['location']['lng']
# location = geolocator.geocode("addr1")
# print(location.raw)
# print(addr1)
# lat1 = gn.geocode(addr1).latitude
# long1 = gn.geocode(addr1).longitude
print(str(lat1) + "," + str(long1))
last = 0
current = 0
done = False
while done != True:
time.sleep(2)
try:
with open('textfiles/namelist.txt', 'r') as file2:
lines = file2.readlines()
for i in range (last, len(lines)):
current = i
name = lines[i]
addr2 = urllib.request.quote(name, city)
response = requests.get('https://maps.googleapis.com/maps/api/geocode/json?address={}'.format(addr2))
resp_json_payload = response.json()
lat2 = resp_json_payload['results'][0]['geometry']['location']['lat']
long2 = resp_json_payload['results'][0]['geometry']['location']['lng']
# print(str(lat2) + "," + str(long2))
dist = greatCircleDistance(lat1, long1, lat2, long2)
d[name] = dist
if i >= len(lines) - 1:
done = True
except:
e = sys.exc_info()[0]
print("Failed... trying again: " + str(e))
last = current
for key, value in sorted(d.items(), key=lambda kv: (-kv[1], kv[0])):
print("{}: {}".format(key, value))
# print(str(lat1) + "," + str(long1))
# mbox.showinfo('my app','Info: ' + str(resp_json_payload['results'][0]['geometry']['location']))
|
python
| 17 | 0.648637 | 105 | 29.421053 | 76 |
starcoderdata
|
public static OPCPackage Create(string path)
{
if (new DirectoryInfo(path).Exists)
throw new ArgumentException("file");
if (File.Exists(path))
{
throw new InvalidOperationException(
"This package (or file) already exists : use the open() method or delete the file.");
}
// Creates a new package
OPCPackage pkg = null;
pkg = new ZipPackage();
pkg.originalPackagePath = (new FileInfo(path)).Name;
ConfigurePackage(pkg);
return pkg;
}
|
c#
| 11 | 0.519169 | 109 | 32 | 19 |
inline
|
const fs = require("fs").promises;
const { getTheme } = require("./theme");
(async () => {
try {
const dist = "./dist";
// create the dist folder
await fs.mkdir(dist, { recursive: true });
// get the theme
const theme = getTheme();
// write the theme file
await fs.writeFile(
`${dist}/flat-dark-color-theme.json`,
JSON.stringify(theme, null, 2)
);
} catch (err) {
console.log(err);
process.exit(1);
}
})();
|
javascript
| 14 | 0.557447 | 46 | 18.583333 | 24 |
starcoderdata
|
pub fn schedule_task(&self, region_id: u64, msg: Msg) {
let reg = match self.try_send(region_id, msg) {
Either::Left(Ok(())) => return,
Either::Left(Err(TrySendError::Disconnected(msg))) | Either::Right(msg) => match msg {
Msg::Registration(reg) => reg,
Msg::Proposal(props) => {
info!(
"target region is not found, drop proposals";
"region_id" => region_id
);
for p in props.props {
let cmd = PendingCmd::new(p.index, p.term, p.cb);
notify_region_removed(props.region_id, props.id, cmd);
}
return;
}
Msg::Apply { .. } | Msg::Destroy(_) | Msg::LogsUpToDate(_) => {
info!(
"target region is not found, drop messages";
"region_id" => region_id
);
return;
}
Msg::Snapshot(_) => {
warn!(
"region is removed before taking snapshot, are we shutting down?";
"region_id" => region_id
);
return;
}
Msg::CatchUpLogs(cul) => panic!(
"[region {}] is removed before merged, failed to schedule {:?}",
region_id, cul.merge
),
#[cfg(test)]
Msg::Validate(_, _) => return,
},
Either::Left(Err(TrySendError::Full(_))) => unreachable!(),
};
// Messages in one region are sent in sequence, so there is no race here.
// However, this can't be handled inside control fsm, as messages can be
// queued inside both queue of control fsm and normal fsm, which can reorder
// messages.
let (sender, apply_fsm) = ApplyFsm::from_registration(reg);
let mailbox = BasicMailbox::new(sender, apply_fsm);
self.register(region_id, mailbox);
}
|
rust
| 18 | 0.442334 | 98 | 44 | 48 |
inline
|
@SuppressWarnings("serial")
@Before
public void before() throws Exception {
semphoreManager.releaseAllLocksAsAdmin(new UserInfo(true));
NewUser user = new NewUser();
user.setEmail(UUID.randomUUID().toString() + "@test.com");
user.setUserName(UUID.randomUUID().toString());
fromUserInfo = userManager.getUserInfo(userManager.createUser(user));
user = new NewUser();
user.setEmail(UUID.randomUUID().toString() + "@test.com");
user.setUserName(UUID.randomUUID().toString());
toUserInfo = userManager.getUserInfo(userManager.createUser(user));
adminUserInfo = userManager.getUserInfo(BOOTSTRAP_PRINCIPAL.THE_ADMIN_USER.getPrincipalId());
final URL url = MessageToUserWorkerIntegrationTest.class.getClassLoader().getResource("Message.txt");
FileItemStream fis = new FileItemStream() {
@Override
public InputStream openStream() throws IOException {
return url.openStream();
}
@Override
public String getContentType() {
return "application/text";
}
@Override
public String getName() {
return "Message.txt";
}
@Override
public String getFieldName() {
return "none";
}
@Override
public boolean isFormField() {
return false;
}
};
S3FileHandle handle = fileHandleManager.createCompressedFileFromString(fromUserInfo.getId().toString(), new Date(), "my dog has fleas");
fileHandleId = handle.getId();
message = new MessageToUser();
message.setFileHandleId(fileHandleId);
message.setRecipients(new HashSet<String>() {
{
add(toUserInfo.getId().toString());
// Note: this causes the worker to send a delivery failure notification too
// Which can be visually confirmed by the tester (appears in STDOUT)
add(BOOTSTRAP_PRINCIPAL.AUTHENTICATED_USERS_GROUP.getPrincipalId().toString());
}
});
message = messageManager.createMessage(fromUserInfo, message);
}
|
java
| 15 | 0.717161 | 138 | 30.483333 | 60 |
inline
|
/**
* rule.go - access rule
*
* @author
*/
package access
import (
"errors"
"net"
"strings"
)
/**
* AccessRule defines order (access, deny)
* and IP or Network
*/
type AccessRule struct {
Allow bool
IsNetwork bool
Ip *net.IP
Network *net.IPNet
}
/**
* Parses string to AccessRule
*/
func ParseAccessRule(rule string) (*AccessRule, error) {
parts := strings.Split(rule, " ")
if len(parts) != 2 {
return nil, errors.New("Bad access rule format: " + rule)
}
r := parts[0]
cidrOrIp := parts[1]
if r != "allow" && r != "deny" {
return nil, errors.New("Cant parse rule definition " + rule)
}
// try check if cidrOrIp is ip and handle
ipShould := net.ParseIP(cidrOrIp)
if ipShould != nil {
return &AccessRule{
Allow: r == "allow",
Ip: &ipShould,
IsNetwork: false,
Network: nil,
}, nil
}
_, ipNetShould, _ := net.ParseCIDR(cidrOrIp)
if ipNetShould != nil {
return &AccessRule{
Allow: r == "allow",
Ip: nil,
IsNetwork: true,
Network: ipNetShould,
}, nil
}
return nil, errors.New("Cant parse acces rule target, not an ip or cidr: " + cidrOrIp)
}
/**
* Checks if ip matches access rule
*/
func (this *AccessRule) Matches(ip *net.IP) bool {
switch this.IsNetwork {
case true:
return this.Network.Contains(*ip)
case false:
return (*this.Ip).Equal(*ip)
}
return false
}
/**
* Checks is it's allow or deny rule
*/
func (this *AccessRule) Allows() bool {
return this.Allow
}
|
go
| 14 | 0.620712 | 87 | 16.166667 | 90 |
starcoderdata
|
package com.taotao.cloud.java.javaee.s1.c5_springmvc.p3.java;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import javax.servlet.http.HttpSession;
@RestController
@RequestMapping("/origin")
@CrossOrigin("http://localhost:8989")
public class OriginController {
@RequestMapping("/test1")
public String test1(HttpSession session){
System.out.println("test1~~");
session.setAttribute("name","shine");
return "ok";
}
@RequestMapping("/test2")
public String test2(HttpSession session){
System.out.println("test2~~");
String name = (String)session.getAttribute("name");
System.out.println("name:"+name);
return "ok";
}
}
|
java
| 11 | 0.714286 | 62 | 29.655172 | 29 |
starcoderdata
|
import patch from './patch';
import patchAttrs from './patchAttrs';
export default function patchNodes (oldNode, newNode) {
if (newNode.isEqualNode(oldNode)) {
// stop merging nodes that are equal
return true;
}
if (newNode.nodeType === 1 && newNode.nodeName === oldNode.nodeName) {
// merge the attributes of elements with matching tags
patchAttrs(newNode, oldNode);
if (newNode.isEqualNode(oldNode)) {
// stop merging nodes that are equal after merging attributes
return true;
}
// merge the remaining children
return patch(oldNode, newNode);
} else if (newNode.nodeType === 3 || newNode.nodeType === 8) {
if (oldNode.nodeValue !== newNode.nodeValue) {
oldNode.nodeValue = newNode.nodeValue;
}
return true;
}
}
|
javascript
| 13 | 0.698939 | 71 | 25.928571 | 28 |
starcoderdata
|
#ifndef OPTIONPANE_H_INCLUDED
#define OPTIONPANE_H_INCLUDED
#include
//创建状态面板
GtkDrawingArea* build_option_pane();
//鼠标移动事件
void option_pane_move(double x,double y);
//鼠标单击事件
void option_pane_click(double x,double y);
//应用程序结束后释放内存
void option_pane_release();
#endif // OPTIONPANE_H_INCLUDED
|
c
| 6 | 0.766484 | 53 | 18.157895 | 19 |
starcoderdata
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.