code
stringlengths 0
29.6k
| language
stringclasses 9
values | AST_depth
int64 3
30
| alphanumeric_fraction
float64 0.2
0.86
| max_line_length
int64 13
399
| avg_line_length
float64 5.02
139
| num_lines
int64 7
299
| source
stringclasses 4
values |
---|---|---|---|---|---|---|---|
/*global define, JSON*/
define('controllers/authController', {
init: function ($, routes, viewEngine) {
"use strict";
// GET /#/login
// login
routes.get(/^\/#\/login\/?/i, function (context) {
viewEngine.setView({
template: 't-login',
data: { }
});
});
// GET /login
// login
routes.get(/^\/login\/?/i, function (context) {
viewEngine.setView({
template: 't-login',
data: {}
});
});
// POST /login
// login
routes.post(/^\/login\/?/i, function (context) {
return true; // ignore
});
}
}); | javascript | 16 | 0.427441 | 58 | 23.451613 | 31 | starcoderdata |
/*
PsychToolbox3/Source/OSX/PsychHID/PsychHIDStandardInterfaces.h
PROJECTS: PsychHID.
PLATFORMS: OSX
AUTHORS:
rpw
mk
HISTORY:
19.08.2007 rpw Created. Original implementation used before Psychtoolbox 3.0.12.
04.10.2014 mk Refactored and almost completely rewritten for PTB 3.0.12.
TO DO:
*/
#ifndef PSYCH_IS_INCLUDED_PsychHIDStandardIntefaces
#define PSYCH_IS_INCLUDED_PsychHIDStandardIntefaces
#include "PsychHID.h"
#include
#endif | c | 4 | 0.676573 | 93 | 18.724138 | 29 | starcoderdata |
def get_security_groups(VpcId):
"""
Create master and slave security groups for EMR on VpcId
:param VpcId:
:return:
"""
ec2 = boto3.resource('ec2')
# Delete existing security groups, since we can't overwrite them
security_groups = ec2.security_groups.filter(Filters=[{'Name': 'vpc-id', 'Values': [VpcId]}])
master = next((s for s in security_groups if s.group_name == 'EMR-master'), None)
if not master:
master = ec2.create_security_group(GroupName='EMR-master',
Description='Master group for Elastic MapReduce',
VpcId=vpc_id)
print('Security Group Created %s in vpc %s.' % (master.id, VpcId))
slave = next((s for s in security_groups if s.group_name == 'EMR-slave'), None)
if not slave:
slave = ec2.create_security_group(GroupName='EMR-slave',
Description='Slave group for Elastic MapReduce',
VpcId=VpcId)
print('Security Group Created %s in vpc %s.' % (slave.id, VpcId))
# IP permissions are the same for master and slave
ip_permissions = [
# permit SSH
{
'IpProtocol': 'tcp',
'FromPort': 22,
'ToPort': 22,
'IpRanges': [{'CidrIp': '0.0.0.0/0'}],
'Ipv6Ranges': [{'CidrIpv6': '::/0'}]
},
# all traffic
{
'IpProtocol': '-1',
'FromPort': -1,
'ToPort': -1,
'UserIdGroupPairs': [{'GroupId': master.id}, {'GroupId': slave.id}]
}
]
for group in [master, slave]:
group.revoke_ingress(IpPermissions=ip_permissions)
data = group.authorize_ingress(IpPermissions=ip_permissions)
print('Ingress Successfully Set %s' % data)
return {'masterSecurityGroup': master.id, 'slaveSecurityGroup': slave.id} | python | 12 | 0.544186 | 97 | 37.72 | 50 | inline |
package com.inventory.postgre.dao;
import java.util.List;
import com.inventory.postgre.entity.Supplying;
public interface SupplyingDao {
void insertSupplying(Supplying supp);
void updateSupplying(Supplying supp);
List findSupplierLogs();
} | java | 6 | 0.772388 | 46 | 13.888889 | 18 | starcoderdata |
package Controller.Chat;
import Model.Chat.Message;
import Model.Library.GSON;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
@WebServlet(name = "LoadChatServlet", urlPatterns = "/loadChat")
public class LoadChatServlet extends HttpServlet {
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
String chatID = request.getParameter("chatID");
String messID = request.getParameter("messID");
String json = GSON.toJson(Message.loadChat(chatID, messID));
response.setContentType("text/html; charset=UTF-8");
response.setCharacterEncoding("UTF-8");
response.getWriter().write(json);
}
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
}
} | java | 11 | 0.766184 | 122 | 35.964286 | 28 | starcoderdata |
void gmmTrainVB(t_Cluster *ptCluster, t_Data *ptData)
{
int i = 0, k = 0,nIter = 0;
int nN = ptData->nN, nK = ptCluster->nK;
/*change in log-likelihood*/
double dLastVBL = 0.0, dDelta = DBL_MAX;
double **aadZ = ptCluster->aadZ;
int nMaxIter = ptCluster->nMaxIter;
double dEpsilon = ptCluster->dEpsilon;
FILE *ofp = NULL;
if(ptCluster->szCOutFile){
ofp = fopen(ptCluster->szCOutFile,"w");
if(!ofp){
fprintf(stderr, "Failed to open file %s in gmmTrainVB\n",ptCluster->szCOutFile);
fflush(stderr);
}
}
/*calculate data likelihood*/
calcZ(ptCluster,ptData);
ptCluster->dVBL = calcVBL(ptCluster);
while(nIter < nMaxIter && dDelta > dEpsilon){
/*update parameter estimates*/
performMStep(ptCluster, ptData);
/*calculate responsibilities*/
calcZ(ptCluster,ptData);
dLastVBL = ptCluster->dVBL;
ptCluster->dVBL = calcVBL(ptCluster);
dDelta = fabs(ptCluster->dVBL - dLastVBL);
if(ofp){
fprintf(ofp,"%d,%f,%f,",nIter, ptCluster->dVBL, dDelta);
for(k = 0; k < nK-1; k++){
fprintf(ofp,"%f,",ptCluster->adPi[k]);
}
fprintf(ofp,"%f\n",ptCluster->adPi[nK - 1]);
fflush(ofp);
}
nIter++;
}
if(ofp){
fclose(ofp);
}
/*assign to best clusters*/
for(i = 0; i < nN; i++){
double dMaxZ = aadZ[i][0];
int nMaxK = 0;
for(k = 1; k < nK; k++){
if(aadZ[i][k] > dMaxZ){
nMaxK = k;
dMaxZ = aadZ[i][k];
}
}
ptCluster->anMaxZ[i] = nMaxK;
}
return;
} | c | 14 | 0.594631 | 82 | 21.938462 | 65 | inline |
import styled from 'styled-components';
export const SectionWrapper = styled.section`
min-height: ${props => props.minHeight};
height: 100%;
width: 100%;
padding: 2rem ${props => props.paddingLeftRight || '0'};
@media (max-width: 750px) {
padding: 1rem 0;
}
`;
export const SectionTitleContent = styled.h2`
padding: 2rem ${props => props.paddingLeftRight || '0'};
font-size: ${props => props.fontSize || '1rem'};
color: ${props => props.fontColor || 'black'};
text-transform: uppercase;
margin-bottom: 1rem;
`;
export const SectionSubTitleContent = styled.h3`
color: ${props => props.fontColor || 'black'};
font-size: ${props => props.fontSize || '1rem'};
margin-bottom: 1rem;
`;
export const SectionLogoContent = styled.h3`
color: ${props => props.fontColor || 'black'};
font-size: ${props => props.fontSize || '1rem'};
margin-top: 5rem;
margin-bottom: 5rem;
`;
export const SectionSpanBoldContent = styled.span`
font-weight: bold;
font-size: ${props => props.fontSize || '1rem'};
margin-bottom: 1rem;
`;
export const SectionFreeWrapper = styled.section`
min-height: ${props => props.minHeight};
height: 100%;
width: 100%;
padding: 5rem ${props => props.paddingRight || '0'} 5rem ${props => props.paddingRight || '0'}; //top right bottom left
@media (max-width: 750px) {
padding: 1rem 0;
}
`;
export const SectionPContent = styled.p`
color: ${props => props.fontColor || 'black'};
margin-bottom: 1rem;
font-size: ${props => props.fontSize || '1rem'};
`;
export const Ul = styled.ul`
list-style-type: none;
`; | javascript | 6 | 0.660606 | 122 | 26.516667 | 60 | starcoderdata |
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.Text;
namespace DiscordAssistant.Models
{
public abstract class JenkinsObject
{
public string _class { get; set; }
[JsonIgnore]
public abstract string ClassName { get; }
}
} | c# | 8 | 0.680135 | 49 | 18.8 | 15 | starcoderdata |
def as_short_dict(self) -> dict[str, Any]:
"""Return a brief dictionary version of this ActionTrace."""
last_step = None
if self._trace:
last_step = list(self._trace)[-1]
result = {
"last_step": last_step,
"run_id": self.run_id,
"state": self._state,
"script_execution": self._script_execution,
"timestamp": {
"start": self._timestamp_start,
"finish": self._timestamp_finish,
},
"domain": self.key[0],
"item_id": self.key[1],
}
if self._error is not None:
result["error"] = str(self._error)
return result | python | 11 | 0.47905 | 68 | 28.875 | 24 | inline |
#include "hls_target.h"
#include "Linebuffer.h"
#include "halide_math.h"
void hls_target(
hls::stream<AxiPackedStencil<uint8_t, 1, 1> > &hw_input,
hls::stream<AxiPackedStencil<uint8_t, 1, 1> > &hw_output)
{
#pragma HLS DATAFLOW
#pragma HLS INLINE region
#pragma HLS INTERFACE ap_hs port=hw_input
#pragma HLS INTERFACE ap_hs port=hw_output
// alias the arguments
hls::stream<AxiPackedStencil<uint8_t, 1, 1> > &_hw_input_stencil_update_stream = hw_input;
hls::stream<AxiPackedStencil<uint8_t, 1, 1> > &_hw_output_stencil_stream = hw_output;
uint8_t _p2_kernela0[9];
// produce p2:kernel
uint8_t _290 = (uint8_t)(0);
_p2_kernela0[0] = _290;
uint8_t _291 = (uint8_t)(0);
_p2_kernela0[1] = _291;
uint8_t _292 = (uint8_t)(0);
_p2_kernela0[2] = _292;
uint8_t _293 = (uint8_t)(0);
_p2_kernela0[3] = _293;
uint8_t _294 = (uint8_t)(0);
_p2_kernela0[4] = _294;
uint8_t _295 = (uint8_t)(0);
_p2_kernela0[5] = _295;
uint8_t _296 = (uint8_t)(0);
_p2_kernela0[6] = _296;
uint8_t _297 = (uint8_t)(0);
_p2_kernela0[7] = _297;
uint8_t _298 = (uint8_t)(0);
_p2_kernela0[8] = _298;
uint8_t _299 = (uint8_t)(0);
_p2_kernela0[0] = _299;
uint8_t _300 = (uint8_t)(1);
_p2_kernela0[1] = _300;
uint8_t _301 = (uint8_t)(0);
_p2_kernela0[2] = _301;
uint8_t _302 = (uint8_t)(1);
_p2_kernela0[3] = _302;
uint8_t _303 = (uint8_t)(2);
_p2_kernela0[4] = _303;
uint8_t _304 = (uint8_t)(1);
_p2_kernela0[5] = _304;
uint8_t _305 = (uint8_t)(0);
_p2_kernela0[6] = _305;
uint8_t _306 = (uint8_t)(1);
_p2_kernela0[7] = _306;
uint8_t _307 = (uint8_t)(0);
_p2_kernela0[8] = _307;
// consume p2:kernel
hls::stream<PackedStencil<uint8_t, 3, 3> > _hw_input_stencil_stream;
#pragma HLS STREAM variable=_hw_input_stencil_stream depth=1
#pragma HLS RESOURCE variable=_hw_input_stencil_stream core=FIFO_SRL
linebuffer<4, 4>(_hw_input_stencil_update_stream, _hw_input_stencil_stream);
(void)0;
// dispatch_stream(_hw_input_stencil_stream, 2, 3, 1, 4, 3, 1, 4, 1, "hw_output", 0, 0, 4, 0, 4);
hls::stream<PackedStencil<uint8_t, 3, 3> > &_hw_input_stencil_stream_to_hw_output = _hw_input_stencil_stream;
(void)0;
// produce hw_output.stencil.stream
for (int _hw_output_y___scan_dim_1 = 0; _hw_output_y___scan_dim_1 < 0 + 2; _hw_output_y___scan_dim_1++)
{
for (int _hw_output_x___scan_dim_0 = 0; _hw_output_x___scan_dim_0 < 0 + 2; _hw_output_x___scan_dim_0++)
{
#pragma HLS PIPELINE II=1
Stencil<uint8_t, 3, 3> _hw_input_stencil;
#pragma HLS ARRAY_PARTITION variable=_hw_input_stencil.value complete dim=0
_hw_input_stencil = _hw_input_stencil_stream_to_hw_output.read();
(void)0;
Stencil<uint8_t, 1, 1> _hw_output_stencil;
#pragma HLS ARRAY_PARTITION variable=_hw_output_stencil.value complete dim=0
int32_t _mula1[1];
_mula1[0] = 0;
int32_t _308 = _mula1[0];
uint8_t _309 = _hw_input_stencil(0, 0);
uint8_t _310 = _p2_kernela0[0];
uint8_t _311 = _309 << _310;
int32_t _312 = (int32_t)(_311);
int32_t _313 = _308 + _312;
_mula1[0] = _313;
int32_t _314 = _mula1[0];
uint8_t _315 = _hw_input_stencil(1, 0);
uint8_t _316 = _p2_kernela0[1];
uint8_t _317 = _315 << _316;
int32_t _318 = (int32_t)(_317);
int32_t _319 = _314 + _318;
_mula1[0] = _319;
int32_t _320 = _mula1[0];
uint8_t _321 = _hw_input_stencil(2, 0);
uint8_t _322 = _p2_kernela0[2];
uint8_t _323 = _321 << _322;
int32_t _324 = (int32_t)(_323);
int32_t _325 = _320 + _324;
_mula1[0] = _325;
int32_t _326 = _mula1[0];
uint8_t _327 = _hw_input_stencil(0, 1);
uint8_t _328 = _p2_kernela0[3];
uint8_t _329 = _327 << _328;
int32_t _330 = (int32_t)(_329);
int32_t _331 = _326 + _330;
_mula1[0] = _331;
int32_t _332 = _mula1[0];
uint8_t _333 = _hw_input_stencil(1, 1);
uint8_t _334 = _p2_kernela0[4];
uint8_t _335 = _333 << _334;
int32_t _336 = (int32_t)(_335);
int32_t _337 = _332 + _336;
_mula1[0] = _337;
int32_t _338 = _mula1[0];
uint8_t _339 = _hw_input_stencil(2, 1);
uint8_t _340 = _p2_kernela0[5];
uint8_t _341 = _339 << _340;
int32_t _342 = (int32_t)(_341);
int32_t _343 = _338 + _342;
_mula1[0] = _343;
int32_t _344 = _mula1[0];
uint8_t _345 = _hw_input_stencil(0, 2);
uint8_t _346 = _p2_kernela0[6];
uint8_t _347 = _345 << _346;
int32_t _348 = (int32_t)(_347);
int32_t _349 = _344 + _348;
_mula1[0] = _349;
int32_t _350 = _mula1[0];
uint8_t _351 = _hw_input_stencil(1, 2);
uint8_t _352 = _p2_kernela0[7];
uint8_t _353 = _351 << _352;
int32_t _354 = (int32_t)(_353);
int32_t _355 = _350 + _354;
_mula1[0] = _355;
int32_t _356 = _mula1[0];
uint8_t _357 = _hw_input_stencil(2, 2);
uint8_t _358 = _p2_kernela0[8];
uint8_t _359 = _357 << _358;
int32_t _360 = (int32_t)(_359);
int32_t _361 = _356 + _360;
_mula1[0] = _361;
int32_t _362 = _mula1[0];
int32_t _363 = _362 >> 4;
uint8_t _364 = (uint8_t)(_363);
_hw_output_stencil(0, 0) = _364;
AxiPackedStencil<uint8_t, 1, 1> _hw_output_stencil_packed = _hw_output_stencil;
if (_hw_output_x___scan_dim_0 == 1 && _hw_output_y___scan_dim_1 == 1) {
_hw_output_stencil_packed.last = 1;
} else {
_hw_output_stencil_packed.last = 0;
}
_hw_output_stencil_stream.write(_hw_output_stencil_packed);
(void)0;
} // for _hw_output_x___scan_dim_0
} // for _hw_output_y___scan_dim_1
} // kernel hls_target_hls_target | c++ | 13 | 0.58818 | 110 | 32.31677 | 161 | starcoderdata |
#ifndef _BBOX_H_
#define _BBOX_H_
#include "TecLibs/Tec3D.h"
class BBox
{
public:
struct PreInit {};
BBox() {;}
BBox(const Point3D &P) : m_Min(P), m_Max(P){;}
BBox(const Point3D &PMin, const Point3D &PMax) : m_Min(PMin), m_Max(PMax){;}
BBox(const Point3D* &Array, int Count)
{
Init(Array,Count);
}
BBox(const BBox & bb) : m_Min(bb.m_Min), m_Max(bb.m_Max) { ; }
BBox(PreInit p) : m_Min{ v3f(FLT_MAX, FLT_MAX, FLT_MAX) }, m_Max{ v3f(-FLT_MAX, -FLT_MAX, -FLT_MAX) } {}
void Init(const Point3D &P)
{
m_Min = P;
m_Max = P;
}
void Init(const Point3D* Array, int Count)
{
int i;
Init(Array[0]);
for (i=1;i<Count;i++) Update(Array[i]);
}
void Init(const BBox &pBBox)
{
m_Min = pBBox.m_Min;
m_Max = pBBox.m_Max;
}
inline Float& operator[] (const int nIndex)
{
return *((&m_Min[0])+nIndex);
}
void SetEmpty(void)
{
m_Min.x = m_Min.y = m_Min.z = Float_Max;
m_Max.x = m_Max.y = m_Max.z = -Float_Max;
}
bool IsEmpty(void) const
{
return m_Max.x<m_Min.x || m_Max.y<m_Min.y || m_Max.z<m_Min.z ;
}
Point3D Center() const
{
return (m_Min+m_Max)*0.5f;
}
v3f Size() const
{
return m_Max - m_Min;
}
void Update(const Point3D &P)
{
if (P.x<m_Min.x) m_Min.x = P.x;
if (P.y<m_Min.y) m_Min.y = P.y;
if (P.z<m_Min.z) m_Min.z = P.z;
if (P.x>m_Max.x) m_Max.x = P.x;
if (P.y>m_Max.y) m_Max.y = P.y;
if (P.z>m_Max.z) m_Max.z = P.z;
}
void Update(const Point3D* Array, int Count)
{
int i;
for (i=0;i<Count;i++) Update(Array[i]);
}
void Update(const BBox &pBBox)
{
if (pBBox.m_Min.x<m_Min.x) m_Min.x = pBBox.m_Min.x;
if (pBBox.m_Min.y<m_Min.y) m_Min.y = pBBox.m_Min.y;
if (pBBox.m_Min.z<m_Min.z) m_Min.z = pBBox.m_Min.z;
if (pBBox.m_Max.x>m_Max.x) m_Max.x = pBBox.m_Max.x;
if (pBBox.m_Max.y>m_Max.y) m_Max.y = pBBox.m_Max.y;
if (pBBox.m_Max.z>m_Max.z) m_Max.z = pBBox.m_Max.z;
}
void ConvertToPoint(Point3D* Array) const
{
Array[0].x = m_Min.x;
Array[0].y = m_Min.y;
Array[0].z = m_Min.z;
Array[1].x = m_Max.x;
Array[1].y = m_Min.y;
Array[1].z = m_Min.z;
Array[2].x = m_Max.x;
Array[2].y = m_Max.y;
Array[2].z = m_Min.z;
Array[3].x = m_Min.x;
Array[3].y = m_Max.y;
Array[3].z = m_Min.z;
Array[4].x = m_Min.x;
Array[4].y = m_Min.y;
Array[4].z = m_Max.z;
Array[5].x = m_Max.x;
Array[5].y = m_Min.y;
Array[5].z = m_Max.z;
Array[6].x = m_Max.x;
Array[6].y = m_Max.y;
Array[6].z = m_Max.z;
Array[7].x = m_Min.x;
Array[7].y = m_Max.y;
Array[7].z = m_Max.z;
}
bool IsIn(const Point3D &P) const
{
return (
P.x>=m_Min.x && P.x<=m_Max.x
&& P.y>=m_Min.y && P.y<=m_Max.y
&& P.z>=m_Min.z && P.z<=m_Max.z);
}
bool IsIn(const BBox &pBBox) const
{
return (
pBBox.m_Min.x>=m_Min.x && pBBox.m_Max.x<=m_Max.x &&
pBBox.m_Min.y>=m_Min.y && pBBox.m_Max.y<=m_Max.y &&
pBBox.m_Min.z>=m_Min.z && pBBox.m_Max.z<=m_Max.z );
}
bool IsInWithAxisMask(const BBox &pBBox,unsigned int axisMask) const
{
return (
((pBBox.m_Min.x>=m_Min.x && pBBox.m_Max.x<=m_Max.x)||((axisMask&1))) &&
((pBBox.m_Min.y>=m_Min.y && pBBox.m_Max.y<=m_Max.y)||((axisMask&2))) &&
((pBBox.m_Min.z>=m_Min.z && pBBox.m_Max.z<=m_Max.z)||((axisMask&4))));
}
bool DoesIntersect(const BBox &Other) const
{
return (! (Other.m_Max.x < m_Min.x || Other.m_Min.x > m_Max.x ) ) &&
(! (Other.m_Max.y < m_Min.y || Other.m_Min.y > m_Max.y ) ) &&
(! (Other.m_Max.z < m_Min.z || Other.m_Min.z > m_Max.z ) );
}
bool operator==(const BBox& Other) const
{
if (IsEmpty()) return Other.IsEmpty();
else return (m_Min == Other.m_Min && m_Max ==Other.m_Max);
}
bool operator!=(const BBox& Other) const
{ return !operator==(Other); }
BBox& operator=(const BBox &bb)
{
m_Min = bb.m_Min;
m_Max = bb.m_Max;
return *this;
}
Float SquaredDistance(const Point3D& p) const
{
Point3D dmax(p);
dmax -= m_Max;
Point3D dmin(m_Min);
dmin -= p;
dmax.x = ((dmax.x) > 0.0f) ? (dmax.x) : 0.0f;
dmax.x = ((dmin.x) > dmax.x) ? (dmin.x) : dmax.x;
dmax.y = ((dmax.y) > 0.0f) ? (dmax.y) : 0.0f;
dmax.y = ((dmin.y) > dmax.y) ? (dmin.y) : dmax.y;
dmax.z = ((dmax.z) > 0.0f) ? (dmax.z) : 0.0f;
dmax.z = ((dmin.z) > dmax.z) ? (dmin.z) : dmax.z;
return dmax.x*dmax.x + dmax.y*dmax.y + dmax.z*dmax.z;
}
Float Distance(const Point3D& p) const
{
return sqrtF(SquaredDistance(p));
}
union {
struct
{
Point3D m_Min, m_Max;
};
Point3D m_MinMax[2];
};
};
#endif | c | 17 | 0.562026 | 105 | 21.867347 | 196 | starcoderdata |
public class LongestSubstringWithoutRepeatingCharacters {
public int lengthOfLongestSubstring(String s) {
if (s == null)
return 0;
int longest = 0;
int head = 0;
int[] lastIndex = new int[128];
for (int i = 0; i < lastIndex.length; ++i) {
lastIndex[i] = -1;
}
for (int tail = 0; tail < s.length(); ++tail) {
if (lastIndex[s.charAt(tail)] >= head) {
longest = Math.max(longest, tail - head);
head = lastIndex[s.charAt(tail)] + 1;
}
lastIndex[s.charAt(tail)] = tail;
}
return Math.max(longest, s.length() - head);
}
} | java | 15 | 0.5 | 57 | 33.4 | 20 | starcoderdata |
<?php
namespace Devless\RulesEngine;
use App\Helpers\Helper;
use App\Helpers\ActionClass;
trait actions
{
/**
* check if on intended table.
*
* @param string $expectedTableName
*
* @return mixed|string
*/
public function onTable()
{
$expectedTableNames = func_get_args();
if (!$this->execOrNot) {
return $this;
}
$this->tableName = (is_array($this->tableName)) ? $this->tableName[0] : $this->tableName;
$this->execOrNot = (in_array($this->tableName , $expectedTableNames));
return $this;
}
/**
* Stop execcution with an exception.
*
* @param null $msg
*
* @return mixed|string
*/
public function succeedWith($msg = null)
{
if (!$this->execOrNot) {
return $this;
}
$evaluator = function () use ($msg) {
return Helper::interrupt(1000, $msg);
};
return $this->executor($evaluator);
}
/**
* Stop execution with an exception.
*
* @param null $msg
*
* @return mixed|string
*/
public function failWith($msg = null)
{
if (!$this->execOrNot) {
return $this;
}
$evaluator = function () use ($msg) {
return Helper::interrupt(1001, $msg);
};
return $this->executor($evaluator);
}
/**
* Call on an ActionClass.
*
* @param $service
* @param $method
* @param null $params
*
* @return mixed|string
*/
public function run($service, $method, $params = null, $remoteUrl = null, $token = null)
{
if (!$this->execOrNot) {
return $this;
}
$evaluator = function () use ($service, $method, $params, $remoteUrl, $token) {
if ($remoteUrl && $token) {
$this->results = ActionClass::remoteExecute($service, $method, $params, $remoteUrl, $token);
} else {
$this->results = ActionClass::execute($service, $method, $params);
}
$this->answered = true;
return true;
};
return $this->executor($evaluator);
}
/**
* Get results variable and set to variable.
*
* @param $input_var
*
* @return $this
*/
public function getRunResult(&$input_var)
{
if (!$this->execOrNot) {
return $this;
}
$input_var = $this->results;
return $this;
}
} | php | 18 | 0.504341 | 108 | 22.036364 | 110 | starcoderdata |
def test_fancy_assignment(self):
a = np.zeros((4, 4, 4), 'd')
twoByTwo = np.ones((2, 2), 'd')
#NOTEs from commit message motivating why we need this:
# a = np.zeros((3,3,3))
# a[:,1:2,1:3].shape == (3,1,2) # good!
# a[0,:,1:3].shape == (3,2) #good!
# a[0,:,[1,2]].shape == (2,3) # ?? (broacasting ':' makes this like a[0,[1,2]])
# a[:,[1,2],[1,2]].shape == (3,2) # ?? not (3,2,2) b/c lists broadcast
# a[:,[1],[1,2]].shape == (3,2) # ?? not (3,1,2) b/c lists broadcast
# a[:,[1,2],[0,1,2]].shape == ERROR b/c [1,2] can't broadcast to [0,1,2]!
#simple integer indices
mt._fas(a, (0, 0, 0), 4.5) # a[0,0,0] = 4.5
self.assertAlmostEqual(a[0, 0, 0], 4.5)
mt._fas(a, (0, 0, 0), 4.5, add=True) # a[0,0,0] += 4.5
self.assertAlmostEqual(a[0, 0, 0], 9.0)
#still simple: mix of slices and integers
mt._fas(a, (slice(0, 2), slice(0, 2), 0), twoByTwo) # a[0:2,0:2,0] = twoByTwo
self.assertArraysAlmostEqual(a[0:2, 0:2, 0], twoByTwo)
#complex case: some/all indices are integer arrays
mt._fas(a, ([0, 1], [0, 1], 0), twoByTwo[:, :]) # a[0:2,0:2,0] = twoByTwo - but a[[0,1],[0,1],0] wouldn't do this!
self.assertArraysAlmostEqual(a[0:2, 0:2, 0], twoByTwo)
mt._fas(a, ([0, 1], [0, 1], 0), twoByTwo[:, :], add=True) # a[0:2,0:2,0] = twoByTwo - but a[[0,1],[0,1],0] wouldn't do this!
self.assertArraysAlmostEqual(a[0:2, 0:2, 0], 2 * twoByTwo)
# Fancy indexing (without assignment)
self.assertEqual(mt._findx(a, (0, 0, 0)).shape, ()) # (1,1,1))
self.assertEqual(mt._findx(a, (slice(0, 2), slice(0, 2), slice(0, 2))).shape, (2, 2, 2))
self.assertEqual(mt._findx(a, (slice(0, 2), slice(0, 2), 0)).shape, (2, 2))
self.assertEqual(mt._findx(a, ([0, 1], [0, 1], 0)).shape, (2, 2))
self.assertEqual(mt._findx(a, ([], [0, 1], 0)).shape, (0, 2)) | python | 12 | 0.503564 | 133 | 52.108108 | 37 | inline |
sail_status_t jpeg_private_fetch_resolution(struct jpeg_decompress_struct *decompress_context, struct sail_resolution **resolution) {
SAIL_CHECK_PTR(resolution);
/* Resolution information is not valid. */
if (decompress_context->X_density == 0 && decompress_context->Y_density == 0) {
return SAIL_OK;
}
SAIL_TRY(sail_alloc_resolution(resolution));
switch (decompress_context->density_unit) {
case 1: {
(*resolution)->unit = SAIL_RESOLUTION_UNIT_INCH;
break;
}
case 2: {
(*resolution)->unit = SAIL_RESOLUTION_UNIT_CENTIMETER;
break;
}
}
(*resolution)->x = (float)decompress_context->X_density;
(*resolution)->y = (float)decompress_context->Y_density;
return SAIL_OK;
} | c | 12 | 0.612984 | 133 | 28.703704 | 27 | inline |
static unsafe void _Move(Window t, int x, int y, in RECT r, bool andSize) {
var wstate = t.WindowState;
if (t.IsLoaded) {
var w = t.Hwnd();
if (w.Is0) throw new ObjectDisposedException("Window");
if (wstate != WindowState.Normal) t.WindowState = WindowState.Normal;
if (andSize) w.MoveL(r); else w.MoveL(x, y);
} else {
//tested: don't need this for Popup. Its PlacementRectangle can use physical pixels.
t.WindowStartupLocation = WindowStartupLocation.Manual;
if (wstate == WindowState.Minimized) t.ShowActivated = false;
WindowsHook.ThreadCbt(k => {
if (k.code == HookData.CbtEvent.CREATEWND) {
var c = k.CreationInfo->lpcs;
if (!c->style.Has(WS.CHILD)) {
var name = c->Name;
if (name.Length > 25 && name.StartsWith("m8KFOuCJOUmjziONcXEi3A ")) {
k.hook.Dispose();
var s = name[23..].ToString();
if (name[^1] == ';') {
c->x = s.ToInt(0, out int e); c->y = s.ToInt(e);
} else if (RECT.TryParse(s, out var r)) {
c->x = r.left; c->y = r.top; c->cx = r.Width; c->cy = r.Height;
}
}
}
} else { //didn't detect the window? Because unhooks when detects.
Debug_.Print($"{k.code} {k.Hwnd}");
//Debug_.PrintIf(k.code != HookData.CbtEvent.SETFOCUS, $"{k.code} {k.Hwnd}"); //sometimes SETFOCUS before CREATEWND, and it is bad
}
return false;
});
t.Left = double.NaN;
t.Top = double.NaN;
if (andSize) {
t.Width = double.NaN;
t.Height = double.NaN;
}
//temporarily change Title. I didn't find other ways to recognize the window in the hook proc. Also in title we can pass r or x y.
string title = t.Title, s;
if (andSize) s = "m8KFOuCJOUmjziONcXEi3A " + r.ToStringSimple(); else s = $"m8KFOuCJOUmjziONcXEi3A {x} {y};";
t.Title = s;
//Need to restore Title ASAP.
// In CBT hook cannot change window name in any way.
// The first opportunity is WM_CREATE, it's before WPF events, but cannot set Title property there.
// The sequence of .NET events depends on Window properties etc:
// Default: IsVisibleChanged, HwndSource.AddSourceChangedHandler, SourceInitialized, Loaded. And several unreliable events inbetween.
// SizeToContent: IsVisibleChanged, HwndSource.AddSourceChangedHandler, Loaded, SourceInitialized.
// WindowInteropHelper(w).EnsureHandle(): SourceInitialized (before ShowX), IsVisibleChanged, HwndSource.AddSourceChangedHandler, Loaded.
// Window without controls: Initialized, ....
SourceChangedEventHandler eh = null;
eh = (_, _) => {
HwndSource.RemoveSourceChangedHandler(t, eh);
t.Title = title;
//if (wstate == WindowState.Normal && !t.ShowActivated) t.Hwnd().ZorderTop(); //it seems don't need it
};
HwndSource.AddSourceChangedHandler(t, eh);
}
} | c# | 28 | 0.640773 | 142 | 44.174603 | 63 | inline |
private bool TryReleaseLeaseIfHeld(ref MasterLeaseMetadata metadata)
{
var now = _clock.UtcNow;
var lease = metadata.Lease;
bool isMaster = IsCurrentMaster(lease);
if (!isMaster)
{
// Does not hold the master lease, so cannot release.
return false;
}
metadata = metadata with
{
Lease = new MasterLease()
{
CreationTimeUtc = lease!.CreationTimeUtc,
LastUpdateTimeUtc = now,
LeaseExpiryTimeUtc = DateTime.MinValue,
Master = _primaryMachineLocation
}
};
return true;
} | c# | 14 | 0.456743 | 69 | 30.833333 | 24 | inline |
def change_node(self, index: int, p_leaf: float, new_node: str = None) -> None:
"""Change node at index."""
if self.behaviors.is_up_node(self.bt[index]):
return
if new_node is None:
new_node = self.random_node(p_leaf)
# Change control node to leaf node, remove whole subtree
if self.behaviors.is_control_node(self.bt[index]) and\
self.behaviors.is_leaf_node(new_node):
self.delete_node(index)
self.bt.insert(index, new_node)
# Change leaf node to control node. Add up and extra condition/behavior node child
elif self.behaviors.is_control_node(new_node) and\
self.behaviors.is_leaf_node(self.bt[index]):
old_node = self.bt[index]
self.bt[index] = new_node
if self.behaviors.is_behavior_node(old_node):
self.bt.insert(index + 1, self.behaviors.get_random_leaf_node())
self.bt.insert(index + 2, old_node)
else: #condition node
self.bt.insert(index + 1, old_node)
self.bt.insert(index + 2, self.behaviors.get_random_behavior_node())
self.bt.insert(index + 3, self.behaviors.get_up_node())
else:
self.bt[index] = new_node | python | 15 | 0.582043 | 90 | 45.178571 | 28 | inline |
bool IMEDispatcher::attachDelegateWithIME(IMEDelegate * delegate)
{
bool ret = false;
do
{
CC_BREAK_IF(! _impl || ! delegate);
DelegateIter end = _impl->_delegateList.end();
DelegateIter iter = _impl->findDelegate(delegate);
// if pDelegate is not in delegate list, return
CC_BREAK_IF(end == iter);
if (_impl->_delegateWithIme)
{
if (_impl->_delegateWithIme != delegate)
{
// if old delegate canDetachWithIME return false
// or pDelegate canAttachWithIME return false,
// do nothing.
CC_BREAK_IF(!_impl->_delegateWithIme->canDetachWithIME()
|| !delegate->canAttachWithIME());
// detach first
IMEDelegate * oldDelegate = _impl->_delegateWithIme;
_impl->_delegateWithIme = 0;
oldDelegate->didDetachWithIME();
_impl->_delegateWithIme = *iter;
delegate->didAttachWithIME();
}
ret = true;
break;
}
// delegate hasn't attached to IME yet
CC_BREAK_IF(! delegate->canAttachWithIME());
_impl->_delegateWithIme = *iter;
delegate->didAttachWithIME();
ret = true;
} while (0);
return ret;
} | c++ | 17 | 0.51076 | 72 | 29.727273 | 44 | inline |
namespace GameServer.Repositories
{
public interface IItemRepository
{
}
} | c# | 5 | 0.685393 | 36 | 11.714286 | 7 | starcoderdata |
package com.activequant.backtesting;
import com.activequant.domainmodel.TimeStamp;
import com.activequant.domainmodel.streaming.StreamEventIterator;
import com.activequant.domainmodel.streaming.TimeStreamEvent;
/**
*
* @author GhostRider
*
*/
public class TradingTimeStreamIterator extends StreamEventIterator {
// time in nanoseconds.
private long endTime, currentTime;
private final long step;
public TradingTimeStreamIterator(TimeStamp startTime, TimeStamp endTime, long stepWidthInNanoS){
step = stepWidthInNanoS;
this.currentTime = startTime.getNanoseconds() - step;
this.endTime = endTime.getNanoseconds();
}
@Override
public boolean hasNext() {
return currentTime < endTime;
}
@Override
public TimeStreamEvent next() {
currentTime = currentTime + step;
TimeStreamEvent event = new TimeStreamEvent(new TimeStamp(currentTime));
//
return event;
}
} | java | 11 | 0.769314 | 97 | 23.184211 | 38 | starcoderdata |
package me.gingerninja.authenticator.data.adapter;
import android.animation.ObjectAnimator;
import android.view.LayoutInflater;
import android.view.MenuItem;
import android.view.ViewGroup;
import androidx.annotation.NonNull;
import androidx.annotation.Size;
import androidx.databinding.DataBindingUtil;
import androidx.recyclerview.widget.RecyclerView;
import com.google.android.material.card.MaterialCardView;
import me.gingerninja.authenticator.R;
import me.gingerninja.authenticator.data.db.entity.Label;
import me.gingerninja.authenticator.databinding.LabelListItemColorBinding;
import me.gingerninja.authenticator.ui.label.LabelListItemViewModel;
public class LabelListIteratorAdapter extends BaseIteratorAdapter<BindingViewHolder, Label> implements LabelListItemViewModel.LabelMenuItemClickListener {
private LabelListItemViewModel.LabelMenuItemClickListener menuItemClickListener;
private boolean dragEnabled = false;
private int moveFrom = -1, moveTo = -1;
public LabelListIteratorAdapter() {
setHasStableIds(true);
}
@Override
protected long getItemId(Label item) {
return item.getId();
}
@Override
public long getItemId(int position) {
return super.getItemId(getAdjustedPosition(position));
}
@Override
public Label getItem(int position) {
return super.getItem(getAdjustedPosition(position));
}
@NonNull
@Override
public BindingViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
return new BindingViewHolder<>(DataBindingUtil.inflate(LayoutInflater.from(parent.getContext()), R.layout.label_list_item_color, parent, false));
}
@Override
public void onBindViewHolder(@NonNull BindingViewHolder holder, int position) {
LabelListItemColorBinding binding = (LabelListItemColorBinding) holder.getBinding();
Label label = getItem(getAdjustedPosition(position));
LabelListItemViewModel oldViewModel = binding.getViewModel();
if (oldViewModel != null) {
Label oldLabel = oldViewModel.getLabel();
if (label.equals(oldLabel)) {
oldViewModel.setMode(dragEnabled ? LabelListItemViewModel.Mode.DRAG : LabelListItemViewModel.Mode.IDLE);
return;
}
}
LabelListItemViewModel viewModel = new LabelListItemViewModel(label, holder.itemView);
viewModel.setMenuItemClickListener(this);
viewModel.setMode(dragEnabled ? LabelListItemViewModel.Mode.DRAG : LabelListItemViewModel.Mode.IDLE);
binding.setViewModel(viewModel);
}
public void setMenuItemClickListener(LabelListItemViewModel.LabelMenuItemClickListener menuItemClickListener) {
this.menuItemClickListener = menuItemClickListener;
}
@Override
public void onLabelMenuItemClicked(MenuItem item, Label label) {
if (menuItemClickListener != null) {
menuItemClickListener.onLabelMenuItemClicked(item, label);
}
}
/**
* Returns a two-long array containing the from and to positions, respectively. It also resets
* the values to their initial state.
*
* @return a two-long array containing the from and to positions, respectively
*/
@Size(2)
public int[] getMovementAndReset() {
int[] ret = new int[]{moveFrom, moveTo};
moveFrom = -1;
moveTo = -1;
return ret;
}
/**
* Converts the static iterator position to a dynamic position that is adjusted according to the
* move factors ({@link #moveFrom} and {@link #moveTo}) set by {@link #onItemMove(int, int)}
* when the user drags a list item to a new position.
*
* @param position the position to be converted
* @return Returns the dynamic position of the item.
* @see #onItemMove(int, int)
*/
private int getAdjustedPosition(int position) {
if (moveFrom < 0 || moveFrom == moveTo) {
return position;
} else {
int min = Math.min(moveFrom, moveTo);
int max = Math.max(moveFrom, moveTo);
if (position == moveTo) {
return moveFrom;
} else if (position >= min && position <= max) {
if (moveFrom < moveTo) {
return position + 1;
} else {
return position - 1;
}
} else {
return position;
}
}
}
public boolean onItemMove(int fromPosition, int toPosition) {
if (moveFrom < 0) {
moveFrom = fromPosition;
}
moveTo = toPosition;
/*if (fromPosition < toPosition) {
for (int i = fromPosition; i < toPosition; i++) {
//Collections.swap(accountList, i, i + 1);
int tmp = positions[i];
positions[i] = positions[i + 1];
positions[i + 1] = tmp;
}
} else {
for (int i = fromPosition; i > toPosition; i--) {
//Collections.swap(accountList, i, i - 1);
int tmp = positions[i];
positions[i] = positions[i - 1];
positions[i - 1] = tmp;
}
}*/
notifyItemMoved(fromPosition, toPosition); // FIXME the backing iterator does not represent the change so it will not work properly
return true;
}
public void onItemDrag(RecyclerView.ViewHolder viewHolder, boolean isDragging) {
if (viewHolder == null) {
return;
}
//int viewType = viewHolder.getItemViewType();
//BindingViewHolder holder = (BindingViewHolder) viewHolder;
//ViewDataBinding binding = holder.getBinding();
MaterialCardView cardView = viewHolder.itemView.findViewById(R.id.card);
if (cardView != null) {
cardView.clearAnimation();
float targetElevation = cardView.getResources().getDimension(isDragging ? R.dimen.account_list_card_elevation_dragging : R.dimen.account_list_card_elevation_normal);
ObjectAnimator animator = ObjectAnimator.ofFloat(cardView, "cardElevation", cardView.getCardElevation(), targetElevation);
animator.start();
/*int originalPadding = cardView.getResources().getDimensionPixelSize(R.dimen.account_list_card_padding_normal);
int draggingPadding = cardView.getResources().getDimensionPixelSize(R.dimen.account_list_card_padding_dragging);
int targetPadding = isDragging ? draggingPadding : originalPadding;
int extTargetPadding = isDragging ? originalPadding - draggingPadding : 0;
//int originalPadding = cardView.getResources().getDimensionPixelSize(R.dimen.account_list_card_padding_normal);
ValueAnimator paddingAnimator = ValueAnimator.ofInt(viewHolder.itemView.getPaddingStart(), targetPadding);
paddingAnimator.addUpdateListener(valueAnimator -> {
int p = (int) valueAnimator.getAnimatedValue();
viewHolder.itemView.setPaddingRelative(p, p, p, p);
});
ValueAnimator paddingAnimator2 = ValueAnimator.ofInt(cardView.getContentPaddingTop(), extTargetPadding);
paddingAnimator2.addUpdateListener(valueAnimator -> {
int p = (int) valueAnimator.getAnimatedValue();
cardView.setContentPadding(p, p, p, p);
});
AnimatorSet set = new AnimatorSet();
set.playTogether(animator, paddingAnimator, paddingAnimator2);
set.start();*/
}
/*switch (viewType) {
case AccountListIteratorAdapter.TYPE_ACCOUNT_TOTP:
AccountListItemTotpViewModel viewModel = ((AccountListItemTotpBinding) binding).getViewModel();
if (viewModel != null) {
viewModel.setMode(isDragging ? AccountListItemViewModel.MODE_DRAG : AccountListItemViewModel.MODE_IDLE);
}
break;
case AccountListIteratorAdapter.TYPE_ACCOUNT_HOTP:
AccountListItemHotpViewModel hotpViewModel = ((AccountListItemHotpBinding) binding).getViewModel();
if (hotpViewModel != null) {
hotpViewModel.setMode(isDragging ? AccountListItemViewModel.MODE_DRAG : AccountListItemViewModel.MODE_IDLE);
}
break;
}*/
}
public void setDragEnabled(boolean enabled) {
if (dragEnabled != enabled) {
dragEnabled = enabled;
notifyDataSetChanged();
}
}
} | java | 13 | 0.648923 | 177 | 38.696833 | 221 | starcoderdata |
import Serializable from './Serializable'
import Arrays from 'utils/Arrays'
import Workflow from './Workflow'
export default class User extends Serializable {
static DEFAULT_ADMIN_NAME = 'admin'
static DEFAULT_PUBLIC_GROUP_NAME = 'default_public_group'
static ROLE = {
VIEW: 'VIEW',
ENTRY_LIMITED: 'ENTRY_LIMITED',
ENTRY: 'ENTRY',
CLEANSING: 'CLEANSING',
ANALYSIS: 'ANALYSIS',
DESIGN: 'DESIGN',
ADMIN: 'ADMIN',
}
static ROLES_HIERARCHY = [
User.ROLE.VIEW,
User.ROLE.ENTRY_LIMITED,
User.ROLE.ENTRY,
User.ROLE.CLEANSING,
User.ROLE.ANALYSIS,
User.ROLE.DESIGN,
User.ROLE.ADMIN,
]
static ROLE_IN_GROUP = {
OWNER: 'OWNER',
ADMINISTRATOR: 'ADMINISTRATOR',
SUPERVISOR: 'SUPERVISOR',
OPERATOR: 'OPERATOR',
VIEWER: 'VIEWER',
DATA_ANALYZER: 'DATA_ANALYZER',
}
static USER_GROUP_JOIN_STATUS = {
ACCEPTED: 'ACCEPTED',
PENDING: 'PENDING',
REJECTED: 'REJECTED',
}
enabled
id
role
username
constructor(jsonData) {
super()
this.fillFromJSON(jsonData)
}
_calculateHighestRoleIndex() {
let max = -1
this.roles.forEach((role) => {
const index = User.ROLES_HIERARCHY.indexOf(role)
if (index > max) {
max = index
}
})
return max
}
_hasAtLeastRole(role) {
const highestIndex = this._calculateHighestRoleIndex()
const index = User.ROLES_HIERARCHY.indexOf(role)
return highestIndex >= index
}
get canAccessUsersManagement() {
return this.role === User.ROLE.ADMIN
}
canCreateRecords(roleInSurveyGroup) {
switch (this.role) {
case User.ROLE.VIEW:
case User.ROLE.ENTRY_LIMITED:
return false
default:
return this.canEditRecords(roleInSurveyGroup)
}
}
canEditRecords(roleInSurveyGroup) {
switch (this.role) {
case User.ROLE.VIEW:
return false
default:
if (roleInSurveyGroup === null) {
return false
}
switch (roleInSurveyGroup) {
case User.ROLE_IN_GROUP.OWNER:
case User.ROLE_IN_GROUP.ADMINISTRATOR:
case User.ROLE_IN_GROUP.SUPERVISOR:
case User.ROLE_IN_GROUP.DATA_ANALYZER:
case User.ROLE_IN_GROUP.OPERATOR:
return true
default:
return false
}
}
}
canEditRecord(record) {
const { step } = record
switch (step) {
case Workflow.STEPS.entry:
return (
this._hasAtLeastRole(User.ROLE.ENTRY) || (this.role === User.ROLE.ENTRY_LIMITED && record.ownerId === this.id)
)
case Workflow.STEPS.cleansing:
return this._hasAtLeastRole(User.ROLE.CLEANSING)
case Workflow.STEPS.analysis:
default:
return false
}
}
canDeleteRecords(roleInSurveyGroup, records) {
const canDeleteRecordsInGeneral = this.canCreateRecords(roleInSurveyGroup)
if (!canDeleteRecordsInGeneral) {
return false
}
switch (this.role) {
case User.ROLE.ENTRY:
return !Arrays.contains(records, (r) => r.step !== Workflow.STEPS.entry || r.ownerId !== this.id)
default:
return true
}
}
canImportRecords(roleInSurveyGroup) {
if (roleInSurveyGroup === null) {
return false
}
switch (roleInSurveyGroup) {
case User.ROLE_IN_GROUP.OWNER:
case User.ROLE_IN_GROUP.ADMINISTRATOR:
case User.ROLE_IN_GROUP.SUPERVISOR:
case User.ROLE_IN_GROUP.DATA_ANALYZER:
return true
default:
return false
}
}
canEditQualifier(roleInSurvey) {
return (
this.role === User.ROLE.ADMIN ||
roleInSurvey === User.ROLE_IN_GROUP.ADMINISTRATOR ||
roleInSurvey === User.ROLE_IN_GROUP.OWNER
)
}
canFilterRecordsBySummaryAttribute(attr, roleInSurvey) {
const rootEntityDef = attr.rootEntity
const isQualifier = rootEntityDef.qualifierAttributeDefinitions.find((qDef) => qDef.name === attr.name) != null
return !isQualifier || this.canEditQualifier(roleInSurvey)
}
canEditRecordAttribute({ record, attributeDefinition }) {
const { survey } = record
const { userInGroupRole } = survey
const { calculated, qualifier } = attributeDefinition
return this.canEditRecord(record) && !calculated && (!qualifier || this.canEditQualifier(userInGroupRole))
}
canUnlockRecords() {
return this.role === User.ROLE.ADMIN
}
canEditNotOwnedRecords() {
return this.role === User.ROLE.ADMIN
}
canEditOnlyOwnedRecords() {
return this.role === User.ROLE.ENTRY_LIMITED
}
canPromoteRecordWithErrors(roleInSurveyGroup) {
return this.canChangeRecordOwner(roleInSurveyGroup)
}
canPromoteRecordsInBulk(roleInSurveyGroup) {
return this.canPromoteRecordWithErrors(roleInSurveyGroup)
}
canDemoteRecordsInBulk(roleInSurveyGroup) {
return this.canPromoteRecordWithErrors(roleInSurveyGroup)
}
canChangeRecordOwner(roleInSurveyGroup) {
const mainRole = this.role
switch (mainRole) {
case User.ROLE.ADMIN:
return true
case User.ROLE.VIEW:
case User.ROLE.ENTRY:
case User.ROLE.ENTRY_LIMITED:
return false
default:
if (roleInSurveyGroup === null) {
return false
}
switch (roleInSurveyGroup) {
case User.ROLE_IN_GROUP.OWNER:
case User.ROLE_IN_GROUP.ADMINISTRATOR:
case User.ROLE_IN_GROUP.SUPERVISOR:
case User.ROLE_IN_GROUP.DATA_ANALYZER:
return true
default:
return false
}
}
}
get canAccessDashboard() {
return this.role !== User.ROLE.ENTRY_LIMITED
}
get canAccessMap() {
return this.canAccessDashboard
}
get canAccessSaiku() {
return this.canAccessDataCleansing
}
get canAccessDataCleansing() {
switch (this.role) {
case User.ROLE.CLEANSING:
case User.ROLE.ANALYSIS:
case User.ROLE.DESIGN:
case User.ROLE.ADMIN:
return true
default:
return false
}
}
get canAccessBackupRestore() {
return this.role === User.ROLE.ADMIN
}
get canAccessSurveyDesigner() {
switch (this.role) {
case User.ROLE.ADMIN:
case User.ROLE.DESIGN:
return true
default:
return false
}
}
canChangeSurveyUserGroup(roleInSurveyGroup) {
switch (this.role) {
case User.ROLE.ADMIN:
return true
case User.ROLE.DESIGN:
if (roleInSurveyGroup === null) {
return false
}
switch (roleInSurveyGroup) {
case User.ROLE_IN_GROUP.OWNER:
case User.ROLE_IN_GROUP.ADMINISTRATOR:
return true
default:
return false
}
default:
return false
}
}
} | javascript | 18 | 0.631104 | 120 | 23.397112 | 277 | starcoderdata |
package be.lycoops.vincent.iv.calculator.output;
import com.airhacks.afterburner.views.FXMLView;
public class OutputView extends FXMLView {
} | java | 4 | 0.823529 | 48 | 25.714286 | 7 | starcoderdata |
/* eslint-disable no-unused-vars */
import ScrollingCamaraInX from '../cameras/scrolling_camara_in_x';
import AntEnemy from '../sprites/ant_enemy';
import EnemyFactory from '../sprites/enemy_factory';
import Player from '../sprites/player';
import Scene from './scene';
export default class LevelOneScene extends Scene
{
constructor() {
super('Primer Nivel')
}
preload() {
this.load.image('bg', 'assets/fondo-infinito.jpg');
this.load.image('goal', 'assets/meta.png');
this.load.spritesheet(
'player',
'assets/player.png',
{frameWidth: 180, frameHeight: 180}
);
this.load.spritesheet(
'ant',
'assets/hormiga.png',
{frameWidth: 192, frameHeight: 96}
);
this.load.spritesheet(
'caterpillar',
'assets/oruga.png',
{frameWidth: 96, frameHeight: 192}
);
this.load.spritesheet(
'wasp',
'assets/avispa.png',
{frameWidth: 128, frameHeight: 128}
);
this.load.spritesheet(
'crash',
'assets/crash.png',
{frameWidth: 199, frameHeight: 200}
);
this.load.image('tileset', './assets/tiles.png');
this.load.tilemapTiledJSON('lvl-1', 'assets/map.json');
}
create() {
this.add.sprite(480, 320, 'bg').setScrollFactor(0);
const map = this.make.tilemap({key: 'lvl-1'});
const tiles = map.addTilesetImage('tiles', 'tileset');
const bg = map.createLayer('backgroundLayer',tiles);
this.collisionLayer = map.createLayer('collisionLayer', tiles);
this.collisionLayer.setCollisionByExclusion([-1]);
let playerFormTiled = this.findObjectsByType(
"player",
map.getObjectLayer("objectsLayer"),
map.tileHeight
);
this.player = new Player(this, playerFormTiled[0].x, playerFormTiled[0].y, "player");
const ground = map.createLayer("hierbaLayer", tiles).setDepth(100);
const enemies = EnemyFactory.create(
this,
map.getObjectLayer("objectsLayer"),
this.physics.add.group({
classType: AntEnemy,
runChildUpdate: true
}),
map.tileHeight
);
this.physics.add.overlap(this.player, enemies, this.player.checkEnemyCollision, null, this.player);
this.scrollingCamera = new ScrollingCamaraInX(this, 3520, 640, this.player.x, 412);
}
update() {
this.player.update();
this.scrollingCamera.update(this.player.x);
}
/**
* @param {string} type
* @param {Phaser.Tilemaps.ObjectLayer} objectsLayer
* @param {number} tileHeight
* @returns {Phaser.Types.Tilemaps.TiledObject[]}
*/
findObjectsByType(type, objectsLayer, tileHeight) {
const result = [];
objectsLayer.objects.forEach((element) => {
if(element.type === type) {
element.y -= tileHeight;
result.push(element);
}
});
return result;
}
} | javascript | 30 | 0.638328 | 103 | 26.596154 | 104 | starcoderdata |
package main;
import java.io.Serializable;
@SuppressWarnings("serial")
public class Message implements Serializable {
public String methodName;
public int arg1;
public int arg2;
public Message(String methodName, int arg1, int arg2)
{
this.methodName = methodName;
this.arg1 = arg1;
this.arg2 = arg2;
}
@Override
public int hashCode() {
return super.hashCode();
}
@Override
public boolean equals(Object obj) {
return super.equals(obj);
}
@Override
protected Object clone() throws CloneNotSupportedException {
return super.clone();
}
} | java | 6 | 0.680303 | 64 | 17.857143 | 35 | starcoderdata |
// Copyright (c) 2022 Samsung Research America
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef NAV2_REGULATED_PURE_PURSUIT_CONTROLLER__PARAMETER_HANDLER_HPP_
#define NAV2_REGULATED_PURE_PURSUIT_CONTROLLER__PARAMETER_HANDLER_HPP_
#include <string>
#include <vector>
#include <memory>
#include <algorithm>
#include <mutex>
#include "rclcpp/rclcpp.hpp"
#include "rclcpp_lifecycle/lifecycle_node.hpp"
#include "nav2_util/odometry_utils.hpp"
#include "nav2_util/geometry_utils.hpp"
#include "nav2_util/node_utils.hpp"
namespace nav2_regulated_pure_pursuit_controller
{
struct Parameters
{
double desired_linear_vel, base_desired_linear_vel;
double lookahead_dist;
double rotate_to_heading_angular_vel;
double max_lookahead_dist;
double min_lookahead_dist;
double lookahead_time;
bool use_velocity_scaled_lookahead_dist;
double min_approach_linear_velocity;
double approach_velocity_scaling_dist;
double max_allowed_time_to_collision_up_to_carrot;
bool use_regulated_linear_velocity_scaling;
bool use_cost_regulated_linear_velocity_scaling;
double cost_scaling_dist;
double cost_scaling_gain;
double inflation_cost_scaling_factor;
double regulated_linear_scaling_min_radius;
double regulated_linear_scaling_min_speed;
bool use_fixed_curvature_lookahead;
double curvature_lookahead_dist;
bool use_rotate_to_heading;
double max_angular_accel;
double rotate_to_heading_min_angle;
bool allow_reversing;
double max_robot_pose_search_dist;
bool use_interpolation;
bool use_collision_detection;
double transform_tolerance;
};
/**
* @class nav2_regulated_pure_pursuit_controller::ParameterHandler
* @brief Handles parameters and dynamic parameters for RPP
*/
class ParameterHandler
{
public:
/**
* @brief Constructor for nav2_regulated_pure_pursuit_controller::ParameterHandler
*/
ParameterHandler(
rclcpp_lifecycle::LifecycleNode::SharedPtr node,
std::string & plugin_name,
rclcpp::Logger & logger, const double costmap_size_x);
/**
* @brief Destrructor for nav2_regulated_pure_pursuit_controller::ParameterHandler
*/
~ParameterHandler() = default;
std::mutex & getMutex() {return mutex_;}
Parameters * getParams() {return ¶ms_;}
protected:
/**
* @brief Callback executed when a parameter change is detected
* @param event ParameterEvent message
*/
rcl_interfaces::msg::SetParametersResult
dynamicParametersCallback(std::vector<rclcpp::Parameter> parameters);
// Dynamic parameters handler
std::mutex mutex_;
rclcpp::node_interfaces::OnSetParametersCallbackHandle::SharedPtr dyn_params_handler_;
Parameters params_;
std::string plugin_name_;
rclcpp::Logger logger_ {rclcpp::get_logger("RegulatedPurePursuitController")};
};
} // namespace nav2_regulated_pure_pursuit_controller
#endif // NAV2_REGULATED_PURE_PURSUIT_CONTROLLER__PARAMETER_HANDLER_HPP_
| c++ | 15 | 0.756996 | 88 | 31.028302 | 106 | research_code |
private WebClient.RequestBodySpec getAuthorizedWebClientForHCP(Function<UriBuilder, URI> uriFunction, HttpMethod httpMethod, String hostname) {
// Function<UriBuilder, URI> uriFunction = initUri(HCPEndpoint.REST, queryParams, urlName);
return webClient
.method(httpMethod)
.uri(uriFunction)
.header("Authorization", "HCP " + HCPConfig.getAuth())
.header("Host", hostname);
} | java | 10 | 0.652079 | 143 | 56.25 | 8 | inline |
#ifndef STAN_MATH_FWD_SCAL_META_AD_PROMOTABLE_HPP
#define STAN_MATH_FWD_SCAL_META_AD_PROMOTABLE_HPP
#include
namespace stan {
namespace math {
template <typename T>
struct fvar;
template <typename V, typename T>
struct ad_promotable<V, fvar > {
enum { value = ad_promotable<V, T>::value };
};
}
}
#endif | c++ | 17 | 0.70101 | 107 | 23.75 | 20 | starcoderdata |
package cn.yinjiahui.voa.portal.controller;
import cn.yinjiahui.voa.common.api.CommonResult;
import cn.yinjiahui.voa.db.model.FileInfo;
import cn.yinjiahui.voa.portal.service.FileService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.util.List;
@Slf4j
@RestController
@RequestMapping("/file")
public class FileController {
@Autowired
FileService fileService;
@PostMapping("")
public CommonResult upload(@RequestParam("file") MultipartFile file,@RequestParam Integer teamId) {
if (file.isEmpty()) {
return CommonResult.failed();
}
try {
fileService.upload(file,teamId);
} catch (IOException e) {
log.error(e.getMessage());
return CommonResult.failed();
}
return getFile(teamId);
}
@GetMapping("")
public CommonResult getFile(Integer teamId){
List file = fileService.getFile(teamId);
return CommonResult.success(file);
}
@DeleteMapping("")
public CommonResult deleteFile(Integer fileId){
fileService.deleteFile(fileId);
return CommonResult.success(null);
}
} | java | 13 | 0.69145 | 104 | 27.020833 | 48 | starcoderdata |
module["exports"] = {
"adjective": [
"Petit",
"Ergonomique",
"Rustique",
"Intelligente",
"Magnifique",
"Incroyable",
"Fantastique",
"Pratique",
"Lisse",
"Génial",
"Générique",
"Fabriqué à la main",
"Fait main",
"Autorisé",
"Raffiné",
"Non ramifié",
"Savoureux"
],
"material": [
"Acier",
"En bois",
"Béton",
"Plastique",
"Coton",
"Granit",
"Caoutchouc",
"Métal",
"Mou, tendre",
"Frais",
"Gelé"
],
"product": [
"Chaise",
"Auto",
"Ordinateur",
"Clavier",
"Souris",
"Bicyclette",
"Balle",
"Gants",
"Pantalon",
"La chemise",
"Tableau",
"Des chaussures",
"Chapeau",
"Les serviettes",
"Savon",
"Thon",
"Poulet",
"Poisson",
"Fromage",
"Lard",
"Pizza",
"Salade",
"Saucisses",
"Puces"
]
}; | javascript | 23 | 0.491579 | 49 | 14.57377 | 61 | starcoderdata |
def test_create_project_no_django(tmpfolder, nodjango_admin_mock):
# Given options with the django extension,
# but without django-admin being installed,
opts = dict(project=PROJ_NAME, extensions=[django.Django('django')])
# when the project is created,
# then an exception should be raised.
with pytest.raises(django.DjangoAdminNotInstalled):
create_project(opts) | python | 12 | 0.725441 | 72 | 43.222222 | 9 | inline |
# sort
sequence = [1, 3, 5, 4, 2, 6]
# Original Sequence [1, 3, 5, 4, 2, 6]
print(f'Original Sequence', sequence)
sequence.sort()
# After sorting [1, 2, 3, 4, 5, 6]
print(f'After sorting', sequence)
# sorted
sequence = [1, 3, 5, 4, 2, 6]
output = sorted(sequence)
print(f'Original Sequence', sequence)
print(f'Output sequence after sorting', output)
print(f'Original sequence after sorting', sequence)
test_list = [{'name':'a','age':20},{'name':'b','age':30},{'name':'c','age':25}]
print(sorted(test_list, key=lambda x:x['age'], reverse=True)) | python | 10 | 0.647378 | 79 | 21.16 | 25 | starcoderdata |
import torch.optim
from numpy import ndarray
def get_optim(optim, params, init_lr, steps=1, wd=0, gamma=1,
momentum=0.9, max_epochs=120):
if optim == 'sgd':
optimizer = torch.optim.SGD(
params, lr=init_lr, momentum=momentum, weight_decay=wd)
elif optim == 'sgd_nomem':
optimizer = torch.optim.SGD(
params, lr=init_lr, momentum=0, weight_decay=wd)
elif optim == 'adam':
optimizer = torch.optim.Adam(
params, lr=init_lr, weight_decay=wd, # amsgrad=True,
betas=(0.9, .999))
else:
raise ValueError('Unknown optimizer')
# Set the learning rate decay
if isinstance(steps, (tuple, list, ndarray)) and len(steps) == 1:
steps = steps[0]
if isinstance(steps, int):
scheduler = torch.optim.lr_scheduler.StepLR(
optimizer, int(max_epochs/steps), gamma=gamma)
elif isinstance(steps, (tuple, list, ndarray)):
scheduler = torch.optim.lr_scheduler.MultiStepLR(
optimizer, steps, gamma=gamma)
else:
raise ValueError('Unknown lr schedule')
return optimizer, scheduler | python | 12 | 0.613438 | 69 | 33.727273 | 33 | starcoderdata |
private void addDevice(BluetoothDevice device, int rssi) {
boolean deviceFound = false;
//for (BluetoothDevice listDev : deviceList) {
//If the same device already found, we will not add it again
if (searchDevice.equals(device.getAddress())) {
Log.i(TAG, "Device Match Occured "+device+" "+rssi);
deviceFound = true;
mBluetoothAdapter.stopLeScan(mLeScanCallback);
Log.i(TAG, "Scan stopped from addDevice due to device match");
}
//}
//populate the deviceList here. this list will later evaluated
devRssiValues.put(device.getAddress(), rssi);
if (!deviceFound) {
//If the device address is listed under settings, then we add the device to the list
//if(searchList.get(0) == null)
Log.i(TAG, "Not a Matching device "+device+" "+rssi);
if(searchDevice == null)
{
Log.i(TAG,"No Door has configured yet. Need at least one address");
Toast.makeText(getApplicationContext(),
"Configure a Door from Settings", Toast.LENGTH_SHORT).show();
}
//if(device.getAddress().equals(searchList.get(0)))
//Changed a lot here moved to else caluse
// if(device.getAddress().equals(searchDevice))
// {
// deviceList.add(device);
// deviceAddress = device.getAddress();
// //Update the status image on UI
// setConnectionStatus(1);
//
// mDevice = BluetoothAdapter.getDefaultAdapter().getRemoteDevice(deviceAddress);
//
// Log.d(TAG, "Selected Door.address==" + mDevice + "mserviceValue" + mService);
// Toast.makeText(getApplicationContext(),mDevice.getName()+ " - connecting", Toast.LENGTH_SHORT).show();
// mService.connect(deviceAddress);
// isServiceConnected = true;
// }
}
else
{
if(device.getAddress().equals(searchDevice))
{
deviceList.add(device);
deviceAddress = device.getAddress();
//Update the status image on UI
setConnectionStatus(1);
Log.i(TAG,"Set Status 1");
mDevice = BluetoothAdapter.getDefaultAdapter().getRemoteDevice(deviceAddress);
//service_init();
if(mServiceConnection != null)
LocalBroadcastManager.getInstance(this).registerReceiver(UARTStatusChangeReceiver, makeGattUpdateIntentFilter());
Log.i(TAG, "Selected Door.address==" + mDevice + "mserviceValue" + mService);
Toast.makeText(getApplicationContext(),mDevice.getName()+ " - connecting", Toast.LENGTH_SHORT).show();
mService.connect(deviceAddress);
isServiceConnected = true;
Log.i(TAG,"Service connected");
}
}
} | java | 14 | 0.557995 | 133 | 42 | 71 | inline |
@SuppressWarnings("deprecation")
static int weekNumber(int year, int month) {
Date d = new Date(year - 1900, month - 1, 1);
while (d.getDay() != CalendarUtil.getStartingDayOfWeek()) d.setDate(d.getDate() - 1);
// ISO 8601: move to the next Thursday
while (d.getDay() != 4) d.setDate(d.getDate() + 1);
int y = d.getYear();
int week = 0;
while (d.getYear() == y) { d.setDate(d.getDate() - 7); week += 1; }
return week;
} | java | 11 | 0.626147 | 87 | 38.727273 | 11 | inline |
import tushare as ts
from datetime import datetime, date, time
# 单例
class Singleton(object):
_instance = None
def __new__(cls, *args, **kw):
if not cls._instance:
cls._instance = super(Singleton, cls).__new__(cls)
return cls._instance
class TushareConn(Singleton):
conn = ts.get_apis()
class TushareHelper(object):
def __init__(self, code, start, end, freq, ma = [5, 10], asset = ''):
self.self = self
self.code = code
self.start = start
self.end = end
self.freq = freq
self.ma = ma
if asset is None or asset == '':
self.asset = 'INDEX'
else:
self.asset = asset
# 原始api数据 pandas data_frame
self.data_frame = {}
#
self.data_original = []
self.data_original_ex = []
self.date_tickers = []
# MA 接口数据
self.data_frame_ma = {}
# 简版数据
self.data_frame_ma_mini = {"index":[], "index_date":[], "short":[], "long":[]}
def __bar(self):
self.data_frame = ts.bar(code=self.code, conn=TushareConn.conn, start_date=self.start, end_date=self.end,
freq=self.freq, asset=self.asset)
def data_transfer(self):
self.__bar()
list_index = 0
# data_frame 按照 index 倒叙排序
for index, row in self.data_frame.sort_index().iterrows():
date_time = datetime.strptime(str(index), '%Y-%m-%d %H:%M:%S')
row["index"] = str(date_time)
row["is_up"] = row["open"] <= row["close"]
self.data_original_ex.append((list_index, row['open'], row['high'], row['low'], row['close']))
self.data_original.append(row)
self.date_tickers.append(str(row['index']))
list_index += 1
# MA
def __ma(self):
self.data_frame_ma = ts.bar(code=self.code, conn=TushareConn.conn, start_date=self.start, end_date=self.end,
freq=self.freq, ma=self.ma )
# ma transfer
def data_transfer_ma(self):
self.__ma()
ma_short = "ma" + str(self.ma[0])
ma_long = "ma" + str(self.ma[1])
list_index = 0
# data_frame 按照 index 倒叙排序
for index, row in self.data_frame_ma.sort_index().iterrows():
date_time = datetime.strptime(str(index), '%Y-%m-%d %H:%M:%S')
self.data_frame_ma_mini["index"].append(list_index)
self.data_frame_ma_mini["index_date"].append((str(date_time)))
self.data_frame_ma_mini["short"].append(row[ma_short])
self.data_frame_ma_mini["long"].append(row[ma_long])
list_index += 1 | python | 14 | 0.538774 | 116 | 34.631579 | 76 | starcoderdata |
package com.tracelink.prodsec.plugin.veracode.sca.service;
import static org.mockito.Mockito.times;
import com.tracelink.prodsec.plugin.veracode.sca.exception.VeracodeScaProductException;
import com.tracelink.prodsec.plugin.veracode.sca.model.VeracodeScaWorkspace;
import com.tracelink.prodsec.plugin.veracode.sca.repository.VeracodeScaWorkspaceRepository;
import com.tracelink.prodsec.plugin.veracode.sca.util.model.Workspace;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.BDDMockito;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
public class VeracodeScaWorkspaceServiceTest {
@MockBean
private VeracodeScaWorkspaceRepository workspaceRepository;
@MockBean
private VeracodeScaProjectService projectService;
private VeracodeScaWorkspaceService workspaceService;
private VeracodeScaWorkspace workspace;
private UUID uuid;
@Before
public void setup() {
workspaceService = new VeracodeScaWorkspaceService(workspaceRepository, projectService);
workspace = new VeracodeScaWorkspace();
uuid = UUID.randomUUID();
workspace.setId(uuid);
}
@Test
public void testGetWorkspaces() {
BDDMockito.when(workspaceRepository.findAll()).thenReturn(Collections.emptyList());
List returnedWorkspaces = workspaceService.getWorkspaces();
Assert.assertTrue(returnedWorkspaces.isEmpty());
VeracodeScaWorkspace workspace = new VeracodeScaWorkspace();
BDDMockito.when(workspaceRepository.findAll())
.thenReturn(Collections.singletonList(workspace));
returnedWorkspaces = workspaceService.getWorkspaces();
Assert.assertEquals(1, returnedWorkspaces.size());
Assert.assertEquals(workspace, returnedWorkspaces.get(0));
}
@Test
public void testUpdateWorkspaces() {
Workspace workspace = new Workspace();
workspace.setId(UUID.randomUUID());
workspace.setName("Workspace1");
workspace.setSiteId("ABCdef");
List workspaces = workspaceService
.updateWorkspaces(Collections.singletonList(workspace));
@SuppressWarnings("unchecked")
ArgumentCaptor workspacesCaptor = ArgumentCaptor
.forClass(List.class);
BDDMockito.verify(workspaceRepository, times(1)).saveAll(workspacesCaptor.capture());
Assert.assertFalse(workspacesCaptor.getValue().isEmpty());
Assert.assertEquals(1, workspaces.size());
Assert.assertEquals(workspaces.get(0), workspacesCaptor.getValue().get(0));
Assert.assertEquals(workspace.getId(), workspaces.get(0).getId());
Assert.assertEquals(workspace.getName(), workspaces.get(0).getName());
Assert.assertEquals(workspace.getSiteId(), workspaces.get(0).getSiteId());
Assert.assertTrue(workspaces.get(0).isIncluded());
}
@Test
public void testSetIncludedWorkspacesNull() {
try {
workspaceService.setIncluded(null);
Assert.fail("Exception should have been thrown");
} catch (IllegalArgumentException e) {
Assert.assertEquals("Please provide non-null workspace IDs to include", e.getMessage());
}
try {
workspaceService.setIncluded(Collections.singletonList(null));
Assert.fail("Exception should have been thrown");
} catch (IllegalArgumentException e) {
Assert.assertEquals("Please provide non-null workspace IDs to include", e.getMessage());
}
}
@Test
public void testSetIncluded() {
VeracodeScaWorkspace workspace1 = new VeracodeScaWorkspace();
workspace1.setId(uuid);
workspace1.setIncluded(false);
VeracodeScaWorkspace workspace2 = new VeracodeScaWorkspace();
workspace2.setId(UUID.randomUUID());
Page page = new PageImpl<>(Arrays.asList(workspace1, workspace2));
BDDMockito.when(workspaceRepository.findAll(BDDMockito.any(Pageable.class)))
.thenReturn(page);
workspaceService.setIncluded(Collections.singletonList(uuid));
Assert.assertTrue(workspace1.isIncluded());
Assert.assertFalse(workspace2.isIncluded());
BDDMockito.verify(workspaceRepository).saveAll(BDDMockito.anyIterable());
BDDMockito.verify(workspaceRepository).flush();
}
@Test
public void testSetIncludedMultiplePages() {
VeracodeScaWorkspace workspace1 = new VeracodeScaWorkspace();
workspace1.setId(uuid);
workspace1.setIncluded(false);
VeracodeScaWorkspace workspace2 = new VeracodeScaWorkspace();
workspace2.setId(UUID.randomUUID());
Pageable pageable1 = PageRequest.of(0, 1);
Page page1 = new PageImpl<>(Collections.singletonList(workspace1),
pageable1, 2);
Pageable pageable2 = PageRequest.of(1, 1);
Page page2 = new PageImpl<>(Collections.singletonList(workspace2),
pageable2, 2);
BDDMockito.when(workspaceRepository.findAll(PageRequest.of(0, 50)))
.thenReturn(page1);
BDDMockito.when(workspaceRepository.findAll(page1.nextPageable()))
.thenReturn(page2);
workspaceService.setIncluded(Collections.singletonList(uuid));
Assert.assertTrue(workspace1.isIncluded());
Assert.assertFalse(workspace2.isIncluded());
BDDMockito.verify(workspaceRepository, times(2)).saveAll(BDDMockito.anyIterable());
BDDMockito.verify(workspaceRepository).flush();
}
@Test
public void testDeleteWorkspace() {
BDDMockito.when(workspaceRepository.findById(uuid)).thenReturn(Optional.of(workspace));
workspaceService.deleteWorkspace(uuid);
BDDMockito.when(workspaceRepository.findById(uuid)).thenReturn(Optional.of(workspace));
BDDMockito.verify(projectService).deleteProjectsByWorkspace(workspace);
BDDMockito.verify(workspaceRepository).delete(workspace);
BDDMockito.verify(workspaceRepository).flush();
}
@Test
public void testDeleteWorkspaceDoesNotExist() {
try {
workspaceService.deleteWorkspace(uuid);
Assert.fail("Exception should have been thrown");
} catch (VeracodeScaProductException e) {
Assert.assertEquals("No workspace with the given ID exists", e.getMessage());
}
}
@Test
public void testDeleteWorkspaceNullId() {
try {
workspaceService.deleteWorkspace(null);
Assert.fail("Exception should have been thrown");
} catch (IllegalArgumentException e) {
Assert.assertEquals("Please provide a non-null workspace ID to delete", e.getMessage());
}
}
} | java | 14 | 0.788601 | 91 | 37.114943 | 174 | starcoderdata |
package jstat.maths.errorfunctions;
import jstat.maths.functions.IRegularizerFunction;
import jstat.maths.functions.IVectorRealFunction;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
/**
* The Mean Square Error or MSE is defined as
* MSE = 1/N Sum_{i = 1}^N (y_i - \hat{y}_i)^2
*
* The \hat{y} value is modeled after the IVectorRealFunction passed
* to the object when instantiated
*/
public class MSEFunction implements ILossFunction {
/**
* Constructor
*/
public MSEFunction(IVectorRealFunction hypothesis ){
this.hypothesis = hypothesis;
this.regularizerFunction = null;
}
/**
* Constructor
*/
public MSEFunction(IVectorRealFunction hypothesis, IRegularizerFunction regularizerFunction ){
this.hypothesis = hypothesis;
this.regularizerFunction = regularizerFunction;
}
/**
* Given the output and the target evaluate the
* loss function
* @param output
* @param target
* @return
*/
@Override
public double evaluate(INDArray data, INDArray labels){
if(data.size(0) != labels.size(0)){
throw new IllegalArgumentException("Invalid number of data points and labels vector size");
}
double result = 0.0;
for(int idx=0; idx<data.size(0); ++idx){
//INDArray row = data.getRow(rowIdx);
double diff = labels.getDouble(idx) - data.getDouble(idx);//this.hypothesis.evaluate(row);
diff *= diff;
result += diff;
}
result /= data.size(0);
if(regularizerFunction != null){
result += regularizerFunction.evaluate(null);
}
return result;
}
/**
* Returns the gradients on the given data
*/
@Override
public INDArray paramGradients(INDArray data, INDArray labels){
INDArray gradients = Nd4j.zeros(this.hypothesis.numCoeffs());
for(int rowIdx=0; rowIdx<data.size(0); ++rowIdx){
INDArray row = data.getRow(rowIdx);
// compute y_i - \hat{y_i}
double diff = (labels.getDouble(rowIdx) - this.hypothesis.evaluate(row));
// the gradietns of the hypothesis on that
// point
INDArray hypothesisGrads = this.hypothesis.coeffGradients(row);
for(int coeff=0; coeff<this.hypothesis.numCoeffs(); ++coeff){
// update the gradient
double grad = gradients.getDouble(coeff) - (2.0/data.size(0))*diff*hypothesisGrads.getDouble(coeff);
gradients.putScalar(coeff, grad);
}
}
return gradients;
}
private IVectorRealFunction hypothesis;
IRegularizerFunction regularizerFunction;
} | java | 18 | 0.621408 | 116 | 26.84 | 100 | starcoderdata |
/*
* JasperReports - Free Java Reporting Library.
* Copyright (C) 2001 - 2014 TIBCO Software Inc. All rights reserved.
* http://www.jaspersoft.com
*
* Unless you have purchased a commercial license agreement from Jaspersoft,
* the following license terms apply:
*
* This program is part of JasperReports.
*
* JasperReports is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JasperReports is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with JasperReports. If not, see
*/
package net.sf.jasperreports.engine.data;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.text.DateFormat;
import java.text.NumberFormat;
import java.text.ParseException;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
import net.sf.jasperreports.engine.JRDataSource;
import net.sf.jasperreports.engine.JRException;
import net.sf.jasperreports.engine.util.FormatUtils;
import net.sf.jasperreports.engine.util.JRDataUtils;
import net.sf.jasperreports.engine.util.JRDateLocaleConverter;
import net.sf.jasperreports.engine.util.JRFloatLocaleConverter;
import org.apache.commons.beanutils.locale.LocaleConvertUtilsBean;
/**
* Abstract text data source, containing methods used to parse text
* data into numerical or date values.
*
* @author (
* @version $Id: JRAbstractTextDataSource.java 7199 2014-08-27 13:58:10Z teodord $
*/
public abstract class JRAbstractTextDataSource implements JRDataSource
{
private LocaleConvertUtilsBean convertBean;
private Locale locale;
private String datePattern;
private String numberPattern;
private TimeZone timeZone;
protected Object convertStringValue(String text, Class valueClass)
{
Object value = null;
if (String.class.equals(valueClass))
{
value = text;
}
else if (Number.class.isAssignableFrom(valueClass))
{
value = getConvertBean().convert(text.trim(), valueClass, locale, numberPattern);
}
else if (Date.class.isAssignableFrom(valueClass))
{
value = getConvertBean().convert(text.trim(), valueClass, locale, datePattern);
}
else if (Boolean.class.equals(valueClass))
{
value = Boolean.valueOf(text);
}
return value;
}
protected Object convertNumber(Number number, Class valueClass) throws JRException
{
Number value = null;
if (valueClass.equals(Byte.class))
{
value = new Byte(number.byteValue());
}
else if (valueClass.equals(Short.class))
{
value = new Short(number.shortValue());
}
else if (valueClass.equals(Integer.class))
{
value = Integer.valueOf(number.intValue());
}
else if (valueClass.equals(Long.class))
{
value = new Long(number.longValue());
}
else if (valueClass.equals(Float.class))
{
value = new Float(number.floatValue());
}
else if (valueClass.equals(Double.class))
{
value = new Double(number.doubleValue());
}
else if (valueClass.equals(BigInteger.class))
{
value = BigInteger.valueOf(number.longValue());
}
else if (valueClass.equals(BigDecimal.class))
{
value = new BigDecimal(Double.toString(number.doubleValue()));
}
else
{
throw new JRException("Unknown number class " + valueClass.getName());
}
return value;
}
/**
* @deprecated Replaced by {@link FormatUtils#getFormattedNumber(NumberFormat, String, Class)}
*/
protected Number getFormattedNumber(NumberFormat numberFormat, String fieldValue, Class valueClass) throws ParseException
{
return FormatUtils.getFormattedNumber(numberFormat, fieldValue, valueClass);
}
/**
* @deprecated Replaced by {@link FormatUtils#getFormattedDate(DateFormat, String, Class)}
*/
protected Date getFormattedDate(DateFormat dateFormat, String fieldValue, Class valueClass) throws ParseException
{
return FormatUtils.getFormattedDate(dateFormat, fieldValue, valueClass);
}
protected LocaleConvertUtilsBean getConvertBean()
{
if (convertBean == null)
{
convertBean = new LocaleConvertUtilsBean();
if (locale != null)
{
convertBean.setDefaultLocale(locale);
convertBean.deregister();
//convertBean.lookup();
}
convertBean.register(
new JRDateLocaleConverter(timeZone),
java.util.Date.class,
locale
);
// fix for https://issues.apache.org/jira/browse/BEANUTILS-351
// remove on upgrade to BeanUtils 1.8.1
JRFloatLocaleConverter floatConverter = new JRFloatLocaleConverter(
locale == null ? Locale.getDefault() : locale);
convertBean.register(floatConverter, Float.class, locale);
convertBean.register(floatConverter, Float.TYPE, locale);
}
return convertBean;
}
/**
* Copy the text parsing attributes for another object.
*
* @param textDataSource the object to copy the attributes from
*/
public void setTextAttributes(JRAbstractTextDataSource textDataSource)
{
setLocale(textDataSource.getLocale());
setDatePattern(textDataSource.getDatePattern());
setNumberPattern(textDataSource.getNumberPattern());
setTimeZone(textDataSource.getTimeZone());
}
public Locale getLocale() {
return locale;
}
public void setLocale(Locale locale) {
this.locale = locale;
convertBean = null;
}
public void setLocale(String locale) {
setLocale(JRDataUtils.getLocale(locale));
}
public String getDatePattern() {
return datePattern;
}
public void setDatePattern(String datePattern) {
this.datePattern = datePattern;
convertBean = null;
}
public String getNumberPattern() {
return numberPattern;
}
public void setNumberPattern(String numberPattern) {
this.numberPattern = numberPattern;
convertBean = null;
}
public TimeZone getTimeZone() {
return timeZone;
}
public void setTimeZone(TimeZone timeZone) {
this.timeZone = timeZone;
convertBean = null;
}
public void setTimeZone(String timeZoneId){
setTimeZone(JRDataUtils.getTimeZone(timeZoneId));
}
} | java | 22 | 0.741659 | 125 | 27.380531 | 226 | starcoderdata |
def get_task_url(self):
"""Get task url.
Returns:
str: Task url.
"""
return self.task_url | python | 5 | 0.448529 | 28 | 16.125 | 8 | inline |
def build_default_constraints(self):
# salary constraint
self.salary_constraint = lpSum([
self.players[player_name] * self.df.loc[player_name].salary
for player_name in self.df.index]) <= self.salary_cap
self.model += self.salary_constraint
# QB constraint
self.QB_constraint = lpSum([
self.players[player_name]
for player_name in self.df[self.df.position == 'QB'].index]) == 1
self.model += self.QB_constraint
# RB constraint
self.RB_constraint_ub = lpSum([
self.players[player_name]
for player_name in self.df[self.df.position == 'RB'].index]) <= 3
self.model += self.RB_constraint_ub
self.RB_constraint_lb = lpSum([
self.players[player_name]
for player_name in self.df[self.df.position == 'RB'].index]) >= 2
self.model += self.RB_constraint_lb
# WR constraint and --- C6 ---
self.WR_constraint_b = lpSum([
self.players[player_name]
for player_name in self.df[self.df.position == 'WR'].index]) == 3
self.model += self.WR_constraint_b
# TE constraint
self.TE_constraint_b = lpSum([
self.players[player_name]
for player_name in self.df[self.df.position == 'TE'].index]) == 1
self.model += self.TE_constraint_b
# DST constraint
self.DST_constraint = lpSum([
self.players[player_name] for player_name in self.df[self.df.position == 'DST'].index]) == 1
self.model += self.DST_constraint
# full roster constraint (ensures that only 1 flex player selected)
self.full_roster_constraint = lpSum([
self.players[player_name] for player_name in self.df.index]) == 9
self.model += self.full_roster_constraint
# --- C2 --- DST doesn't face any offensive players Constraint
teams = self.df.team.unique()
for x in teams:
self.dst_opponent_constraint = 4*lpSum([self.players[player_name] for player_name in self.df[(self.df.position == 'DST') & (self.df.team == x) ].index]) + lpSum([self.players[player_name] for player_name in self.df[self.df.opponent == x].index]) <= 4
self.model += self.dst_opponent_constraint | python | 21 | 0.589447 | 262 | 43.846154 | 52 | inline |
public class EditDistance2 {
private int[][] cache;
private String word1;
private String word2;
private int get(int i, int j) {
if (cache[i][j] != -1)
return cache[i][j];
char ch1 = word1.charAt(i-1);
char ch2 = word2.charAt(j-1);
if (ch1 == ch2) {
cache[i][j] = get(i-1, j-1);
} else {
int insert = get(i-1, j) + 1;
int replace = get(i-1, j-1) + 1;
int delete = get(i, j-1) + 1;
cache[i][j] = (insert > delete) ? (delete < replace ? delete : replace) : (insert < replace ? insert : replace);
}
return cache[i][j];
}
public int minDistance(String word1, String word2) {
this.word1 = word1;
this.word2 = word2;
this.cache = new int[word1.length()+1][word2.length()+1];
for (int i = 0; i <= word1.length(); i++) {
for (int j = 0; j <= word2.length(); j++) {
if (i == 0) {
cache[i][j] = j;
} else if (j == 0) {
cache[i][j] = i;
} else {
cache[i][j] = -1;
}
}
}
return get(word1.length(), word2.length());
}
} | java | 16 | 0.426993 | 124 | 26.543478 | 46 | starcoderdata |
from fastapi import APIRouter, status
from fastapi.responses import JSONResponse
router = APIRouter()
@router.get("/isAlive", include_in_schema=False)
async def is_alive_status():
"""
Alive check
"""
return JSONResponse(status_code=status.HTTP_200_OK, content={})
@router.get("/isReady", include_in_schema=False)
async def is_ready_status():
"""
Ready check
"""
return JSONResponse(status_code=status.HTTP_200_OK, content={}) | python | 9 | 0.699605 | 67 | 23.095238 | 21 | starcoderdata |
let printPreamble = true;
const preamble = `For more documentation and support please visit https://konveyor.io/move2kube/
# Changelog
`;
function printPreambleAndGroupName({ heading }) {
const line = `\n## ${heading}\n`;
if (printPreamble) {
printPreamble = false;
return preamble + line;
}
return line;
}
module.exports = {
"dataSource": "prs",
"prefix": "Move2Kube UI ",
"groupBy":
{
"🚀 Features": ["enhancement"],
"🐛 Bug Fixes": ["bug"]
},
"template": {
"group": printPreambleAndGroupName,
"issue": ({ name, text, url }) => `- ${name} [${text}](${url})`,
}
} | javascript | 9 | 0.566517 | 96 | 22.892857 | 28 | starcoderdata |
import { LightningElement, api, track } from 'lwc';
import { connect } from 'c/connect'
import { setVisibilityFilter } from 'c/actions'
const mapStateToProps = (state, ownProps) => ({
variant: ownProps.filter === state.visibilityFilter ? 'brand' : 'neutral'
})
const mapDispatchToProps = (dispatch, ownProps) => ({
handleClick: () => dispatch(setVisibilityFilter(ownProps.filter))
})
export default class TodoFooter extends LightningElement {
@track variant
@api label;
@api filter;
connectedCallback() {
connect(mapStateToProps, mapDispatchToProps)(this);
}
} | javascript | 9 | 0.711429 | 93 | 30.818182 | 22 | starcoderdata |
"""
Resonance Driving Terms
***********************
Functions for the calculations of Resonance Driving Terms, as well as
getting lists of valid driving term indices for certain orders.
"""
import itertools
import logging
from math import factorial
from typing import Tuple, Sequence, List, Union
import numpy as np
import pandas as pd
from tfs import TfsDataFrame
from optics_functions.constants import PI2I, X, Y, BETA, TUNE
from optics_functions.utils import (seq2str, timeit, get_all_phase_advances,
dphi_at_element, dphi, i_pow,
split_complex_columns)
LOG = logging.getLogger(__name__)
def calculate_rdts(df: TfsDataFrame, rdts: Sequence[str],
qx: float = None, qy: float = None, feeddown: int = 0,
complex_columns: bool = True, loop_phases: bool = False,
hamiltionian_terms: bool = False) -> TfsDataFrame:
""" Calculates the Resonance Driving Terms.
Eq. (A8) in [FranchiAnalyticFormulas2017]_ .
Args:
df (TfsDataFrame): Twiss Dataframe.
rdts (Sequence): List of rdt-names to calculate.
qx (float): Tune in X-Plane (if not given, header df.Q1 is assumed present).
qy (float): Tune in Y-Plane (if not given, header df.Q2 is assumed present).
feeddown (int): Levels of feed-down to include.
complex_columns (bool): Output complex values in single column of type complex.
If ``False``, split complex columns into two real-valued columns.
loop_phases (bool): Loop over elements when calculating phase-advances.
Might be slower for small number of elements, but
allows for large (e.g. sliced) optics.
hamiltionian_terms (bool): Add the hamiltonian terms to the result dataframe.
Returns:
New TfsDataFrame with RDT columns.
"""
LOG.info(f"Calculating RDTs: {seq2str(rdts):s}.")
with timeit("RDT calculation", print_fun=LOG.debug):
df_res = TfsDataFrame(index=df.index)
if qx is None:
qx = df.headers[f"{TUNE}1"]
if qy is None:
qy = df.headers[f"{TUNE}2"]
if not loop_phases:
phase_advances = get_all_phase_advances(df) # might be huge!
for rdt in rdts:
rdt = rdt.upper()
if len(rdt) != 5 or rdt[0] != 'F':
raise ValueError(f"'{rdt:s}' does not seem to be a valid RDT name.")
j, k, l, m = [int(i) for i in rdt[1:]]
conj_rdt = jklm2str(k, j, m, l)
if conj_rdt in df_res:
df_res[rdt] = np.conjugate(df_res[conj_rdt])
else:
with timeit(f"calculating {rdt}", print_fun=LOG.debug):
n = j + k + l + m
jk, lm = j + k, l + m
if n <= 1:
raise ValueError(f"The RDT-order has to be >1 but was {n:d} for {rdt:s}")
denom_h = 1./(factorial(j) * factorial(k) * factorial(l) * factorial(m) * (2**n))
denom_f = 1./(1. - np.exp(PI2I * ((j-k) * qx + (l-m) * qy)))
betax = df[f"{BETA}{X}"]**(jk/2.)
betay = df[f"{BETA}{Y}"]**(lm/2.)
# Magnetic Field Strengths with Feed-Down
dx_idy = df[X] + 1j*df[Y]
k_complex = pd.Series(0j, index=df.index) # Complex sum of strenghts (from K_n + iJ_n) and feeddown to them
for q in range(feeddown+1):
n_mad = n + q - 1
kl_iksl = df[f"K{n_mad:d}L"] + 1j * df[f"K{n_mad:d}SL"]
k_complex += (kl_iksl * (dx_idy**q)) / factorial(q)
# real(i**lm * k+ij) is equivalent to Omega-function in paper, see Eq.(A11)
# pd.Series is needed here, as np.real() returns numpy-array
k_real = pd.Series(np.real(i_pow(lm) * k_complex), index=df.index)
sources = df.index[k_real != 0] # other elements do not contribute to integral, speedup summations
if not len(sources):
LOG.warning(f"No sources found for {rdt}. RDT will be zero.")
df_res[rdt] = 0j
if hamiltionian_terms:
df_res[f2h(rdt)] = 0j
continue
# calculate hamiltionian-numerator part
h_terms = -k_real.loc[sources] * betax.loc[sources] * betay.loc[sources]
if loop_phases:
# do loop over elements to not have elements x elements Matrix in memory
h_jklm = pd.Series(index=df.index, dtype=complex)
for element in df.index:
# calculate dphi from all sources to the element
# index-intersection keeps `element` at correct place in index
sources_plus = df.index.intersection(sources.union([element]))
dphis = dphi_at_element(df.loc[sources_plus, :], element, qx, qy)
phase_term = np.exp(PI2I * ((j-k) * dphis[X].loc[sources] + (l-m) * dphis[Y].loc[sources]))
h_jklm[element] = (h_terms * phase_term).sum() * denom_h
else:
phx = dphi(phase_advances['X'].loc[sources, :], qx)
phy = dphi(phase_advances['Y'].loc[sources, :], qy)
phase_term = np.exp(PI2I * ((j-k) * phx + (l-m) * phy))
h_jklm = phase_term.multiply(h_terms, axis="index").sum(axis="index").transpose() * denom_h
df_res[rdt] = h_jklm * denom_f
LOG.info(f"Average RDT amplitude |{rdt:s}|: {df_res[rdt].abs().mean():g}")
if hamiltionian_terms:
df_res[f2h(rdt)] = h_jklm
if not complex_columns:
terms = list(rdts)
if hamiltionian_terms:
terms += [f2h(rdt) for rdt in rdts] # F#### -> H####
df_res = split_complex_columns(df_res, terms)
return df_res
def get_ac_dipole_rdts(order_or_terms: Union[int, str, Sequence[str]], spectral_line: Tuple[int],
plane: str, ac_tunes: Tuple[float, float], acd_name: str):
""" Calculates the Hamiltonian Terms under Forced Motion.
Args:
order_or_terms (Union[int, str, Sequence[str]]): If an int is given all Resonance Driving
Terms up to this order will be calculated. The strings are assumed to be the desired
driving term names, e.g. "F1001"
spectral_line (Tuple[int]): Needed to determine what phase advance is needed before and
after AC dipole location, depends on detal+ and delta-. Sample input: (2,-1)
plane (str): Either 'H' or 'V' to determine phase term of AC dipole before and after
ACD location.
ac_tunes (Tuple[float, float]) Contains horizontal and vertical AC dipole tunes,
i.e. (0.302, 0.33)
acd_name (str): The AC Dipole element name (?).
"""
raise NotImplementedError("Todo. Leave it here so it's not forgotten. See (and improve) python2 code!")
# RDT Definition Generation Functions ------------------------------------------
def get_all_to_order(n: int) -> List[Tuple[int, int, int, int]]:
""" Returns list of all valid RDT jklm-tuple of order 2 to n """
if n <= 1:
raise ValueError("'n' must be greater 1 for resonance driving terms.")
permut = [x for x in itertools.product(range(n + 1), repeat=4)
if 1 < sum(x) <= n and not (x[0] == x[1] and x[2] == x[3])]
return list(sorted(permut, key=sum))
def generator(orders: Sequence[int], normal: bool = True,
skew: bool = True, complex_conj: bool = True) -> dict:
""" Generates lists of RDT-4-tuples sorted into a dictionary by order.
Args:
orders (list): list of orders to be generated. Orders < 2 raise errors.
normal (bool): generate normal RDTs (default: True).
skew (bool): generate skew RDTs (default: True).
complex_conj (bool): Have both, RDT and it's complex conjugate RDT in the list
(default: True).
Returns:
Dictionary with keys of orders containing lists of 4-Tuples for the RDTs of that order.
"""
if any([n <= 1 for n in orders]):
raise ValueError("All order must be greater 1 for resonance driving terms.")
if not (normal or skew):
raise ValueError("At least one of 'normal' or 'skew' parameters must be True.")
permut = {o: [] for o in orders}
for x in itertools.product(range(max(orders) + 1), repeat=4):
order = sum(x)
if ((order in orders) # check for order
and not (x[0] == x[1] and x[2] == x[3]) # rdt index rule
and ((skew and sum(x[2:4]) % 2) or (normal and not sum(x[2:4]) % 2)) # skew or normal
and (complex_conj or not((x[1], x[0], x[3], x[2]) in permut[order])) # filter conj
):
permut[order].append(x)
return permut
# Other ------------------------------------------------------------------------
def jklm2str(j: int, k: int, l: int, m: int) -> str:
return f"F{j:d}{k:d}{l:d}{m:d}"
def str2jklm(rdt: str) -> Tuple[int, ...]:
return tuple(int(i) for i in rdt[1:])
def f2h(rdt: str) -> str:
return f"H{rdt[1:]}" | python | 27 | 0.539328 | 128 | 44.004695 | 213 | starcoderdata |
def _import_validations(self):
"""
Method to import validations to Mongo
"""
# import validations
self.update_jobGroup(self.import_manifest.get(
'import_id', uuid.uuid4().hex), 'StateIO: Importing Validations')
# loop through jobs
for orig_job_id, clone_job_id in self.import_manifest['pk_hash']['jobs'].items():
# assemple location of export
validations_json_filepath = '%s/validation_exports/j%s_mongo_validations.json' % (
self.import_path, orig_job_id)
# load as dataframe
validations_df = self.spark.read.json(validations_json_filepath)
# check for dataframe rows to proceed
if len(validations_df.take(1)) > 0:
# read first row to get old validation_scenario_id, and run through pk_hash for new one
row = validations_df.take(1)[0]
vs_id = int(row.validation_scenario_id['$numberLong'])
new_vs_id = self.import_manifest['pk_hash']['validations'][vs_id]
# flatten record_id
validations_df = validations_df.withColumn(
'record_id', validations_df['record_id']['$oid'])
# retrieve newly written records for this Job
pipeline = json.dumps(
{'$match': {'job_id': clone_job_id, 'success': True}})
records_df = self.spark.read.format("com.mongodb.spark.sql.DefaultSource")\
.option("uri", "mongodb://%s" % settings.MONGO_HOST)\
.option("database", "combine")\
.option("collection", "record")\
.option("partitioner", "MongoSamplePartitioner")\
.option("spark.mongodb.input.partitionerOptions.partitionSizeMB",
settings.MONGO_READ_PARTITION_SIZE_MB) \
.option("pipeline", pipeline).load()
# join on validations_df.record_id : records_df.orig_id
updated_validations_df = validations_df.drop('_id').alias('validations_df').join(
records_df.select('_id', 'orig_id').alias('records_df'),
validations_df['record_id'] == records_df['orig_id'])
# update record_id
updated_validations_df = updated_validations_df.withColumn(
'record_id', updated_validations_df['_id'])
# limit to validation columns
updated_validations_df = updated_validations_df.select(
validations_df.columns).drop('_id')
# flatten
updated_validations_df = updated_validations_df.withColumn('fail_count',
updated_validations_df.fail_count[
'$numberLong'].cast(LongType()))
updated_validations_df = updated_validations_df.withColumn('job_id', pyspark_sql_functions.lit(
int(clone_job_id)).cast(LongType()))
# update validation scenario id
updated_validations_df = updated_validations_df.withColumn('validation_scenario_id',
pyspark_sql_functions.lit(
int(new_vs_id)).cast(LongType()))
# write records to MongoDB
updated_validations_df.write.format("com.mongodb.spark.sql.DefaultSource")\
.mode("append")\
.option("uri", "mongodb://%s" % settings.MONGO_HOST)\
.option("database", "combine")\
.option("collection", "record_validation").save() | python | 27 | 0.515159 | 112 | 52.328767 | 73 | inline |
package passwordhash
import "fmt"
var (
DefaultPasswordSaltSize int = 32
)
type HasherInterface interface {
Salt() string
Hash(...string) string
}
func New(algorithm string) (HasherInterface, error) {
switch algorithm {
case "sha256":
return NewSha256(), nil
default:
return nil, fmt.Errorf("Unknown hash algorithm: %s", algorithm)
}
} | go | 10 | 0.729381 | 65 | 16.636364 | 22 | starcoderdata |
import { useFormik, FormikProvider } from "formik";
import { useMemo } from "react";
import { Box, Flex, Text } from "rebass";
import {
FormGrayBox,
FormGroupInput,
FormInputBox,
} from "~/components/AppForm";
import { Geocode, geocodeInitialValue } from "~/components/Geocode";
import { Link } from "~/components/Link";
import { serviceSchema } from "~/validation-schemas/serviceSchema";
import {
Button,
Field,
Heading,
InlineError,
Select,
Textarea,
} from "~/components";
import { findOptions } from "~/utils/form";
function buildTiOptions(tis) {
const tiOptions = tis.map((ti) => ({
label: ti.etablissement,
value: ti.id,
}));
return { tiOptions };
}
function ServiceEditInformationsForm(props) {
const { handleSubmit, cancelLink, service } = props;
const { departements, service_tis } = service;
const tis = departements.reduce((acc, { tis }) => {
acc.push(...tis);
return acc;
}, []);
const { tiOptions } = useMemo(() => {
return buildTiOptions(tis);
}, [tis]);
const {
count: antennes_count,
sum: { mesures_max: antennes_mesures_max },
} = service.service_antennes_aggregate.aggregate;
const formik = useFormik({
initialValues: {
competences: service.competences || "",
dispo_max: service.dispo_max || "",
email: service.email || "",
geocode: geocodeInitialValue(service),
information: service.information || "",
nom: service.nom || "",
prenom: service.prenom || "",
telephone: service.telephone || "",
tis: service_tis.map(({ ti }) => ti.id),
antennes_count,
antennes_mesures_max,
},
onSubmit: handleSubmit,
validationSchema: serviceSchema,
});
return (
<FormikProvider value={formik}>
<form noValidate onSubmit={formik.handleSubmit}>
<Heading size={4} mb={1}>
{"Responsable"}
<FormGroupInput
placeholder="Nom"
id="nom"
formik={formik}
validationSchema={serviceSchema}
/>
<Box flex={1 / 2}>
<FormGroupInput
placeholder="Prénom"
id="prenom"
formik={formik}
validationSchema={serviceSchema}
/>
<Heading size={4} mb={1}>
{"Coordonnées"}
<FormGroupInput
placeholder="Email"
id="email"
formik={formik}
validationSchema={serviceSchema}
/>
<Box flex={1 / 2}>
<FormGroupInput
placeholder="Téléphone"
id="telephone"
formik={formik}
validationSchema={serviceSchema}
/>
<Heading size={4}>{"Adresse"}
<Text lineHeight="1.5" color="textSecondary">
{
"Cette adresse permettra de localiser le service sur la carte des mesures"
}
<Geocode
id="geocode"
resource={service}
onChange={(geocode) => formik.setFieldValue("geocode", geocode)}
aria-describedby="msg-geocode"
/>
<div id="msg-geocode">
<InlineError
message={formik.errors.geocode}
fieldId="geocode"
/>
<Heading size={4}>{"Tribunaux"}
<Text lineHeight="1.5" color="textSecondary">
{"Liste des tribunaux préférentiels"}
<Select
instanceId={"tis-filter"}
id="tis"
name="tis"
placeholder="Tribunaux dans lesquels vous exercez"
value={findOptions(tiOptions, formik.values.tis)}
hasError={formik.errors.tis && formik.touched.tis}
onChange={(options) => {
formik.setFieldValue(
"tis",
(options || []).map((o) => o.value)
);
}}
options={tiOptions}
isMulti
aria-describedby="msg-tis"
/>
<div id="msg-tis">
{formik.touched.tis && (
<InlineError message={formik.errors.tis} fieldId="tis" />
)}
<Heading size={4} mb={1}>
{"Activité"}
<Text lineHeight="1.5" color="textSecondary">
{"Ces informations seront visibles par les magistrats."}
<FormGroupInput
placeholder="Nombre de mesures souhaité"
id="dispo_max"
formik={formik}
validationSchema={serviceSchema}
/>
<Textarea
value={formik.values.competences}
id="competences"
name="competences"
error={formik.errors.competences}
onChange={formik.handleChange}
label="Informations à destination du magistrat"
placeholder="Préférences géographiques, compétences, langues parlées, ..."
aria-describedby="msg-competences"
/>
<div id="msg-competences">
<InlineError
message={formik.errors.competences}
fieldId="competences"
/>
<Flex p={2} alignItems="center" justifyContent="flex-end">
<Box mr="2">
<Link to={cancelLink}>
<Button variant="outline">Annuler
<Button
type="submit"
disabled={formik.isSubmitting}
isLoading={formik.isSubmitting}
>
Enregistrer
);
}
export { ServiceEditInformationsForm }; | javascript | 20 | 0.493019 | 118 | 29.174468 | 235 | starcoderdata |
@Test public void testPlayerProfileResponseTooMany() {
// before
MojangAPI classUnderTest = new MojangAPI();
// execute
PlayerProfile pp1 = null;
PlayerProfile pp2 = null;
try {
classUnderTest.getPlayerProfile("4566e69fc90748ee8d71d7ba5aa00d20").orElse(null);
pp2 = classUnderTest.getPlayerProfile("4566e69fc90748ee8d71d7ba5aa00d20").orElse(null);
} catch (ApiResponseException e) {
assertTrue(e instanceof TooManyRequestsException);
}
// Seems like they raised the limit
// assertNull(pp2);
} | java | 11 | 0.641234 | 99 | 33.277778 | 18 | inline |
protected override void Draw(GameTime gameTime)
{
//
// Replace this with your own drawing code.
//
GraphicsDevice.Clear(Color.CornflowerBlue);
spriteBatch.Begin();
spriteBatch.Draw(font, new Vector2(20, 20), Color.White);
spriteBatch.End();
spriteBatch.Begin();
Primitives.DrawBox(spriteBatch,
new Rectangle(20, 20 + font.Height, 30, 30),
Color.Blue);
spriteBatch.End();
if (this.ActiveScene != null)
{
spriteBatch.Begin();
this.ActiveScene.Draw(gameTime, spriteBatch);
spriteBatch.End();
}
base.Draw(gameTime);
} | c# | 13 | 0.476015 | 75 | 29.148148 | 27 | inline |
<?php
if(isset($_REQUEST['FechaNacimiento'])){
ob_start();
$urlApi="https://pokeapi.co/api/v2/pokemon/";
$PokemonListado=[];
$Fecha=strtotime($_REQUEST['FechaNacimiento']);
putenv('GDFONTPATH=' . realpath('.'));
$Fuente='./Pokemon Solid.ttf';
$FuenteHoll='./Pokemon Hollow.ttf';
ini_set('memory_limit', '256M');
array_push($PokemonListado, ltrim(date("m", $Fecha),'0'));
array_push($PokemonListado, ltrim(date("d", $Fecha),'0'));
array_push($PokemonListado, ltrim(date("y", $Fecha),'0'));
$W=96*3;
$H=110;
$Px=0;
$margen=10;
$imgBase=imagecreatetruecolor($W+$margen+2,$H);
imagesavealpha($imgBase,true);
$rojo = imagecolorallocate($imgBase, 0xFF, 0x00, 0x00);
$negro = imagecolorallocate($imgBase, 0x00, 0x00, 0x00);
$blanco = imagecolorallocate($imgBase, 0xFF, 0xFF, 0xFF);
$azul = imagecolorallocate($imgBase, 0x00, 0x00, 0xFF);
$amarillo = imagecolorallocate($imgBase, 0xFF, 0xE9, 0x00);
$transp = imagecolorallocatealpha($imgBase, 255, 0, 0, 127);
imagefill($imgBase,0,0,$transp);
foreach ($PokemonListado as $indice=>$PokemonID) {
$PokemonListado[$indice]=json_decode(file_get_contents($urlApi.$PokemonID));
list($ancho, $alto, $tipo, $atributos) =getimagesize($PokemonListado[$indice]->sprites->front_default);
if($ancho>($W-$Px)){
$imgX=imagecreatetruecolor($W+$ancho+$margen,$H);
imagesavealpha($imgX,true);
imagefill($imgX,0,0,$transp);
imagecopy($imgX, $imgBase, 0, 0, 0, 0, $W, $H);
$W+=$ancho+$margen;
ImageDestroy($imgBase);
$imgBase=$imgX;
}
$img=imagecreatefrompng($PokemonListado[$indice]->sprites->front_default);
imagecopymerge($imgBase, $img, $Px+$margen, 0, 0, 0, $ancho, $alto, 100);
ImageDestroy($img);
$Posicion = imagefttext($imgBase, 14, 5, $Px+$margen, $alto, $amarillo, $Fuente, ucfirst($PokemonListado[$indice]->name));
$Posicion = imagefttext($imgBase, 14, 5, $Px+$margen, $alto, $azul, $FuenteHoll, ucfirst($PokemonListado[$indice]->name));
$Px+=$ancho;
}
ob_clean();
imagepng($imgBase);
$ArchivoTemporal=ob_get_clean();
ImageDestroy($imgBase);
ob_end_clean();
if(!isset($_REQUEST['Imagen'])){
echo '<img src="';
echo 'data:image/png;base64,'.base64_encode($ArchivoTemporal);
echo '" >';
}else{
header('Content-Type: image/png');
echo $ArchivoTemporal;
}
}else{
?>
<!DOCTYPE html>
<html lang="">
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
Pokémon
<!-- Bootstrap CSS -->
<link rel="stylesheet" href="https://getbootstrap.com/docs/4.0/dist/css/bootstrap.min.css" crossorigin="anonymous">
<form method="get">
<div class="container">
<div class="alert alert-primary" role="alert">
Escribe tu fecha de nacimiento para generar tu equipo Pokémon
<div class="form-group">
<div class="row">
<div class="col-12">
<div class="col-sm">
<input type="date" name="FechaNacimiento" value="" placeholder="Fecha nacimiento">
<div class="col-sm">
<button type="submit" class="btn btn-primary">Ver Equipo
<button type="submit" class="btn btn-primary" name="imagen" value="1">Ver Equipo Imagen
<?php } | php | 16 | 0.527123 | 134 | 40.858586 | 99 | starcoderdata |
<?php
use Illuminate\Http\Request;
/*
|--------------------------------------------------------------------------
| API Routes
|--------------------------------------------------------------------------
|
| Here is where you can register API routes for your application. These
| routes are loaded by the RouteServiceProvider within a group which
| is assigned the "api" middleware group. Enjoy building your API!
|
*/
Route::middleware('auth:api')->get('/user', function (Request $request) {
return $request->user();
});
Route::group(['namespace'=>'Authntication'],function()
{
Route::post('register', 'rigisterController@register');
Route::post('login','loginController@login');
Route::middleware('auth:api')->post('logout','loginController@logout');
});
Route::group(['namespace'=>'dashboard'],function()
{
// blog category dashboard
Route::get('dashboard/index/blogcategory', 'blogcategoryController@index');
Route::post('dashboard/create/blogcategory','blogcategoryController@store');
Route::post('dashboard/update/blogcategory/{id}','blogcategoryController@update');
Route::get('dashboard/delete/blogcategory/{id}','blogcategoryController@destroy');
//blog dashboard
Route::resource('dashboard/blog', 'blogController');
// food category dashboar
Route::get('dashboard/index/foodcategory', 'foodcategoryController@index');
Route::post('dashboard/create/foodcategory','foodcategoryController@store');
Route::post('dashboard/update/foodcategory/{id}','foodcategoryController@update');
Route::get('dashboard/delete/foodcategory/{id}','foodcategoryController@destroy');
//food dashboard
Route::resource('dashboard/food', 'foodController');
// user dashboard
Route::get('dashboard/index/user', 'userController@index');
Route::post('dashboard/create/user', 'userController@store');
Route::post('dashboard/update/user/{id}', 'userController@update');
Route::get('dashboard/delete/user/{id}','userController@destroy');
// doctor dashboard
Route::get('dashboard/index/doctor', 'doctorController@index');
Route::post('dashboard/create/doctor', 'doctorController@store');
Route::post('dashboard/update/doctor/{id}', 'doctorController@update');
Route::get('dashboard/delete/doctor/{id}','doctorController@destroy');
});
Route::group(['namespace'=>'blog'],function()
{
Route::get('category/blog', 'categortController@index');
Route::get('blog', 'blogController@index');
});
Route::group(['namespace'=>'food'],function()
{
Route::get('category/food', 'categoryController@index');
Route::get('food', 'foodController@index');
});
Route::group(['namespace'=>'community'],function()
{
Route::get('post', 'postController@index');
Route::post('post', 'postController@create');
Route::get('post/{id}', 'postController@destroy');
Route::get('comment', 'commentController@index');
Route::post('comment/{post_id}', 'commentController@create');
Route::get('comment/{id}', 'commentController@destroy');
});
Route::get('profile', 'profileController@index');
Route::post('profile', 'profileController@create');
Route::group(['namespace'=>'doctors'],function()
{
Route::get('doctor/profile/{id}', 'doctorController@show');
Route::get('doctors', 'doctorController@index');
});
Route::get('chat', 'chatController@index');
Route::post('chat/{doctor_id}', 'chatController@create'); | php | 13 | 0.692887 | 82 | 31.223301 | 103 | starcoderdata |
@inherits Nancy.ViewEngines.Razor.NancyRazorViewBase
@{ Layout = "layout/main.cshtml"; }
<div class="row">
<div class="col-md-6">
<div class="small-box bg-blue-gradient">
<div class="inner">
Hardware
order to get started mining, you need mining hardware. You can mine coins with your CPU or GPU.
<div class="icon">
<i class="fa fa-microchip">
<a href="https://www.merit.me/" target="_blank" class="small-box-footer">
Get Started with Merit <i class="fa fa-arrow-circle-right">
<div class="col-md-6">
<div class="small-box bg-purple-gradient">
<div class="inner">
Wallet Software
order to a mine a specific crypto-currency, you have first download the wallet software and create an address.
<div class="icon">
<i class="ion ion-md-desktop">
<a href="https://www.merit.me/get-started/#get-merit-desktop" target="_blank" class="small-box-footer">
Get Wallet <i class="fa fa-arrow-circle-right">
<div class="row">
<div class="col-md-6">
<div class="small-box bg-aqua-gradient">
<div class="inner">
Mining Software
you need a mining software that can connect our pools.
<div class="icon">
<i class="ion ion-md-hammer">
<a href="/help/miningsoftware/" class="small-box-footer">
Check our list to find a suitable one for you. <i class="fa fa-arrow-circle-right">
<div class="col-md-6">
<div class="small-box bg-orange-gradient">
<div class="inner">
an Invite
is an invite-only network, and you must be invited in order to join.
<div class="icon">
<i class="ion ion-md-wallet">
<a href="https://www.merit.me/get-started/#get-invite" target="_blank" class="small-box-footer">
How to get an invite <i class="fa fa-arrow-circle-right">
<div class="row pool-config">
<div class="col-md-12">
<div class="box box-solid box-free">
<div class="box-header">
<h3 class="box-title">Configure Your Miner
<div class="box-body no-padding">
<table class="table table-striped">
<th style="width: 40px" class="hidden-xs">
<th style="width: 120px">Pool
<th style="width: 100px" class="hidden-xs">Location
<th class="text-right">Stratum Address
<th class="hidden-xs">Username
<th class="hidden-xs">Password
@foreach (var node in Model.Stack.Nodes)
{
<td class="pool-icon hidden-xs"><img src="/Content/img/coins/icon/@(Model.Pool.Config.Coin.Symbol).png" />
<td class="hidden-xs"><img src="/Content/img/flags/@(node.Location).png" alt="@node.Location" />
<td class="text-right">stratum+tcp://@node.Address
<td class="hidden-xs">Your wallet address
<td class="hidden-xs">Anything
} | c# | 22 | 0.46653 | 143 | 40.685714 | 105 | starcoderdata |
#ifndef _FONT
#define _FONT
#include "vector.h"
#include "logger.h"
#include "texture.h"
#ifdef __cplusplus // if c++
extern "C" { // run as c code
#endif
#include
#include FT_FREETYPE_H
#ifdef __cplusplus
}
#endif
#ifdef __cplusplus // if c++
#include
#include
#include
#include
// To avoid naming conflictions with X11
#ifndef __gnu_linux__
class Font;
#define FONT Font
#endif
#ifdef __gnu_linux__
class XFont;
#define FONT XFont
#endif
#ifdef __windows__
#define DEFAULT_FONT_PATH "C:/Windows/Fonts/ariblk.ttf"
#endif
#ifdef __gnu_linux__
#define DEFAULT_FONT_PATH "/usr/share/fonts/X11/Type1/c0583bt_.pfb"
#endif
#ifdef __macosx__
#define DEFAULT_FONT_PATH "System/Library/Fonts/HelveticaNeue.dfont"
#endif
class FONT
{
public:
FONT(); static int font_new(lua_State *L);
FONT(const FONT& font);
FONT(const std::string& file_name);
~FONT();
// methods
bool load(const std::string& file_name); static int load(lua_State *L);
bool load(const void * data, long size);
void generate(); static int generate(lua_State *L); /* stores all characters and their sizes in array*/
void destroy(); static int destroy(lua_State *L);
void copy(const FONT& font); static int copy(lua_State *L);
// setters
void set_width(unsigned int width); static int set_width(lua_State *L);
void set_height(unsigned int height); static int set_height(lua_State *L);
void set_size(unsigned int width, unsigned int height); static int set_size(lua_State *L);
// getters
int get_width()const; static int get_width(lua_State *L);
int get_width(char glyph)const; static int get_width_of_glyph(lua_State *L);
int get_width(const std::string& text) const;
int get_height()const; static int get_height(lua_State *L);
int get_height(char glyph)const; static int get_height_of_glyph(lua_State *L);
int get_height(const std::string& text)const;
Vector2 get_size()const; static int get_size(lua_State *L);
Vector2 get_size(char glyph)const; //static int get_size(lua_State *L);
Vector4 get_rect()const; static int get_rect(lua_State *L);
FT_Byte * get_data()const; static int get_data(lua_State *L);
FT_Face get_face()const; static int get_face(lua_State *L);
std::string get_file()const; static int get_file(lua_State *L);
FT_Library get_library()const;
static FONT * get_system_font();
// font info
unsigned int get_buffer(char glyph) const; static int get_buffer(lua_State *L);
unsigned int get_buffer(const std::string& glyph) const;
//////////////////////////////////////////////////////////////////////////////////////////////////
Texture * get_bitmap(int index) const; static int get_bitmap(lua_State *L);
Texture * get_bitmap(const std::string& name) const;
unsigned int get_character_count() const; static int get_character_count(lua_State *L);
std::string get_family_name()const; static int get_family_name(lua_State *L);
std::string get_style_name ()const; static int get_style_name (lua_State *L);
//void get_()const; //static int get_(lua_State *L);
Vector2 get_bearing(char glyph)const; static int get_bearing(lua_State *L);
Vector2 get_advance(char glyph)const; static int get_advance(lua_State *L);
//void get_()const; //static int get_(lua_State *L);
//void get_()const; //static int get_(lua_State *L);
//////////////////////////////////////////////////////////////////////////////////////////////////
// boolean
bool is_generated() const; static int is_generated(lua_State * L);
// friends
friend class Engine;
friend class Logger;
friend class Label;
static FT_Library library;
private:
static bool open ();
static void close();
////////////////////
unsigned int width;
unsigned int height;
FT_Face face; // face = collection of glyphs (single characters)
FT_Byte * data; // font data loaded from memory
std::string file;
static FONT * system_font;
////////////////////
std::vector<Texture *> bitmaps; // to save textures
FT_Glyph_Metrics metrics[256];
//Get cell dimensions
unsigned int cellW = 0;
unsigned int cellH = 0;
int max_bearing = 0;
int min_hang = 0;
public:
typedef struct Character {
unsigned int id; // opengl texture / ID handle of the glyph texture
unsigned int width; // width of glyph
unsigned int height; // height of glyph
int bearing_x; // offset from baseline to left of glyph
int bearing_y; // offset from baseline to top of glyph
long int advance_x; // offset to advance to next glyph (to the right)
long int advance_y; // offset to advance to next glyph (to the bottom)
unsigned char * buffer; // bitmap data
} Character;
std::map<char, Character> character_array; // map is like a std::vector that stores two types
};
#endif
#endif | c | 7 | 0.556766 | 144 | 43.80315 | 127 | starcoderdata |
package edu.wisc.my.ltiproxy;
import com.google.common.base.MoreObjects;
import java.util.Collections;
import java.util.Map;
import java.util.Objects;
public class LTIParameters {
private final String actionURL;
private final Map<String, String> signedParameters;
public LTIParameters(String actionURL, Map<String, String> signedParameters) {
this.actionURL = actionURL;
this.signedParameters = Collections.unmodifiableMap(signedParameters);
}
public String getActionURL() {
return actionURL;
}
public Map<String, String> getSignedParameters() {
return signedParameters;
}
@Override
public boolean equals(Object obj) {
boolean result = (null != obj) && (obj instanceof LTIParameters);
if (result) {
LTIParameters rhs = (LTIParameters) obj;
result = result
&& Objects.equals(rhs.getActionURL(), this.getActionURL())
&& Objects.equals(rhs.getSignedParameters(), this.getSignedParameters());
}
return result;
}
@Override
public int hashCode() {
return Objects.hash(this.getActionURL(), this.getSignedParameters());
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("actionUrl", actionURL)
.add("signedParameters", signedParameters)
.toString();
}
} | java | 15 | 0.637431 | 93 | 28.55102 | 49 | starcoderdata |
private List<Tuple<int, int>> GetRowsRanges()
{
// For each row we return at which column it starts and ends
List<Tuple<int, int>> RowsRanges = new List<Tuple<int, int>>(rowsCount);
int rowNum, start, end;
for (rowNum = 0; rowNum < rowsCount; rowNum++)
{
start = 0;
end = 0;
// First we skip the first null squares
while (boardSquares[rowNum, start] == null && start < columnsCount - 1)
start++;
// And now we count how many columns the row has
end = start;
while (boardSquares[rowNum, end] != null && end < columnsCount - 1)
end++;
RowsRanges.Add(new Tuple<int, int>(start, end));
}
return RowsRanges;
} | c# | 13 | 0.523039 | 83 | 29.923077 | 26 | inline |
def test_GetCalledSamples():
dummy_record = get_dummy_record()
ref_allele = dummy_record.REF
alt_alleles = dummy_record.ALT
rec = trh.TRRecord(dummy_record, ref_allele, alt_alleles, "CAG", "", None)
assert np.all(rec.GetCalledSamples() == [True] * 5 + [False])
assert np.all(rec.GetCalledSamples(strict=False))
# Test differences in ploidy
rec = trh.TRRecord(get_triploid_record(), ref_allele, [], "CAG", "", None)
assert np.all(rec.GetCalledSamples(strict=True))
# Test a true no call
rec = trh.TRRecord(get_nocall_record(), ref_allele, [], "CAG", "", None)
assert np.all(~rec.GetCalledSamples()) | python | 10 | 0.659969 | 78 | 42.2 | 15 | inline |
#include "config.h"
#include "file_io_type_pel.hpp"
#include "utils.hpp"
#include "xyz/openbmc_project/Common/error.hpp"
#include
#include
#include
#include
#include
#include
#include
#include
#include
#include "libpldm/base.h"
#include "oem/ibm/libpldm/file_io.h"
namespace pldm
{
namespace responder
{
int PelHandler::readIntoMemory(uint32_t offset, uint32_t& length,
uint64_t address)
{
static constexpr auto logObjPath = "/xyz/openbmc_project/logging";
static constexpr auto logInterface = "org.open_power.Logging.PEL";
auto& bus = pldm::utils::DBusHandler::getBus();
try
{
auto service =
pldm::utils::DBusHandler().getService(logObjPath, logInterface);
auto method = bus.new_method_call(service.c_str(), logObjPath,
logInterface, "GetPEL");
method.append(fileHandle);
auto reply = bus.call(method);
sdbusplus::message::unix_fd fd{};
reply.read(fd);
auto rc = transferFileData(fd, true, offset, length, address);
return rc;
}
catch (const std::exception& e)
{
std::cerr << "GetPEL D-Bus call failed, PEL id = " << fileHandle
<< ", error = " << e.what() << "\n";
return PLDM_ERROR;
}
return PLDM_SUCCESS;
}
int PelHandler::read(uint32_t offset, uint32_t& length, Response& response)
{
static constexpr auto logObjPath = "/xyz/openbmc_project/logging";
static constexpr auto logInterface = "org.open_power.Logging.PEL";
auto& bus = pldm::utils::DBusHandler::getBus();
try
{
auto service =
pldm::utils::DBusHandler().getService(logObjPath, logInterface);
auto method = bus.new_method_call(service.c_str(), logObjPath,
logInterface, "GetPEL");
method.append(fileHandle);
auto reply = bus.call(method);
sdbusplus::message::unix_fd fd{};
reply.read(fd);
std::cerr << "GetPEL D-Bus call done\n";
off_t fileSize = lseek(fd, 0, SEEK_END);
if (fileSize == -1)
{
std::cerr << "file seek failed";
return PLDM_ERROR;
}
if (offset >= fileSize)
{
std::cerr << "Offset exceeds file size, OFFSET=" << offset
<< " FILE_SIZE=" << fileSize << std::endl;
return PLDM_DATA_OUT_OF_RANGE;
}
if (offset + length > fileSize)
{
length = fileSize - offset;
}
auto rc = lseek(fd, offset, SEEK_SET);
if (rc == -1)
{
std::cerr << "file seek failed";
return PLDM_ERROR;
}
size_t currSize = response.size();
response.resize(currSize + length);
auto filePos = reinterpret_cast
filePos += currSize;
rc = ::read(fd, filePos, length);
if (rc == -1)
{
std::cerr << "file read failed";
return PLDM_ERROR;
}
if (rc != length)
{
std::cerr << "mismatch between number of characters to read and "
<< "the length read, LENGTH=" << length << " COUNT=" << rc
<< std::endl;
return PLDM_ERROR;
}
}
catch (const std::exception& e)
{
std::cerr << "GetPEL D-Bus call failed";
return PLDM_ERROR;
}
return PLDM_SUCCESS;
}
int PelHandler::writeFromMemory(uint32_t offset, uint32_t length,
uint64_t address)
{
char tmpFile[] = "/tmp/pel.XXXXXX";
int fd = mkstemp(tmpFile);
if (fd == -1)
{
std::cerr << "failed to create a temporary pel, ERROR=" << errno
<< "\n";
return PLDM_ERROR;
}
close(fd);
fs::path path(tmpFile);
auto rc = transferFileData(path, false, offset, length, address);
if (rc == PLDM_SUCCESS)
{
rc = storePel(path.string());
}
fs::remove(path);
return rc;
}
int PelHandler::fileAck(uint8_t /*fileStatus*/)
{
static constexpr auto logObjPath = "/xyz/openbmc_project/logging";
static constexpr auto logInterface = "org.open_power.Logging.PEL";
auto& bus = pldm::utils::DBusHandler::getBus();
try
{
auto service =
pldm::utils::DBusHandler().getService(logObjPath, logInterface);
auto method = bus.new_method_call(service.c_str(), logObjPath,
logInterface, "HostAck");
method.append(fileHandle);
bus.call_noreply(method);
}
catch (const std::exception& e)
{
std::cerr << "HostAck D-Bus call failed";
return PLDM_ERROR;
}
return PLDM_SUCCESS;
}
int PelHandler::storePel(std::string&& pelFileName)
{
static constexpr auto logObjPath = "/xyz/openbmc_project/logging";
static constexpr auto logInterface = "xyz.openbmc_project.Logging.Create";
auto& bus = pldm::utils::DBusHandler::getBus();
try
{
auto service =
pldm::utils::DBusHandler().getService(logObjPath, logInterface);
using namespace sdbusplus::xyz::openbmc_project::Logging::server;
std::map<std::string, std::string> addlData{};
addlData.emplace("RAWPEL", std::move(pelFileName));
auto severity =
sdbusplus::xyz::openbmc_project::Logging::server::convertForMessage(
sdbusplus::xyz::openbmc_project::Logging::server::Entry::Level::
Error);
auto method = bus.new_method_call(service.c_str(), logObjPath,
logInterface, "Create");
method.append("xyz.openbmc_project.Host.Error.Event", severity,
addlData);
bus.call_noreply(method);
}
catch (const std::exception& e)
{
std::cerr << "failed to make a d-bus call to PEL daemon, ERROR="
<< e.what() << "\n";
return PLDM_ERROR;
}
return PLDM_SUCCESS;
}
} // namespace responder
} // namespace pldm | c++ | 20 | 0.561873 | 80 | 29.480583 | 206 | starcoderdata |
# nflnames/tests/test_nflnames_teams.py
# -*- coding: utf-8 -*-
# Copyright (C) 2020
# Licensed under the MIT License
import random
import pytest
import nflnames
from nflnames import *
def test_TEAM_CODES():
"""tests TEAM_CODES"""
assert isinstance(TEAM_CODES, dict)
rkey = random.choice(list(TEAM_CODES.keys()))
ritem = TEAM_CODES[rkey]
assert isinstance(ritem, list)
assert isinstance(random.choice(ritem), str)
def test_TEAM_NAMES():
"""tests TEAM_NAMES"""
assert isinstance(TEAM_NAMES, dict)
rkey = random.choice(list(TEAM_NAMES.keys()))
ritem = TEAM_NAMES[rkey]
assert isinstance(ritem, list)
assert isinstance(random.choice(ritem), str)
def test_standardize(tprint):
val = nflnames.teams._standardize(random.choice(list(TEAM_CODES.keys())), TEAM_CODES)
assert isinstance(val, list)
assert isinstance(val[0], tuple)
assert isinstance(val[0][0], str)
def test_is_standardized():
tc = 'gbp'
assert not is_standardized({tc})
assert is_standardized({standardize_team_code(tc)})
def test_standard_team_codes():
assert isinstance(standard_team_codes(), set)
def test_standard_team_names():
assert isinstance(standard_team_names(), set)
def test_standardize_team_code():
tc = 'gbp'
assert not is_standardized({tc})
tcs = standardize_team_code(tc)
assert is_standardized({tcs})
def test_standardize_team_name(tprint):
tn = 'Texans'
assert not is_standardized({tn})
stn = standardize_team_name(tn)
tprint(stn)
assert is_standardized({stn}, team_type='name') | python | 13 | 0.684307 | 89 | 23.537313 | 67 | starcoderdata |
func (g *Game) betBlinds() {
//Bet small blind
player := g.table[0]
if player.wealth >= g.smallBlind {
g.pot.commitBet(player, g.smallBlind)
} else {
g.pot.commitBet(player, player.wealth)
}
//Bet big blind
player = g.table[1]
if player.wealth >= 2*g.smallBlind {
g.pot.commitBet(player, 2*g.smallBlind)
} else {
g.pot.commitBet(player, player.wealth)
}
} | go | 10 | 0.668449 | 41 | 21.058824 | 17 | inline |
import PropType from 'prop-types';
import React, { useState } from 'react';
import { connect } from 'react-redux';
import Instruction from './Instruction';
import { getUserStateOrTerritory, getUserStateOrTerritoryStatus } from '../reducers/user.selector';
import { AFFILIATION_STATUSES } from '../constants';
import UpgradeBrowser from './UpgradeBrowser';
import axios from '../util/api';
const ApprovalStatus = ({ status, mailTo, administratorType }) => {
const options = {
[AFFILIATION_STATUSES.REQUESTED]: {
status: `Approval Pending From ${administratorType} Administrator`,
src: '../static/icons/puzzle.svg',
alt: 'Puzzle Piece Icon',
width: 57
},
[AFFILIATION_STATUSES.DENIED]: {
status: 'Approval Has Been Denied',
src: '../static/icons/alert.svg',
alt: 'Alert Icon',
width: 18
},
[AFFILIATION_STATUSES.REVOKED]: {
status: 'Approval Permissions Revoked',
src: '../static/icons/alert.svg',
alt: 'Alert Icon',
width: 18
}
};
return (
<div className="ds-u-display--flex ds-u-flex-direction--column ds-u-justify-content--center ds-u-align-items--center ds-u-margin-y--4">
<img alt={options[status].alt} src={options[status].src} width={options[status].width} />
<h3 className="ds-u-margin-bottom--1">{options[status].status}
<p className="ds-u-margin--0">
Contact the{' '}
{mailTo && <a href={`mailto:${mailTo}`}>{administratorType} Administrator for more
information.
)
};
ApprovalStatus.propTypes = {
status: PropType.string.isRequired,
mailTo: PropType.string.isRequired,
administratorType: PropType.string.isRequired
};
// TODO: We'll have to figure out a way to only show this the first time they go into an approved state?
// const Approved = () => (
// <div className="ds-u-display--flex ds-u-flex-direction--column ds-u-justify-content--center ds-u-align-items--center ds-u-margin-y--4">
// <img
// alt="Puzzle Piece Icon"
// src="../static/icons/thumbs-up.svg"
// width="57"
// />
// <h3 className="ds-u-margin-bottom--1">Approved
// <p className="ds-u-margin--0">
// Congratulations! You may now create an APD.
//
//
// );
const AffiliationStatus = ({ state, approvalStatus }) => {
const [mailTo, setMailTo] = useState('');
React.useEffect(() => {
axios
.get(`/states/${state.id}`)
.then(res => res.data)
.then(usState => usState.stateAdmins.map(user => user.email).join(','))
.then(email => setMailTo(email));
}, [state]);
return (
<div className="site-body ds-l-container">
<div className="ds-u-margin--0">
<main id="start-main-content">
<div className="ds-u-padding-top--2">
<UpgradeBrowser />
<div className="ds-l-row ds-u-margin-top--7">
<div className="ds-l-col--8 ds-u-margin-x--auto">
<div
className="ds-u-display--flex ds-u-justify-content--center"
data-testid="eAPDlogo"
>
<img
src="/static/img/eAPDLogoSVG_ICO/SVG/eAPDColVarSVG.svg"
alt="eAPD Logo"
/>
<Instruction source="stateDashboard.introduction" />
<div className="ds-u-margin-top--5 ds-u-padding-bottom--1 ds-u-border-bottom--2">
<h2 className="ds-h2 ds-u-display--inline-block">
{state ? state.name : ''} APDs
<ApprovalStatus
status={approvalStatus}
mailTo={mailTo || '
administratorType='State'
/>
);
};
AffiliationStatus.propTypes = {
state: PropType.object.isRequired,
approvalStatus: PropType.string.isRequired
};
const mapStateToProps = state => ({
state: getUserStateOrTerritory(state) || {},
approvalStatus:
getUserStateOrTerritoryStatus(state) || AFFILIATION_STATUSES.REQUESTED
});
export default connect(mapStateToProps)(AffiliationStatus);
export { AffiliationStatus as plain, ApprovalStatus, mapStateToProps }; | javascript | 22 | 0.591338 | 140 | 32.914063 | 128 | starcoderdata |
using System;
using System.Collections.Generic;
namespace Votyra.Core
{
public class SimpleSubject : IObservable IObserver
{
private readonly List _observers=new List
private T _value;
public T Value
{
get => _value;
set
{
if (EqualityComparer
{
return;
}
_value = value;
OnNext(value);
}
}
public IDisposable Subscribe(IObserver observer)
{
_observers.Add(observer);
observer.OnNext(Value);
return new CallbackDisposable(this, observer);
}
public void OnCompleted()
{
_observers.ForEach(o=>o.OnCompleted());
_observers.Clear();
}
public void OnError(Exception error)
{
_observers.ForEach(o => o.OnError(error));
}
public void OnNext(T value)
{
_observers.ForEach(o => o.OnNext(value));
}
private struct CallbackDisposable : IDisposable
{
private readonly SimpleSubject _parent;
private readonly IObserver _observer;
public CallbackDisposable(SimpleSubject parent,IObserver observer)
{
_parent = parent;
_observer = observer;
}
public void Dispose()
{
_parent._observers.Remove(_observer);
}
}
public void Dispose()
{
OnCompleted();
}
}
} | c# | 15 | 0.485976 | 84 | 23.619718 | 71 | starcoderdata |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.brooklyn.policy.action;
import static org.testng.Assert.assertTrue;
import java.util.List;
import org.apache.brooklyn.api.entity.EntitySpec;
import org.apache.brooklyn.api.objs.Configurable;
import org.apache.brooklyn.api.policy.Policy;
import org.apache.brooklyn.api.policy.PolicySpec;
import org.apache.brooklyn.api.sensor.AttributeSensor;
import org.apache.brooklyn.config.ConfigKey;
import org.apache.brooklyn.core.mgmt.rebind.RebindOptions;
import org.apache.brooklyn.core.mgmt.rebind.RebindTestFixtureWithApp;
import org.apache.brooklyn.core.sensor.Sensors;
import org.apache.brooklyn.core.test.entity.TestEntity;
import org.apache.brooklyn.test.Asserts;
import org.apache.brooklyn.util.time.Duration;
import org.testng.annotations.Test;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
public class ScheduledPolicyRebindTest extends RebindTestFixtureWithApp {
private static final AttributeSensor START = Sensors.newBooleanSensor("start");
/*
* This test simulates what happens when the rebind occurs after more than the
* scheduled period of time has elapsed.
*/
@Test
public void testShortPeriodicEffectorFiresAfterRebind() throws Exception {
TestEntity origEntity = origApp.createAndManageChild(EntitySpec.create(TestEntity.class)
.policy(PolicySpec.create(PeriodicEffectorPolicy.class)
.configure(PeriodicEffectorPolicy.EFFECTOR, "myEffector")
.configure(PeriodicEffectorPolicy.EFFECTOR_ARGUMENTS, ImmutableMap.of())
.configure(PeriodicEffectorPolicy.PERIOD, Duration.millis(1))
.configure(PeriodicEffectorPolicy.TIME, "immediately")
.configure(PeriodicEffectorPolicy.START_SENSOR, START)));
origEntity.sensors().set(START, Boolean.TRUE);
assertCallHistoryContainsEventually(origEntity, "myEffector");
newApp = rebind(RebindOptions.create().terminateOrigManagementContext(true));
TestEntity newEntity = (TestEntity) Iterables.find(newApp.getChildren(), Predicates.instanceOf(TestEntity.class));
Policy newPolicy = Iterables.find(newEntity.policies(), Predicates.instanceOf(PeriodicEffectorPolicy.class));
assertConfigEqualsEventually(newPolicy, PeriodicEffectorPolicy.RUNNING, true);
int calls = newEntity.getCallHistory().size();
assertCallHistoryEventually(newEntity, "myEffector", calls + 2);
}
@Test
public void testLongPeriodicEffectorFiresAfterRebind() throws Exception {
TestEntity origEntity = origApp.createAndManageChild(EntitySpec.create(TestEntity.class)
.policy(PolicySpec.create(PeriodicEffectorPolicy.class)
.configure(PeriodicEffectorPolicy.EFFECTOR, "myEffector")
.configure(PeriodicEffectorPolicy.EFFECTOR_ARGUMENTS, ImmutableMap.of())
.configure(PeriodicEffectorPolicy.PERIOD, Duration.millis(100))
.configure(PeriodicEffectorPolicy.TIME, "immediately")
.configure(PeriodicEffectorPolicy.START_SENSOR, START)));
origEntity.sensors().set(START, Boolean.TRUE);
assertCallHistoryContainsEventually(origEntity, "myEffector");
newApp = rebind(RebindOptions.create().terminateOrigManagementContext(true));
TestEntity newEntity = (TestEntity) Iterables.find(newApp.getChildren(), Predicates.instanceOf(TestEntity.class));
Policy newPolicy = Iterables.find(newEntity.policies(), Predicates.instanceOf(PeriodicEffectorPolicy.class));
assertConfigEqualsEventually(newPolicy, PeriodicEffectorPolicy.RUNNING, true);
assertCallHistoryContainsEventually(newEntity, "myEffector");
}
@Test
public void testPeriodicEffectorStartsAfterRebind() throws Exception {
TestEntity origEntity = origApp.createAndManageChild(EntitySpec.create(TestEntity.class)
.policy(PolicySpec.create(PeriodicEffectorPolicy.class)
.configure(PeriodicEffectorPolicy.EFFECTOR, "myEffector")
.configure(PeriodicEffectorPolicy.EFFECTOR_ARGUMENTS, ImmutableMap.of())
.configure(PeriodicEffectorPolicy.PERIOD, Duration.millis(1))
.configure(PeriodicEffectorPolicy.TIME, "immediately")
.configure(PeriodicEffectorPolicy.START_SENSOR, START)));
newApp = rebind(RebindOptions.create().terminateOrigManagementContext(true));
TestEntity newEntity = (TestEntity) Iterables.find(newApp.getChildren(), Predicates.instanceOf(TestEntity.class));
Policy newPolicy = Iterables.find(newEntity.policies(), Predicates.instanceOf(PeriodicEffectorPolicy.class));
Asserts.assertFalse(newPolicy.config().get(PeriodicEffectorPolicy.RUNNING));
Asserts.assertFalse(newEntity.getCallHistory().contains("myEffector"));
newEntity.sensors().set(START, Boolean.TRUE);
assertConfigEqualsEventually(newPolicy, PeriodicEffectorPolicy.RUNNING, true);
assertCallHistoryEventually(newEntity, "myEffector", 2);
}
private void assertConfigEqualsEventually(Configurable obj, ConfigKey running, T val) {
Asserts.eventually(() -> obj.config().get(running), Predicates.equalTo(val));
}
private void assertCallHistoryContainsEventually(TestEntity entity, String effector) {
assertCallHistoryEventually(entity, effector, 1);
}
private void assertCallHistoryEventually(TestEntity entity, String effector, int minSize) {
Asserts.succeedsEventually(new Runnable() {
public void run() {
List callHistory = entity.getCallHistory();
synchronized (callHistory) {
int size = Iterables.size(Iterables.filter(callHistory, Predicates.equalTo("myEffector")));
assertTrue(size >= minSize, "size="+size);
}
}});
}
} | java | 23 | 0.71706 | 122 | 49.744526 | 137 | starcoderdata |
using Microsoft.EntityFrameworkCore;
using nyom.domain.Configuration;
using nyom.domain.Notifications;
using nyom.infra.Data.EntityFramwork.Extensions;
using nyom.infra.Data.EntityFramwork.Mapping;
namespace nyom.infra.Data.EntityFramwork.Context
{
public class NyomContext : DbContext
{
public NyomContext(DbContextOptions options) : base(options)
{
}
public DbSet Configurations { get; set; }
public DbSet Notifications { get; set; }
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
modelBuilder.AddConfiguration(new ConfigurationMap());
modelBuilder.AddConfiguration(new NotificationMap());
base.OnModelCreating(modelBuilder);
}
}
} | c# | 13 | 0.785812 | 75 | 25.178571 | 28 | starcoderdata |
import{html,Polymer}from"../../@polymer/polymer/polymer-legacy.js";let LrndesignContentblock=Polymer({_template:html`
:host {
display: inline-block;
position: relative;
box-sizing: border-box;
}
`,is:"lrndesign-contentblock",properties:{title:{type:String}}});export{LrndesignContentblock}; | javascript | 11 | 0.637056 | 117 | 34.909091 | 11 | starcoderdata |
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics.CodeAnalysis;
using System.Management.Automation;
using System.Xml.Serialization;
namespace vsteam_lib.Test
{
[TestClass]
[ExcludeFromCodeCoverage]
public class CommonTests
{
private class HasDateTime
{
[XmlAttribute("createdDate")]
public DateTime CreatedOn { get; set; }
}
private class HasString
{
public string createdDate { get; set; } = "2020-08-27T10:37:32.367Z";
}
[TestMethod]
public void MoveProperties_StringToDateTime()
{
// Arrange
var target = new HasDateTime();
var source = PSObject.AsPSObject(new HasString());
// Act
Common.MoveProperties(target, source);
// Assert
Assert.AreEqual("8/27/2020 10:37:32 am", target.CreatedOn.ToUniversalTime().ToString("M/d/yyyy h:mm:ss tt").ToLower());
}
}
} | c# | 18 | 0.635721 | 128 | 24.410256 | 39 | starcoderdata |
<?php
class Trad {
# Mots
const W_SECONDE = 'seconde';
const W_MINUTE = 'minute';
const W_HOUR = 'heure';
const W_DAY = 'jour';
const W_WEEK = 'semaine';
const W_MONTH = 'mois';
const W_YEAR = 'année';
const W_DECADE = 'décennie';
const W_SECONDE_P = 'secondes';
const W_MINUTE_P = 'minutes';
const W_HOUR_P = 'heures';
const W_DAY_P = 'jours';
const W_WEEK_P = 'semaines';
const W_MONTH_P = 'mois';
const W_YEAR_P = 'années';
const W_DECADE_P = 'décennies';
const W_BACK = 'Retour';
const W_DATE = 'Date : ';
const W_DESC = 'Synopsis : ';
const W_MORE = 'lire…';
const W_ACTIVATED = 'Activé';
const W_DESACTIVATED = 'Désactivé';
const W_SEASON_NB = 'Saison %nb%';
# Phrases
const S_AGO = 'il y a %duration% %pediod%';
const S_NOTFOUND = 'La page que vous recherchez n\'existe pas…';
const S_NO_EPISODE = 'Aucun épisode en attente…';
# Verbes
const V_LOGIN = 'Se connecter';
const V_CONTINUE = 'Continuer';
const V_SAVE = 'Enregistrer';
const V_ADD = 'Ajouter';
const V_SEARCH = 'Rechercher';
# Forms
const F_USERNAME = 'Nom d\'utilisateur :';
const F_PASSWORD = ' :';
const F_COOKIE = 'Type de connexion :';
const F_COOKIE_FALSE = 'Ordinateur public';
const F_COOKIE_TRUE = 'Ordinateur privé (rester connecté)';
const F_URL = 'URL :';
const F_URL_REWRITING = 'URL rewriting :';
const F_LANGUAGE = 'Langue :';
const F_ADDIC7ED = 'Addic7ed :';
const F_NAME = 'Nom :';
const F_DOWNLOAD = 'Téléchargement :';
const F_TORRENT_DIR = 'Dossier des torrents :';
const F_APIKEY = 'Clé API TheTVDB :';
const F_SHOWNAME = 'Nom de la série :';
const F_TIP_PASSWORD = 'La pour ne pas le changer.';
const F_TIP_URL_REWRITING = 'Laissez vide pour désactiver l\'URL rewriting. Sinon, indiquez le chemin du dossier de Goofy Goose (en commençant et terminant par un "/") par rapport au nom de domaine.';
# Titres
const T_404 = 'Erreur 404 – Page non trouvée';
const T_LOGIN = 'Connexion';
const T_LOGOUT = 'Déconnexion';
const T_INSTALLATION = 'Installation';
const T_SETTINGS = 'Préférences';
const T_GLOBAL_SETTINGS = 'Réglages généraux';
const T_USER_SETTINGS = 'Utilisateur';
const T_HOME = 'À voir';
const T_ADD = 'Ajouter une série';
const T_SEARCH_RESULT = 'Résultats pour « %showname% »';
const T_SHOWS = 'Séries';
const T_DATA = 'Données';
const T_SOON = 'Bientôt…';
const T_TORRENT = 'Torrent';
const T_SUBTITLES = 'Sous-titres';
const T_INFOS = 'Infos';
const T_OPTIONS = 'Options';
# Alertes
const A_ERROR_LOGIN = 'Mauvais nom d\'utilisateur ou mot de passe.';
const A_ERROR_LOGIN_WAIT = 'Merci de patienter %duration% %period% avant de réessayer. Ceci est une protection contre les attaques malveillantes.';
const A_ERROR_FORM = 'Merci de remplir tous les champs.';
const A_ERROR_AJAX = 'Une erreur est survenue. Merci de réessayer.';
const A_ERROR_AJAX_LOGIN = 'Vous êtes déconnecté. Raffraichissez la page, connectez-vous, puis vous pourrez réessayer.';
const A_ERROR_NOSHOW = 'Aucune série n\'a été trouvée.';
const A_ERROR_ADD = 'Impossible d\'ajouter la série. Vérifiez votre clé API de TheTVDB.';
const A_ERROR_NETWORK = 'Impossible de récupérer le fichier distant.';
const A_SUCCESS_INSTALL = 'Goofy Goose est maintenant correctement installé. Connectez-vous pour commencer à l\'utiliser.';
const A_SUCCESS_SETTINGS = 'Les préférences ont bien été enregistrées.';
const A_SUCCESS_ADD = 'La série a bien été ajoutée.';
const A_SUCCESS_UPDATE = 'La série a bien été mise à jour.';
public static $settings = array(
'validate_url' => 'L\'url n\'est pas valide.'
);
public static $days = array(
0 => 'Dimanche',
1 => 'Lundi',
2 => 'Mardi',
3 => 'Mercredi',
4 => 'Jeudi',
5 => 'Vendredi',
6 => 'Samedi'
);
}
?> | php | 9 | 0.650218 | 201 | 29.658537 | 123 | starcoderdata |
func appendTag(b []byte, code byte, length uint64) []byte {
if length < 0x0E {
// Short form, with length embedded in the code byte.
return append(b, code|byte(length))
}
// Long form, with separate length.
b = append(b, code|0x0E)
return appendVarUint(b, length)
} | go | 12 | 0.687273 | 59 | 26.6 | 10 | inline |
package com.datafan.dataintegration.core.util.encryption.sm;
public class Sm4Context {
private int mode;
private long[] sk;
private boolean isPadding;
public Sm4Context() {
this.mode = 1;
this.isPadding = true;
this.sk = new long[32];
}
public int getMode() {
return mode;
}
public void setMode(int mode) {
this.mode = mode;
}
public long[] getSk() {
return sk;
}
public void setSk(long[] sk) {
this.sk = sk;
}
public boolean isPadding() {
return isPadding;
}
public void setPadding(boolean padding) {
isPadding = padding;
}
} | java | 9 | 0.565029 | 60 | 16.3 | 40 | starcoderdata |
from .component import PDFComponent
class PDFPage():
def __init__(self, page):
self.page = page
def getComponentsByType(self, componentType):
return map(lambda c: PDFComponent(c), filter(lambda c: isinstance(c, componentType), self.page)) | python | 13 | 0.713333 | 104 | 36.625 | 8 | starcoderdata |
int cg_discrete_write(int file_number, int B, int Z, const char * discrete_name,
int *D)
{
cgns_zone *zone;
cgns_discrete *discrete = NULL;
int index;
/* verify input */
if (cgi_check_strlen(discrete_name)) return CG_ERROR;
cg = cgi_get_file(file_number);
if (cg == 0) return CG_ERROR;
if (cgi_check_mode(cg->filename, cg->mode, CG_MODE_WRITE)) return CG_ERROR;
zone = cgi_get_zone(cg, B, Z);
if (zone==0) return CG_ERROR;
/* Overwrite a DiscreteData_t node: */
for (index=0; index<zone->ndiscrete; index++) {
if (strcmp(discrete_name, zone->discrete[index].name)==0) {
/* in CG_MODE_WRITE, children names must be unique */
if (cg->mode==CG_MODE_WRITE) {
cgi_error("Duplicate child name found: %s",discrete_name);
return CG_ERROR;
}
/* overwrite an existing solution */
/* delete the existing solution from file */
if (cgi_delete_node(zone->id, zone->discrete[index].id))
return CG_ERROR;
/* save the old in-memory address to overwrite */
discrete = &(zone->discrete[index]);
/* free memory */
cgi_free_discrete(discrete);
break;
}
}
/* ... or add a FlowSolution_t Node: */
if (index==zone->ndiscrete) {
if (zone->ndiscrete == 0) {
zone->discrete = CGNS_NEW(cgns_discrete, zone->ndiscrete+1);
} else {
zone->discrete = CGNS_RENEW(cgns_discrete, zone->ndiscrete+1, zone->discrete);
}
discrete = &zone->discrete[zone->ndiscrete];
zone->ndiscrete++;
}
(*D) = index+1;
/* save data in memory */
memset(discrete, 0, sizeof(cgns_discrete));
strcpy(discrete->name, discrete_name);
discrete->location=CGNS_ENUMV(Vertex);
/* save data in file */
if (cgi_new_node(zone->id, discrete->name, "DiscreteData_t", &discrete->id,
"MT", 0, 0, 0)) return CG_ERROR;
return CG_OK;
} | c | 14 | 0.553459 | 90 | 32.901639 | 61 | inline |
using UnityEngine;
using UnityEngine.Networking;
using System.Diagnostics;
using System.Collections;
using System;
using System.ComponentModel;
using System.IO;
using System.Threading;
[HelpURL("http://www.middlevr.com/doc/current/#voice-communication")]
public class VRVoiceChatManager : NetworkBehaviour
{
#region Attributes
private NetworkManager m_NetworkManager = null;
private Process m_WebRTCServerProcess = null;
private vrWebView m_VRRTCWebView = null;
private string m_RTCServerURL = "https://localhost:7778";
private bool m_ServerReady = false;
private vrCommand m_RTCConnectionReadyCommand = null;
public bool m_EnableVoiceChat = true;
#endregion
#region MonoBehaviour Integration
protected void OnDisable()
{
KillServerProcess();
}
protected void OnApplicationQuit()
{
if (isServer)
{
KillServerProcess();
}
}
private void OnDestroy()
{
MiddleVR.DisposeObject(ref m_RTCConnectionReadyCommand);
}
#endregion
#region NetworkBehaviour Integration
public override void OnStartServer()
{
if (vrClusterManager.GetInstance().IsClient() || !m_EnableVoiceChat)
return;
StartCoroutine(LaunchWebRTCServer());
}
public override void OnStartClient()
{
if (vrClusterManager.GetInstance().IsClient() || !m_EnableVoiceChat)
return;
if (!isServer)
{
m_NetworkManager = FindObjectOfType
m_RTCServerURL = "https://" + m_NetworkManager.networkAddress + ":" + (m_NetworkManager.networkPort + 1).ToString();
}
m_RTCConnectionReadyCommand = new vrCommand("RTCConnectionReady", RTCConnectionReadyCommandHandler, null, (uint)VRCommandFlags.VRCommandFlag_DontSynchronizeCluster);
StartCoroutine(CreateWebView());
}
#endregion
private void CopyDirectory(string iSourceDir, string iDestDir)
{
DirectoryInfo dir = new DirectoryInfo(iSourceDir);
DirectoryInfo[] dirs = dir.GetDirectories();
if (!Directory.Exists(iDestDir))
{
Directory.CreateDirectory(iDestDir);
}
// Get the files in the directory and copy them to the new location.
FileInfo[] files = dir.GetFiles();
foreach (FileInfo file in files)
{
string tempPath = Path.Combine(iDestDir, file.Name);
file.CopyTo(tempPath, false);
}
foreach (DirectoryInfo subdir in dirs)
{
string temppath = Path.Combine(iDestDir, subdir.Name);
CopyDirectory(subdir.FullName, temppath);
}
}
private void KillServerProcess()
{
if (m_WebRTCServerProcess != null)
{
Process.Start("cmd", "/C taskkill /f /t /pid " + m_WebRTCServerProcess.Id);
m_WebRTCServerProcess = null;
}
}
private void CopyWebRTCServer()
{
string pathToAppDataWebRTCServer = Path.GetFullPath(MiddleVR.VRKernel.GetAppDataFolder() + "\\MVRRTCServer");
if (!Directory.Exists(pathToAppDataWebRTCServer))
{
string pathToInstalledWebRTCServer = Path.GetFullPath(MiddleVR.VRKernel.GetModuleFolder() + "\\MVRRTCServer");
CopyDirectory(pathToInstalledWebRTCServer, pathToAppDataWebRTCServer);
}
m_ServerReady = true;
}
private IEnumerator LaunchWebRTCServer()
{
MiddleVR.VRLog(1, "[>] Starting copy of the WebRTC server.");
// We currently have to copy the WebRTC server in %appdata%/MiddleVR
// due to Windows not allowing a process to start an other process
// from "Program Files" without admin right.
var copyingServerThread = new Thread(CopyWebRTCServer);
copyingServerThread.Start();
while (!m_ServerReady)
{
yield return null;
}
copyingServerThread.Join();
MiddleVR.VRLog(1, "[<] Ending copy of the WebRTC server.");
m_NetworkManager = FindObjectOfType
var startInfo = new ProcessStartInfo();
startInfo.CreateNoWindow = true;
startInfo.UseShellExecute = false;
startInfo.WindowStyle = ProcessWindowStyle.Hidden;
string pathToAppDataWebRTCServer = Path.GetFullPath(MiddleVR.VRKernel.GetAppDataFolder() + "\\MVRRTCServer");
startInfo.WorkingDirectory = pathToAppDataWebRTCServer;
startInfo.FileName = pathToAppDataWebRTCServer + "\\node.exe";
startInfo.Arguments = pathToAppDataWebRTCServer + "\\server.js " + (m_NetworkManager.networkPort + 1).ToString();
m_WebRTCServerProcess = new Process();
m_WebRTCServerProcess.StartInfo = startInfo;
try
{
if (!m_WebRTCServerProcess.Start())
{
MiddleVR.VRLog(1, "[X] An error occured when launching the voice chat server, process already running.");
}
}
catch (Exception e)
{
MiddleVR.VRLog(1, "[X] An error occured when launching the voice chat server.\n" + e.Message);
}
m_RTCServerURL = "https://" + m_NetworkManager.networkAddress + ":" + (m_NetworkManager.networkPort + 1).ToString();
}
private IEnumerator CreateWebView()
{
bool serverReady = false;
float time = Time.time;
do
{
WWW www = new WWW(m_RTCServerURL);
yield return www;
if (string.IsNullOrEmpty(www.error) ||
(www.error.IndexOf("Could not resolve host", 0) < 0 &&
www.error.IndexOf("Connection refused", 0) < 0))
{
serverReady = true;
}
else
{
yield return new WaitForSeconds(10.0f);
}
} while (!serverReady && Time.time - time < 60.0f);
if (serverReady)
{
m_VRRTCWebView = new vrWebView("", m_RTCServerURL);
}
}
private vrValue RTCConnectionReadyCommandHandler(vrValue iValue)
{
if (isServer)
{
m_VRRTCWebView.ExecuteJavascript("OpenRoom('MVRAudioChat', '" + m_RTCServerURL + "');");
}
else
{
m_VRRTCWebView.ExecuteJavascript("JoinRoom('MVRAudioChat', '" + m_RTCServerURL + "');");
}
return null;
}
} | c# | 18 | 0.616475 | 173 | 30.024155 | 207 | starcoderdata |
# -*- coding: utf-8 -*-
import copy
from unittest import TestCase
from unittest.mock import Mock, call, patch
import torch
from steganogan import decoders
from tests.utils import assert_called_with_tensors
class TestBasicDecoder(TestCase):
class TestDecoder(decoders.BasicDecoder):
def __init__(self):
pass
def setUp(self):
self.test_decoder = self.TestDecoder()
@patch('steganogan.decoders.nn.Conv2d', autospec=True)
def test__covn2d(self, conv2d_mock):
"""Conv2d must be called with given args and kernel_size=3 and padding=1"""
# run
result = self.test_decoder._conv2d(2, 4)
# asserts
assert result == conv2d_mock.return_value
conv2d_mock.assert_called_once_with(
in_channels=2,
out_channels=4,
kernel_size=3,
padding=1
)
@patch('steganogan.decoders.nn.Sequential')
@patch('steganogan.decoders.nn.Conv2d')
@patch('steganogan.decoders.nn.BatchNorm2d')
def test___init__(self, batchnorm_mock, conv2d_mock, sequential_mock):
"""Test the init params and that the layers are created correctly"""
# run
decoders.BasicDecoder(2, 5)
# assert
expected_batch_calls = [call(5), call(5), call(5)]
assert batchnorm_mock.call_args_list == expected_batch_calls
expected_conv_calls = [
call(in_channels=3, out_channels=5, kernel_size=3, padding=1),
call(in_channels=5, out_channels=5, kernel_size=3, padding=1),
call(in_channels=5, out_channels=5, kernel_size=3, padding=1),
call(in_channels=5, out_channels=2, kernel_size=3, padding=1),
]
assert conv2d_mock.call_args_list == expected_conv_calls
def test_upgrade_legacy_without_version(self):
"""Upgrade legacy must create self._models from conv1, conv2, conv3, conv4"""
# setup
self.test_decoder.layers = Mock(return_value=torch.Tensor([[5, 6], [7, 8]]))
# run
self.test_decoder.upgrade_legacy()
# assert
assert self.test_decoder._models == [self.test_decoder.layers]
assert self.test_decoder.version == '1'
@patch('steganogan.decoders.nn.Sequential', autospec=True)
def test_upgrade_legacy_with_version_1(self, sequential_mock):
"""The object must be the same and not changed by the method"""
# setup
decoder = decoders.BasicDecoder(1, 1)
expected = copy.deepcopy(decoder)
# run
decoder.upgrade_legacy()
# assert
assert decoder.__dict__ == expected.__dict__
def test_forward_1_layer(self):
"""If there is only one layer it must be called with image as the only argument."""
# setup
layer1 = Mock(return_value=torch.Tensor([[5, 6], [7, 8]]))
self.test_decoder._models = [layer1]
# run
image = torch.Tensor([[1, 2], [3, 4]])
result = self.test_decoder.forward(image)
# assert
assert (result == torch.Tensor([[5, 6], [7, 8]])).all()
call_1 = call(torch.Tensor([[1, 2], [3, 4]]))
assert_called_with_tensors(layer1, [call_1])
def test_forward_more_than_2_layers(self):
"""If there are more than 2 layers, they must be called adding data to each result"""
# setup
layer1 = Mock(return_value=torch.Tensor([[5, 6], [7, 8]]))
layer2 = Mock(return_value=torch.Tensor([[9, 10], [11, 12]]))
layer3 = Mock(return_value=torch.Tensor([[13, 14], [15, 16]]))
self.test_decoder._models = [layer1, layer2, layer3]
# run
image = torch.Tensor([[1, 2], [3, 4]])
result = self.test_decoder.forward(image)
# asserts
call_layer_1 = call(torch.Tensor([[1, 2], [3, 4]]))
call_layer_2 = call(torch.Tensor([[5, 6], [7, 8]]))
call_layer_3 = call(torch.Tensor([[5, 6, 9, 10], [7, 8, 11, 12]]))
assert_called_with_tensors(layer1, [call_layer_1])
assert_called_with_tensors(layer2, [call_layer_2])
assert_called_with_tensors(layer3, [call_layer_3])
assert (result == torch.Tensor([[13, 14], [15, 16]])).all()
class TestDenseDecoder(TestCase):
class TestDecoder(decoders.DenseDecoder):
def __init__(self):
pass
def test_upgrade_legacy_without_version(self):
"""Upgrade legacy must create self._models from conv1, conv2, conv3, conv4"""
# setup
test_decoder = self.TestDecoder() # instance an empty decoder
test_decoder.conv1 = Mock(return_value=torch.Tensor([[5, 6], [7, 8]]))
test_decoder.conv2 = Mock(return_value=torch.Tensor([[9, 10], [11, 12]]))
test_decoder.conv3 = Mock(return_value=torch.Tensor([[13, 14], [15, 16]]))
test_decoder.conv4 = Mock(return_value=torch.Tensor([[17, 18], [19, 20]]))
# run
test_decoder.upgrade_legacy()
# assert
expected_models = [
test_decoder.conv1,
test_decoder.conv2,
test_decoder.conv3,
test_decoder.conv4,
]
assert test_decoder._models == expected_models
assert test_decoder.version == '1'
@patch('steganogan.decoders.nn.Sequential', autospec=True)
def test_upgrade_legacy_with_version_1(self, sequential_mock):
"""The object must be the same and not changed by the method"""
# setup
decoder = decoders.DenseDecoder(1, 1)
expected = copy.deepcopy(decoder)
# run
decoder.upgrade_legacy()
# assert
assert decoder.__dict__ == expected.__dict__
@patch('steganogan.decoders.nn.Sequential')
@patch('steganogan.decoders.nn.Conv2d')
@patch('steganogan.decoders.nn.BatchNorm2d')
def test___init__(self, batchnorm_mock, conv2d_mock, sequential_mock):
"""Test the init params and that the layers are created correctly"""
# run
decoders.DenseDecoder(2, 5)
# assert
expected_batch_calls = [call(5), call(5), call(5)]
assert batchnorm_mock.call_args_list == expected_batch_calls
expected_conv_calls = [
call(in_channels=3, out_channels=5, kernel_size=3, padding=1),
call(in_channels=5, out_channels=5, kernel_size=3, padding=1),
call(in_channels=10, out_channels=5, kernel_size=3, padding=1),
call(in_channels=15, out_channels=2, kernel_size=3, padding=1),
]
assert conv2d_mock.call_args_list == expected_conv_calls | python | 14 | 0.6024 | 93 | 33.460733 | 191 | starcoderdata |
/**
* Copyright 2014 Flipkart Internet Pvt. Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.flipkart.foxtrot.core.querystore.actions;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.flipkart.foxtrot.common.Document;
import com.flipkart.foxtrot.common.group.GroupRequest;
import com.flipkart.foxtrot.common.group.GroupResponse;
import com.flipkart.foxtrot.common.query.Filter;
import com.flipkart.foxtrot.common.query.general.EqualsFilter;
import com.flipkart.foxtrot.common.query.numeric.GreaterThanFilter;
import com.flipkart.foxtrot.core.TestUtils;
import com.flipkart.foxtrot.core.exception.ErrorCode;
import com.flipkart.foxtrot.core.exception.FoxtrotException;
import com.flipkart.foxtrot.core.querystore.impl.ElasticsearchQueryStore;
import com.google.common.collect.Maps;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import java.util.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.doReturn;
/**
* Created by rishabh.goyal on 28/04/14.
*/
public class GroupActionTest extends ActionTest {
@BeforeClass
public static void setUp() throws Exception {
List documents = TestUtils.getGroupDocuments(getMapper());
getQueryStore().save(TestUtils.TEST_TABLE_NAME, documents);
getElasticsearchConnection().getClient()
.admin()
.indices()
.prepareRefresh("*")
.execute()
.actionGet();
getTableMetadataManager().getFieldMappings(TestUtils.TEST_TABLE_NAME, true, true);
((ElasticsearchQueryStore)getQueryStore()).getCardinalityConfig()
.setMaxCardinality(15000);
getTableMetadataManager().updateEstimationData(TestUtils.TEST_TABLE_NAME, 1397658117000L);
}
@Ignore
@Test
public void testGroupActionSingleQueryException() throws FoxtrotException, JsonProcessingException {
GroupRequest groupRequest = new GroupRequest();
groupRequest.setTable(TestUtils.TEST_TABLE_NAME);
groupRequest.setNesting(Collections.singletonList("os"));
doReturn(null).when(getElasticsearchConnection())
.getClient();
try {
getQueryExecutor().execute(groupRequest);
fail();
} catch (FoxtrotException ex) {
ex.printStackTrace();
assertEquals(ErrorCode.ACTION_EXECUTION_ERROR, ex.getCode());
}
}
@Test
public void testGroupActionSingleFieldNoFilter() throws FoxtrotException, JsonProcessingException {
GroupRequest groupRequest = new GroupRequest();
groupRequest.setTable(TestUtils.TEST_TABLE_NAME);
groupRequest.setNesting(Collections.singletonList("os"));
Map<String, Object> response = Maps.newHashMap();
response.put("android", 7L);
response.put("ios", 4L);
GroupResponse actualResult = GroupResponse.class.cast(getQueryExecutor().execute(groupRequest));
assertEquals(response, actualResult.getResult());
}
@Test
public void testGroupActionSingleFieldEmptyFieldNoFilter() throws FoxtrotException, JsonProcessingException {
GroupRequest groupRequest = new GroupRequest();
groupRequest.setTable(TestUtils.TEST_TABLE_NAME);
groupRequest.setNesting(Collections.singletonList(""));
try {
getQueryExecutor().execute(groupRequest);
fail();
} catch (FoxtrotException ex) {
ex.printStackTrace();
assertEquals(ErrorCode.MALFORMED_QUERY, ex.getCode());
}
}
@Test
public void testGroupActionSingleFieldSpecialCharactersNoFilter() throws FoxtrotException, JsonProcessingException {
GroupRequest groupRequest = new GroupRequest();
groupRequest.setTable(TestUtils.TEST_TABLE_NAME);
groupRequest.setNesting(Arrays.asList(""));
try {
getQueryExecutor().execute(groupRequest);
fail();
} catch (FoxtrotException ex) {
ex.printStackTrace();
assertEquals(ErrorCode.MALFORMED_QUERY, ex.getCode());
}
}
@Test
public void testGroupActionSingleFieldHavingSpecialCharactersWithFilter() throws FoxtrotException, JsonProcessingException {
GroupRequest groupRequest = new GroupRequest();
groupRequest.setTable(TestUtils.TEST_TABLE_NAME);
EqualsFilter equalsFilter = new EqualsFilter();
equalsFilter.setField("device");
equalsFilter.setValue("nexus");
groupRequest.setFilters(Collections.
groupRequest.setNesting(Arrays.asList("!@#$%^&*()"));
Map<String, Object> response = Maps.newHashMap();
GroupResponse actualResult = GroupResponse.class.cast(getQueryExecutor().execute(groupRequest));
assertEquals(response, actualResult.getResult());
}
@Test
public void testGroupActionSingleFieldWithFilter() throws FoxtrotException, JsonProcessingException {
GroupRequest groupRequest = new GroupRequest();
groupRequest.setTable(TestUtils.TEST_TABLE_NAME);
EqualsFilter equalsFilter = new EqualsFilter();
equalsFilter.setField("device");
equalsFilter.setValue("nexus");
groupRequest.setFilters(Collections.
groupRequest.setNesting(Arrays.asList("os"));
Map<String, Object> response = Maps.newHashMap();
response.put("android", 5L);
response.put("ios", 1L);
GroupResponse actualResult = GroupResponse.class.cast(getQueryExecutor().execute(groupRequest));
assertEquals(response, actualResult.getResult());
}
@Test
public void testGroupActionTwoFieldsNoFilter() throws FoxtrotException, JsonProcessingException {
GroupRequest groupRequest = new GroupRequest();
groupRequest.setTable(TestUtils.TEST_TABLE_NAME);
groupRequest.setNesting(Arrays.asList("os", "device"));
Map<String, Object> response = Maps.newHashMap();
response.put("android", new HashMap<String, Object>() {{
put("nexus", 5L);
put("galaxy", 2L);
}});
response.put("ios", new HashMap<String, Object>() {{
put("nexus", 1L);
put("ipad", 2L);
put("iphone", 1L);
}});
GroupResponse actualResult = GroupResponse.class.cast(getQueryExecutor().execute(groupRequest));
assertEquals(response, actualResult.getResult());
}
@Test
public void testGroupActionTwoFieldsWithFilter() throws FoxtrotException, JsonProcessingException {
GroupRequest groupRequest = new GroupRequest();
groupRequest.setTable(TestUtils.TEST_TABLE_NAME);
groupRequest.setNesting(Arrays.asList("os", "device"));
GreaterThanFilter greaterThanFilter = new GreaterThanFilter();
greaterThanFilter.setField("battery");
greaterThanFilter.setValue(48);
groupRequest.setFilters(Collections.
Map<String, Object> response = Maps.newHashMap();
response.put("android", new HashMap<String, Object>() {{
put("nexus", 3L);
put("galaxy", 2L);
}});
response.put("ios", new HashMap<String, Object>() {{
put("ipad", 1L);
}});
GroupResponse actualResult = GroupResponse.class.cast(getQueryExecutor().execute(groupRequest));
assertEquals(response, actualResult.getResult());
}
@Test
public void testGroupActionMultipleFieldsNoFilter() throws FoxtrotException, JsonProcessingException {
GroupRequest groupRequest = new GroupRequest();
groupRequest.setTable(TestUtils.TEST_TABLE_NAME);
groupRequest.setNesting(Arrays.asList("os", "device", "version"));
Map<String, Object> response = Maps.newHashMap();
final Map<String, Object> nexusResponse = new HashMap<String, Object>() {{
put("1", 2L);
put("2", 2L);
put("3", 1L);
}};
final Map<String, Object> galaxyResponse = new HashMap<String, Object>() {{
put("2", 1L);
put("3", 1L);
}};
response.put("android", new HashMap<String, Object>() {{
put("nexus", nexusResponse);
put("galaxy", galaxyResponse);
}});
final Map<String, Object> nexusResponse2 = new HashMap<String, Object>() {{
put("2", 1L);
}};
final Map<String, Object> iPadResponse = new HashMap<String, Object>() {{
put("2", 2L);
}};
final Map<String, Object> iPhoneResponse = new HashMap<String, Object>() {{
put("1", 1L);
}};
response.put("ios", new HashMap<String, Object>() {{
put("nexus", nexusResponse2);
put("ipad", iPadResponse);
put("iphone", iPhoneResponse);
}});
GroupResponse actualResult = GroupResponse.class.cast(getQueryExecutor().execute(groupRequest));
assertEquals(response, actualResult.getResult());
}
@Test
public void testGroupActionMultipleFieldsWithFilter() throws FoxtrotException, JsonProcessingException {
GroupRequest groupRequest = new GroupRequest();
groupRequest.setTable(TestUtils.TEST_TABLE_NAME);
groupRequest.setNesting(Arrays.asList("os", "device", "version"));
GreaterThanFilter greaterThanFilter = new GreaterThanFilter();
greaterThanFilter.setField("battery");
greaterThanFilter.setValue(48);
groupRequest.setFilters(Collections.
Map<String, Object> response = Maps.newHashMap();
final Map<String, Object> nexusResponse = new HashMap<String, Object>() {{
put("2", 2L);
put("3", 1L);
}};
final Map<String, Object> galaxyResponse = new HashMap<String, Object>() {{
put("2", 1L);
put("3", 1L);
}};
response.put("android", new HashMap<String, Object>() {{
put("nexus", nexusResponse);
put("galaxy", galaxyResponse);
}});
final Map<String, Object> iPadResponse = new HashMap<String, Object>() {{
put("2", 1L);
}};
response.put("ios", new HashMap<String, Object>() {{
put("ipad", iPadResponse);
}});
GroupResponse actualResult = GroupResponse.class.cast(getQueryExecutor().execute(groupRequest));
assertEquals(response, actualResult.getResult());
}
} | java | 15 | 0.663169 | 152 | 39.216312 | 282 | starcoderdata |
@Override
public void run() {
// initial sleep to wait for the systems to spin up before we start logging
if (delayTimeMillis >= 0) {
try {
Thread.sleep(delayTimeMillis);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
}
// we do a sleep here so no need to test for Thread.isInterrupted()
while (true) {
long lastStartMillis = System.currentTimeMillis();
try {
metricsManager.persist();
} catch (IOException ioe) {
// ignore I guess
}
// sleep the number of millis so that we start the persisting at the same period each time
long sleepMillis = lastStartMillis + periodTimeMillis - System.currentTimeMillis();
if (sleepMillis <= 0) {
if (Thread.currentThread().isInterrupted()) {
return;
}
} else {
try {
Thread.sleep(sleepMillis);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;
}
}
}
} | java | 14 | 0.650052 | 93 | 25.054054 | 37 | inline |
<div class="page-header">
Panel
user interface from here. It is kept simple for basic learning.
<div class="col-sm-offset-1">
href="<?php echo $this->to_url('theme-settings'); ?>">Manage Theme
href="<?php echo $this->to_url('admin-user-list'); ?>">Manage Users
href="#">Manage Menus - (will be added in next version)
href="#">Manage Widgets - (will be added in next version) | php | 6 | 0.607477 | 88 | 40.230769 | 13 | starcoderdata |
'use strict';
describe('Dish', function(){
var dish;
beforeEach(function(){
dish = new Dish('Pizza', 10);
});
describe('.name', function(){
it('returns the name of the dish', function(){
expect(dish.name).toEqual('Pizza');
});
});
describe('.price', function(){
it('returns the price of the dish', function(){
expect(dish.price).toEqual(10);
});
});
describe('._setName', function(){
it('allows the dishes name to be changed', function(){
dish._setName('Pineapple Pizza');
expect(dish.name).toEqual('Pineapple Pizza');
});
})
describe('._setPrice', function(){
it('allows the dishes price to be changed', function(){
dish._setPrice(15);
expect(dish.price).toEqual(15);
});
});
}); | javascript | 19 | 0.594859 | 59 | 21.081081 | 37 | starcoderdata |
package me.bristermitten.privatemines.view;
import com.cryptomorin.xseries.XMaterial;
import me.bristermitten.privatemines.PrivateMines;
import me.bristermitten.privatemines.config.PMConfig;
import me.bristermitten.privatemines.config.menu.MenuConfig;
import me.bristermitten.privatemines.config.menu.MenuSpec;
import me.bristermitten.privatemines.data.PrivateMine;
import me.bristermitten.privatemines.service.MineStorage;
import org.apache.commons.lang.Validate;
import org.bukkit.Material;
import org.bukkit.entity.Player;
import org.bukkit.event.inventory.InventoryClickEvent;
public class PrivateMineMenu {
private static MenuSpec original;
/*
Manages the /pmine Menu
*/
public PrivateMineMenu(Player p, PrivateMines plugin, MenuConfig config, MineStorage storage, PMConfig pmConfig,
MenuFactory factory) {
Validate.notNull(p, "Player");
Validate.notNull(plugin, "PrivateMines");
Validate.notNull(config, "MenuConfig");
Validate.notNull(storage, "MineStorage");
Validate.notNull(pmConfig, "PMConfig");
if (original == null) {
original = new MenuSpec();
original.addAction("go-to-mine",
e -> goToMine(storage, e));
original.addAction("view-mines",
e -> openMinesMenu(plugin, config, storage, e));
original.addAction("change-block",
e -> openChangeBlockMenu(plugin, config, storage, pmConfig, e));
original.addAction("change-schematic",
e -> factory.createAndOpenThemeMenu((Player) e.getWhoClicked()));
original.addAction("change-reset-style",
e -> openResetStyleMenu(plugin, config, storage, pmConfig, e));
original.addAction("change-reset-percent",
e -> openChangeResetPercentMenu(plugin, config, storage, pmConfig, e));
}
PrivateMine mine = storage.get(p);
Material type = mine != null ? XMaterial.valueOf(String.valueOf(mine.getMineBlocks()
.get(0)
.getType())).parseMaterial() :
XMaterial.STONE.parseMaterial();
original.loadFrom(config.configurationForName("Main"), "%BLOCK%", type);
MenuSpec menuSpec = new MenuSpec();
menuSpec.copyFrom(original);
menuSpec.register(plugin);
p.openInventory(menuSpec.genMenu());
}
/*
Create the click event for going to the mine.
*/
private void goToMine(MineStorage storage, InventoryClickEvent e) {
Player player = (Player) e.getWhoClicked();
if (storage.hasMine(player)) {
storage.get((Player) e.getWhoClicked()).teleport();
}
}
private void openMinesMenu(PrivateMines plugin, MenuConfig config, MineStorage storage, InventoryClickEvent e) {
new MinesMenu((Player) e.getWhoClicked(), config, plugin, storage);
}
private void openChangeBlockMenu(PrivateMines plugin, MenuConfig config, MineStorage storage, PMConfig pmConfig,
InventoryClickEvent e) {
new ChangeBlockMenu((Player) e.getWhoClicked(), plugin, pmConfig, config, storage);
}
private void openResetStyleMenu(PrivateMines plugin, MenuConfig config, MineStorage storage, PMConfig pmConfig,
InventoryClickEvent e) {
new ChangeResetStyleMenu((Player) e.getWhoClicked(), plugin, pmConfig, config, storage);
}
private void openChangeResetPercentMenu(PrivateMines plugin, MenuConfig config, MineStorage storage, PMConfig pmConfig,
InventoryClickEvent e) {
new ChangeMineResetPercentMenu((Player) e.getWhoClicked(), plugin, pmConfig, config, storage);
}
} | java | 17 | 0.739811 | 120 | 35.408602 | 93 | starcoderdata |
from django.urls import path
from bookstore import views
urlpatterns = [
path('all_book',views.all_book),
path('add_book',views.add_book),
path('update_book/
path('delete_book',views.delete_book),
] | python | 7 | 0.69863 | 52 | 25.545455 | 11 | starcoderdata |
public override Table CreateGroupTable(long groupIndex)
{
if (IsGroupDegenerate(groupIndex))
{
// if sub table is an expand table, act as pass-through
if (m_expandTable != null)
{
var i = m_Groups[groupIndex].m_GroupIndice[0];
return m_expandTable.CreateGroupTable(i);
}
}
if (m_createGroupTable != null)
{
return m_createGroupTable(this, m_Groups[groupIndex], groupIndex);
}
int subGroupColumn = m_GroupedColumnFirst + 1;
if (subGroupColumn < m_GroupedColumnLast)
{
GroupedTable subTable = new GroupedTable(m_table, m_Groups[groupIndex].m_GroupIndice, subGroupColumn, m_GroupedColumnLast, m_ColumnOrder, m_SortOrder);
return subTable;
}
else if (subGroupColumn < m_ColumnOrder.Length)
{
//create a sorted table only
SortedTable subTable = new SortedTable(m_table, m_ColumnOrder, m_SortOrder, subGroupColumn, m_Groups[groupIndex].m_GroupIndice);
return subTable;
}
else
{
//create indexed table
IndexedTable subTable = new IndexedTable(m_table, m_Groups[groupIndex].m_GroupIndice);
return subTable;
}
} | c# | 18 | 0.532236 | 167 | 41.911765 | 34 | inline |
private void deleteCordysUser(String username, String thisUser, String postfix)
throws Exception
{
int deleteResponseNode = 0;
int deleteRequestNode = 0;
try
{
Connector tmpConnector = getConfiguration().getConnector();
int deleteMethodNode = tmpConnector.createSOAPMethodEx("http://schemas.cordys.com/1.0/ldap",
"DeleteRecursive",
/*organization, */ "o=SYSTEM," + postfix, // NOTE : fool system to think we are in the SYSTEM organisation
/*organizationalUser,*/ thisUser +
",cn=organizational users,o=SYSTEM," +
postfix, // NOTE : current user
// must be a user of
// SYSTEM
null, null);
deleteRequestNode = SOAPMessage.getRootEnvelopeNode(deleteMethodNode);
// alwasy create tupple/old
com.eibus.xml.nom.Document document = Node.getDocument(deleteRequestNode);
int tuple = document.createElement("tuple", deleteMethodNode);
int newnode = document.createElement("old", tuple);
int entryNode = Node.createElement("entry", newnode);
Node.setAttribute(entryNode, "dn",
"cn=" + username + ",cn=authenticated users," + postfix);
// send it
deleteResponseNode = tmpConnector.sendAndWait(deleteRequestNode);
int deleteResponseBody = SOAPMessage.getRootBodyNode(deleteResponseNode);
RequestUtil.checkSoapResponseforError(deleteResponseBody);
}
finally
{
Node.delete(deleteRequestNode);
Node.delete(Node.getRoot(deleteResponseNode));
}
} | java | 12 | 0.474715 | 173 | 55.307692 | 39 | inline |
using System;
using System.Collections.Generic;
using Microsoft.Xna.Framework;
namespace Wisp.Components
{
public class Collidable : Component
{
public CollisionBox collisionBox;
public int Mask { get; set; } = -1;
public Point Size { get; set; } = Point.Zero;
public Vector2 Pos { get; set; }
private HashSet previousCollisions = new HashSet
private HashSet currentCollisions = new HashSet
public Collidable()
{
collisionBox = new CollisionBox();
}
public override void Update(Scene scene)
{
var current = this;
var nodeManager = scene.NodeManager;
var entity = Parent;
Point size;
if (current.Size == Point.Zero) current.Size = entity.Size;
size = current.Size;
current.collisionBox.UpdateBox(entity.ScenePos + current.Pos, new Vector2(size.X, size.Y));
if (IsStatic(entity)) return;
var collidableComponents = nodeManager.GetComponents
foreach (Collidable other in collidableComponents)
{
if (current != other)
{
if (current.Mask >= 0 && other.Mask >= 0 && current.Mask == other.Mask)
continue;
var box = current.collisionBox;
var collision = box.CheckCollision(other.collisionBox);
if (collision != null)
{
var collisionEvent = new CollisionEvent()
{
source = entity,
target = other.Parent,
collision = collision,
IsNew = !previousCollisions.Contains(other.Parent)
};
currentCollisions.Add(other.Parent);
scene.AddEvent(collisionEvent);
}
}
}
previousCollisions.Clear();
foreach (var collision in currentCollisions) previousCollisions.Add(collision);
currentCollisions.Clear();
base.Update(scene);
}
private bool IsStatic(Node node)
{
if (node.parent != null) return IsStatic(node.parent);
return !node.HasComponent
}
}
} | c# | 25 | 0.518254 | 103 | 30.898734 | 79 | starcoderdata |
def __init__(self, config, benchmark, model_name, verbosity=1):
self.config = config
self.benchmark = benchmark
self.verbosity = verbosity
self._callbacks = []
self.callbacks = None
self.step = 0
self.epoch = 0
self.phase = 0
self.backups = {}
self.batch_logger = None
self.lr_decay = None
self.tensorboard_path = None
self.num_offsets = (1 << config.offset_bits)
self.model_path = None
self.monitor = None
self.recreate_chkpt = False
self.chkpt = None
# Create a model
print('DEBUG : Creating a model with...')
print(' pc vocab size :', benchmark.num_pcs())
print(' page vocab size :', benchmark.num_pages())
self.model = get_model(model_name).compile_model(config, benchmark.num_pcs(), benchmark.num_pages())
self._compile_metrics()
self.backups['optim'] = self.model.optimizer | python | 9 | 0.57551 | 108 | 36.730769 | 26 | inline |
from .window import Window
from .turtle import Turtle
from .pentool import Pen
from ._context import _Context
import gc,sys
def test_turtle():
w = Window()
t = Turtle(w)
t.speed = 3
t.stroke = 3
t.dash = (10,10)
print(w.turtles)
print(w.pens)
input('Press return')
print('A')
t.forward(100)
t.right(90)
t.forward(100)
input('Press return')
print('B')
t.color = 'blue'
t.backward(200)
input('Press return')
print('C')
t.clear()
t.dash = None
input('Press return')
print('D')
t.forward(200)
input('Press return')
print('E')
w.clear()
input('Press return')
print('F')
w.dispose()
def test_pen():
w = Window()
t = Pen(w)
t.speed = 3
t.stroke = 3
t.dash = (10,10)
print(w.turtles)
print(w.pens)
input('Press return')
print('A')
t.drawTo(100,100)
t.solid = True
input('Press return')
print('B')
t.drawLine(0,100)
t.drawLine(100,0)
t.drawLine(0,-100)
input('Press return')
print('C')
t.fillcolor = 'green'
input('Press return')
print('D')
t.move(-100,-100)
t.drawOval(25,25)
input('Press return')
print('E')
w.clear()
input('Press return')
print('F')
w.dispose()
#test_pen()
test_turtle() | python | 7 | 0.56427 | 59 | 18.138889 | 72 | starcoderdata |
import pytest
#import networkx.algorithms.connectivity.tests.test_edge_kcomponents
#from graphscope.nx.utils.compat import import_as_graphscope_nx
#import_as_graphscope_nx(
# networkx.algorithms.connectivity.tests.test_edge_kcomponents,
# decorators=pytest.mark.usefixtures("graphscope_session")
#)
@pytest.mark.skip(reason="not support multigraph")
def test_not_implemented():
G = nx.MultiGraph()
pytest.raises(nx.NetworkXNotImplemented, EdgeComponentAuxGraph.construct, G)
pytest.raises(nx.NetworkXNotImplemented, nx.k_edge_components, G, k=2)
pytest.raises(nx.NetworkXNotImplemented, nx.k_edge_subgraphs, G, k=2)
pytest.raises(nx.NetworkXNotImplemented, bridge_components, G)
pytest.raises(nx.NetworkXNotImplemented, bridge_components, nx.DiGraph()) | python | 9 | 0.776119 | 80 | 37.285714 | 21 | starcoderdata |
#include<bits/stdc++.h>
using namespace std;
int main()
{
int t[3];
cin>>t[0]>>t[1]>>t[2];
sort(t,t+3);
cout<<t[0]+t[1];
return 0;
} | c++ | 9 | 0.554745 | 23 | 12.8 | 10 | codenet |
const gulp = require("gulp");
const sass = require("gulp-sass");
sass.compiler = require('sass');
const logError = sass.logError;
const concat = require("gulp-concat");
const rev = require("gulp-rev");
const _manifest = rev.manifest;
const revRewrite = require("gulp-rev-rewrite");
const babel = require("gulp-babel");
const del = require("del");
const sync = del.sync;
const autoprefixer = require("gulp-autoprefixer");
const { init, write } = require("gulp-sourcemaps");
const uglify = require("gulp-uglify");
const pump = require("pump");
const cleanCSS = require("gulp-clean-css");
const imagemin = require("gulp-imagemin");
const cache = require("gulp-cache");
const browserSync = require("browser-sync").create();
// sources
const sassSrc = "./src/styles/main.scss";
const sassDarkSrc = "./src/styles/main-dark.scss";
const sassFiles = "./src/styles/**/*.scss";
const assetsSrc = "./src/assets/**/";
const htmlSrc = "./src/**/*.html";
const jsSrc = "./src/**/*.js";
// dist
const dist = "./dist";
const htmlDest = "./dist/**/*.html";
const assets = "./dist/assets";
const build = "./dist/build/";
// temp
const temp = "./dist/build/temp/";
const jsTemp = "./dist/build/temp/js";
const cssTemp = "./dist/build/temp/css";
// vendor js
const jquery = "node_modules/jquery/dist/jquery.min.js";
const popperJS = "node_modules/popper.js/dist/umd/popper.min.js";
const bootstrapJS = "node_modules/bootstrap/dist/js/bootstrap.min.js";
// hashing task
gulp.task("hash", function () {
return gulp
.src([temp + "**/*.js", temp + "**/*.css"])
.pipe(rev())
.pipe(gulp.dest(dist))
.pipe(_manifest())
.pipe(gulp.dest(assets));
});
// cleaning dist folder
gulp.task(
"clean-build",
gulp.series("hash", () => {
return del([build]);
})
);
// inject hashed files to html
gulp.task(
"update",
gulp.series("clean-build", function () {
const manifest = gulp.src(assets + "/rev-manifest.json");
return gulp
.src(htmlDest)
.pipe(revRewrite({ manifest }))
.pipe(gulp.dest(dist));
})
);
// Compile sass into CSS
gulp.task("build-sass", () => {
return gulp
.src(sassSrc)
.pipe(init())
.pipe(sass().on("error", logError))
.pipe(autoprefixer())
.pipe(concat("style.css"))
.pipe(write())
.pipe(cleanCSS({ compatibility: "ie8" }))
.pipe(gulp.dest(cssTemp))
.pipe(browserSync.stream());
});
gulp.task("build-dark-sass", () => {
return gulp
.src(sassDarkSrc)
.pipe(init())
.pipe(sass().on("error", logError))
.pipe(autoprefixer())
.pipe(concat("style-dark.css"))
.pipe(write())
.pipe(cleanCSS({ compatibility: "ie8" }))
.pipe(gulp.dest(cssTemp))
.pipe(browserSync.stream());
});
// bundle dependencies js
gulp.task("vendor-js", () => {
return gulp
.src([jquery, popperJS, bootstrapJS])
.pipe(concat("vendor-bundle.js"))
.pipe(gulp.dest(build));
});
// babel build task
gulp.task("build-js", () => {
return gulp
.src(jsSrc)
.pipe(
babel({
presets: ["@babel/env"]
})
)
.pipe(concat("main.js"))
.pipe(gulp.dest(build));
});
// bundle all js
gulp.task(
"bundle-js",
gulp.series(gulp.parallel("vendor-js", "build-js"), () => {
return gulp
.src([build + "vendor-bundle.js", build + "main.js"])
.pipe(init())
.pipe(concat("bundle.js"))
.pipe(write())
.pipe(gulp.dest(jsTemp));
})
);
// uglifyJS
gulp.task(
"compress-js",
gulp.series("bundle-js", function (cb) {
pump([gulp.src(temp + "**/*.js"), uglify(), gulp.dest(temp)], cb);
})
);
// images optimising
gulp.task("optimise-img", () => {
return gulp
.src(assetsSrc + "*.+(png|jpg|jpeg|gif|svg)")
.pipe(
cache(
imagemin({
interlaced: true
})
)
)
.pipe(gulp.dest(assets));
});
// html files build
gulp.task(
"build-html",
gulp.series(function () {
return gulp.src(htmlSrc).pipe(gulp.dest(dist));
})
);
// build and minify
gulp.task(
"build-compress",
gulp.parallel(
"build-html",
"build-sass",
"build-dark-sass",
"compress-js",
"optimise-img"
)
);
// build files
gulp.task(
"build-all",
gulp.parallel(
"build-html",
"build-sass",
"build-dark-sass",
"bundle-js",
"optimise-img"
)
);
// clean previous build
gulp.task("clean", function () {
return del([dist]);
});
// clean html files for update
gulp.task("clean-html", done => {
sync([dist + "/*.html"]);
done();
});
// delete assets except js and css files
gulp.task("delete-assets", () => {
return del([assets + "/*", "!./dist/assets/rev-manifest.json"]);
});
// watching scss/js/html files
gulp.task("watch", function (done) {
gulp.watch(jsSrc, gulp.series("live-reload"));
gulp.watch(assetsSrc, gulp.series("live-reload"));
gulp.watch(sassFiles, gulp.series("build-sass", "build-dark-sass", "live-reload"));
gulp.watch(htmlSrc).on(
"change",
gulp.series(
"clean-html",
"build-html",
"update",
"delete-assets",
"optimise-img",
done => {
browserSync.reload();
done();
}
)
);
done();
});
// Static Server
gulp.task(
"serve",
gulp.parallel("watch", () => {
browserSync.init({
server: {
baseDir: "./dist/"
}
});
})
);
// live reloading
gulp.task(
"live-reload",
gulp.series("clean", "build-all", "update", function (done) {
browserSync.reload();
done();
})
);
// build and serve
exports.default = gulp.series("clean", "build-all", "update", "serve");
// build for production
exports.build = gulp.series("clean", "build-compress", "update"); | javascript | 22 | 0.597414 | 85 | 21.316206 | 253 | starcoderdata |
describe("mulberry.model", function() {
beforeEach(function() {
dojo.require('mulberry._Model');
dojo.require('mulberry._Store');
});
it("should create a model constructor using the provided data", function() {
var f = false;
mulberry.model('TestModel', {
format : function() {
f = true;
}
});
new client.models.TestModel().format();
expect(f).toBeTruthy();
});
it("should have a format function if one is not defined", function() {
mulberry.model('TestModel');
expect(new client.models.TestModel().format).toBeDefined();
});
}); | javascript | 20 | 0.615894 | 78 | 22.230769 | 26 | starcoderdata |
// AnimationProgramming.cpp : Defines the entry point for the console application.
//
#include "../include/stdafx.h"
#include "../include/SkeletonBone.h"
#include "../include/Skeleton.h"
class CSimulation : public ISimulation
{
Skeleton m_skeleton;
virtual void Init() override
{
m_skeleton = Skeleton("ThirdPerson");
m_skeleton.Init();
}
virtual void Update(float frameTime) override
{
DrawAxis();
m_skeleton.Update(frameTime);
m_skeleton.Draw();
}
void DrawAxis()
{
// X axis
DrawLine(50, 0, 0, 75, 0, 0, 1, 0, 0);
// Y axis
DrawLine(50, 0, 0, 50, -25, 0, 0, 1, 0);
// Z axis
DrawLine(50, 0, 0, 50, 0, 25, 0, 0, 1);
}
};
int main()
{
CSimulation simulation;
Run(&simulation, 1400, 800);
return 0;
} | c++ | 10 | 0.654088 | 82 | 17.068182 | 44 | starcoderdata |
public static void drawRectBorder(int xLeft, int yTop, int xRight, int yBottom, int borderARGB, int borderThickness) {
// Draw top border, including corners.
drawRect(
xLeft,
yTop,
xRight,
yTop + borderThickness,
borderARGB);
// Draw bottom border, including corners.
drawRect(
xLeft,
yBottom - borderThickness,
xRight,
yBottom,
borderARGB);
// Draw left border, excluding corners.
drawRect(
xLeft,
yTop + borderThickness,
xLeft + borderThickness,
yBottom - borderThickness,
borderARGB);
// Draw right border, excluding corners.
drawRect(
xRight - borderThickness,
yTop + borderThickness,
xRight,
yBottom - borderThickness,
borderARGB);
} | java | 7 | 0.484405 | 118 | 33.233333 | 30 | inline |
<?php
/**
* Created by PhpStorm.
* User: storn
* Date: 2016/12/12
* Time: 13:49
*/
namespace worms\core;
class Config
{
static private $__config = [];
/**
* @desc set 设置配置
* @author storn
*
* @param string $key key
* @param mixed $value 值
*/
static public function set($key, $value)
{
$key = strtoupper($key);
if (is_array($value) && isset(self::$__config[$key]) && is_array(self::$__config[$key])) {
self::$__config[$key] = array_merge(self::$__config[$key], $value);
} else {
self::$__config[$key] = $value;
}
}
/**
* @desc batchSet 批量设置
* @author storn
*
* @param array $config 配置数组
*/
static public function batchSet(array $config)
{
foreach ($config as $k => $v) {
self::set($k, $v);
}
}
/**
* @desc loadFileConf 加载文件配置
* @author storn
*
* @param string $confFile 配置文件路径
*
* @throws \InvalidArgumentException
*/
static public function loadFileConf(string $confFile)
{
if (file_exists($confFile)) {
$conf = include $confFile;
if (is_array($conf)) {
self::batchSet($conf);
} else {
throw new \InvalidArgumentException("配置无效 @" . $confFile);
}
}
}
/**
* @desc get
* @author storn
*
* @param string $key key
* @param null|mixed $default 默认值
*
* @return null
*/
static public function get($key = '', $default = null)
{
if ($key == '') {
return self::$__config;
}
if (!strpos($key, '.')) {
$key = strtoupper($key);
return isset(self::$__config[$key]) ? self::$__config[$key] : $default;
}
// 二维数组设置和获取支持
list($pKey, $cKey) = explode('.', $key, 2);
$pKey = strtoupper($pKey);
return isset(self::$__config[$pKey][$cKey]) ? self::$__config[$pKey][$cKey] : $default;
}
} | php | 17 | 0.476168 | 98 | 22.065934 | 91 | starcoderdata |
func (c *client) reconnectLoop() (err error) {
var try uint
var delay = 3 * time.Second
for {
if try == 0 {
c.log.Debug(c.getLogPrefix(), "trying to connect")
} else {
c.log.Debug(c.getLogPrefix(), "reconnect attempt", try)
}
if _, err = c.connect(); err == nil {
c.log.Debug(c.getLogPrefix(), "establishing connection succeeded")
break
}
c.log.Info(c.getLogPrefix(), "establishing connection failed, trying again in ", delay)
c.log.Info(c.getLogPrefix(), err)
// wait N seconds
select {
case <-time.After(delay):
delay += (4 + time.Duration(try*2)) * time.Second
case <-c.SystemShutdown:
c.log.Debug(c.getLogPrefix(), "stopping reconnect attempt", try)
return
}
if delay > 5*60*time.Second {
delay = 60 * time.Second
}
}
return
} | go | 15 | 0.638432 | 89 | 23 | 33 | inline |
void del_item(struct NODE *llist, int num) {
struct NODE *temp;
temp = (struct NODE *)malloc(sizeof(struct NODE));
if(llist->number == num) {
/* remove the node */
temp = llist->next;
free(llist);
llist = temp;
} else {
while(llist->next->number != num)
llist = llist->next;
temp = llist->next->next;
free(llist->next);
llist->next = temp;
}
} | c | 11 | 0.607527 | 51 | 19.722222 | 18 | inline |
package ca.cmfly.controller.programs;
import java.io.IOException;
import java.util.List;
import ca.cmfly.controller.LightController;
import ca.cmfly.controller.LightId;
public abstract class LightShow {
protected LightController lc;
protected boolean keepGoing;
protected List lightIds;
public LightShow() throws IOException{
this.lc = new LightController();
this.keepGoing = true;
this.lightIds = LightController.getLightIds();
}
public void runLightShow() throws IOException {
init();
while(keepGoing){
doit();
}
}
public abstract void doit() throws IOException;
public void init() throws IOException{
// Empty implementation allowed
}
} | java | 10 | 0.714681 | 48 | 19.878788 | 33 | starcoderdata |
#!/usr/bin/env python
import sys
from oppai import *
# prints timing points (just a test for this interface)
ez = ezpp_new()
ezpp(ez, sys.argv[1])
for i in range(ezpp_ntiming_points(ez)):
time = ezpp_timing_time(ez, i)
ms_per_beat = ezpp_timing_ms_per_beat(ez, i)
change = ezpp_timing_change(ez, i)
print("%f | %f beats per ms | change: %d" % (time, ms_per_beat, change))
ezpp_free(ez) | python | 8 | 0.669192 | 74 | 27.285714 | 14 | starcoderdata |
import common from './common';
import fs from './lib/fs';
export default {
...common,
fs,
}; | javascript | 4 | 0.612245 | 30 | 13 | 7 | starcoderdata |
// Code generated by msgraph-generate.go DO NOT EDIT.
package msgraph
// SynchronizationJobRestartCriteria undocumented
type SynchronizationJobRestartCriteria struct {
// Object is the base model of SynchronizationJobRestartCriteria
Object
// ResetScope undocumented
ResetScope *SynchronizationJobRestartScope `json:"resetScope,omitempty"`
} | go | 7 | 0.838942 | 73 | 33.666667 | 12 | starcoderdata |
public static void Union<T>(this List<T> self, List<T> list)
{
// Add list items to self if not already contained
foreach (T item in list)
{
if (!self.Contains(item)) self.Add(item);
}
} | c# | 11 | 0.628019 | 60 | 22.111111 | 9 | inline |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.