max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
5,169 | {
"name": "VirtualGameController",
"version": "0.0.7",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"summary": "Feature-rich game controller framework for iOS, tvOS, OS X and watchOS in Swift 2.1.",
"homepage": "https://github.com/robreuss/VirtualGameController",
"authors": {
"robreuss": "<EMAIL>"
},
"social_media_url": "http://twitter.com/robreuss",
"source": {
"git": "https://github.com/robreuss/VirtualGameController.git",
"tag": "0.0.7"
},
"source_files": "Source/**/*.swift",
"platforms": {
"ios": "9.0",
"tvos": "9.0",
"osx": "10.10"
},
"tvos": {
"exclude_files": "Source/**/VgcCentralPublisherWatch.swift"
},
"osx": {
"exclude_files": [
"Source/**/VgcCentralPublisherWatch.swift",
"Source/**/VgcCentralViewController.swift",
"Source/**/VgcSharedViews.swift",
"Source/**/VgcWatchConnectivity.swift"
]
}
}
| 395 |
6,931 | <reponame>EkremBayar/bayar<gh_stars>1000+
__all__ = ["bkfilter", "hpfilter", "cffilter", "miso_lfilter",
"convolution_filter", "recursive_filter"]
from .bk_filter import bkfilter
from .hp_filter import hpfilter
from .cf_filter import cffilter
from .filtertools import miso_lfilter, convolution_filter, recursive_filter
| 124 |
1,255 | /*
* Copyright (c) 2014, <NAME>, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
#include "NativeFeatureIncludes.h"
#if _RAKNET_SUPPORT_Rackspace==1 && _RAKNET_SUPPORT_TCPInterface==1
#include "Rackspace.h"
#include "RakString.h"
#include "TCPInterface.h"
using namespace RakNet;
Rackspace::Rackspace()
{
tcpInterface=0;
}
Rackspace::~Rackspace()
{
}
void Rackspace::AddEventCallback(Rackspace2EventCallback *callback)
{
unsigned int idx = eventCallbacks.GetIndexOf(callback);
if (idx == (unsigned int)-1)
eventCallbacks.Push(callback,_FILE_AND_LINE_);
}
void Rackspace::RemoveEventCallback(Rackspace2EventCallback *callback)
{
unsigned int idx = eventCallbacks.GetIndexOf(callback);
if (idx != (unsigned int)-1)
eventCallbacks.RemoveAtIndex(idx);
}
void Rackspace::ClearEventCallbacks(void)
{
eventCallbacks.Clear(true, _FILE_AND_LINE_);
}
SystemAddress Rackspace::Authenticate(TCPInterface *_tcpInterface, const char *_authenticationURL, const char *_rackspaceCloudUsername, const char *_apiAccessKey)
{
unsigned int index = GetOperationOfTypeIndex(RO_CONNECT_AND_AUTHENTICATE);
if (index!=(unsigned int)-1)
{
// In progress
return operations[index].connectionAddress;
}
tcpInterface=_tcpInterface;
rackspaceCloudUsername=_rackspaceCloudUsername;
apiAccessKey=_apiAccessKey;
unsigned int i;
RackspaceOperation ro;
ro.type=RO_CONNECT_AND_AUTHENTICATE;
ro.isPendingAuthentication=false;
RakAssert(tcpInterface->WasStarted());
ro.connectionAddress=tcpInterface->Connect(_authenticationURL,443,true);
if (ro.connectionAddress==UNASSIGNED_SYSTEM_ADDRESS)
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnConnectionAttemptFailure(RO_CONNECT_AND_AUTHENTICATE, _authenticationURL);
return UNASSIGNED_SYSTEM_ADDRESS;
}
#if OPEN_SSL_CLIENT_SUPPORT==1
tcpInterface->StartSSLClient(ro.connectionAddress);
#endif
RakNet::RakString command(
"GET /v1.0 HTTP/1.1\n"
"Host: %s\n"
"X-Auth-User: %s\n"
"X-Auth-Key: %s\n\n"
,_authenticationURL, _rackspaceCloudUsername, _apiAccessKey);
tcpInterface->Send(command.C_String(), (unsigned int) command.GetLength(), ro.connectionAddress, false);
operations.Insert(ro,_FILE_AND_LINE_);
return ro.connectionAddress;
}
const char * Rackspace::EventTypeToString(RackspaceEventType eventType)
{
switch (eventType)
{
case RET_Success_200:
return "Success_200";
case RET_Success_201:
return "Success_201";
case RET_Success_202:
return "Success_202";
case RET_Success_203:
return "Success_203";
case RET_Success_204:
return "Success_204";
case RET_Cloud_Servers_Fault_500:
return "Cloud_Servers_Fault_500";
case RET_Service_Unavailable_503:
return "Service_Unavailable_503";
case RET_Unauthorized_401:
return "Unauthorized_401";
case RET_Bad_Request_400:
return "Bad_Request_400";
case RET_Over_Limit_413:
return "Over_Limit_413";
case RET_Bad_Media_Type_415:
return "Bad_Media_Type_415";
case RET_Item_Not_Found_404:
return "Item_Not_Found_404";
case RET_Build_In_Progress_409:
return "Build_In_Progress_409";
case RET_Resize_Not_Allowed_403:
return "Resize_Not_Allowed_403";
case RET_Connection_Closed_Without_Reponse:
return "Connection_Closed_Without_Reponse";
case RET_Unknown_Failure:
return "Unknown_Failure";
}
return "Unknown event type (bug)";
}
void Rackspace::AddOperation(RackspaceOperationType type, RakNet::RakString httpCommand, RakNet::RakString operation, RakNet::RakString xml)
{
RackspaceOperation ro;
ro.type=type;
ro.httpCommand=httpCommand;
ro.operation=operation;
ro.xml=xml;
ro.isPendingAuthentication=HasOperationOfType(RO_CONNECT_AND_AUTHENTICATE);
if (ro.isPendingAuthentication==false)
{
if (ExecuteOperation(ro))
operations.Insert(ro,_FILE_AND_LINE_);
}
else
operations.Insert(ro,_FILE_AND_LINE_);
}
void Rackspace::ListServers(void)
{
AddOperation(RO_LIST_SERVERS, "GET", "servers", "");
}
void Rackspace::ListServersWithDetails(void)
{
AddOperation(RO_LIST_SERVERS_WITH_DETAILS, "GET", "servers/detail", "");
}
void Rackspace::CreateServer(RakNet::RakString name, RakNet::RakString imageId, RakNet::RakString flavorId)
{
RakNet::RakString xml(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
"<server xmlns=\"http://docs.rackspacecloud.com/servers/api/v1.0\" name=\"%s\" imageId=\"%s\" flavorId=\"%s\">"
"</server>"
,name.C_String() ,imageId.C_String(), flavorId.C_String());
AddOperation(RO_CREATE_SERVER, "POST", "servers", xml);
}
void Rackspace::GetServerDetails(RakNet::RakString serverId)
{
AddOperation(RO_GET_SERVER_DETAILS, "GET", RakNet::RakString("servers/%s", serverId.C_String()), "");
}
void Rackspace::UpdateServerNameOrPassword(RakNet::RakString serverId, RakNet::RakString newName, RakNet::RakString newPassword)
{
if (newName.IsEmpty() && newPassword.IsEmpty())
return;
RakNet::RakString xml(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
"<server xmlns=\"http://docs.rackspacecloud.com/servers/api/v1.0\""
);
if (newName.IsEmpty()==false)
xml += RakNet::RakString(" name=\"%s\"", newName.C_String());
if (newPassword.IsEmpty()==false)
xml += RakNet::RakString(" adminPass=\"%s\"", newPassword.C_String());
xml += " />";
AddOperation(RO_UPDATE_SERVER_NAME_OR_PASSWORD, "PUT", RakNet::RakString("servers/%s", serverId.C_String()), xml);
}
void Rackspace::DeleteServer(RakNet::RakString serverId)
{
AddOperation(RO_DELETE_SERVER, "DELETE", RakNet::RakString("servers/%s", serverId.C_String()), "");
}
void Rackspace::ListServerAddresses(RakNet::RakString serverId)
{
AddOperation(RO_LIST_SERVER_ADDRESSES, "GET", RakNet::RakString("servers/%s/ips", serverId.C_String()), "");
}
void Rackspace::ShareServerAddress(RakNet::RakString serverId, RakNet::RakString ipAddress)
{
AddOperation(RO_SHARE_SERVER_ADDRESS, "PUT", RakNet::RakString("servers/%s/ips/public/%s", serverId.C_String(), ipAddress.C_String()), "");
}
void Rackspace::DeleteServerAddress(RakNet::RakString serverId, RakNet::RakString ipAddress)
{
AddOperation(RO_DELETE_SERVER_ADDRESS, "DELETE", RakNet::RakString("servers/%s/ips/public/%s", serverId.C_String(), ipAddress.C_String()), "");
}
void Rackspace::RebootServer(RakNet::RakString serverId, RakNet::RakString rebootType)
{
RakNet::RakString xml(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
"<reboot xmlns=\"http://docs.rackspacecloud.com/servers/api/v1.0\" type=\"%s\""
"/>",
rebootType.C_String());
AddOperation(RO_REBOOT_SERVER, "POST", RakNet::RakString("servers/%s/action", serverId.C_String()), xml);
}
void Rackspace::RebuildServer(RakNet::RakString serverId, RakNet::RakString imageId)
{
RakNet::RakString xml(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
"<rebuild xmlns=\"http://docs.rackspacecloud.com/servers/api/v1.0\" imageId=\"%s\""
"/>",
imageId.C_String());
AddOperation(RO_REBUILD_SERVER, "POST", RakNet::RakString("servers/%s/action", serverId.C_String()), xml);
}
void Rackspace::ResizeServer(RakNet::RakString serverId, RakNet::RakString flavorId)
{
RakNet::RakString xml(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
"<resize xmlns=\"http://docs.rackspacecloud.com/servers/api/v1.0\" flavorId=\"%s\""
"/>",
flavorId.C_String());
AddOperation(RO_RESIZE_SERVER, "POST", RakNet::RakString("servers/%s/action", serverId.C_String()), xml);
}
void Rackspace::ConfirmResizedServer(RakNet::RakString serverId)
{
RakNet::RakString xml(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
"<confirmResize xmlns=\"http://docs.rackspacecloud.com/servers/api/v1.0\" "
"/>");
AddOperation(RO_CONFIRM_RESIZED_SERVER, "POST", RakNet::RakString("servers/%s/action", serverId.C_String()), xml);
}
void Rackspace::RevertResizedServer(RakNet::RakString serverId)
{
RakNet::RakString xml(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
"<revertResize xmlns=\"http://docs.rackspacecloud.com/servers/api/v1.0\" "
"/>");
AddOperation(RO_REVERT_RESIZED_SERVER, "POST", RakNet::RakString("servers/%s/action", serverId.C_String()), xml);
}
void Rackspace::ListFlavors(void)
{
AddOperation(RO_LIST_FLAVORS, "GET", "flavors", "");
}
void Rackspace::GetFlavorDetails(RakNet::RakString flavorId)
{
AddOperation(RO_GET_FLAVOR_DETAILS, "GET", RakNet::RakString("flavors/%s", flavorId.C_String()), "");
}
void Rackspace::ListImages(void)
{
AddOperation(RO_LIST_IMAGES, "GET", "images", "");
}
void Rackspace::CreateImage(RakNet::RakString serverId, RakNet::RakString imageName)
{
RakNet::RakString xml(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
"<image xmlns=\"http://docs.rackspacecloud.com/servers/api/v1.0\" name=\"%s\" serverId=\"%s\""
"/>",
imageName.C_String(),serverId.C_String());
AddOperation(RO_CREATE_IMAGE, "POST", "images", xml);
}
void Rackspace::GetImageDetails(RakNet::RakString imageId)
{
AddOperation(RO_GET_IMAGE_DETAILS, "GET", RakNet::RakString("images/%s", imageId.C_String()), "");
}
void Rackspace::DeleteImage(RakNet::RakString imageId)
{
AddOperation(RO_DELETE_IMAGE, "DELETE", RakNet::RakString("images/%s", imageId.C_String()), "");
}
void Rackspace::ListSharedIPGroups(void)
{
AddOperation(RO_LIST_SHARED_IP_GROUPS, "GET", "shared_ip_groups", "");
}
void Rackspace::ListSharedIPGroupsWithDetails(void)
{
AddOperation(RO_LIST_SHARED_IP_GROUPS_WITH_DETAILS, "GET", "shared_ip_groups/detail", "");
}
void Rackspace::CreateSharedIPGroup(RakNet::RakString name, RakNet::RakString optionalServerId)
{
RakNet::RakString xml(
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
"<sharedIpGroup xmlns=\"http://docs.rackspacecloud.com/servers/api/v1.0\" name=\"%s\">", name.C_String());
if (optionalServerId.IsEmpty()==false)
xml+=RakNet::RakString("<server id=\"%s\"/>", optionalServerId.C_String());
xml+="</sharedIpGroup>";
AddOperation(RO_CREATE_SHARED_IP_GROUP, "POST", "shared_ip_groups", xml);
}
void Rackspace::GetSharedIPGroupDetails(RakNet::RakString groupId)
{
AddOperation(RO_GET_SHARED_IP_GROUP_DETAILS, "GET", RakNet::RakString("shared_ip_groups/%s", groupId.C_String()), "");
}
void Rackspace::DeleteSharedIPGroup(RakNet::RakString groupId)
{
AddOperation(RO_DELETE_SHARED_IP_GROUP, "DELETE", RakNet::RakString("shared_ip_groups/%s", groupId.C_String()), "");
}
void Rackspace::OnClosedConnection(SystemAddress systemAddress)
{
if (systemAddress==UNASSIGNED_SYSTEM_ADDRESS)
return;
unsigned int i, operationsIndex;
operationsIndex=0;
while (operationsIndex < operations.Size())
{
if (operations[operationsIndex].isPendingAuthentication==false && operations[operationsIndex].connectionAddress==systemAddress)
{
RackspaceOperation ro = operations[operationsIndex];
operations.RemoveAtIndex(operationsIndex);
RakNet::RakString packetDataString = ro.incomingStream;
const char *packetData = packetDataString.C_String();
char resultCodeStr[32];
int resultCodeInt;
RackspaceEventType rackspaceEventType;
char *result;
result=strstr((char*) packetData, "HTTP/1.1 ");
if (result!=0)
{
result+=strlen("HTTP/1.1 ");
for (i=0; i < sizeof(resultCodeStr)-1 && result[i] && result[i]>='0' && result[i]<='9'; i++)
resultCodeStr[i]=result[i];
resultCodeStr[i]=0;
resultCodeInt=atoi(resultCodeStr);
switch (resultCodeInt)
{
case 200: rackspaceEventType=RET_Success_200; break;
case 201: rackspaceEventType=RET_Success_201; break;
case 202: rackspaceEventType=RET_Success_202; break;
case 203: rackspaceEventType=RET_Success_203; break;
case 204: rackspaceEventType=RET_Success_204; break;
case 500: rackspaceEventType=RET_Cloud_Servers_Fault_500; break;
case 503: rackspaceEventType=RET_Service_Unavailable_503; break;
case 401: rackspaceEventType=RET_Unauthorized_401; break;
case 400: rackspaceEventType=RET_Bad_Request_400; break;
case 413: rackspaceEventType=RET_Over_Limit_413; break;
case 415: rackspaceEventType=RET_Bad_Media_Type_415; break;
case 404: rackspaceEventType=RET_Item_Not_Found_404; break;
case 409: rackspaceEventType=RET_Build_In_Progress_409; break;
case 403: rackspaceEventType=RET_Resize_Not_Allowed_403; break;
default: rackspaceEventType=RET_Unknown_Failure; break;
}
}
else
{
rackspaceEventType=RET_Connection_Closed_Without_Reponse;
}
switch (ro.type)
{
case RO_CONNECT_AND_AUTHENTICATE:
{
if (rackspaceEventType==RET_Success_204)
{
RakNet::RakString header;
ReadLine(packetData, "X-Server-Management-Url: ", serverManagementURL);
serverManagementURL.SplitURI(header, serverManagementDomain, serverManagementPath);
ReadLine(packetData, "X-Storage-Url: ", storageURL);
storageURL.SplitURI(header, storageDomain, storagePath);
ReadLine(packetData, "X-CDN-Management-Url: ", cdnManagementURL);
cdnManagementURL.SplitURI(header, cdnManagementDomain, cdnManagementPath);
ReadLine(packetData, "X-Auth-Token: ", authToken);
ReadLine(packetData, "X-Storage-Token: ", storageToken);
operationsIndex=0;
while (operationsIndex < operations.Size())
{
if (operations[operationsIndex].isPendingAuthentication==true)
{
operations[operationsIndex].isPendingAuthentication=false;
if (ExecuteOperation(operations[operationsIndex])==false)
{
operations.RemoveAtIndex(operationsIndex);
}
else
operationsIndex++;
}
else
operationsIndex++;
}
// Restart in list
operationsIndex=0;
}
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnAuthenticationResult(rackspaceEventType, (const char*) packetData);
break;
}
case RO_LIST_SERVERS:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnListServersResult(rackspaceEventType, (const char*) packetData);
break;
}
case RO_LIST_SERVERS_WITH_DETAILS:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnListServersWithDetailsResult(rackspaceEventType, (const char*) packetData);
break;
}
case RO_CREATE_SERVER:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnCreateServerResult(rackspaceEventType, (const char*) packetData);
break;
}
case RO_GET_SERVER_DETAILS:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnGetServerDetails(rackspaceEventType, (const char*) packetData);
break;
}
case RO_UPDATE_SERVER_NAME_OR_PASSWORD:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnUpdateServerNameOrPassword(rackspaceEventType, (const char*) packetData);
break;
}
case RO_DELETE_SERVER:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnDeleteServer(rackspaceEventType, (const char*) packetData);
break;
}
case RO_LIST_SERVER_ADDRESSES:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnListServerAddresses(rackspaceEventType, (const char*) packetData);
break;
}
case RO_SHARE_SERVER_ADDRESS:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnShareServerAddress(rackspaceEventType, (const char*) packetData);
break;
}
case RO_DELETE_SERVER_ADDRESS:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnDeleteServerAddress(rackspaceEventType, (const char*) packetData);
break;
}
case RO_REBOOT_SERVER:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnRebootServer(rackspaceEventType, (const char*) packetData);
break;
}
case RO_REBUILD_SERVER:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnRebuildServer(rackspaceEventType, (const char*) packetData);
break;
}
case RO_RESIZE_SERVER:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnResizeServer(rackspaceEventType, (const char*) packetData);
break;
}
case RO_CONFIRM_RESIZED_SERVER:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnConfirmResizedServer(rackspaceEventType, (const char*) packetData);
break;
}
case RO_REVERT_RESIZED_SERVER:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnRevertResizedServer(rackspaceEventType, (const char*) packetData);
break;
}
case RO_LIST_FLAVORS:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnListFlavorsResult(rackspaceEventType, (const char*) packetData);
break;
}
case RO_GET_FLAVOR_DETAILS:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnGetFlavorDetailsResult(rackspaceEventType, (const char*) packetData);
break;
}
case RO_LIST_IMAGES:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnListImagesResult(rackspaceEventType, (const char*) packetData);
break;
}
case RO_CREATE_IMAGE:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnCreateImageResult(rackspaceEventType, (const char*) packetData);
break;
}
case RO_GET_IMAGE_DETAILS:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnGetImageDetailsResult(rackspaceEventType, (const char*) packetData);
break;
}
case RO_DELETE_IMAGE:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnDeleteImageResult(rackspaceEventType, (const char*) packetData);
break;
}
case RO_LIST_SHARED_IP_GROUPS:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnListSharedIPGroups(rackspaceEventType, (const char*) packetData);
break;
}
case RO_LIST_SHARED_IP_GROUPS_WITH_DETAILS:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnListSharedIPGroupsWithDetails(rackspaceEventType, (const char*) packetData);
break;
}
case RO_CREATE_SHARED_IP_GROUP:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnCreateSharedIPGroup(rackspaceEventType, (const char*) packetData);
break;
}
case RO_GET_SHARED_IP_GROUP_DETAILS:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnGetSharedIPGroupDetails(rackspaceEventType, (const char*) packetData);
break;
}
case RO_DELETE_SHARED_IP_GROUP:
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnDeleteSharedIPGroup(rackspaceEventType, (const char*) packetData);
break;
}
default:
break;
}
}
else
{
operationsIndex++;
}
}
}
void Rackspace::OnReceive(Packet *packet)
{
unsigned int operationsIndex;
for (operationsIndex=0; operationsIndex < operations.Size(); operationsIndex++)
{
if (operations[operationsIndex].isPendingAuthentication==false && operations[operationsIndex].connectionAddress==packet->systemAddress)
{
operations[operationsIndex].incomingStream+=packet->data;
}
}
}
bool Rackspace::ExecuteOperation(RackspaceOperation &ro)
{
if (ConnectToServerManagementDomain(ro)==false)
return false;
RakNet::RakString command(
"%s %s/%s HTTP/1.1\n"
"Host: %s\n"
"Content-Type: application/xml\n"
"Content-Length: %i\n"
"Accept: application/xml\n"
"X-Auth-Token: %s\n",
ro.httpCommand.C_String(), serverManagementPath.C_String(), ro.operation.C_String(), serverManagementDomain.C_String(),
ro.xml.GetLength(),
authToken.C_String());
if (ro.xml.IsEmpty()==false)
{
command+="\n";
command+=ro.xml;
command+="\n";
}
command+="\n";
//printf(command.C_String());
tcpInterface->Send(command.C_String(), (unsigned int) command.GetLength(), ro.connectionAddress, false);
return true;
}
void Rackspace::ReadLine(const char *data, const char *stringStart, RakNet::RakString &output)
{
output.Clear();
char *result, *resultEnd;
result=strstr((char*) data, stringStart);
if (result==0)
{
RakAssert(0);
return;
}
result+=strlen(stringStart);
if (result==0)
{
RakAssert(0);
return;
}
output=result;
resultEnd=result;
while (*resultEnd && (*resultEnd!='\r') && (*resultEnd!='\n') )
resultEnd++;
output.Truncate((unsigned int) (resultEnd-result));
}
bool Rackspace::ConnectToServerManagementDomain(RackspaceOperation &ro)
{
unsigned int i;
ro.connectionAddress=tcpInterface->Connect(serverManagementDomain.C_String(),443,true);
if (ro.connectionAddress==UNASSIGNED_SYSTEM_ADDRESS)
{
for (i=0; i < eventCallbacks.Size(); i++)
eventCallbacks[i]->OnConnectionAttemptFailure(ro.type, serverManagementURL);
return false;
}
#if OPEN_SSL_CLIENT_SUPPORT==1
tcpInterface->StartSSLClient(ro.connectionAddress);
#endif
return true;
}
bool Rackspace::HasOperationOfType(RackspaceOperationType t)
{
unsigned int i;
for (i=0; i < operations.Size(); i++)
{
if (operations[i].type==t)
return true;
}
return false;
}
unsigned int Rackspace::GetOperationOfTypeIndex(RackspaceOperationType t)
{
unsigned int i;
for (i=0; i < operations.Size(); i++)
{
if (operations[i].type==t)
return i;
}
return (unsigned int) -1;
}
#endif
| 9,367 |
8,273 | <reponame>AlohaChina/or-tools
#!/usr/bin/env python3
# Copyright 2010-2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START program]
"""Vehicles Routing Problem (VRP) with Time Window (TW) per vehicle.
All time are in minutes using 0am as origin
e.g. 8am = 480, 11am = 660, 1pm = 780 ...
We have 1 depot (0) and 16 locations (1-16).
We have a fleet of 4 vehicles (0-3) whose working time is [480, 1020] (8am-5pm)
We have the distance matrix between these locations and depot.
We have a service time of 25min at each location.
Locations are duplicated so we can simulate a TW per vehicle.
location: [01-16] vehicle: 0 TW: [540, 660] (9am-11am)
location: [17-32] vehicle: 1 TW: [660, 780] (11am-1pm)
location: [33-48] vehicle: 2 TW: [780, 900] (1pm-3pm)
location: [49-64] vehicle: 3 TW: [900, 1020] (3pm-5pm)
"""
# [START import]
from ortools.constraint_solver import routing_enums_pb2
from ortools.constraint_solver import pywrapcp
# [END import]
# [START data_model]
def create_data_model():
"""Stores the data for the problem."""
data = {}
data['time_matrix'] = [
[0, 6, 9, 8, 7, 3, 6, 2, 3, 2, 6, 6, 4, 4, 5, 9, 7],
[6, 0, 8, 3, 2, 6, 8, 4, 8, 8, 13, 7, 5, 8, 12, 10, 14],
[9, 8, 0, 11, 10, 6, 3, 9, 5, 8, 4, 15, 14, 13, 9, 18, 9],
[8, 3, 11, 0, 1, 7, 10, 6, 10, 10, 14, 6, 7, 9, 14, 6, 16],
[7, 2, 10, 1, 0, 6, 9, 4, 8, 9, 13, 4, 6, 8, 12, 8, 14],
[3, 6, 6, 7, 6, 0, 2, 3, 2, 2, 7, 9, 7, 7, 6, 12, 8],
[6, 8, 3, 10, 9, 2, 0, 6, 2, 5, 4, 12, 10, 10, 6, 15, 5],
[2, 4, 9, 6, 4, 3, 6, 0, 4, 4, 8, 5, 4, 3, 7, 8, 10],
[3, 8, 5, 10, 8, 2, 2, 4, 0, 3, 4, 9, 8, 7, 3, 13, 6],
[2, 8, 8, 10, 9, 2, 5, 4, 3, 0, 4, 6, 5, 4, 3, 9, 5],
[6, 13, 4, 14, 13, 7, 4, 8, 4, 4, 0, 10, 9, 8, 4, 13, 4],
[6, 7, 15, 6, 4, 9, 12, 5, 9, 6, 10, 0, 1, 3, 7, 3, 10],
[4, 5, 14, 7, 6, 7, 10, 4, 8, 5, 9, 1, 0, 2, 6, 4, 8],
[4, 8, 13, 9, 8, 7, 10, 3, 7, 4, 8, 3, 2, 0, 4, 5, 6],
[5, 12, 9, 14, 12, 6, 6, 7, 3, 3, 4, 7, 6, 4, 0, 9, 2],
[9, 10, 18, 6, 8, 12, 15, 8, 13, 9, 13, 3, 4, 5, 9, 0, 9],
[7, 14, 9, 16, 14, 8, 5, 10, 6, 5, 4, 10, 8, 6, 2, 9, 0],
]
data['num_vehicles'] = 4
data['depot'] = 0
return data
# [END data_model]
# [START solution_printer]
def print_solution(manager, routing, assignment):
"""Prints solution on console."""
print(f'Objective: {assignment.ObjectiveValue()}')
# Display dropped nodes.
dropped_nodes = 'Dropped nodes:'
for index in range(routing.Size()):
if routing.IsStart(index) or routing.IsEnd(index):
continue
if assignment.Value(routing.NextVar(index)) == index:
node = manager.IndexToNode(index)
if node > 16:
original = node
while original > 16:
original = original - 16
dropped_nodes += f' {node}({original})'
else:
dropped_nodes += f' {node}'
print(dropped_nodes)
# Display routes
time_dimension = routing.GetDimensionOrDie('Time')
total_time = 0
for vehicle_id in range(manager.GetNumberOfVehicles()):
plan_output = f'Route for vehicle {vehicle_id}:\n'
index = routing.Start(vehicle_id)
start_time = 0
while not routing.IsEnd(index):
time_var = time_dimension.CumulVar(index)
node = manager.IndexToNode(index)
if node > 16:
original = node
while original > 16:
original = original - 16
plan_output += f'{node}({original})'
else:
plan_output += f'{node}'
plan_output += f' Time:{assignment.Value(time_var)} -> '
if start_time == 0:
start_time = assignment.Value(time_var)
index = assignment.Value(routing.NextVar(index))
time_var = time_dimension.CumulVar(index)
node = manager.IndexToNode(index)
plan_output += f'{node} Time:{assignment.Value(time_var)}\n'
end_time = assignment.Value(time_var)
duration = end_time - start_time
plan_output += f'Duration of the route:{duration}min\n'
print(plan_output)
total_time += duration
print(f'Total duration of all routes: {total_time}min')
# [END solution_printer]
def main():
"""Solve the VRP with time windows."""
# Instantiate the data problem.
# [START data]
data = create_data_model()
# [END data]
# Create the routing index manager.
# [START index_manager]
manager = pywrapcp.RoutingIndexManager(
1 + 16*4, # number of locations
data['num_vehicles'],
data['depot'])
# [END index_manager]
# Create Routing Model.
# [START routing_model]
routing = pywrapcp.RoutingModel(manager)
# [END routing_model]
# Create and register a transit callback.
# [START transit_callback]
def time_callback(from_index, to_index):
"""Returns the travel time between the two nodes."""
# Convert from routing variable Index to time matrix NodeIndex.
from_node = manager.IndexToNode(from_index)
to_node = manager.IndexToNode(to_index)
# since our matrix is 17x17 map duplicated node to original one to
# retrieve the travel time
while from_node > 16:
from_node = from_node - 16;
while to_node > 16:
to_node = to_node - 16;
# add service of 25min for each location (except depot)
service_time = 0
if from_node != data['depot']:
service_time = 25
return data['time_matrix'][from_node][to_node] + service_time
transit_callback_index = routing.RegisterTransitCallback(time_callback)
# [END transit_callback]
# Define cost of each arc.
# [START arc_cost]
routing.SetArcCostEvaluatorOfAllVehicles(transit_callback_index)
# [END arc_cost]
# Add Time Windows constraint.
# [START time_windows_constraint]
time = 'Time'
routing.AddDimension(
transit_callback_index,
0, # allow waiting time (0 min)
1020, # maximum time per vehicle (9 hours)
False, # Don't force start cumul to zero.
time)
time_dimension = routing.GetDimensionOrDie(time)
# Add time window constraints for each location except depot.
for location_idx in range(17):
if location_idx == data['depot']:
continue
# Vehicle 0 location TW: [9am, 11am]
index_0 = manager.NodeToIndex(location_idx)
time_dimension.CumulVar(index_0).SetRange(540, 660)
routing.VehicleVar(index_0).SetValues([-1, 0])
# Vehicle 1 location TW: [11am, 1pm]
index_1 = manager.NodeToIndex(location_idx+16*1)
time_dimension.CumulVar(index_1).SetRange(660, 780)
routing.VehicleVar(index_1).SetValues([-1, 1])
# Vehicle 2 location TW: [1pm, 3pm]
index_2 = manager.NodeToIndex(location_idx+16*2)
time_dimension.CumulVar(index_2).SetRange(780, 900)
routing.VehicleVar(index_2).SetValues([-1, 2])
# Vehicle 3 location TW: [3pm, 5pm]
index_3 = manager.NodeToIndex(location_idx+16*3)
time_dimension.CumulVar(index_3).SetRange(900, 1020)
routing.VehicleVar(index_3).SetValues([-1, 3])
# Add Disjunction so only one node among duplicate is visited
penalty = 100_000 # Give solver strong incentive to visit one node
routing.AddDisjunction([index_0, index_1, index_2, index_3], penalty, 1)
# Add time window constraints for each vehicle start node.
depot_idx = data['depot']
for vehicle_id in range(data['num_vehicles']):
index = routing.Start(vehicle_id)
time_dimension.CumulVar(index).SetRange(480, 1020) # (8am, 5pm)
# Add time window constraints for each vehicle end node.
depot_idx = data['depot']
for vehicle_id in range(data['num_vehicles']):
index = routing.End(vehicle_id)
time_dimension.CumulVar(index).SetRange(480, 1020) # (8am, 5pm)
# [END time_windows_constraint]
# Instantiate route start and end times to produce feasible times.
# [START depot_start_end_times]
for i in range(data['num_vehicles']):
routing.AddVariableMinimizedByFinalizer(
time_dimension.CumulVar(routing.Start(i)))
routing.AddVariableMinimizedByFinalizer(
time_dimension.CumulVar(routing.End(i)))
# [END depot_start_end_times]
# Setting first solution heuristic.
# [START parameters]
search_parameters = pywrapcp.DefaultRoutingSearchParameters()
search_parameters.first_solution_strategy = (
routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC)
search_parameters.local_search_metaheuristic = (
routing_enums_pb2.LocalSearchMetaheuristic.GUIDED_LOCAL_SEARCH)
search_parameters.time_limit.FromSeconds(1)
# [END parameters]
# Solve the problem.
# [START solve]
assignment = routing.SolveWithParameters(search_parameters)
# [END solve]
# Print solution on console.
# [START print_solution]
if assignment:
print_solution(manager, routing, assignment)
else:
print("no solution found !")
# [END print_solution]
if __name__ == '__main__':
main()
# [END program]
| 4,279 |
328 | <reponame>grvvy/greatfet
#
# This file is part of GreatFET
#
from ..interface import GreatFETInterface
class SPIDevice(GreatFETInterface):
""" Abstract base class representing an SPI-attached device. """
def __init__(self, spi_bus, chip_select, spi_mode=0):
""" Sets up a new SPI-attached device.
Parameters:
spi_bus -- The SPI bus to which the given device is attached.
chip_select -- The GPIOPin object that acts as the chip select for the given device.
spi_mode -- The SPI mode to use for the given transaction.
"""
# Store our interface...
self._bus = spi_bus
self._chip_select = chip_select
self._spi_mode = spi_mode
# ... and register ourselves with the parent SPI bus.
self._bus.attach_device(self)
def _transmit(self, data, receive_length=None, deassert_chip_select=True):
"""
Sends (and typically receives) data over the SPI bus.
Args:
data -- the data to be sent to the given device.
receive_length -- the total amount of data to be read. If longer
than the data length, the transmit will automatically be extended
with zeroes.
deassert_chip_select -- if set, the chip-select line will be left low after
communicating; this allows this transcation to be continued in the future
"""
return self._bus.transmit(data, receive_length, spi_mode=self._spi_mode,
chip_select=self._chip_select, deassert_chip_select=deassert_chip_select)
| 676 |
852 | //
//
#include "TopQuarkAnalysis/TopJetCombination/interface/TtSemiSimpleBestJetComb.h"
TtSemiSimpleBestJetComb::TtSemiSimpleBestJetComb() {}
TtSemiSimpleBestJetComb::~TtSemiSimpleBestJetComb() {}
int TtSemiSimpleBestJetComb::operator()(std::vector<TtSemiEvtSolution>& sols) {
// search the highest probChi^2 value in the among the different jet combination solutions
double maxProbChi2 = 0;
for (unsigned int s = 0; s < sols.size(); s++)
maxProbChi2 = std::max(maxProbChi2, sols[s].getProbChi2());
//search indices of original solutions with highest probChi2 value
std::vector<unsigned int> indices;
indices.clear();
for (unsigned int s = 0; s < sols.size(); s++) {
if (fabs(sols[s].getProbChi2() - maxProbChi2) < 0.0001)
indices.push_back(s);
}
int bestSol = -999;
if (maxProbChi2 > 0.) {
if (indices.size() == 1)
bestSol = indices[0];
if (indices.size() == 2) {
//typically only light jets constraints applied, so still b-jet ambiguity to resolve
// -> look at DPhi(Whadr,bhadr) and choose smallest value
double DPhi_Wb0 = fabs(sols[indices[0]].getFitHadW().phi() - sols[indices[0]].getFitHadb().phi());
double DPhi_Wb1 = fabs(sols[indices[1]].getFitHadW().phi() - sols[indices[1]].getFitHadb().phi());
if (DPhi_Wb0 > 3.1415)
DPhi_Wb0 = 2. * 3.1415 - DPhi_Wb0;
if (DPhi_Wb1 > 3.1415)
DPhi_Wb1 = 2. * 3.1415 - DPhi_Wb1;
if (DPhi_Wb0 < DPhi_Wb1) {
bestSol = indices[0];
} else {
bestSol = indices[1];
}
}
}
return bestSol;
}
| 702 |
465 | #ifndef _CSR_MMAP_H_
#define _CSR_MMAP_H_
#include "csr_typedefs.h"
#ifdef __cplusplus
extern "C" {
#endif
typedef struct _csr_mmap_t csr_mmap_t;
/* mmap.c */
csr_mmap_t *csr_mmap_file(const char *, unsigned char bLoadMem);
csr_mmap_t *csr_mmap_file_w(const char *);
void csr_munmap_file(csr_mmap_t *);
void *csr_mmap_map(csr_mmap_t *);
csr_offset_t csr_mmap_size(csr_mmap_t *);
#ifdef __cplusplus
}
#endif
#endif
| 221 |
1,799 | /*------------------------------------------------------------------------------
* rtklib unit test driver : norad two line element function
*-----------------------------------------------------------------------------*/
#include <stdio.h>
#include <assert.h>
#include "../../src/rtklib.h"
#define OUT stdout
/* dump tle ------------------------------------------------------------------*/
static void dumptle(FILE *fp, const tle_t *tle)
{
int i;
for (i=0;i<tle->n;i++) {
fprintf(fp,"(%2d) name = %s\n", i+1,tle->data[i].name );
fprintf(fp,"(%2d) satno= %s\n", i+1,tle->data[i].satno);
fprintf(fp,"(%2d) class= %c\n", i+1,tle->data[i].satclass);
fprintf(fp,"(%2d) desig= %s\n", i+1,tle->data[i].desig);
fprintf(fp,"(%2d) epoch= %s\n", i+1,time_str(tle->data[i].epoch,0));
fprintf(fp,"(%2d) etype= %d\n", i+1,tle->data[i].etype);
fprintf(fp,"(%2d) eleno= %d\n", i+1,tle->data[i].eleno);
fprintf(fp,"(%2d) ndot = %19.12e\n",i+1,tle->data[i].ndot );
fprintf(fp,"(%2d) nddot= %19.12e\n",i+1,tle->data[i].nddot);
fprintf(fp,"(%2d) bstar= %19.12e\n",i+1,tle->data[i].bstar);
fprintf(fp,"(%2d) inc = %19.12e\n",i+1,tle->data[i].inc );
fprintf(fp,"(%2d) OMG = %19.12e\n",i+1,tle->data[i].OMG );
fprintf(fp,"(%2d) ecc = %19.12e\n",i+1,tle->data[i].ecc );
fprintf(fp,"(%2d) omg = %19.12e\n",i+1,tle->data[i].omg );
fprintf(fp,"(%2d) M = %19.12e\n",i+1,tle->data[i].M );
fprintf(fp,"(%2d) n = %19.12e\n",i+1,tle->data[i].n );
fprintf(fp,"(%2d) rev = %d\n", i+1,tle->data[i].rev );
}
}
/* tle_read() ----------------------------------------------------------------*/
static void utest1(void)
{
const char *file1="../data/tle/tle_sgp4.err";
const char *file2="../data/tle/tle_sgp4.txt";
const char *file3="../data/tle/tle_nav.txt";
tle_t tle={0};
int stat;
stat=tle_read(file1,&tle);
assert(!stat);
stat=tle_read(file2,&tle);
assert(stat);
assert(tle.n==1);
stat=tle_read(file3,&tle);
assert(stat);
assert(tle.n==114);
#if 0
dumptle(OUT,&tle);
#endif
fprintf(OUT,"%s utest1 : OK\n",__FILE__);
}
/* tle_pos() -----------------------------------------------------------------*/
static void utest2(void)
{
const char *file2="../data/tle/tle_sgp4.txt";
const double ep0[6]={1980,1,1};
tle_t tle={0};
gtime_t epoch;
double min,rs[6];
int i,stat;
epoch=utc2gpst(timeadd(epoch2time(ep0),274.98708465*86400.0));
stat=tle_read(file2,&tle);
assert(stat);
stat=tle_pos(epoch,"TEST_ERR","","",&tle,NULL,rs);
assert(!stat);
for (i=0;i<5;i++) {
min=360.0*i;
stat=tle_pos(timeadd(epoch,min*60.0),"TEST_SAT","","",&tle,NULL,rs);
assert(stat);
fprintf(OUT,"%4.0f: %14.8f %14.8f %14.8f %11.8f %11.8f %11.8f\n",min,
rs[0]/1e3,rs[1]/1e3,rs[2]/1e3,rs[3]/1e3,rs[4]/1e3,rs[5]/1e3);
}
fprintf(OUT,"%s utest2 : OK\n",__FILE__);
}
/* tle_pos() accuracy --------------------------------------------------------*/
static void utest3(void)
{
const char *file1="../data/tle/brdc3050.12*";
const char *file2="../data/tle/TLE_GNSS_20121101.txt";
const char *file3="../data/tle/igs17127.erp";
const double ep[6]={2012,10,31,0,0,0};
nav_t nav={0};
erp_t erp={0};
tle_t tle={0};
gtime_t time;
char sat[32];
double rs1[6],rs2[6],ds[6],dts[2],var;
int i,j,k,stat,svh;
readrnx(file1,0,"",NULL,&nav,NULL);
assert(nav.n>0);
stat=readerp(file3,&erp);
assert(stat);
stat=tle_read(file2,&tle);
assert(stat);
for (i=0;i<MAXSAT;i++) {
satno2id(i+1,sat);
fprintf(OUT,"SAT=%s\n",sat);
for (j=0;j<96;j++) {
time=timeadd(epoch2time(ep),900.0*j);
if (!satpos(time,time,i+1,EPHOPT_BRDC,&nav,rs1,dts,&var,&svh)) continue;
if (satsys(i+1,NULL)==SYS_QZS) svh&=0xFE;
if (svh) continue;
stat=tle_pos(time,sat,"","",&tle,&erp,rs2);
assert(stat);
for (k=0;k<3;k++) ds[k]=rs2[k]-rs1[k];
fprintf(OUT,"%6.0f %11.3f %11.3f %11.3f %11.3f\n",900.0*j,
ds[0]/1e3,ds[1]/1e3,ds[2]/1e3,norm(ds,3)/1e3);
assert(norm(ds,3)/1e3<300.0);
}
fprintf(OUT,"\n");
}
fprintf(OUT,"%s utest3 : OK\n",__FILE__);
}
/* main ----------------------------------------------------------------------*/
int main(int argc, char **argv)
{
utest1();
utest2();
utest3();
return 0;
}
| 2,656 |
3,799 | <reponame>semoro/androidx
/*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.textclassifier;
import static com.google.common.truth.Truth.assertThat;
import android.content.Intent;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SmallTest;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
* Tests for {@link ExtrasUtils}.
*/
@SmallTest
@RunWith(AndroidJUnit4.class)
public class ExtrasUtilsTest {
@Test
public void testGetTopLanguage() {
final Intent intent = ExtrasUtils.buildFakeTextClassifierIntent("ja", "en");
assertThat(ExtrasUtils.getTopLanguage(intent).getLanguage()).isEqualTo("ja");
}
@Test
public void testGetTopLanguage_differentLanguage() {
final Intent intent = ExtrasUtils.buildFakeTextClassifierIntent("de");
assertThat(ExtrasUtils.getTopLanguage(intent).getLanguage()).isEqualTo("de");
}
@Test
public void testGetTopLanguage_nullLanguageBundle() {
assertThat(ExtrasUtils.getTopLanguage(new Intent())).isNull();
}
@Test
public void testGetTopLanguage_null() {
assertThat(ExtrasUtils.getTopLanguage(null)).isNull();
}
}
| 577 |
7,482 | /*
* Copyright (c) 2006-2021, RT-Thread Development Team
*
* SPDX-License-Identifier: Apache-2.0
*
* Change Logs:
* Date Author Notes
* 2021-04-27 flybreak the first version.
*/
#pragma once
#include <cstdlib>
#include <system_error>
#include <chrono>
#include <ratio>
#include <rtthread.h>
#define RT_USING_CPP_EXCEPTION
inline void throw_system_error(int err, const char *what_msg)
{
#ifdef RT_USING_CPP_EXCEPTION
throw std::system_error(std::error_code(err, std::system_category()), what_msg);
#else
(void)err;
(void)what_msg;
::abort();
#endif
}
class tick_clock
{
public:
typedef clock_t rep;
typedef std::ratio<1, RT_TICK_PER_SECOND> period;
typedef std::chrono::duration<tick_clock::rep, tick_clock::period> duration;
typedef std::chrono::time_point<tick_clock> time_point;
constexpr static bool is_ready = true;
static time_point now();
};
class real_time_clock
{
public:
typedef std::chrono::nanoseconds duration;
typedef duration::rep rep;
typedef duration::period period;
typedef std::chrono::time_point<real_time_clock, duration> time_point;
static constexpr bool is_steady = true;
static time_point
now() noexcept;
};
| 489 |
580 | <gh_stars>100-1000
// Author: <NAME>
// Date: 04/01/2010
// 12289 - One-Two-Three
// To fixxxxx
#include <iostream>
#include <string>
using namespace std;
int main ()
{
int n; // Number of words in input
cin >> n;
if (n <=0 )
return 0;
else
{
string word;
for(int i= 0 ; i< n; i++)
{
cin >> word;
if (word.length() == 5)
cout << "3" << endl;
else if (word.length() == 3)
{
if (word[0] == 't' || word[1] == 'w' || word[2] == 'o')
cout << "2" << endl;
else
cout << "1" << endl;
}
}
}
return 0;
}
| 319 |
575 | <gh_stars>100-1000
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chromecast/media/cma/base/balanced_media_task_runner_factory.h"
#include <map>
#include <utility>
#include "base/bind.h"
#include "base/check.h"
#include "base/macros.h"
#include "base/single_thread_task_runner.h"
#include "chromecast/media/cma/base/media_task_runner.h"
#include "media/base/timestamp_constants.h"
namespace chromecast {
namespace media {
// MediaTaskRunnerWithNotification -
// Media task runner which also behaves as a media task runner observer.
class MediaTaskRunnerWithNotification : public MediaTaskRunner {
public:
// Wraps a MediaTaskRunner so that a third party can:
// - be notified when a PostMediaTask is performed on this media task runner.
// |new_task_cb| is invoked in that case.
// - monitor the lifetime of the media task runner, i.e. check when the media
// task runner is not needed anymore.
// |shutdown_cb| is invoked in that case.
MediaTaskRunnerWithNotification(
const scoped_refptr<MediaTaskRunner>& media_task_runner,
base::RepeatingClosure new_task_cb,
base::OnceClosure shutdown_cb);
// MediaTaskRunner implementation.
bool PostMediaTask(const base::Location& from_here,
base::OnceClosure task,
base::TimeDelta timestamp) override;
private:
~MediaTaskRunnerWithNotification() override;
scoped_refptr<MediaTaskRunner> const media_task_runner_;
const base::RepeatingClosure new_task_cb_;
base::OnceClosure shutdown_cb_;
DISALLOW_COPY_AND_ASSIGN(MediaTaskRunnerWithNotification);
};
MediaTaskRunnerWithNotification::MediaTaskRunnerWithNotification(
const scoped_refptr<MediaTaskRunner>& media_task_runner,
base::RepeatingClosure new_task_cb,
base::OnceClosure shutdown_cb)
: media_task_runner_(media_task_runner),
new_task_cb_(std::move(new_task_cb)),
shutdown_cb_(std::move(shutdown_cb)) {}
MediaTaskRunnerWithNotification::~MediaTaskRunnerWithNotification() {
std::move(shutdown_cb_).Run();
}
bool MediaTaskRunnerWithNotification::PostMediaTask(
const base::Location& from_here,
base::OnceClosure task,
base::TimeDelta timestamp) {
bool may_run_in_future =
media_task_runner_->PostMediaTask(from_here, std::move(task), timestamp);
if (may_run_in_future)
new_task_cb_.Run();
return may_run_in_future;
}
// BalancedMediaTaskRunner -
// Run media tasks whose timestamp is less or equal to a max timestamp.
//
// Restrictions of BalancedMediaTaskRunner:
// - Can have at most one task in the queue.
// - Tasks should be given by increasing timestamps.
class BalancedMediaTaskRunner
: public MediaTaskRunner {
public:
explicit BalancedMediaTaskRunner(
const scoped_refptr<base::SingleThreadTaskRunner>& task_runner);
// Schedule tasks whose timestamp is less than or equal to |max_timestamp|.
void ScheduleWork(base::TimeDelta max_timestamp);
// Return the timestamp of the last media task.
// Return ::media::kNoTimestamp if no media task has been posted.
base::TimeDelta GetMediaTimestamp() const;
// MediaTaskRunner implementation.
bool PostMediaTask(const base::Location& from_here,
base::OnceClosure task,
base::TimeDelta timestamp) override;
private:
~BalancedMediaTaskRunner() override;
scoped_refptr<base::SingleThreadTaskRunner> const task_runner_;
// Protects the following variables.
mutable base::Lock lock_;
// Possible pending media task.
base::Location from_here_;
base::OnceClosure pending_task_;
// Timestamp of the last posted task.
// Is initialized to ::media::kNoTimestamp.
base::TimeDelta last_timestamp_;
DISALLOW_COPY_AND_ASSIGN(BalancedMediaTaskRunner);
};
BalancedMediaTaskRunner::BalancedMediaTaskRunner(
const scoped_refptr<base::SingleThreadTaskRunner>& task_runner)
: task_runner_(task_runner), last_timestamp_(::media::kNoTimestamp) {}
BalancedMediaTaskRunner::~BalancedMediaTaskRunner() {
}
void BalancedMediaTaskRunner::ScheduleWork(base::TimeDelta max_media_time) {
base::OnceClosure task;
{
base::AutoLock auto_lock(lock_);
if (pending_task_.is_null())
return;
if (last_timestamp_ != ::media::kNoTimestamp &&
last_timestamp_ >= max_media_time) {
return;
}
task = std::move(pending_task_);
}
task_runner_->PostTask(from_here_, std::move(task));
}
base::TimeDelta BalancedMediaTaskRunner::GetMediaTimestamp() const {
base::AutoLock auto_lock(lock_);
return last_timestamp_;
}
bool BalancedMediaTaskRunner::PostMediaTask(const base::Location& from_here,
base::OnceClosure task,
base::TimeDelta timestamp) {
DCHECK(!task.is_null());
// Pass through for a task with no timestamp.
if (timestamp == ::media::kNoTimestamp) {
return task_runner_->PostTask(from_here, std::move(task));
}
base::AutoLock auto_lock(lock_);
// Timestamps must be in order.
// Any task that does not meet that condition is simply discarded.
if (last_timestamp_ != ::media::kNoTimestamp && timestamp < last_timestamp_) {
return false;
}
// Only support one pending task at a time.
DCHECK(pending_task_.is_null());
from_here_ = from_here;
pending_task_ = std::move(task);
last_timestamp_ = timestamp;
return true;
}
BalancedMediaTaskRunnerFactory::BalancedMediaTaskRunnerFactory(
base::TimeDelta max_delta)
: max_delta_(max_delta) {
}
BalancedMediaTaskRunnerFactory::~BalancedMediaTaskRunnerFactory() {
}
scoped_refptr<MediaTaskRunner>
BalancedMediaTaskRunnerFactory::CreateMediaTaskRunner(
const scoped_refptr<base::SingleThreadTaskRunner>& task_runner) {
scoped_refptr<BalancedMediaTaskRunner> media_task_runner(
new BalancedMediaTaskRunner(task_runner));
scoped_refptr<MediaTaskRunnerWithNotification> media_task_runner_wrapper(
new MediaTaskRunnerWithNotification(
media_task_runner,
base::BindRepeating(&BalancedMediaTaskRunnerFactory::OnNewTask, this),
base::BindOnce(
&BalancedMediaTaskRunnerFactory::UnregisterMediaTaskRunner, this,
media_task_runner)));
base::AutoLock auto_lock(lock_);
// Note that |media_task_runner| is inserted here and
// not |media_task_runner_wrapper|. Otherwise, we would always have one
// ref on |media_task_runner_wrapper| and would never get the release
// notification.
// When |media_task_runner_wrapper| is going away,
// BalancedMediaTaskRunnerFactory will receive a notification and will in
// turn remove |media_task_runner|.
task_runners_.insert(media_task_runner);
return media_task_runner_wrapper;
}
void BalancedMediaTaskRunnerFactory::OnNewTask() {
typedef
std::multimap<base::TimeDelta, scoped_refptr<BalancedMediaTaskRunner> >
TaskRunnerMap;
TaskRunnerMap runnable_task_runner;
base::AutoLock auto_lock(lock_);
// Get the minimum timestamp among all streams.
for (MediaTaskRunnerSet::const_iterator it = task_runners_.begin();
it != task_runners_.end(); ++it) {
base::TimeDelta timestamp((*it)->GetMediaTimestamp());
if (timestamp == ::media::kNoTimestamp)
continue;
runnable_task_runner.insert(
std::pair<base::TimeDelta, scoped_refptr<BalancedMediaTaskRunner> >(
timestamp, *it));
}
// If there is no media task, just returns.
if (runnable_task_runner.empty())
return;
// Run tasks which meet the balancing criteria.
base::TimeDelta min_timestamp(runnable_task_runner.begin()->first);
base::TimeDelta max_timestamp = min_timestamp + max_delta_;
for (TaskRunnerMap::iterator it = runnable_task_runner.begin();
it != runnable_task_runner.end(); ++it) {
(*it).second->ScheduleWork(max_timestamp);
}
}
void BalancedMediaTaskRunnerFactory::UnregisterMediaTaskRunner(
const scoped_refptr<BalancedMediaTaskRunner>& media_task_runner) {
{
base::AutoLock auto_lock(lock_);
task_runners_.erase(media_task_runner);
}
// After removing one of the task runners some of the other task runners might
// need to be waken up, if they are no longer blocked by the balancing
// restrictions with the old stream.
OnNewTask();
}
} // namespace media
} // namespace chromecast
| 2,879 |
305 | #!/usr/bin/env python
from distutils.core import setup, Command
import os
import os.path
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='toproxy',
version='3.2',
description='Simple Tornado Async HTTP Proxy',
long_description=open('README.md').read(),
keywords = ["tornado proxy","fengyun"],
url='http://xiaorui.cc',
author='ruifengyun',
author_email='<EMAIL>',
install_requires=['tornado'],
packages=['toproxy'],
license = "MIT",
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.0',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
| 337 |
317 | #ifndef EDYN_UTIL_ARRAY_HPP
#define EDYN_UTIL_ARRAY_HPP
#include <array>
namespace edyn {
// Create array filled with given value. From https://stackoverflow.com/a/41259045
namespace detail {
template <typename T, std::size_t...Is>
constexpr std::array<T, sizeof...(Is)> make_array(const T& value, std::index_sequence<Is...>) {
return {{(static_cast<void>(Is), value)...}};
}
}
template <std::size_t N, typename T>
constexpr std::array<T, N> make_array(const T& value) {
return detail::make_array(value, std::make_index_sequence<N>());
}
}
#endif // EDYN_UTIL_ARRAY_HPP | 239 |
369 | <gh_stars>100-1000
// Copyright (c) 2017-2021, Mudit<NAME>. All rights reserved.
// For licensing, see https://github.com/mudita/MuditaOS/LICENSE.md
#pragma once
#include "Label.hpp"
#include "Image.hpp"
#include "ListItem.hpp"
namespace gui
{
class BaseThreadItem : public ListItem
{
protected:
gui::Label *contact = nullptr;
gui::Label *numberImportance = nullptr;
gui::Label *timestamp = nullptr;
gui::Label *snippetPrefix = nullptr;
gui::Label *snippet = nullptr;
static gui::Label *createEmptyLabel(Item *parent);
virtual void onDimensionChangedTop(const BoundingBox &oldDim, const BoundingBox &newDim);
virtual void onDimensionChangedBottom(const BoundingBox &oldDim, const BoundingBox &newDim);
void resizeSnippet(const BoundingBox &dimensions, unsigned int leftOffset = 0U);
void displayNumberImportance(long int importance);
public:
BaseThreadItem();
bool onDimensionChanged(const BoundingBox &oldDim, const BoundingBox &newDim) override;
};
} // namespace gui
| 439 |
2,380 | /**
*
* Copyright 2018-2021 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smack.packet;
import javax.xml.namespace.QName;
/**
* Interface to represent XML elements. Every XML element in XMPP has a qualified XML name ({@link QName}). This name
* can be obtained via {@link #getQName()}.
* <p>
* XMPP uses "extension elements", i.e. XML elements, to provide extended functionality beyond what is in the base XMPP
* specification. Examples of extensions elements include message events, message properties, and extra presence data.
* IQ stanzas have limited support for extension elements. See {@link ExtensionElement} for more information about XMPP
* extension elements.
* </p>
* <p>
* It is recommend to use {@link ExtensionElement} over this class when creating new extension elements.
* </p>
*
* @see org.jivesoftware.smack.provider.ExtensionElementProvider
* @since 4.5
*/
public interface XmlElement extends NamedElement, XmlLangElement {
/**
* Returns the root element XML namespace.
*
* @return the namespace.
*/
String getNamespace();
default QName getQName() {
String namespaceURI = getNamespace();
String localPart = getElementName();
return new QName(namespaceURI, localPart);
}
@Override
default String getLanguage() {
return null;
}
}
| 561 |
583 | <gh_stars>100-1000
#include "AbstractLocalyticsSupport.h"
#include "core/RunnerMacros.h"
AbstractLocalyticsSupport::AbstractLocalyticsSupport(ByteCodeRunner *owner)
: NativeMethodHost(owner) {
}
NativeFunction * AbstractLocalyticsSupport::MakeNativeFunction(const char *name, int num_args) {
#undef NATIVE_NAME_PREFIX
#define NATIVE_NAME_PREFIX "LocalyticsSupport."
TRY_USE_NATIVE_METHOD(AbstractLocalyticsSupport, tagEventWithAttributes, 2);
return NULL;
}
StackSlot AbstractLocalyticsSupport::tagEventWithAttributes(RUNNER_ARGS) {
RUNNER_PopArgs2(event_name, event_attributes);
RUNNER_CheckTag(TString, event_name);
RUNNER_CheckTag(TArray, event_attributes);
std::map<unicode_string, unicode_string> attributes_map;
for (int i = 0; i < RUNNER->GetArraySize(event_attributes); i++) {
const StackSlot & attributes_item = RUNNER->GetArraySlot(event_attributes,i);
RUNNER_CheckTag(TArray, attributes_item);
unicode_string key = RUNNER->GetString(RUNNER->GetArraySlot(attributes_item,0));
unicode_string value = RUNNER->GetString(RUNNER->GetArraySlot(attributes_item,1));
attributes_map[key] = value;
}
doTagEventWithAttributes(RUNNER->GetString(event_name), attributes_map);
RETVOID;
}
| 495 |
353 | package org.nutz.plugins.event.impl;
import org.nutz.integration.jedis.RedisService;
import org.nutz.ioc.Ioc;
import org.nutz.json.Json;
import org.nutz.json.JsonFormat;
import org.nutz.lang.Lang;
import org.nutz.lang.Streams;
import org.nutz.log.Log;
import org.nutz.log.Logs;
import org.nutz.plugins.event.Event;
import org.nutz.plugins.event.EventBus;
import org.nutz.plugins.event.EventListener;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
/**
* 基于redis的事件中心
*
* @author <EMAIL>
* @varsion 2017-5-16
*/
@SuppressWarnings("unchecked")
public class RedisEventBus implements EventBus {
private Log log = Logs.get();
protected String prefix = "nutzevent-";
protected long errorSleep = 10 * 1000;
private Ioc ioc;
private RedisService redisService;
private ExecutorService executorService;
/**
* 初始化,将容器中所有EventListener子类注册进来
*/
@Override
public void init() {
if (executorService == null)
executorService = Executors.newCachedThreadPool();
String[] listeners = ioc.getNamesByType(EventListener.class);
for (final String bean : listeners) {
EventListener listener = ioc.get(EventListener.class, bean);
final String channelName = prefix + listener.subscribeTopic();
executorService.submit(new Runnable() {
public void run() {
while (true) {
byte[] message;
try {
message = redisService.rpop(channelName.getBytes());
if (Lang.isEmpty(message))
continue;
} catch (Exception e) {
log.warnf("on %s error : %s", channelName, e.getMessage());
// 当redis连接中断(报错)时,休眠x秒重新监听
Lang.sleep(errorSleep);
continue;
}
try {
Event event = (Event) to(message);
EventListener listener = ioc.get(EventListener.class, bean);
listener.onEvent(event);
} catch (Exception e) {
log.error("event listener error!", e);
//redisService.lpush(channelName, message); // 事件处理失败, 是否需要重新放回队列?
}
}
}
});
}
}
@Override
public void depose() {
if (executorService != null) {
executorService.shutdown();
try {
executorService.awaitTermination(5, TimeUnit.SECONDS);
} catch (InterruptedException e) {
}
}
}
/**
* 通过redis的list发布事件广播消息
*/
@Override
public <T extends Event> void fireEvent(T event) {
if (event == null) {
return;
}
String channelName = prefix + event.getTopic(); //事件名
String message = Json.toJson(event, JsonFormat.compact()); //事件体
redisService.lpush(channelName.getBytes(), to(event));
}
private byte[] to(Object obj) {
ObjectOutputStream os = null;
byte[] bytes = null;
try {
ByteArrayOutputStream bs = new ByteArrayOutputStream();
os = new ObjectOutputStream(bs);
os.writeUnshared(obj);
bytes = bs.toByteArray();
} catch (Exception e) {
log.info("object to bytes fail", e);
} finally {
Streams.safeClose(os);
}
return bytes;
}
private Object to(byte[] bytes) {
ObjectInputStream os = null;
Object obj = null;
try {
os = new ObjectInputStream(new ByteArrayInputStream(bytes));
obj = os.readObject();
} catch (Exception e) {
log.info(" bytes to Object fail", e);
} finally {
Streams.safeClose(os);
}
return obj;
}
}
| 2,225 |
313 | //------------------------------------------------------------------------------
// GB_AxB_saxpy5_bitmap.c: C+=A*B when C is full, A is bitmap, B is sparse/hyper
//------------------------------------------------------------------------------
// SuiteSparse:GraphBLAS, <NAME>, (c) 2017-2022, All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
//------------------------------------------------------------------------------
// C is as-if-full.
// A is bitmap, and not iso-valued and not pattern-only
// B is sparse or hypersparse.
{
//--------------------------------------------------------------------------
// get C, A, and B
//--------------------------------------------------------------------------
const int64_t m = C->vlen ; // # of rows of C and A
const int8_t *restrict Ab = A->b ;
const int64_t *restrict Bp = B->p ;
const int64_t *restrict Bh = B->h ;
const int64_t *restrict Bi = B->i ;
const bool B_iso = B->iso ;
const GB_ATYPE *restrict Ax = (GB_ATYPE *) A->x ;
#if !GB_B_IS_PATTERN
const GB_BTYPE *restrict Bx = (GB_BTYPE *) B->x ;
#endif
GB_CTYPE *restrict Cx = (GB_CTYPE *) C->x ;
//--------------------------------------------------------------------------
// C += A*B where A is bitmap (and not iso or pattern-only)
//--------------------------------------------------------------------------
int tid ;
#pragma omp parallel for num_threads(nthreads) schedule(dynamic,1)
for (tid = 0 ; tid < ntasks ; tid++)
{
// get the task descriptor
const int64_t jB_start = B_slice [tid] ;
const int64_t jB_end = B_slice [tid+1] ;
// C(:,jB_start:jB_end-1) += A * B(:,jB_start:jB_end-1)
for (int64_t jB = jB_start ; jB < jB_end ; jB++)
{
// get B(:,j) and C(:,j)
const int64_t j = GBH (Bh, jB) ;
const int64_t pC = j * m ;
const int64_t pB_start = Bp [jB] ;
const int64_t pB_end = Bp [jB+1] ;
// C(:,j) += A*B(:,j)
for (int64_t pB = pB_start ; pB < pB_end ; pB++)
{
// get B(k,j)
const int64_t k = Bi [pB] ;
GB_GETB (bkj, Bx, pB, B_iso) ;
// get A(:,k)
const int64_t pA = k * m ;
// C(:,j) += A(:,k)*B(k,j)
for (int64_t i = 0 ; i < m ; i++)
{
if (!Ab [pA+i]) continue ;
// C(i,j) += A(i,k)*B(k,j) ;
GB_MULTADD (Cx [pC + i], Ax [pA + i], bkj, i, k, j) ;
}
}
}
}
}
| 1,237 |
428 | <filename>Java/Loon-Neo-Robovm/src/main/java/loon/jni/CTFramesetter.java
/**
* Copyright 2008 - 2015 The Loon Game Engine Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
* @project loon
* @author cping
* @email:<EMAIL>
* @version 0.5
*/
package loon.jni;
import org.robovm.rt.bro.*;
import org.robovm.rt.bro.annotation.*;
import org.robovm.rt.bro.ptr.*;
import org.robovm.apple.foundation.*;
import org.robovm.apple.corefoundation.*;
import org.robovm.apple.coregraphics.*;
import org.robovm.apple.coretext.CTFrameAttributes;
import org.robovm.apple.coretext.CTTypesetter;
@Library("CoreText")
public class CTFramesetter extends CFType {
public static class CTFramesetterPtr extends
Ptr<CTFramesetter, CTFramesetterPtr> {
}
static {
Bro.bind(CTFramesetter.class);
}
protected CTFramesetter() {
}
@Bridge(symbol = "CTFramesetterGetTypeID", optional = true)
public static native @MachineSizedUInt long getClassTypeID();
@Bridge(symbol = "CTFramesetterCreateWithAttributedString", optional = true)
public static native CTFramesetter create(NSAttributedString string);
@Bridge(symbol = "CTFramesetterCreateFrame", optional = true)
public native CTFrame createFrame(@ByVal CFRange stringRange, CGPath path,
CTFrameAttributes frameAttributes);
@Bridge(symbol = "CTFramesetterGetTypesetter", optional = true)
public native CTTypesetter getTypesetter();
@Bridge(symbol = "CTFramesetterSuggestFrameSizeWithConstraints", optional = true)
public native @ByVal CGSize suggestFrameSize(@ByVal CFRange stringRange,
CTFrameAttributes frameAttributes, @ByVal CGSize constraints,
CFRange fitRange);
}
| 672 |
790 | import copy
import pickle
from django.utils.timezone import UTC, LocalTimezone
from django.utils import unittest
class TimezoneTests(unittest.TestCase):
def test_copy(self):
self.assertIsInstance(copy.copy(UTC()), UTC)
self.assertIsInstance(copy.copy(LocalTimezone()), LocalTimezone)
def test_deepcopy(self):
self.assertIsInstance(copy.deepcopy(UTC()), UTC)
self.assertIsInstance(copy.deepcopy(LocalTimezone()), LocalTimezone)
def test_pickling_unpickling(self):
self.assertIsInstance(pickle.loads(pickle.dumps(UTC())), UTC)
self.assertIsInstance(pickle.loads(pickle.dumps(LocalTimezone())), LocalTimezone)
| 252 |
322 | <filename>vehicle/OVMS.V3/components/vehicle_jaguaripace/src/vehicle_jaguaripace.h
/*
; Project: Open Vehicle Monitor System
; Date: 1st April 2021
;
; Changes:
; 0.0.1 Initial release
;
; (C) 2021 <NAME>
;
; Permission is hereby granted, free of charge, to any person obtaining a copy
; of this software and associated documentation files (the "Software"), to deal
; in the Software without restriction, including without limitation the rights
; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
; copies of the Software, and to permit persons to whom the Software is
; furnished to do so, subject to the following conditions:
;
; The above copyright notice and this permission notice shall be included in
; all copies or substantial portions of the Software.
;
; THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
; LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
; THE SOFTWARE.
*/
#ifndef __VEHICLE_JAGUARIPACE_H__
#define __VEHICLE_JAGUARIPACE_H__
#include "vehicle.h"
using namespace std;
class OvmsVehicleJaguarIpace : public OvmsVehicle {
public:
OvmsVehicleJaguarIpace();
~OvmsVehicleJaguarIpace();
protected:
void IncomingPollReply(canbus* bus, uint16_t type, uint16_t pid, uint8_t* data, uint8_t length, uint16_t mlremain);
void IncomingFrameCan1(CAN_frame_t* p_frame) override;
//void IncomingFrameCan2(CAN_frame_t* p_frame) override;
//void IncomingFrameCan3(CAN_frame_t* p_frame) override;
//void IncomingFrameCan4(CAN_frame_t* p_frame) override;
char m_vin[18];
uint8_t m_localization[10];
OvmsCommand *cmd_xrt;
OvmsMetricFloat *xji_v_bat_capacity_soh_min ;
OvmsMetricFloat *xji_v_bat_capacity_soh_max ;
OvmsMetricFloat *xji_v_bat_power_soh;
OvmsMetricFloat *xji_v_bat_power_soh_min;
OvmsMetricFloat *xji_v_bat_power_soh_max;
OvmsMetricFloat *xji_v_bat_max_regen;
private:
void IncomingPollFrame(CAN_frame_t* frame);
void IncomingBecmPoll(uint16_t pid, uint8_t* data, uint8_t length, uint16_t remain);
void IncomingHvacPoll(uint16_t pid, uint8_t* data, uint8_t length, uint16_t remain);
void IncomingBcmPoll(uint16_t pid, uint8_t* data, uint8_t length, uint16_t remain);
void IncomingTpmsPoll(uint16_t pid, uint8_t* data, uint8_t length, uint16_t remain);
void IncomingTcuPoll(uint16_t pid, uint8_t* data, uint8_t length, uint16_t remain);
bool SendPollMessage(canbus* bus, uint16_t id, uint8_t type, uint16_t pid);
};
#endif //#ifndef __VEHICLE_JAGUARIPACE_H__
| 1,092 |
585 | <filename>venv/Lib/site-packages/nipype/interfaces/semtools/utilities/tests/test_auto_FindCenterOfBrain.py
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from ..brains import FindCenterOfBrain
def test_FindCenterOfBrain_inputs():
input_map = dict(
args=dict(
argstr="%s",
),
axis=dict(
argstr="--axis %d",
),
backgroundValue=dict(
argstr="--backgroundValue %d",
),
clippedImageMask=dict(
argstr="--clippedImageMask %s",
hash_files=False,
),
closingSize=dict(
argstr="--closingSize %d",
),
debugAfterGridComputationsForegroundImage=dict(
argstr="--debugAfterGridComputationsForegroundImage %s",
hash_files=False,
),
debugClippedImageMask=dict(
argstr="--debugClippedImageMask %s",
hash_files=False,
),
debugDistanceImage=dict(
argstr="--debugDistanceImage %s",
hash_files=False,
),
debugGridImage=dict(
argstr="--debugGridImage %s",
hash_files=False,
),
debugTrimmedImage=dict(
argstr="--debugTrimmedImage %s",
hash_files=False,
),
environ=dict(
nohash=True,
usedefault=True,
),
generateDebugImages=dict(
argstr="--generateDebugImages ",
),
headSizeEstimate=dict(
argstr="--headSizeEstimate %f",
),
headSizeLimit=dict(
argstr="--headSizeLimit %f",
),
imageMask=dict(
argstr="--imageMask %s",
extensions=None,
),
inputVolume=dict(
argstr="--inputVolume %s",
extensions=None,
),
maximize=dict(
argstr="--maximize ",
),
otsuPercentileThreshold=dict(
argstr="--otsuPercentileThreshold %f",
),
)
inputs = FindCenterOfBrain.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_FindCenterOfBrain_outputs():
output_map = dict(
clippedImageMask=dict(
extensions=None,
),
debugAfterGridComputationsForegroundImage=dict(
extensions=None,
),
debugClippedImageMask=dict(
extensions=None,
),
debugDistanceImage=dict(
extensions=None,
),
debugGridImage=dict(
extensions=None,
),
debugTrimmedImage=dict(
extensions=None,
),
)
outputs = FindCenterOfBrain.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
| 1,510 |
892 | <gh_stars>100-1000
{
"schema_version": "1.2.0",
"id": "GHSA-c463-rcwh-797m",
"modified": "2022-05-02T03:56:59Z",
"published": "2022-05-02T03:56:59Z",
"aliases": [
"CVE-2009-4669"
],
"details": "Multiple SQL injection vulnerabilities in RoomPHPlanning 1.6 allow remote attackers to execute arbitrary SQL commands via (1) the loginus parameter to Login.php or (2) the Old Password field to changepwd.php, and allow (3) remote authenticated administrators to execute arbitrary SQL commands via the id parameter to admin/userform.php.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2009-4669"
},
{
"type": "WEB",
"url": "http://secunia.com/advisories/35237"
},
{
"type": "WEB",
"url": "http://www.exploit-db.com/exploits/8797"
}
],
"database_specific": {
"cwe_ids": [
"CWE-89"
],
"severity": "HIGH",
"github_reviewed": false
}
} | 430 |
761 | import json
import time
import argparse
import logging
import os
from word_generator import WordGenerator
from google.cloud import storage
logger = logging.getLogger(__name__)
def upload_wotd(blob, word_generator):
word = word_generator.generate_word()
if not word:
raise RuntimeError("Error during generation")
blob.upload_from_string(json.dumps({
"word": word.word,
"part_of_speech": word.pos,
"definition": word.definition,
"example_usage": word.example,
"topic": word.topic,
"generated_at_ms": int(1000 * time.time()),
}),
content_type="application/jon"
)
def main(args):
gcloud_credentials = os.environ.get("GOOGLE_APPLICATION_CREDENTIALS")
if not gcloud_credentials:
raise RuntimeError("Expected to set GOOGLE_APPLICATION_CREDENTIALS env var")
# Remove all handlers associated with the root logger object.
for handler in logging.root.handlers[:]:
logging.root.removeHandler(handler)
lvl = logging.DEBUG if args.verbose else logging.INFO
if args.log_file:
print(f"Logging to {args.log_file}")
logging.basicConfig(
level=lvl, filename=args.log_file, filemode="a", format="%(asctime)s - %(levelname)s - %(message)s",
)
else:
logging.basicConfig(level=lvl)
word_generator = WordGenerator(
device=args.device,
forward_model_path=args.forward_model_path,
inverse_model_path=args.inverse_model_path,
blacklist_path=args.blacklist_path,
quantize=args.quantize,
)
logger.info("Uploading WOTD")
client = storage.Client(project=args.gcloud_project)
bucket = client.get_bucket(args.gcloud_bucket)
blob = bucket.blob(args.gcloud_path)
upload_wotd(blob, word_generator)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Run a wotd bot that uploads a wotd to a specified google bucket")
parser.add_argument(
"--device", help="Force a certain device (cuda / cpu)", type=str,
)
parser.add_argument("--forward-model-path", help="Model path for (Word -> Definition)", type=str, required=True)
parser.add_argument("--inverse-model-path", help="Model path for (Definition -> Word)", type=str, required=True)
parser.add_argument(
"--blacklist-path", help="Blacklist path for word generation", type=str, required=True,
)
parser.add_argument("--quantize", help="Perform quantization for models", action="store_true")
parser.add_argument("--log-file", type=str, help="Log to this file")
parser.add_argument("--verbose", action="store_true", help="Verbose logging")
parser.add_argument("--gcloud-project", type=str, required=True)
parser.add_argument("--gcloud-bucket", type=str, required=True)
parser.add_argument("--gcloud-path", type=str, default="wotd.json")
args = parser.parse_args()
try:
main(args)
except Exception:
logger.exception("Uncaught error")
raise
| 1,198 |
5,279 | <reponame>hengfengli/beam
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.io.gcp.firestore;
import static java.util.Objects.requireNonNull;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import com.google.api.gax.rpc.ServerStream;
import com.google.api.gax.rpc.ServerStreamingCallable;
import com.google.cloud.firestore.v1.stub.FirestoreStub;
import com.google.firestore.v1.Cursor;
import com.google.firestore.v1.Document;
import com.google.firestore.v1.RunQueryRequest;
import com.google.firestore.v1.RunQueryResponse;
import com.google.firestore.v1.StructuredQuery;
import com.google.firestore.v1.StructuredQuery.CollectionSelector;
import com.google.firestore.v1.StructuredQuery.Direction;
import com.google.firestore.v1.StructuredQuery.FieldFilter;
import com.google.firestore.v1.StructuredQuery.FieldFilter.Operator;
import com.google.firestore.v1.StructuredQuery.FieldReference;
import com.google.firestore.v1.StructuredQuery.Filter;
import com.google.firestore.v1.StructuredQuery.Order;
import com.google.firestore.v1.Value;
import java.util.Collections;
import java.util.List;
import java.util.function.Function;
import org.apache.beam.sdk.io.gcp.firestore.FirestoreV1ReadFn.RunQueryFn;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.AbstractIterator;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
@SuppressWarnings(
"initialization.fields.uninitialized") // mockito fields are initialized via the Mockito Runner
public final class FirestoreV1FnRunQueryTest
extends BaseFirestoreV1ReadFnTest<RunQueryRequest, RunQueryResponse> {
@Mock private ServerStreamingCallable<RunQueryRequest, RunQueryResponse> callable;
@Mock private ServerStream<RunQueryResponse> responseStream1;
@Mock private ServerStream<RunQueryResponse> responseStream2;
@Test
public void endToEnd() throws Exception {
TestData testData = TestData.fieldEqualsBar().setProjectId(projectId).build();
List<RunQueryResponse> responses =
ImmutableList.of(testData.response1, testData.response2, testData.response3);
when(responseStream1.iterator()).thenReturn(responses.iterator());
when(callable.call(testData.request)).thenReturn(responseStream1);
when(stub.runQueryCallable()).thenReturn(callable);
when(ff.getFirestoreStub(any())).thenReturn(stub);
RpcQosOptions options = RpcQosOptions.defaultOptions();
when(ff.getRpcQos(any()))
.thenReturn(FirestoreStatefulComponentFactory.INSTANCE.getRpcQos(options));
ArgumentCaptor<RunQueryResponse> responsesCaptor =
ArgumentCaptor.forClass(RunQueryResponse.class);
doNothing().when(processContext).output(responsesCaptor.capture());
when(processContext.element()).thenReturn(testData.request);
RunQueryFn fn = new RunQueryFn(clock, ff, options);
runFunction(fn);
List<RunQueryResponse> allValues = responsesCaptor.getAllValues();
assertEquals(responses, allValues);
}
@Override
public void resumeFromLastReadValue() throws Exception {
TestData testData =
TestData.fieldEqualsBar()
.setProjectId(projectId)
.setOrderFunction(
f ->
Collections.singletonList(
Order.newBuilder().setDirection(Direction.ASCENDING).setField(f).build()))
.build();
RunQueryRequest request2 =
RunQueryRequest.newBuilder()
.setParent(String.format("projects/%s/databases/(default)/document", projectId))
.setStructuredQuery(
testData
.request
.getStructuredQuery()
.toBuilder()
.setStartAt(
Cursor.newBuilder()
.setBefore(false)
.addValues(Value.newBuilder().setStringValue("bar"))))
.build();
List<RunQueryResponse> responses =
ImmutableList.of(testData.response1, testData.response2, testData.response3);
when(responseStream1.iterator())
.thenReturn(
new AbstractIterator<RunQueryResponse>() {
private int invocationCount = 1;
@Override
protected RunQueryResponse computeNext() {
int count = invocationCount++;
if (count == 1) {
return responses.get(0);
} else if (count == 2) {
return responses.get(1);
} else {
throw RETRYABLE_ERROR;
}
}
});
when(callable.call(testData.request)).thenReturn(responseStream1);
doNothing().when(attempt).checkCanRetry(any(), eq(RETRYABLE_ERROR));
when(responseStream2.iterator()).thenReturn(ImmutableList.of(responses.get(2)).iterator());
when(callable.call(request2)).thenReturn(responseStream2);
when(stub.runQueryCallable()).thenReturn(callable);
when(ff.getFirestoreStub(any())).thenReturn(stub);
when(ff.getRpcQos(any())).thenReturn(rpcQos);
when(rpcQos.newReadAttempt(any())).thenReturn(attempt);
when(attempt.awaitSafeToProceed(any())).thenReturn(true);
ArgumentCaptor<RunQueryResponse> responsesCaptor =
ArgumentCaptor.forClass(RunQueryResponse.class);
doNothing().when(processContext).output(responsesCaptor.capture());
when(processContext.element()).thenReturn(testData.request);
RunQueryFn fn = new RunQueryFn(clock, ff, rpcQosOptions);
runFunction(fn);
List<RunQueryResponse> allValues = responsesCaptor.getAllValues();
assertEquals(responses, allValues);
verify(callable, times(1)).call(testData.request);
verify(callable, times(1)).call(request2);
verify(attempt, times(3)).recordStreamValue(any());
}
@Test
public void resumeFromLastReadValue_withNoOrderBy() throws Exception {
TestData testData = TestData.fieldEqualsBar().setProjectId(projectId).build();
RunQueryRequest request2 =
RunQueryRequest.newBuilder()
.setParent(String.format("projects/%s/databases/(default)/document", projectId))
.setStructuredQuery(
testData
.request
.getStructuredQuery()
.toBuilder()
.setStartAt(
Cursor.newBuilder()
.setBefore(false)
.addValues(
Value.newBuilder()
.setReferenceValue(testData.response2.getDocument().getName())))
.addOrderBy(
Order.newBuilder()
.setField(FieldReference.newBuilder().setFieldPath("__name__"))
.setDirection(Direction.ASCENDING)))
.build();
List<RunQueryResponse> responses =
ImmutableList.of(testData.response1, testData.response2, testData.response3);
when(responseStream1.iterator())
.thenReturn(
new AbstractIterator<RunQueryResponse>() {
private int invocationCount = 1;
@Override
protected RunQueryResponse computeNext() {
int count = invocationCount++;
if (count == 1) {
return responses.get(0);
} else if (count == 2) {
return responses.get(1);
} else {
throw RETRYABLE_ERROR;
}
}
});
when(callable.call(testData.request)).thenReturn(responseStream1);
doNothing().when(attempt).checkCanRetry(any(), eq(RETRYABLE_ERROR));
when(responseStream2.iterator()).thenReturn(ImmutableList.of(testData.response3).iterator());
when(callable.call(request2)).thenReturn(responseStream2);
when(stub.runQueryCallable()).thenReturn(callable);
when(ff.getFirestoreStub(any())).thenReturn(stub);
when(ff.getRpcQos(any())).thenReturn(rpcQos);
when(rpcQos.newReadAttempt(any())).thenReturn(attempt);
when(attempt.awaitSafeToProceed(any())).thenReturn(true);
ArgumentCaptor<RunQueryResponse> responsesCaptor =
ArgumentCaptor.forClass(RunQueryResponse.class);
doNothing().when(processContext).output(responsesCaptor.capture());
when(processContext.element()).thenReturn(testData.request);
RunQueryFn fn = new RunQueryFn(clock, ff, rpcQosOptions);
runFunction(fn);
List<RunQueryResponse> allValues = responsesCaptor.getAllValues();
assertEquals(responses, allValues);
verify(callable, times(1)).call(testData.request);
verify(callable, times(1)).call(request2);
verify(attempt, times(3)).recordStreamValue(any());
}
@Override
protected V1RpcFnTestCtx newCtx() {
return new V1RpcFnTestCtx() {
@Override
public RunQueryRequest getRequest() {
return RunQueryRequest.newBuilder()
.setParent(String.format("projects/%s/databases/(default)/document", projectId))
.build();
}
@Override
public void mockRpcToCallable(FirestoreStub stub) {
when(stub.runQueryCallable()).thenReturn(callable);
}
@Override
public void whenCallableCall(RunQueryRequest in, Throwable... throwables) {
when(callable.call(in)).thenThrow(throwables);
}
@Override
public void verifyNoInteractionsWithCallable() {
verifyNoMoreInteractions(callable);
}
};
}
@Override
protected RunQueryFn getFn(
JodaClock clock,
FirestoreStatefulComponentFactory firestoreStatefulComponentFactory,
RpcQosOptions rpcQosOptions) {
return new RunQueryFn(clock, firestoreStatefulComponentFactory, rpcQosOptions);
}
private static final class TestData {
private final RunQueryRequest request;
private final RunQueryResponse response1;
private final RunQueryResponse response2;
private final RunQueryResponse response3;
public TestData(String projectId, Function<FieldReference, List<Order>> orderFunction) {
String fieldPath = "foo";
FieldReference foo = FieldReference.newBuilder().setFieldPath(fieldPath).build();
StructuredQuery.Builder builder =
StructuredQuery.newBuilder()
.addFrom(
CollectionSelector.newBuilder()
.setAllDescendants(false)
.setCollectionId("collection"))
.setWhere(
Filter.newBuilder()
.setFieldFilter(
FieldFilter.newBuilder()
.setField(foo)
.setOp(Operator.EQUAL)
.setValue(Value.newBuilder().setStringValue("bar"))
.build()));
orderFunction.apply(foo).forEach(builder::addOrderBy);
request =
RunQueryRequest.newBuilder()
.setParent(String.format("projects/%s/databases/(default)/document", projectId))
.setStructuredQuery(builder)
.build();
response1 = newResponse(fieldPath, 1);
response2 = newResponse(fieldPath, 2);
response3 = newResponse(fieldPath, 3);
}
private static RunQueryResponse newResponse(String field, int docNumber) {
String docId = String.format("doc-%d", docNumber);
return RunQueryResponse.newBuilder()
.setDocument(
Document.newBuilder()
.setName(docId)
.putAllFields(
ImmutableMap.of(field, Value.newBuilder().setStringValue("bar").build()))
.build())
.build();
}
private static Builder fieldEqualsBar() {
return new Builder();
}
@SuppressWarnings("initialization.fields.uninitialized") // fields set via builder methods
private static final class Builder {
private String projectId;
private Function<FieldReference, List<Order>> orderFunction;
public Builder() {
orderFunction = f -> Collections.emptyList();
}
public Builder setProjectId(String projectId) {
this.projectId = projectId;
return this;
}
public Builder setOrderFunction(Function<FieldReference, List<Order>> orderFunction) {
this.orderFunction = orderFunction;
return this;
}
private TestData build() {
return new TestData(
requireNonNull(projectId, "projectId must be non null"),
requireNonNull(orderFunction, "orderFunction must be non null"));
}
}
}
}
| 5,669 |
3,175 | # coding=utf-8
# Copyright 2020 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
r"""Creates graphs and summary information from event logs.
This was created to build the graphs and supporting data for the README pages
for each agent. There are a number of FLAGS but the script is not designed to be
fully configurable. Making changes directly in the script is expected for one
off needs.
Usage examples:
python3 graph_builder.py --eventlog=<path to event log 1> \
--eventlog=<path to event log 2>
"""
import csv
import enum
import os
from typing import Dict, List, Optional, Sequence, Tuple, Union
from absl import app
from absl import flags
from absl import logging
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
from tf_agents.benchmark import utils
FLAGS = flags.FLAGS
flags.DEFINE_multi_string('eventlog', None,
'Diretory where eventlog is stored.')
flags.DEFINE_string('output_path', '.',
'Path to store the graph and any other associated data.')
flags.DEFINE_string('output_prefix', 'results', 'Prefix used for artifacts')
flags.DEFINE_string('graph_title', '', 'Title for the graph.')
flags.DEFINE_string('graph_xaxis_title', 'steps', 'Title for the x-axis.')
flags.DEFINE_string('graph_yaxis_title', 'AverageReturn',
'Title for the y-axis or event_name is used.')
flags.DEFINE_string('event_name', 'AverageReturn', 'Name of event to track.')
flags.DEFINE_integer('end_step', None,
'If set, processing of the event log ends on this step.')
flags.DEFINE_boolean('show_graph', False, 'If true, show graph in a window.')
class GraphAggTypes(enum.Enum):
"""Enum of options to aggregate data when generating a graph."""
MEAN = 'mean'
MEDIAN = 'median'
flags.DEFINE_enum_class('graph_agg', GraphAggTypes.MEAN, GraphAggTypes,
'Method to aggregate data for the graph.')
Number = Union[int, float]
class StatsBuilder(object):
"""Builds graphs and other summary information from eventlogs."""
def __init__(self,
eventlog_dirs: List[str],
event_tag: str,
output_path: str = '.',
title: str = '',
xaxis_title: str = 'steps',
yaxis_title: Optional[str] = None,
graph_agg: GraphAggTypes = GraphAggTypes.MEAN,
output_prefix: str = 'results',
end_step: Optional[int] = None,
show_graph: bool = False):
"""Initializes StatsBuilder class.
Args:
eventlog_dirs: List of paths to event log directories to process.
event_tag: Event to extract from the logs.
output_path: Output path for artifacts, e.g. graphs and cvs files.
title: Title of the graph.
xaxis_title: Title for x-axis of the graph. Defaults to "steps".
yaxis_title: Title for the y-axis. Defaults to the `event_tag`.
graph_agg: Aggregation for the graph.
output_prefix: Prefix for the artifact files. Defaults to "results".
end_step: If set, processing of the event log ends on this step.
show_graph: If true, blocks and shows graph. Only tests in linux.
Raises:
ValueError: Raised if the graph_agg passed is not understood.
"""
self.eventlog_dirs = eventlog_dirs
self.event_tag = event_tag
self.output_path = output_path
self.title = title
self.xaxis_title = xaxis_title
self.show_graph = show_graph
self.end_step = end_step
if graph_agg == GraphAggTypes.MEAN:
self.graph_agg = np.mean
elif graph_agg == GraphAggTypes.MEDIAN:
self.graph_agg = np.median
else:
raise ValueError('Unknown graph_agg:{}'.format(graph_agg))
# Makes the output path absolute for clarity.
self.output_dir = os.path.abspath(output_path)
os.makedirs(self.output_dir, exist_ok=True)
self.output_prefix = output_prefix
if yaxis_title:
self.yaxis_title = yaxis_title
else:
self.yaxis_title = event_tag
def _gather_data(self) -> Tuple[List[Dict[int, np.generic]], List[float]]:
"""Gather data from all of the logs and add to the data_collector list.
Returns:
Tuple of arrays indexed by log file, e.g. data_collector[0] is all of the
values found in the event log for the given event and walltimes[0] is the
total time in minutes it took to get to the end_step in that event log.
"""
data_collector, walltimes = [], []
for eventlog_dir in self.eventlog_dirs:
event_file = utils.find_event_log(eventlog_dir)
logging.info('Processing event file: %s', event_file)
data, total_time = utils.extract_event_log_values(event_file,
self.event_tag,
self.end_step)
walltimes.append(total_time)
data_collector.append(data)
return data_collector, walltimes
def _align_and_aggregate(
self, data_collector: List[Dict[int,
np.generic]]) -> List[Sequence[Number]]:
"""Combines data from multipole runs into a pivot table like structure.
Uses the first run as the base and aligns the data for each run by rows
with each row representing a step. If a step is not found in a run,
the value -1 is used. No error or warning is thrown or logged.
Args:
data_collector: list of dicts with each dict representing a run most
likely extracted from an event log.
Returns:
2d array with each row representing a step and each run represented as
a column, e.g. step, run 1, run 2, median, and mean.
"""
# Use the first event log's steps as the base and create aggregated data
# at the step internals of the first event log.
base_data = data_collector[0]
agg_data = []
for key, value in sorted(base_data.items()):
entry = [key]
values = [value]
for data in data_collector[1:]:
values.append(data.get(key, -1))
mean_val = np.mean(values)
median_val = np.median(values)
# Combines into step, values 1..n, median, and mean.
values.append(median_val)
values.append(mean_val)
entry += values
agg_data.append(entry)
return agg_data
def _output_csv(self, agg_data: List[Sequence[Number]]):
"""Exports the `agg_data` as a csv.
Args:
agg_data: 2d array of data to export to csv.
"""
# Outputs csv with aggregated data for each step.
csv_path = os.path.join(self.output_path,
self.output_prefix + '_summary.csv')
with open(csv_path, 'w', newline='') as f:
writer = csv.writer(f)
writer.writerows(agg_data)
def _output_graph(self, agg_data: List[Sequence[Number]], num_runs: int):
"""Builds a graph of the results and outputs to a .png.
Args:
agg_data: 2d array of data to be graphed.
num_runs: Number of columns of runs in the data.
"""
# Build data frames
columns = ['step']
columns.extend([str(i) for i in range(num_runs)])
# csv contains aggregate info that will get excluded in the pd.melt.
columns.extend(['median', 'mean'])
print(columns)
df = pd.DataFrame(agg_data, columns=columns)
logging.debug('Dataframes datatypes: %s', df.dtypes)
new_pd = pd.melt(
df,
id_vars='step',
value_vars=list(df.columns[1:num_runs + 1]),
var_name='run',
value_name=self.yaxis_title)
logging.info('DataFrame to graph:\n%s', new_pd)
# Build graph
plt.figure(figsize=(10, 5))
ax = sns.lineplot(
data=new_pd, x='step', y=self.yaxis_title, estimator=self.graph_agg)
ax.set_title(self.title)
ax.set(xlabel=self.xaxis_title)
plt.ticklabel_format(style='plain', axis='x')
graph_path = os.path.join(self.output_path,
self.output_prefix + '_graph.png')
plt.savefig(graph_path)
def build_artifacts(self):
"""Processes the event logs and coordinates creating the artifacts."""
data_collector, _ = self._gather_data()
agg_data = self._align_and_aggregate(data_collector)
self._output_csv(agg_data)
self._output_graph(agg_data, len(data_collector))
if self.show_graph:
plt.show()
def main(_):
logging.set_verbosity(logging.INFO)
stat_builder = StatsBuilder(
FLAGS.eventlog,
FLAGS.event_name,
output_path=FLAGS.output_path,
output_prefix=FLAGS.output_prefix,
title=FLAGS.graph_title,
xaxis_title=FLAGS.graph_xaxis_title,
yaxis_title=FLAGS.graph_yaxis_title,
graph_agg=FLAGS.graph_agg,
end_step=FLAGS.end_step,
show_graph=FLAGS.show_graph)
stat_builder.build_artifacts()
if __name__ == '__main__':
flags.mark_flag_as_required('eventlog')
app.run(main)
| 3,747 |
672 | <filename>velox/tpch/gen/tests/TpchGenTest.cpp
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <folly/init/Init.h>
#include "gtest/gtest.h"
#include "velox/tpch/gen/TpchGen.h"
#include "velox/type/StringView.h"
#include "velox/vector/FlatVector.h"
namespace {
using namespace facebook::velox;
using namespace facebook::velox::tpch;
// Nation tests.
TEST(TpchGenTestNation, default) {
auto rowVector = genTpchNation();
ASSERT_NE(rowVector, nullptr);
EXPECT_EQ(4, rowVector->childrenSize());
EXPECT_EQ(25, rowVector->size());
auto nationKey = rowVector->childAt(0)->asFlatVector<int64_t>();
auto nationName = rowVector->childAt(1)->asFlatVector<StringView>();
EXPECT_EQ(0, nationKey->valueAt(0));
EXPECT_EQ("ALGERIA"_sv, nationName->valueAt(0));
// Ensure we won't crash while accessing any of the columns.
LOG(INFO) << rowVector->toString(0);
EXPECT_EQ(24, nationKey->valueAt(24));
EXPECT_EQ("UNITED STATES"_sv, nationName->valueAt(24));
LOG(INFO) << rowVector->toString(24);
}
// Ensure scale factor doesn't affect Nation table.
TEST(TpchGenTestNation, scaleFactor) {
auto rowVector = genTpchNation(10'000, 0, 1'000);
ASSERT_NE(rowVector, nullptr);
EXPECT_EQ(4, rowVector->childrenSize());
EXPECT_EQ(25, rowVector->size());
}
TEST(TpchGenTestNation, smallBatch) {
auto rowVector = genTpchNation(10);
ASSERT_NE(rowVector, nullptr);
EXPECT_EQ(4, rowVector->childrenSize());
EXPECT_EQ(10, rowVector->size());
auto nationKey = rowVector->childAt(0)->asFlatVector<int64_t>();
EXPECT_EQ(0, nationKey->valueAt(0));
EXPECT_EQ(9, nationKey->valueAt(9));
}
TEST(TpchGenTestNation, smallBatchWithOffset) {
auto rowVector = genTpchNation(10, 5);
ASSERT_NE(rowVector, nullptr);
EXPECT_EQ(4, rowVector->childrenSize());
EXPECT_EQ(10, rowVector->size());
auto nationKey = rowVector->childAt(0)->asFlatVector<int64_t>();
EXPECT_EQ(5, nationKey->valueAt(0));
EXPECT_EQ(14, nationKey->valueAt(9));
}
TEST(TpchGenTestNation, smallBatchPastEnd) {
auto rowVector = genTpchNation(10, 20);
ASSERT_NE(rowVector, nullptr);
EXPECT_EQ(4, rowVector->childrenSize());
EXPECT_EQ(5, rowVector->size());
auto nationKey = rowVector->childAt(0)->asFlatVector<int64_t>();
EXPECT_EQ(20, nationKey->valueAt(0));
EXPECT_EQ(24, nationKey->valueAt(4));
}
TEST(TpchGenTestNation, reproducible) {
auto rowVector1 = genTpchNation();
auto rowVector2 = genTpchNation();
auto rowVector3 = genTpchNation();
for (size_t i = 0; i < rowVector1->size(); ++i) {
ASSERT_TRUE(rowVector1->equalValueAt(rowVector2.get(), i, i));
ASSERT_TRUE(rowVector1->equalValueAt(rowVector3.get(), i, i));
}
// Ensure it's also reproducible if we add an offset.
auto rowVector4 = genTpchNation(100, 10);
auto rowVector5 = genTpchNation(100, 10);
for (size_t i = 0; i < rowVector4->size(); ++i) {
ASSERT_TRUE(rowVector4->equalValueAt(rowVector5.get(), i, i));
}
}
// Region.
TEST(TpchGenTestRegion, batches) {
auto rowVector1 = genTpchRegion();
EXPECT_EQ(3, rowVector1->childrenSize());
EXPECT_EQ(5, rowVector1->size());
auto regionKey = rowVector1->childAt(0)->asFlatVector<int64_t>();
auto regionName = rowVector1->childAt(1)->asFlatVector<StringView>();
EXPECT_EQ(0, regionKey->valueAt(0));
EXPECT_EQ("AFRICA"_sv, regionName->valueAt(0));
LOG(INFO) << rowVector1->toString(0);
EXPECT_EQ(4, regionKey->valueAt(4));
EXPECT_EQ("MIDDLE EAST"_sv, regionName->valueAt(4));
LOG(INFO) << rowVector1->toString(4);
}
TEST(TpchGenTestRegion, lastBatch) {
// Ask for 100 regions but there are only 5.
auto rowVector = genTpchRegion(100);
EXPECT_EQ(5, rowVector->size());
// Scale factor doens't affect it.
rowVector = genTpchRegion(100, 0, 2);
EXPECT_EQ(5, rowVector->size());
// Zero records if we go beyond the end.
rowVector = genTpchRegion(1'000, 200'000);
EXPECT_EQ(0, rowVector->size());
}
TEST(TpchGenTestRegion, reproducible) {
auto rowVector1 = genTpchRegion(100);
auto rowVector2 = genTpchRegion(100);
auto rowVector3 = genTpchRegion(100);
ASSERT_EQ(5, rowVector1->size());
for (size_t i = 0; i < rowVector1->size(); ++i) {
ASSERT_TRUE(rowVector1->equalValueAt(rowVector2.get(), i, i));
ASSERT_TRUE(rowVector1->equalValueAt(rowVector3.get(), i, i));
}
}
// Orders tests.
TEST(TpchGenTestOrders, batches) {
auto rowVector1 = genTpchOrders(10'000);
EXPECT_EQ(9, rowVector1->childrenSize());
EXPECT_EQ(10'000, rowVector1->size());
auto orderKey = rowVector1->childAt(0)->asFlatVector<int64_t>();
auto orderDate = rowVector1->childAt(4)->asFlatVector<StringView>();
EXPECT_EQ(1, orderKey->valueAt(0));
EXPECT_EQ("1996-01-02"_sv, orderDate->valueAt(0));
LOG(INFO) << rowVector1->toString(0);
EXPECT_EQ(40'000, orderKey->valueAt(9999));
EXPECT_EQ("1995-01-30"_sv, orderDate->valueAt(9999));
LOG(INFO) << rowVector1->toString(9999);
// Get second batch.
auto rowVector2 = genTpchOrders(10'000, 10'000);
EXPECT_EQ(9, rowVector2->childrenSize());
EXPECT_EQ(10'000, rowVector2->size());
orderKey = rowVector2->childAt(0)->asFlatVector<int64_t>();
orderDate = rowVector2->childAt(4)->asFlatVector<StringView>();
EXPECT_EQ(40001, orderKey->valueAt(0));
EXPECT_EQ("1996-01-02"_sv, orderDate->valueAt(0));
LOG(INFO) << rowVector2->toString(0);
EXPECT_EQ(80000, orderKey->valueAt(9999));
EXPECT_EQ("1995-01-30"_sv, orderDate->valueAt(9999));
LOG(INFO) << rowVector2->toString(9999);
}
TEST(TpchGenTestOrders, lastBatch) {
// Ask for 200 but there are only 100 left.
auto rowVector = genTpchOrders(200, 1'499'900);
EXPECT_EQ(100, rowVector->size());
// Ensure we get 200 on a larger scale factor.
rowVector = genTpchOrders(200, 1'499'900, 2);
EXPECT_EQ(200, rowVector->size());
}
TEST(TpchGenTestOrders, reproducible) {
{
auto rowVector1 = genTpchOrders(1000);
auto rowVector2 = genTpchOrders(1000);
auto rowVector3 = genTpchOrders(1000);
for (size_t i = 0; i < rowVector1->size(); ++i) {
ASSERT_TRUE(rowVector1->equalValueAt(rowVector2.get(), i, i));
ASSERT_TRUE(rowVector1->equalValueAt(rowVector3.get(), i, i));
}
}
// Ensure it's also reproducible if we add an offset.
{
auto rowVector1 = genTpchOrders(1000, 2000);
auto rowVector2 = genTpchOrders(1000, 2000);
auto rowVector3 = genTpchOrders(1000, 2000);
for (size_t i = 0; i < rowVector1->size(); ++i) {
ASSERT_TRUE(rowVector1->equalValueAt(rowVector2.get(), i, i));
ASSERT_TRUE(rowVector1->equalValueAt(rowVector3.get(), i, i));
}
}
// Ensure that if the offsets are different, records will be different.
{
auto rowVector1 = genTpchOrders(1000, 2000);
auto rowVector2 = genTpchOrders(1000, 2001);
for (size_t i = 0; i < rowVector2->size(); ++i) {
ASSERT_FALSE(rowVector1->equalValueAt(rowVector2.get(), i, i));
}
}
}
// Lineitem.
TEST(TpchGenTestLineItem, batches) {
size_t ordersMaxSize = 100;
auto rowVector1 = genTpchLineItem(ordersMaxSize);
// Always returns 16 columns, and number of lineItem rows varies from 1 to 7
// per order.
EXPECT_EQ(16, rowVector1->childrenSize());
EXPECT_GE(rowVector1->size(), ordersMaxSize);
EXPECT_LE(rowVector1->size(), ordersMaxSize * 7);
auto orderKey = rowVector1->childAt(0)->asFlatVector<int64_t>();
auto shipDate = rowVector1->childAt(10)->asFlatVector<StringView>();
EXPECT_EQ(1, orderKey->valueAt(0));
EXPECT_EQ("1996-03-13"_sv, shipDate->valueAt(0));
LOG(INFO) << rowVector1->toString(0);
size_t lastRow = rowVector1->size() - 1;
EXPECT_EQ(388, orderKey->valueAt(lastRow));
EXPECT_EQ("1992-12-24"_sv, shipDate->valueAt(lastRow));
LOG(INFO) << rowVector1->toString(lastRow);
// Get next batch.
auto rowVector2 = genTpchLineItem(ordersMaxSize, ordersMaxSize);
EXPECT_EQ(16, rowVector2->childrenSize());
EXPECT_GE(rowVector2->size(), ordersMaxSize);
EXPECT_LE(rowVector2->size(), ordersMaxSize * 7);
orderKey = rowVector2->childAt(0)->asFlatVector<int64_t>();
shipDate = rowVector2->childAt(10)->asFlatVector<StringView>();
EXPECT_EQ(389, orderKey->valueAt(0));
EXPECT_EQ("1996-03-13"_sv, shipDate->valueAt(0));
LOG(INFO) << rowVector2->toString(0);
lastRow = rowVector2->size() - 1;
EXPECT_EQ(800, orderKey->valueAt(lastRow));
EXPECT_EQ("1992-12-24"_sv, shipDate->valueAt(lastRow));
LOG(INFO) << rowVector2->toString(lastRow);
}
TEST(TpchGenTestLineItem, lastBatch) {
// Ask for 1000 lineItems but there are only 10 orders left.
auto rowVector = genTpchLineItem(1000, 1'499'990);
EXPECT_GE(rowVector->size(), 10);
EXPECT_LE(rowVector->size(), 10 * 7);
// Ensure we get 1000 orders on a larger scale factor.
rowVector = genTpchLineItem(1000, 1'499'990, 2);
EXPECT_GE(rowVector->size(), 1000);
EXPECT_LE(rowVector->size(), 1000 * 7);
}
TEST(TpchGenTestLineItem, reproducible) {
{
auto rowVector1 = genTpchLineItem(1000);
auto rowVector2 = genTpchLineItem(1000);
auto rowVector3 = genTpchLineItem(1000);
for (size_t i = 0; i < rowVector1->size(); ++i) {
ASSERT_TRUE(rowVector1->equalValueAt(rowVector2.get(), i, i));
ASSERT_TRUE(rowVector1->equalValueAt(rowVector3.get(), i, i));
}
}
// Ensure it's also reproducible if we add an offset.
{
auto rowVector1 = genTpchLineItem(1000, 2000);
auto rowVector2 = genTpchLineItem(1000, 2000);
auto rowVector3 = genTpchLineItem(1000, 2000);
for (size_t i = 0; i < rowVector1->size(); ++i) {
ASSERT_TRUE(rowVector1->equalValueAt(rowVector2.get(), i, i));
ASSERT_TRUE(rowVector1->equalValueAt(rowVector3.get(), i, i));
}
}
// Ensure that if the offsets are different, records will be different.
{
auto rowVector1 = genTpchLineItem(1000, 2000);
auto rowVector2 = genTpchLineItem(1000, 2001);
for (size_t i = 0; i < rowVector2->size(); ++i) {
ASSERT_FALSE(rowVector1->equalValueAt(rowVector2.get(), i, i));
}
}
}
// Supplier.
TEST(TpchGenTestSupplier, batches) {
auto rowVector1 = genTpchSupplier(1'000);
EXPECT_EQ(7, rowVector1->childrenSize());
EXPECT_EQ(1'000, rowVector1->size());
auto suppKey = rowVector1->childAt(0)->asFlatVector<int64_t>();
auto nationKey = rowVector1->childAt(3)->asFlatVector<int64_t>();
auto phone = rowVector1->childAt(4)->asFlatVector<StringView>();
EXPECT_EQ(1, suppKey->valueAt(0));
EXPECT_EQ(17, nationKey->valueAt(0));
EXPECT_EQ("27-918-335-1736"_sv, phone->valueAt(0));
LOG(INFO) << rowVector1->toString(0);
EXPECT_EQ(1'000, suppKey->valueAt(999));
EXPECT_EQ(17, nationKey->valueAt(999));
EXPECT_EQ("27-971-649-2792"_sv, phone->valueAt(999));
LOG(INFO) << rowVector1->toString(999);
// Get second batch.
auto rowVector2 = genTpchSupplier(1'000, 1'000);
EXPECT_EQ(7, rowVector2->childrenSize());
EXPECT_EQ(1'000, rowVector2->size());
suppKey = rowVector2->childAt(0)->asFlatVector<int64_t>();
nationKey = rowVector2->childAt(3)->asFlatVector<int64_t>();
phone = rowVector2->childAt(4)->asFlatVector<StringView>();
EXPECT_EQ(1'001, suppKey->valueAt(0));
EXPECT_EQ(17, nationKey->valueAt(0));
EXPECT_EQ("27-918-335-1736"_sv, phone->valueAt(0));
LOG(INFO) << rowVector2->toString(0);
EXPECT_EQ(2'000, suppKey->valueAt(999));
EXPECT_EQ(17, nationKey->valueAt(999));
EXPECT_EQ("27-971-649-2792"_sv, phone->valueAt(999));
LOG(INFO) << rowVector2->toString(999);
}
TEST(TpchGenTestSupplier, lastBatch) {
// Ask for 10'000 suppliers but there are only 10 left.
auto rowVector = genTpchSupplier(10'000, 9'990);
EXPECT_EQ(10, rowVector->size());
// Ensure we get 1000 suppliers on a larger scale factor.
rowVector = genTpchSupplier(1'000, 9'990, 2);
EXPECT_EQ(1'000, rowVector->size());
// Zero records if we go beyond the end.
rowVector = genTpchSupplier(1'000, 10'000);
EXPECT_EQ(0, rowVector->size());
}
TEST(TpchGenTestSupplier, reproducible) {
auto rowVector1 = genTpchSupplier(100);
auto rowVector2 = genTpchSupplier(100);
auto rowVector3 = genTpchSupplier(100);
for (size_t i = 0; i < rowVector1->size(); ++i) {
ASSERT_TRUE(rowVector1->equalValueAt(rowVector2.get(), i, i));
ASSERT_TRUE(rowVector1->equalValueAt(rowVector3.get(), i, i));
}
// Ensure it's also reproducible if we add an offset.
auto rowVector4 = genTpchSupplier(100, 10);
auto rowVector5 = genTpchSupplier(100, 10);
for (size_t i = 0; i < rowVector4->size(); ++i) {
ASSERT_TRUE(rowVector4->equalValueAt(rowVector5.get(), i, i));
}
}
// Part.
TEST(TpchGenTestPart, batches) {
auto rowVector1 = genTpchPart(1'000);
EXPECT_EQ(9, rowVector1->childrenSize());
EXPECT_EQ(1'000, rowVector1->size());
auto partKey = rowVector1->childAt(0)->asFlatVector<int64_t>();
auto mfgr = rowVector1->childAt(2)->asFlatVector<StringView>();
EXPECT_EQ(1, partKey->valueAt(0));
EXPECT_EQ("Manufacturer#1"_sv, mfgr->valueAt(0));
EXPECT_EQ(1'000, partKey->valueAt(999));
EXPECT_EQ("Manufacturer#2"_sv, mfgr->valueAt(999));
LOG(INFO) << rowVector1->toString(999);
// Get second batch.
auto rowVector2 = genTpchPart(1'000, 1'000);
EXPECT_EQ(9, rowVector2->childrenSize());
EXPECT_EQ(1'000, rowVector2->size());
partKey = rowVector2->childAt(0)->asFlatVector<int64_t>();
mfgr = rowVector2->childAt(2)->asFlatVector<StringView>();
EXPECT_EQ(1'001, partKey->valueAt(0));
EXPECT_EQ("Manufacturer#1"_sv, mfgr->valueAt(0));
LOG(INFO) << rowVector2->toString(0);
EXPECT_EQ(2'000, partKey->valueAt(999));
EXPECT_EQ("Manufacturer#2"_sv, mfgr->valueAt(999));
LOG(INFO) << rowVector2->toString(999);
}
TEST(TpchGenTestPart, lastBatch) {
// Ask for 10'000 parts but there are only 10 left.
auto rowVector = genTpchPart(10'000, 199'990);
EXPECT_EQ(10, rowVector->size());
// Ensure we get 1000 parts on a larger scale factor.
rowVector = genTpchPart(1'000, 199'990, 2);
EXPECT_EQ(1'000, rowVector->size());
// Zero records if we go beyond the end.
rowVector = genTpchPart(1'000, 200'000);
EXPECT_EQ(0, rowVector->size());
}
TEST(TpchGenTestPart, reproducible) {
auto rowVector1 = genTpchPart(100);
auto rowVector2 = genTpchPart(100);
auto rowVector3 = genTpchPart(100);
for (size_t i = 0; i < rowVector1->size(); ++i) {
ASSERT_TRUE(rowVector1->equalValueAt(rowVector2.get(), i, i));
ASSERT_TRUE(rowVector1->equalValueAt(rowVector3.get(), i, i));
}
// Ensure it's also reproducible if we add an offset.
auto rowVector4 = genTpchPart(100, 10);
auto rowVector5 = genTpchPart(100, 10);
for (size_t i = 0; i < rowVector4->size(); ++i) {
ASSERT_TRUE(rowVector4->equalValueAt(rowVector5.get(), i, i));
}
}
// PartSupp.
bool partSuppCheck(
const RowVectorPtr& vector,
size_t idx,
std::pair<size_t, size_t> expected) {
return (expected.first ==
vector->childAt(0)->asFlatVector<int64_t>()->valueAt(idx)) &&
(expected.second ==
vector->childAt(1)->asFlatVector<int64_t>()->valueAt(idx));
}
TEST(TpchGenTestPartSupp, batches) {
auto rowVector1 = genTpchPartSupp(1'000);
EXPECT_EQ(5, rowVector1->childrenSize());
EXPECT_EQ(1'000, rowVector1->size());
EXPECT_TRUE(partSuppCheck(rowVector1, 0, {1, 2}));
EXPECT_TRUE(partSuppCheck(rowVector1, 1, {1, 2502}));
EXPECT_TRUE(partSuppCheck(rowVector1, 2, {1, 5002}));
EXPECT_TRUE(partSuppCheck(rowVector1, 3, {1, 7502}));
EXPECT_TRUE(partSuppCheck(rowVector1, 4, {2, 3}));
EXPECT_TRUE(partSuppCheck(rowVector1, 5, {2, 2503}));
// Get second batch.
auto rowVector2 = genTpchPartSupp(1'000, 1'000);
EXPECT_EQ(5, rowVector2->childrenSize());
EXPECT_EQ(1'000, rowVector2->size());
EXPECT_TRUE(partSuppCheck(rowVector2, 0, {251, 252}));
EXPECT_TRUE(partSuppCheck(rowVector2, 1, {251, 2752}));
EXPECT_TRUE(partSuppCheck(rowVector2, 2, {251, 5252}));
EXPECT_TRUE(partSuppCheck(rowVector2, 3, {251, 7752}));
EXPECT_TRUE(partSuppCheck(rowVector2, 4, {252, 253}));
EXPECT_TRUE(partSuppCheck(rowVector2, 5, {252, 2753}));
}
// PartSupp records are generated based on mk_part, which generates 4 partsupp
// records at a time. This tests that the 4 record boundary is transparent and
// works as expected.
TEST(TpchGenTestPartSupp, misalignedBatches) {
auto rowVector = genTpchPartSupp(5, 0);
EXPECT_EQ(5, rowVector->size());
EXPECT_TRUE(partSuppCheck(rowVector, 0, {1, 2}));
EXPECT_TRUE(partSuppCheck(rowVector, 1, {1, 2502}));
EXPECT_TRUE(partSuppCheck(rowVector, 2, {1, 5002}));
EXPECT_TRUE(partSuppCheck(rowVector, 3, {1, 7502}));
EXPECT_TRUE(partSuppCheck(rowVector, 4, {2, 3}));
// Rotate.
rowVector = genTpchPartSupp(5, 1);
EXPECT_EQ(5, rowVector->size());
EXPECT_TRUE(partSuppCheck(rowVector, 0, {1, 2502}));
EXPECT_TRUE(partSuppCheck(rowVector, 1, {1, 5002}));
EXPECT_TRUE(partSuppCheck(rowVector, 2, {1, 7502}));
EXPECT_TRUE(partSuppCheck(rowVector, 3, {2, 3}));
EXPECT_TRUE(partSuppCheck(rowVector, 4, {2, 2503}));
// Rotate.
rowVector = genTpchPartSupp(5, 2);
EXPECT_EQ(5, rowVector->size());
EXPECT_TRUE(partSuppCheck(rowVector, 0, {1, 5002}));
EXPECT_TRUE(partSuppCheck(rowVector, 1, {1, 7502}));
EXPECT_TRUE(partSuppCheck(rowVector, 2, {2, 3}));
EXPECT_TRUE(partSuppCheck(rowVector, 3, {2, 2503}));
EXPECT_TRUE(partSuppCheck(rowVector, 4, {2, 5003}));
// Rotate.
rowVector = genTpchPartSupp(5, 3);
EXPECT_EQ(5, rowVector->size());
EXPECT_TRUE(partSuppCheck(rowVector, 0, {1, 7502}));
EXPECT_TRUE(partSuppCheck(rowVector, 1, {2, 3}));
EXPECT_TRUE(partSuppCheck(rowVector, 2, {2, 2503}));
EXPECT_TRUE(partSuppCheck(rowVector, 3, {2, 5003}));
EXPECT_TRUE(partSuppCheck(rowVector, 4, {2, 7503}));
// Rotate. We're aligned to the 4-record window again.
rowVector = genTpchPartSupp(5, 4);
EXPECT_EQ(5, rowVector->size());
EXPECT_TRUE(partSuppCheck(rowVector, 0, {2, 3}));
EXPECT_TRUE(partSuppCheck(rowVector, 1, {2, 2503}));
EXPECT_TRUE(partSuppCheck(rowVector, 2, {2, 5003}));
EXPECT_TRUE(partSuppCheck(rowVector, 3, {2, 7503}));
EXPECT_TRUE(partSuppCheck(rowVector, 4, {3, 4}));
}
TEST(TpchGenTestPartSupp, lastBatch) {
// Ask for 1'000 records but there are only 10 left.
auto rowVector = genTpchPartSupp(1'000, 799'990);
EXPECT_EQ(10, rowVector->size());
// Ensure we get 1'000 records on a larger scale factor.
rowVector = genTpchPartSupp(1'000, 799'990, 2);
EXPECT_EQ(1'000, rowVector->size());
// Zero records if we go beyond the end.
rowVector = genTpchPartSupp(1'000, 800'000);
EXPECT_EQ(0, rowVector->size());
}
TEST(TpchGenTestPartSupp, reproducible) {
auto rowVector1 = genTpchPartSupp(100);
auto rowVector2 = genTpchPartSupp(100);
auto rowVector3 = genTpchPartSupp(100);
EXPECT_EQ(100, rowVector1->size());
for (size_t i = 0; i < rowVector1->size(); ++i) {
ASSERT_TRUE(rowVector1->equalValueAt(rowVector2.get(), i, i));
ASSERT_TRUE(rowVector1->equalValueAt(rowVector3.get(), i, i));
}
// Ensure it's also reproducible if we add an offset.
auto rowVector4 = genTpchPartSupp(100, 10);
auto rowVector5 = genTpchPartSupp(100, 10);
EXPECT_EQ(100, rowVector4->size());
for (size_t i = 0; i < rowVector4->size(); ++i) {
ASSERT_TRUE(rowVector4->equalValueAt(rowVector5.get(), i, i));
}
}
// Customer.
TEST(TpchGenTestCustomer, batches) {
auto rowVector1 = genTpchCustomer(1'000);
EXPECT_EQ(8, rowVector1->childrenSize());
EXPECT_EQ(1'000, rowVector1->size());
auto custKey = rowVector1->childAt(0)->asFlatVector<int64_t>();
auto mktSegment = rowVector1->childAt(6)->asFlatVector<StringView>();
EXPECT_EQ(1, custKey->valueAt(0));
EXPECT_EQ("BUILDING"_sv, mktSegment->valueAt(0));
EXPECT_EQ(1'000, custKey->valueAt(999));
EXPECT_EQ("BUILDING"_sv, mktSegment->valueAt(999));
LOG(INFO) << rowVector1->toString(999);
// Get second batch.
auto rowVector2 = genTpchCustomer(1'000, 1'000);
EXPECT_EQ(8, rowVector2->childrenSize());
EXPECT_EQ(1'000, rowVector2->size());
custKey = rowVector2->childAt(0)->asFlatVector<int64_t>();
mktSegment = rowVector2->childAt(6)->asFlatVector<StringView>();
EXPECT_EQ(1'001, custKey->valueAt(0));
EXPECT_EQ("BUILDING"_sv, mktSegment->valueAt(0));
LOG(INFO) << rowVector2->toString(0);
EXPECT_EQ(2'000, custKey->valueAt(999));
EXPECT_EQ("BUILDING"_sv, mktSegment->valueAt(999));
LOG(INFO) << rowVector2->toString(999);
}
TEST(TpchGenTestCustomer, lastBatch) {
// Ask for 10'000 customers but there are only 10 left.
auto rowVector = genTpchCustomer(10'000, 149'990);
EXPECT_EQ(10, rowVector->size());
// Ensure we get 1000 customers on a larger scale factor.
rowVector = genTpchCustomer(1'000, 149'990, 2);
EXPECT_EQ(1'000, rowVector->size());
// Zero records if we go beyond the end.
rowVector = genTpchCustomer(1'000, 200'000);
EXPECT_EQ(0, rowVector->size());
}
TEST(TpchGenTestCustomer, reproducible) {
auto rowVector1 = genTpchCustomer(100);
auto rowVector2 = genTpchCustomer(100);
auto rowVector3 = genTpchCustomer(100);
for (size_t i = 0; i < rowVector1->size(); ++i) {
ASSERT_TRUE(rowVector1->equalValueAt(rowVector2.get(), i, i));
ASSERT_TRUE(rowVector1->equalValueAt(rowVector3.get(), i, i));
}
// Ensure it's also reproducible if we add an offset.
auto rowVector4 = genTpchCustomer(100, 10);
auto rowVector5 = genTpchCustomer(100, 10);
for (size_t i = 0; i < rowVector4->size(); ++i) {
ASSERT_TRUE(rowVector4->equalValueAt(rowVector5.get(), i, i));
}
}
} // namespace
int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
folly::init(&argc, &argv, false);
return RUN_ALL_TESTS();
}
| 8,873 |
589 | package rocks.inspectit.agent.java.sdk.opentracing.internal.impl;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import io.opentracing.SpanContext;
import rocks.inspectit.agent.java.sdk.opentracing.internal.util.RandomUtils;
/**
* Implementation of the {@link io.opentracing.SpanContext}. Keeps information about span id, trace
* id and span parent id. Sampling flag is not included in the moment as it's not explicitly defined
* by the opentracing API.
*
* @author <NAME>
*
*/
public class SpanContextImpl implements SpanContext {
/**
* Span id.
*/
private final long id;
/**
* Trace id.
*/
private final long traceId;
/**
* Parent span id.
*/
private final long parentId;
/**
* Reference type to the parent context.
*/
private final String referenceType;
/**
* Baggage.
*/
private final Map<String, String> baggage;
/**
* Constructor. Use build methods.
*
* @param id
* Unique ID of the span.
* @param traceId
* ID of the trace that span belongs to.
* @param parentId
* ID of the span's parent.
* @param referenceType
* Reference to the parent.
* @param baggage
* Additional baggage
*/
public SpanContextImpl(long id, long traceId, long parentId, String referenceType, Map<String, String> baggage) {
// ids
this.id = id;
this.traceId = traceId;
this.parentId = parentId;
this.referenceType = referenceType;
// baggage handling
if ((null != baggage) && !baggage.isEmpty()) {
this.baggage = new HashMap<String, String>(baggage);
} else {
this.baggage = new HashMap<String, String>(0, 1f);
}
}
/**
* Builds new {@link SpanContextImpl} as a child of given parent context. If parent context is
* <code>null</code> then {@link #build(Map)} will be used and new trace context will be
* created.
* <p>
* Passed baggage will be the baggage of this span context.
*
* @param parent
* Context that will be use to determine to which trace/parent new context belongs.
* Can be <code>null</code> to denote that the new trace context should be created.
* @param referenceType
* Reference type to the parent context.
* @param baggage
* Context baggage.
* @return {@link SpanContextImpl}. Never <code>null</code>.
*/
public static SpanContextImpl build(SpanContextImpl parent, String referenceType, Map<String, String> baggage) {
if (null == parent) {
return build(baggage);
} else {
long id = RandomUtils.randomLong();
SpanContextImpl spanContextImpl = new SpanContextImpl(id, parent.getTraceId(), parent.getId(), referenceType, baggage);
return spanContextImpl;
}
}
/**
* Builds new {@link SpanContextImpl} with new trace context and given baggage.
*
* @param baggage
* Context baggage.
* @return {@link SpanContextImpl}. Never <code>null</code>.
*/
public static SpanContextImpl build(Map<String, String> baggage) {
long id = RandomUtils.randomLong();
return new SpanContextImpl(id, id, id, null, baggage);
}
/**
* Builds new {@link SpanContextImpl} with new trace context and no baggage.
*
* @return {@link SpanContextImpl}. Never <code>null</code>.
*/
public static SpanContextImpl build() {
long id = RandomUtils.randomLong();
return new SpanContextImpl(id, id, id, null, null);
}
/**
* This method is used when building context that is extracted from the propagation (like HTTP
* headers). In this only calling span id and trace id are passed over the network with the
* baggage. Returned context represents the context of the calling span.
*
* @param passedId
* calling span id
* @param passedTraceId
* calling span trace id
* @param passedBaggage
* passage traveling along
* @return Context representing the context of the calling span.
*/
public static SpanContextImpl buildExtractedContext(long passedId, long passedTraceId, Map<String, String> passedBaggage) {
return new SpanContextImpl(passedId, passedTraceId, passedId, null, passedBaggage);
}
/**
* {@inheritDoc}
*/
@Override
public Iterable<Entry<String, String>> baggageItems() {
return Collections.unmodifiableMap(baggage).entrySet();
}
/**
* Sets baggage item.
*
* @param key
* key
* @param value
* value
*/
public void setBaggageItem(String key, String value) {
baggage.put(key, value);
}
/**
* Gets baggage item.
*
* @param key
* key
* @return Baggage item or <code>null</code> if the one does not exist.
*/
public String getBaggageItem(String key) {
return baggage.get(key);
}
/**
* Returns reference type to the parent if the one is set.
*
* @return Returns reference type to the parent if the one is set.
*/
public String getReferenceType() {
return this.referenceType;
}
/**
* Gets {@link #id}.
*
* @return {@link #id}
*/
public long getId() {
return this.id;
}
/**
* Gets {@link #traceId}.
*
* @return {@link #traceId}
*/
public long getTraceId() {
return this.traceId;
}
/**
* Gets {@link #parentId}.
*
* @return {@link #parentId}
*/
public long getParentId() {
return this.parentId;
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = (prime * result) + ((this.baggage == null) ? 0 : this.baggage.hashCode());
result = (prime * result) + (int) (this.id ^ (this.id >>> 32));
result = (prime * result) + (int) (this.parentId ^ (this.parentId >>> 32));
result = (prime * result) + ((this.referenceType == null) ? 0 : this.referenceType.hashCode());
result = (prime * result) + (int) (this.traceId ^ (this.traceId >>> 32));
return result;
}
/**
* {@inheritDoc}
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
SpanContextImpl other = (SpanContextImpl) obj;
if (this.baggage == null) {
if (other.baggage != null) {
return false;
}
} else if (!this.baggage.equals(other.baggage)) {
return false;
}
if (this.id != other.id) {
return false;
}
if (this.parentId != other.parentId) {
return false;
}
if (this.referenceType == null) {
if (other.referenceType != null) {
return false;
}
} else if (!this.referenceType.equals(other.referenceType)) {
return false;
}
if (this.traceId != other.traceId) {
return false;
}
return true;
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
return "SpanContextImpl [id=" + this.id + ", traceId=" + this.traceId + ", parentId=" + this.parentId + ", referenceType=" + this.referenceType + ", baggage=" + this.baggage + "]";
}
} | 2,496 |
583 | //
// PlayerViewModel.h
// KSYPlayerDemo
//
// Created by devcdl on 2017/8/23.
// Copyright © 2017年 kingsoft. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import "VideoListViewModel.h"
@class VodPlayOperationView, VideoContainerView, LivePlayController, VodPlayController;
@interface PlayerViewModel : NSObject
@property (nonatomic, strong) VideoModel *playingVideoModel;
@property (nonatomic, strong) VideoListViewModel *videoListViewModel;
@property (nonatomic, weak) UIViewController *owner;
@property (nonatomic, assign) NSInteger currPlayingIndex;
- (instancetype)initWithPlayingVideoModel:(VideoModel *)playingVideoModel
videoListViewModel:(VideoListViewModel *)videoListViewModel
selectedIndex:(NSInteger)selectedIndex;
- (void)fullScreenHandlerForPlayController:(UIViewController *)playController
isFullScreen:(BOOL) isFullScreen;
- (void)fullScreenHandlerForLivePlayController:(LivePlayController *)playController
isFullScreen:(BOOL) isFullScreen;
- (void)fullScreenButtonClickedHandlerForVodPlayController:(VodPlayController *)vodPlayController isFullScreen:(BOOL)isFullScreen;
- (VideoModel *)nextVideoModel;
@end
| 460 |
912 | <reponame>meson800/onedrive-sdk-python<gh_stars>100-1000
# -*- coding: utf-8 -*-
'''
# Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
#
# This file was generated and any changes will be overwritten.
'''
from __future__ import unicode_literals
from ..collection_base import CollectionPageBase
from ..model.item import Item
class SharedCollectionPage(CollectionPageBase):
def __getitem__(self, index):
"""Get the Item at the index specified
Args:
index (int): The index of the item to get from the SharedCollectionPage
Returns:
:class:`Item<onedrivesdk.model.item.Item>`:
The Item at the index
"""
return Item(self._prop_list[index])
def shared(self):
"""Get a generator of Item within the SharedCollectionPage
Yields:
:class:`Item<onedrivesdk.model.item.Item>`:
The next Item in the collection
"""
for item in self._prop_list:
yield Item(item)
| 450 |
681 | <reponame>nahimr/REDRIVER2
#ifndef SEARCH_H
#define SEARCH_H
extern void InitTextureNames(); // 0x0001BA88
#endif
| 49 |
1,338 | /*
* Copyright 2004-2007 Haiku, Inc. All rights reserved.
* Distributed under the terms of the MIT License.
*
* Authors:
* <NAME>
*/
#ifndef _DIRECTORY_FILE_PANEL_H
#define _DIRECTORY_FILE_PANEL_H
#include <FilePanel.h>
#include <Button.h>
static const uint32 MSG_DIRECTORY = 'mDIR';
class DirectoryRefFilter : public BRefFilter {
public:
DirectoryRefFilter();
bool Filter(const entry_ref* ref, BNode* node,
struct stat_beos* stat,
const char* mimeType);
};
class DirectoryFilePanel : public BFilePanel {
public:
DirectoryFilePanel(
file_panel_mode mode = B_OPEN_PANEL,
BMessenger* target = NULL,
const entry_ref* startDirectory = NULL,
uint32 nodeFlavors = 0,
bool allowMultipleSelection = true,
BMessage* message = NULL,
BRefFilter* filter = NULL,
bool modal = false,
bool hideWhenDone = true);
virtual ~DirectoryFilePanel() {};
virtual void SelectionChanged();
virtual void Show();
// overrides non-virtual BFilePanel::Show()
protected:
BButton* fCurrentButton;
};
#endif // _DIRECTORY_FILE_PANEL_H
| 489 |
1,279 | <gh_stars>1000+
from unittest import TestCase
from samtranslator.model.eventsources.pull import SelfManagedKafka
from samtranslator.model.exceptions import InvalidEventException
class SelfManagedKafkaEventSource(TestCase):
def setUp(self):
self.logical_id = "SelfManagedKafkaEvent"
self.kafka_event_source = SelfManagedKafka(self.logical_id)
def test_get_policy_arn(self):
arn = self.kafka_event_source.get_policy_arn()
expected_arn = None
self.assertEqual(arn, expected_arn)
def test_get_policy_statements(self):
self.kafka_event_source.SourceAccessConfigurations = [
{"Type": "SASL_SCRAM_256_AUTH", "URI": "SECRET_URI"},
{"Type": "VPC_SUBNET", "URI": "SECRET_URI"},
{"Type": "VPC_SECURITY_GROUP", "URI": "SECRET_URI"},
]
self.kafka_event_source.Topics = ["Topics"]
self.kafka_event_source.KafkaBootstrapServers = ["endpoint1", "endpoint2"]
self.kafka_event_source.Enabled = True
self.kafka_event_source.BatchSize = 1
policy_statements = self.kafka_event_source.get_policy_statements()
expected_policy_document = [
{
"PolicyDocument": {
"Statement": [
{"Action": ["secretsmanager:GetSecretValue"], "Effect": "Allow", "Resource": "SECRET_URI"},
{
"Action": [
"ec2:CreateNetworkInterface",
"ec2:DescribeNetworkInterfaces",
"ec2:DeleteNetworkInterface",
"ec2:DescribeVpcs",
"ec2:DescribeSubnets",
"ec2:DescribeSecurityGroups",
],
"Effect": "Allow",
"Resource": "*",
},
],
"Version": "2012-10-17",
},
"PolicyName": "SelfManagedKafkaExecutionRolePolicy",
}
]
self.assertEqual(policy_statements, expected_policy_document)
def test_get_policy_statements_with_only_auth_mechanism(self):
self.kafka_event_source.SourceAccessConfigurations = [
{"Type": "BASIC_AUTH", "URI": "SECRET_URI"},
]
self.kafka_event_source.Topics = ["Topics"]
self.kafka_event_source.KafkaBootstrapServers = ["endpoint1", "endpoint2"]
self.kafka_event_source.Enabled = True
self.kafka_event_source.BatchSize = 1
policy_statements = self.kafka_event_source.get_policy_statements()
expected_policy_document = [
{
"PolicyDocument": {
"Statement": [
{"Action": ["secretsmanager:GetSecretValue"], "Effect": "Allow", "Resource": "SECRET_URI"},
],
"Version": "2012-10-17",
},
"PolicyName": "SelfManagedKafkaExecutionRolePolicy",
}
]
self.assertEqual(policy_statements, expected_policy_document)
def test_get_policy_statements_with_only_vpc_config(self):
self.kafka_event_source.SourceAccessConfigurations = [
{"Type": "VPC_SUBNET", "URI": "SECRET_URI"},
{"Type": "VPC_SECURITY_GROUP", "URI": "SECRET_URI"},
]
self.kafka_event_source.Topics = ["Topics"]
self.kafka_event_source.KafkaBootstrapServers = ["endpoint1", "endpoint2"]
self.kafka_event_source.Enabled = True
self.kafka_event_source.BatchSize = 1
policy_statements = self.kafka_event_source.get_policy_statements()
expected_policy_document = [
{
"PolicyDocument": {
"Statement": [
{
"Action": [
"ec2:CreateNetworkInterface",
"ec2:DescribeNetworkInterfaces",
"ec2:DeleteNetworkInterface",
"ec2:DescribeVpcs",
"ec2:DescribeSubnets",
"ec2:DescribeSecurityGroups",
],
"Effect": "Allow",
"Resource": "*",
},
],
"Version": "2012-10-17",
},
"PolicyName": "SelfManagedKafkaExecutionRolePolicy",
}
]
self.assertEqual(policy_statements, expected_policy_document)
def test_get_policy_statements_with_secrets_manager_kms_key_id(self):
self.kafka_event_source.SourceAccessConfigurations = [
{"Type": "SASL_SCRAM_256_AUTH", "URI": "SECRET_URI"},
{"Type": "VPC_SUBNET", "URI": "SECRET_URI"},
{"Type": "VPC_SECURITY_GROUP", "URI": "SECRET_URI"},
]
self.kafka_event_source.Topics = ["Topics"]
self.kafka_event_source.KafkaBootstrapServers = ["endpoint1", "endpoint2"]
self.kafka_event_source.Enabled = True
self.kafka_event_source.BatchSize = 1
self.kafka_event_source.SecretsManagerKmsKeyId = "SECRET_KEY"
policy_statements = self.kafka_event_source.get_policy_statements()
expected_policy_document = [
{
"PolicyDocument": {
"Statement": [
{"Action": ["secretsmanager:GetSecretValue"], "Effect": "Allow", "Resource": "SECRET_URI"},
{
"Action": [
"ec2:CreateNetworkInterface",
"ec2:DescribeNetworkInterfaces",
"ec2:DeleteNetworkInterface",
"ec2:DescribeVpcs",
"ec2:DescribeSubnets",
"ec2:DescribeSecurityGroups",
],
"Effect": "Allow",
"Resource": "*",
},
{
"Action": ["kms:Decrypt"],
"Effect": "Allow",
"Resource": {
"Fn::Sub": "arn:${AWS::Partition}:kms:${AWS::Region}:${AWS::AccountId}:key/"
+ self.kafka_event_source.SecretsManagerKmsKeyId
},
},
],
"Version": "2012-10-17",
},
"PolicyName": "SelfManagedKafkaExecutionRolePolicy",
}
]
self.assertEqual(policy_statements, expected_policy_document)
def test_must_raise_for_missing_topics(self):
self.kafka_event_source.SourceAccessConfigurations = [
{"Type": "SASL_SCRAM_256_AUTH", "URI": "SECRET_URI"},
{"Type": "VPC_SUBNET", "URI": "SECRET_URI"},
{"Type": "VPC_SECURITY_GROUP", "URI": "SECRET_URI"},
]
self.kafka_event_source.KafkaBootstrapServers = ["endpoint1", "endpoint2"]
self.kafka_event_source.Enabled = True
self.kafka_event_source.BatchSize = 1
with self.assertRaises(InvalidEventException):
self.kafka_event_source.get_policy_statements()
def test_must_raise_for_empty_topics(self):
self.kafka_event_source.SourceAccessConfigurations = [
{"Type": "SASL_SCRAM_256_AUTH", "URI": "SECRET_URI"},
{"Type": "VPC_SUBNET", "URI": "SECRET_URI"},
{"Type": "VPC_SECURITY_GROUP", "URI": "SECRET_URI"},
]
self.kafka_event_source.KafkaBootstrapServers = ["endpoint1", "endpoint2"]
self.kafka_event_source.Enabled = True
self.kafka_event_source.BatchSize = 1
self.kafka_event_source.Topics = []
with self.assertRaises(InvalidEventException):
self.kafka_event_source.get_policy_statements()
def test_must_raise_for_multiple_topics(self):
self.kafka_event_source.SourceAccessConfigurations = [
{"Type": "SASL_SCRAM_256_AUTH", "URI": "SECRET_URI"},
{"Type": "VPC_SUBNET", "URI": "SECRET_URI"},
{"Type": "VPC_SECURITY_GROUP", "URI": "SECRET_URI"},
]
self.kafka_event_source.KafkaBootstrapServers = ["endpoint1", "endpoint2"]
self.kafka_event_source.Topics = ["Topics1", "Topics2"]
self.kafka_event_source.Enabled = True
self.kafka_event_source.BatchSize = 1
with self.assertRaises(InvalidEventException):
self.kafka_event_source.get_policy_statements()
def test_must_raise_for_missing_endpoints(self):
self.kafka_event_source.SourceAccessConfigurations = [
{"Type": "SASL_SCRAM_256_AUTH", "URI": "SECRET_URI"},
{"Type": "VPC_SUBNET", "URI": "SECRET_URI"},
{"Type": "VPC_SECURITY_GROUP", "URI": "SECRET_URI"},
]
self.kafka_event_source.Enabled = True
self.kafka_event_source.Topics = ["Topics"]
self.kafka_event_source.BatchSize = 1
with self.assertRaises(InvalidEventException):
self.kafka_event_source.get_policy_statements()
def test_must_raise_for_empty_bootstrap_server(self):
self.kafka_event_source.SourceAccessConfigurations = [
{"Type": "SASL_SCRAM_256_AUTH", "URI": "SECRET_URI"},
{"Type": "VPC_SUBNET", "URI": "SECRET_URI"},
{"Type": "VPC_SECURITY_GROUP", "URI": "SECRET_URI"},
]
self.kafka_event_source.KafkaBootstrapServers = []
self.kafka_event_source.Enabled = True
self.kafka_event_source.Topics = ["Topics"]
self.kafka_event_source.BatchSize = 1
with self.assertRaises(InvalidEventException):
self.kafka_event_source.get_policy_statements()
def test_must_raise_for_missing_vpc_subnet(self):
self.kafka_event_source.SourceAccessConfigurations = [
{"Type": "SASL_SCRAM_256_AUTH", "URI": "SECRET_URI"},
{"Type": "VPC_SECURITY_GROUP", "URI": "SECRET_URI"},
]
self.kafka_event_source.KafkaBootstrapServers = ["endpoint1", "endpoint2"]
self.kafka_event_source.Enabled = True
self.kafka_event_source.Topics = ["Topics"]
self.kafka_event_source.BatchSize = 1
with self.assertRaises(InvalidEventException):
self.kafka_event_source.get_policy_statements()
def test_must_raise_for_missing_vpc_security_group(self):
self.kafka_event_source.SourceAccessConfigurations = [
{"Type": "SASL_SCRAM_256_AUTH", "URI": "SECRET_URI"},
{"Type": "VPC_SUBNET", "URI": "SECRET_URI"},
]
self.kafka_event_source.KafkaBootstrapServers = ["endpoint1", "endpoint2"]
self.kafka_event_source.Enabled = True
self.kafka_event_source.Topics = ["Topics"]
self.kafka_event_source.BatchSize = 1
with self.assertRaises(InvalidEventException):
self.kafka_event_source.get_policy_statements()
def test_must_raise_for_missing_source_access_configurations(self):
self.kafka_event_source.KafkaBootstrapServers = ["endpoint1", "endpoint2"]
self.kafka_event_source.Enabled = True
self.kafka_event_source.Topics = ["Topics"]
self.kafka_event_source.BatchSize = 1
with self.assertRaises(InvalidEventException):
self.kafka_event_source.get_policy_statements()
def test_must_raise_for_unknown_source_access_configurations_type(self):
test_credentials = [
[{"Type": "BASIC_AUT", "URI": "SECRET_URI"}],
[{"Type": "SASL_SCRAM_256_AUT", "URI": "SECRET_URI"}],
[{"Type": None, "URI": "SECRET_URI"}],
[{"Type": "VPC_SUB", "URI": "SECRET_URI"}, {"Type": "VPC_SECURITY_GROUP", "URI": "SECRET_URI"}],
[{"Type": "VPC_SUBNET", "URI": "SECRET_URI"}, {"Type": None, "URI": None}],
]
self.kafka_event_source.KafkaBootstrapServers = ["endpoint1", "endpoint2"]
self.kafka_event_source.Enabled = True
self.kafka_event_source.Topics = ["Topics"]
self.kafka_event_source.BatchSize = 1
for config in test_credentials:
self.kafka_event_source.SourceAccessConfigurations = config
with self.assertRaises(InvalidEventException):
self.kafka_event_source.get_policy_statements()
def test_must_raise_for_wrong_source_access_configurations_uri(self):
test_credentials = [
[{"Type": "BASIC_AUTH", "URI": 1}],
[{"Type": "SASL_SCRAM_256_AUTH", "URI": 1}],
[{"Type": "SASL_SCRAM_512_AUTH", "URI": 1}],
[{"Type": "VPC_SUBNET", "URI": None}, {"Type": "VPC_SECURITY_GROUP", "URI": "SECRET_URI"}],
[{"Type": "VPC_SUBNET", "URI": "SECRET_URI"}, {"Type": "VPC_SECURITY_GROUP", "URI": None}],
]
self.kafka_event_source.KafkaBootstrapServers = ["endpoint1", "endpoint2"]
self.kafka_event_source.Enabled = True
self.kafka_event_source.Topics = ["Topics"]
self.kafka_event_source.BatchSize = 1
for config in test_credentials:
self.kafka_event_source.SourceAccessConfigurations = config
with self.assertRaises(InvalidEventException):
self.kafka_event_source.get_policy_statements()
| 7,024 |
892 | <reponame>westonsteimel/advisory-database-github
{
"schema_version": "1.2.0",
"id": "GHSA-74q4-q7vc-p889",
"modified": "2022-05-01T07:00:06Z",
"published": "2022-05-01T07:00:06Z",
"aliases": [
"CVE-2006-2537"
],
"details": "Multiple format string vulnerabilities in (a) OpenBOR 2.0046 and earlier, (b) Beats of Rage (BOR) 1.0029 and earlier, and (c) Horizontal Shooter BOR (HOR) 2.0000 and earlier allow remote attackers to execute code via format string specifiers in configurations used in various mod files, as demonstrated by the (1) music identifier in data/scenes/intro.txt, which is not properly handled in the update function, and (2) background identifier in data/easy/1aeasy.txt, which is not properly handled in the shutdown function.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2006-2537"
},
{
"type": "WEB",
"url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/26582"
},
{
"type": "WEB",
"url": "http://aluigi.altervista.org/adv/borfs-adv.txt"
},
{
"type": "WEB",
"url": "http://secunia.com/advisories/20173"
},
{
"type": "WEB",
"url": "http://secunia.com/advisories/20174"
},
{
"type": "WEB",
"url": "http://secunia.com/advisories/20181"
},
{
"type": "WEB",
"url": "http://www.osvdb.org/25687"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/bid/18088"
},
{
"type": "WEB",
"url": "http://www.vupen.com/english/advisories/2006/1901"
},
{
"type": "WEB",
"url": "http://www.vupen.com/english/advisories/2006/1902"
},
{
"type": "WEB",
"url": "http://www.vupen.com/english/advisories/2006/1903"
}
],
"database_specific": {
"cwe_ids": [
],
"severity": "HIGH",
"github_reviewed": false
}
} | 891 |
7,482 | /*
* Copyright (c) 2006-2021, RT-Thread Development Team
*
* SPDX-License-Identifier: Apache-2.0
*
* Change Logs:
* Date Author Notes
* 2020-04-29 supperthomas first version
*
*/
#include <rtthread.h>
#include <rthw.h>
#include <nrfx_systick.h>
#include "board.h"
#include "drv_uart.h"
#include <nrfx_clock.h>
/**
* This is the timer interrupt service routine.
*
*/
void SysTick_Handler(void)
{
/* enter interrupt */
rt_interrupt_enter();
rt_tick_increase();
/* leave interrupt */
rt_interrupt_leave();
}
static void clk_event_handler(nrfx_clock_evt_type_t event){}
void SysTick_Configuration(void)
{
nrfx_clock_init(clk_event_handler);
nrfx_clock_enable();
nrfx_clock_lfclk_start();
/* Set interrupt priority */
NVIC_SetPriority(SysTick_IRQn, 0xf);
/* Configure SysTick to interrupt at the requested rate. */
nrf_systick_load_set(SystemCoreClock / RT_TICK_PER_SECOND);
nrf_systick_val_clear();
nrf_systick_csr_set(NRF_SYSTICK_CSR_CLKSOURCE_CPU | NRF_SYSTICK_CSR_TICKINT_ENABLE
| NRF_SYSTICK_CSR_ENABLE);
}
void rt_hw_board_init(void)
{
rt_hw_interrupt_enable(0);
SysTick_Configuration();
#if defined(RT_USING_HEAP)
rt_system_heap_init((void *)HEAP_BEGIN, (void *)HEAP_END);
#endif
#ifdef RT_USING_SERIAL
rt_hw_uart_init();
#endif
#ifdef RT_USING_CONSOLE
rt_console_set_device(RT_CONSOLE_DEVICE_NAME);
#endif
#ifdef RT_USING_COMPONENTS_INIT
rt_components_board_init();
#endif
#ifdef BSP_USING_SOFTDEVICE
extern uint32_t Image$$RW_IRAM1$$Base;
uint32_t const *const m_ram_start = &Image$$RW_IRAM1$$Base;
if ((uint32_t)m_ram_start == 0x20000000)
{
rt_kprintf("\r\n using softdevice the RAM couldn't be %p,please use the templete from package\r\n", m_ram_start);
while (1);
}
else
{
rt_kprintf("\r\n using softdevice the RAM at %p\r\n", m_ram_start);
}
#endif
}
| 902 |
456 | <filename>lib/djvUIComponents/SearchBox.cpp
// SPDX-License-Identifier: BSD-3-Clause
// Copyright (c) 2004-2020 <NAME>
// All rights reserved.
#include <djvUIComponents/SearchBox.h>
#include <djvUI/DrawUtil.h>
#include <djvUI/IconWidget.h>
#include <djvUI/LineEditBase.h>
#include <djvUI/RowLayout.h>
#include <djvUI/SoloLayout.h>
#include <djvUI/ToolButton.h>
#include <djvRender2D/Render.h>
using namespace djv::Core;
namespace djv
{
namespace UIComponents
{
struct SearchBox::Private
{
std::shared_ptr<UI::Text::LineEditBase> lineEditBase;
std::shared_ptr<UI::IconWidget> searchIconWidget;
std::shared_ptr<UI::ToolButton> clearButton;
std::shared_ptr<UI::SoloLayout> soloLayout;
std::shared_ptr<UI::HorizontalLayout> layout;
std::function<void(const std::string &)> filterCallback;
};
void SearchBox::_init(const std::shared_ptr<System::Context>& context)
{
Widget::_init(context);
DJV_PRIVATE_PTR();
setClassName("djv::UIComponents::SearchBox");
setVAlign(UI::VAlign::Center);
p.lineEditBase = UI::Text::LineEditBase::create(context);
p.lineEditBase->setTextSizeRole(UI::MetricsRole::SearchBox);
p.searchIconWidget = UI::IconWidget::create(context);
p.searchIconWidget->setIcon("djvIconSearch");
p.searchIconWidget->setIconColorRole(UI::ColorRole::Foreground);
p.clearButton = UI::ToolButton::create(context);
p.clearButton->setIcon("djvIconClear");
p.clearButton->setInsideMargin(UI::MetricsRole::None);
p.clearButton->setTextFocusEnabled(false);
p.clearButton->setBackgroundColorRole(UI::ColorRole::None);
p.layout = UI::HorizontalLayout::create(context);
p.layout->setSpacing(UI::MetricsRole::None);
p.layout->setBackgroundColorRole(UI::ColorRole::Trough);
p.layout->addChild(p.lineEditBase);
p.layout->setStretch(p.lineEditBase);
p.soloLayout = UI::SoloLayout::create(context);
p.soloLayout->addChild(p.searchIconWidget);
p.soloLayout->addChild(p.clearButton);
p.layout->addChild(p.soloLayout);
addChild(p.layout);
_widgetUpdate();
auto weak = std::weak_ptr<SearchBox>(std::dynamic_pointer_cast<SearchBox>(shared_from_this()));
p.lineEditBase->setFocusCallback(
[weak](bool value)
{
if (auto widget = weak.lock())
{
widget->_redraw();
}
});
p.lineEditBase->setTextChangedCallback(
[weak](const std::string & value)
{
if (auto widget = weak.lock())
{
widget->_doFilterCallback();
widget->_widgetUpdate();
}
});
p.clearButton->setClickedCallback(
[weak]
{
if (auto widget = weak.lock())
{
widget->clearFilter();
widget->_doFilterCallback();
widget->_widgetUpdate();
}
});
}
SearchBox::SearchBox() :
_p(new Private)
{}
SearchBox::~SearchBox()
{}
std::shared_ptr<SearchBox> SearchBox::create(const std::shared_ptr<System::Context>& context)
{
auto out = std::shared_ptr<SearchBox>(new SearchBox);
out->_init(context);
return out;
}
const std::string & SearchBox::getFilter() const
{
return _p->lineEditBase->getText();
}
void SearchBox::setFilter(const std::string & value)
{
_p->lineEditBase->setText(value);
}
void SearchBox::clearFilter()
{
_p->lineEditBase->setText(std::string());
}
void SearchBox::setFilterCallback(const std::function<void(const std::string &)> & value)
{
_p->filterCallback = value;
}
float SearchBox::getHeightForWidth(float value) const
{
DJV_PRIVATE_PTR();
const auto& style = _getStyle();
const glm::vec2 m = getMargin().getSize(style);
const float b = style->getMetric(UI::MetricsRole::Border);
const float btf = style->getMetric(UI::MetricsRole::BorderTextFocus);
float size = value - m.x - btf * 2.F;
float out = p.layout->getHeightForWidth(size);
return out + b * 2.F + btf * 2.F + m.y;
}
void SearchBox::_preLayoutEvent(System::Event::PreLayout& event)
{
DJV_PRIVATE_PTR();
const auto& style = _getStyle();
const float b = style->getMetric(UI::MetricsRole::Border);
const float btf = style->getMetric(UI::MetricsRole::BorderTextFocus);
glm::vec2 size = p.layout->getMinimumSize();
_setMinimumSize(size + b * 2.F + btf * 2.F + getMargin().getSize(style));
}
void SearchBox::_layoutEvent(System::Event::Layout& event)
{
DJV_PRIVATE_PTR();
const auto& style = _getStyle();
const float b = style->getMetric(UI::MetricsRole::Border);
const float btf = style->getMetric(UI::MetricsRole::BorderTextFocus);
const Math::BBox2f g = getMargin().bbox(getGeometry(), style);
const Math::BBox2f g2 = g.margin(-b - btf);
_p->layout->setGeometry(g2);
}
void SearchBox::_paintEvent(System::Event::Paint& event)
{
Widget::_paintEvent(event);
DJV_PRIVATE_PTR();
const auto& style = _getStyle();
const Math::BBox2f g = getMargin().bbox(getGeometry(), style);
const float b = style->getMetric(UI::MetricsRole::Border);
const float btf = style->getMetric(UI::MetricsRole::BorderTextFocus);
const auto& render = _getRender();
if (p.lineEditBase->hasTextFocus())
{
render->setFillColor(style->getColor(UI::ColorRole::TextFocus));
UI::drawBorder(render, g, btf);
}
Math::BBox2f g2 = g.margin(-btf);
render->setFillColor(style->getColor(UI::ColorRole::Border));
UI::drawBorder(render, g2, b);
}
void SearchBox::_doFilterCallback()
{
DJV_PRIVATE_PTR();
if (p.filterCallback)
{
p.filterCallback(p.lineEditBase->getText());
}
}
void SearchBox::_widgetUpdate()
{
DJV_PRIVATE_PTR();
std::shared_ptr<Widget> currentWidget = p.searchIconWidget;
if (!p.lineEditBase->getText().empty())
{
currentWidget = p.clearButton;
}
p.soloLayout->setCurrentWidget(currentWidget);
}
} // namespace UIComponents
} // namespace djv
| 3,623 |
322 | <gh_stars>100-1000
#pragma once
#include <immintrin.h>
#include <ATen/ATen.h>
#include <ATen/ExpandUtils.h>
#include <ATen/Parallel.h>
#include <c10/util/SmallVector.h>
#include <limits>
namespace torch_ipex {
namespace cpu {
namespace kernel {
namespace vec {
namespace vec512 {
#define VEC_512_FP32_CAP (16)
#define VEC_512_FP32_BYTES_WIDTH (64)
#define FP32_BYTES_WIDTH (4)
inline std::vector<int64_t> _adjust_strides(
const at::Tensor& src,
std::vector<int64_t>& infered_size) {
// We does NOT support broadcasting last dim which mean last_dim = 1
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(src.stride(src.ndimension() - 1) == 1);
auto original_shape = src.sizes();
auto original_stride = src.strides();
auto offset = infered_size.size() - original_shape.size();
std::vector<int64_t> adjusted_stride;
if (offset > 0)
adjusted_stride.resize(infered_size.size(), 0);
else
adjusted_stride.resize(infered_size.size());
for (size_t i = 0; i < original_shape.size(); i++) {
// see NOTE: [Computing output strides]
if (original_shape[i] == 1 && infered_size[offset + i] != 1) {
adjusted_stride[offset + i] = 0;
} else {
adjusted_stride[offset + i] = original_stride[i];
}
}
return adjusted_stride;
}
static inline uint32_t _cal_head_padding(const float* addr) {
uint64_t time_64 = ((uint64_t)addr) / VEC_512_FP32_BYTES_WIDTH;
return (((uint64_t)addr) - time_64 * VEC_512_FP32_BYTES_WIDTH) /
FP32_BYTES_WIDTH;
}
static inline uint32_t _cal_valid_data_num(
const uint32_t& head_padding,
const uint32_t& dim_size) {
return std::min(VEC_512_FP32_CAP - head_padding, (uint32_t)dim_size);
}
static inline uint32_t _cal_tail_padding(
const uint32_t& dim_size,
const uint32_t& head_padding,
const uint32_t& valid_data_num) {
return VEC_512_FP32_CAP - head_padding - valid_data_num;
}
/**
* Check if the start address is aligned or not. If the start address is not
* 64 bytes aligned, we will pad head to align 64bytes and then pad tail to
* fill 64bytes.
*/
static inline bool _padding_alignment(
const float* a,
const uint32_t& size,
uint32_t& valid_data_num,
__mmask16& loading_mask) {
uint32_t head_padding = _cal_head_padding(a);
if (head_padding == 0)
return false;
valid_data_num = _cal_valid_data_num(head_padding, (uint32_t)size);
uint32_t tail_padding = _cal_tail_padding(size, head_padding, valid_data_num);
loading_mask = ((1 << valid_data_num) - 1) << tail_padding;
return true;
}
inline int64_t _calc_element_offset(
const int64_t& outer_loop_idx,
const std::vector<int64_t>& outer_loop_size,
const std::vector<int64_t>& outer_loop_strides) {
int64_t __outer_loop_idx = outer_loop_idx;
int64_t b_offset = 0;
for (int j = 0; j < outer_loop_size.size(); j++) {
auto idx = __outer_loop_idx / outer_loop_size[j];
__outer_loop_idx -= idx * outer_loop_size[j];
// The stride could be any number if the dim equals to 1
b_offset += idx * outer_loop_strides[j];
}
return b_offset;
}
inline __m512 _dil_exp_kernel(__m512 vec_src) {
static __m512 vec_factorial_1 =
_mm512_set1_ps(0.999999701f); // 1/factorial(1)
static __m512 vec_factorial_2 =
_mm512_set1_ps(0.499991506f); // 1/factorial(2)
static __m512 vec_factorial_3 =
_mm512_set1_ps(0.166676521f); // 1/factorial(3)
static __m512 vec_factorial_4 =
_mm512_set1_ps(0.0418978221f); // 1/factorial(4)
static __m512 vec_factorial_5 =
_mm512_set1_ps(0.00828929059f); // 1/factorial(5)
static __m512 vec_exp_log2ef =
(__m512)_mm512_set1_epi32(0x3fb8aa3b); // log2(e)
static __m512 vec_half = _mm512_set1_ps(0.5f);
static __m512 vec_one = _mm512_set1_ps(1.f);
static __m512 vec_zero = _mm512_set1_ps(0.f);
static __m512 vec_two = _mm512_set1_ps(2.f);
static __m512 vec_ln2f = (__m512)_mm512_set1_epi32(0x3f317218); // ln(2)
static __m512 vec_ln_flt_min = (__m512)_mm512_set1_epi32(0xc2aeac50);
static __m512 vec_ln_flt_max = (__m512)_mm512_set1_epi32(0x42b17218);
static __m512i vec_127 = _mm512_set1_epi32(0x0000007f);
static int n_mantissa_bits = 23;
// exp(x) =
// = exp(n * ln(2) + r) // divide x by ln(2) and get quot and rem
// = 2^n * exp(r) // simplify the exp(n*ln(2)) expression
auto less_ln_flt_min_mask =
_mm512_cmp_ps_mask(vec_src, vec_ln_flt_min, 1 /*_CMP_LT_OS*/);
vec_src = _mm512_min_ps(vec_src, vec_ln_flt_max);
vec_src = _mm512_max_ps(vec_src, vec_ln_flt_min);
// fx = floorf(x * log2ef + 0.5)
auto vec_fx = _mm512_fmadd_ps(vec_src, vec_exp_log2ef, vec_half);
auto vec_fx_i = _mm512_cvt_roundps_epi32(
vec_fx, _MM_FROUND_TO_NEG_INF | _MM_FROUND_NO_EXC);
vec_fx = _mm512_cvtepi32_ps(vec_fx_i);
// x = x - fx * ln2
auto vec_exp_poly = _mm512_fnmadd_ps(vec_fx, vec_ln2f, vec_src);
// compute polynomial
auto vec_res =
_mm512_fmadd_ps(vec_exp_poly, vec_factorial_5, vec_factorial_4);
vec_res = _mm512_fmadd_ps(vec_exp_poly, vec_res, vec_factorial_3);
vec_res = _mm512_fmadd_ps(vec_exp_poly, vec_res, vec_factorial_2);
vec_res = _mm512_fmadd_ps(vec_exp_poly, vec_res, vec_factorial_1);
vec_res = _mm512_fmadd_ps(vec_exp_poly, vec_res, vec_one);
// compute 2^(n-1)
auto vec_exp_number = _mm512_sub_ps(vec_fx, vec_one);
auto vec_exp_number_i = _mm512_cvtps_epi32(vec_exp_number);
auto vec_two_pow_n_i = _mm512_add_epi32(vec_exp_number_i, vec_127);
vec_two_pow_n_i = _mm512_slli_epi32(vec_two_pow_n_i, n_mantissa_bits);
auto vec_two_pow_n = (__m512)vec_two_pow_n_i;
vec_two_pow_n =
_mm512_mask_blend_ps(less_ln_flt_min_mask, vec_two_pow_n, vec_zero);
// y = y * 2^n
vec_res = _mm512_mul_ps(vec_res, vec_two_pow_n);
vec_res = _mm512_mul_ps(vec_res, vec_two);
return vec_res;
}
inline void _dil_add_reduce_max_fusion_kernel(
const float* a,
const float* b,
const int& size,
float* out,
float& max) {
auto vec_ps_min = _mm512_set1_ps(std::numeric_limits<float>::min());
auto vec_ps_min_tail = _mm512_set1_ps(std::numeric_limits<float>::min());
auto vec_a = vec_ps_min;
auto vec_b = vec_ps_min;
auto vec_out = vec_ps_min;
// Check if the start address is not aligned. If the start address is not
// 64bytes aligned, we will pad head to align 64bytes and then pad tail to
// fill 64bytes.
uint32_t valid_data_num = 0;
__mmask16 loading_mask = {};
if (_padding_alignment(a, size, valid_data_num, loading_mask)) {
vec_a = _mm512_maskz_expandloadu_ps(loading_mask, a);
vec_b = _mm512_mask_expandloadu_ps(vec_ps_min, loading_mask, b);
vec_out = _mm512_add_ps(vec_a, vec_b);
vec_ps_min = _mm512_max_ps(vec_ps_min, vec_out);
_mm512_mask_compressstoreu_ps(out, loading_mask, vec_out);
}
int i = valid_data_num;
for (; i <= size - 16; i += 16) {
vec_a = _mm512_load_ps(a + i);
vec_b = _mm512_load_ps(b + i);
vec_out = _mm512_add_ps(vec_a, vec_b);
vec_ps_min = _mm512_max_ps(vec_ps_min, vec_out);
_mm512_store_ps(out + i, vec_out);
}
if (i < size) {
__mmask16 mask = (1 << (size - i)) - 1;
vec_a = _mm512_mask_load_ps(vec_ps_min_tail, mask, a + i);
vec_b = _mm512_maskz_load_ps(mask, b + i);
vec_out = _mm512_add_ps(vec_a, vec_b);
vec_ps_min = _mm512_max_ps(vec_out, vec_ps_min);
_mm512_mask_store_ps(out + i, mask, vec_out);
}
// NOTE: _mm512_reduce_max_ps is sequence instruction
max = _mm512_reduce_max_ps(vec_ps_min);
}
inline void _dil_exp_reduce_sum_fusion_kernel(
const float* a,
const int& size,
float* out,
float& val) {
static auto vec_zero = _mm512_set1_ps(0.f);
auto vec_max = _mm512_set1_ps(val);
auto vec_sum = _mm512_set1_ps(0.f);
__m512 vec_a = {};
__m512 vec_out = {};
// The start address is not aligned
uint32_t valid_data_num = 0;
__mmask16 loading_mask = {};
if (_padding_alignment(a, size, valid_data_num, loading_mask)) {
vec_a = _mm512_maskz_expandloadu_ps(loading_mask, a);
vec_out = _mm512_sub_ps(vec_a, vec_max);
vec_out = _dil_exp_kernel(vec_out);
vec_sum = _mm512_mask_add_ps(vec_sum, loading_mask, vec_sum, vec_out);
_mm512_mask_compressstoreu_ps(out, loading_mask, vec_out);
}
int i = valid_data_num;
for (; i <= size - 16; i += 16) {
vec_a = _mm512_load_ps(a + i);
vec_out = _mm512_sub_ps(vec_a, vec_max);
vec_out = _dil_exp_kernel(vec_out);
vec_sum = _mm512_add_ps(vec_sum, vec_out);
_mm512_store_ps(out + i, vec_out);
}
if (i < size) {
__mmask16 mask = (1 << (size - i)) - 1;
auto vec_a = _mm512_mask_load_ps(vec_max, mask, a + i);
auto vec_out = _mm512_sub_ps(vec_a, vec_max);
vec_out = _dil_exp_kernel(vec_out);
vec_sum = _mm512_mask_add_ps(vec_sum, mask, vec_sum, vec_out);
_mm512_mask_store_ps(out + i, mask, vec_out);
}
// NOTE: _mm512_reduce_add_ps is sequence instruction
val = _mm512_reduce_add_ps(vec_sum);
}
inline void _dil_normalization_kernel(
const float* a,
const float& sum,
const int& size,
float* out) {
auto vec_sum = _mm512_set1_ps(sum);
__m512 vec_a = {};
__m512 vec_out = {};
// The start address is not aligned
uint32_t valid_data_num = 0;
__mmask16 loading_mask = {};
if (_padding_alignment(a, size, valid_data_num, loading_mask)) {
vec_a = _mm512_maskz_expandloadu_ps(loading_mask, a);
vec_out = _mm512_div_ps(vec_a, vec_sum);
_mm512_mask_compressstoreu_ps(out, loading_mask, vec_out);
}
int i = valid_data_num;
for (; i <= size - 16; i += 16) {
auto vec_a = _mm512_load_ps(a + i);
auto vec_out = _mm512_div_ps(vec_a, vec_sum);
_mm512_store_ps(out + i, vec_out);
}
if (i < size) {
__mmask16 mask = (1 << (size - i)) - 1;
auto vec_a = _mm512_maskz_load_ps(mask, a + i);
auto vec_out = _mm512_div_ps(vec_a, vec_sum);
_mm512_mask_store_ps(out + i, mask, vec_out);
}
}
/**
* @brief Fuse the add operator and softmax operator.
*
* @attention
* There are some assumptions for this operator.
* - The reduce dimension for softmax is the last dimension
* - The reduce dimension for softmax is the leading dimension
* - The elements number of the reduce dimension for softmax is n*16
* - The input tensors are contiguous
* - The number of the input tensor dimension should be >=2
* - Only the second input tensor is brodcastable
*
* @param[in] a a contiguous tensor to be added
* @param[in] b a tensor to be added while it should be broadcastable
* @return The tensor stores the result of @code softmax(a + b) @endcode
*/
at::Tensor dil_add_softmax(const at::Tensor& a, const at::Tensor& b) {
float* a_data_base = a.data_ptr<float>();
float* b_data_base = b.data_ptr<float>();
// Check if the tensor needs to be broadcasted
auto infered_size = a.sizes().vec();
auto need_broadcast = (infered_size != b.sizes());
if (need_broadcast) {
infered_size = at::infer_size(a.sizes(), b.sizes());
}
// Create an new tensor to store the output
auto output = at::empty_like(a);
float* output_data_base = output.data_ptr<float>();
// Calculate the strides for the input tensor
std::vector<int64_t> b_adjusted_strides = _adjust_strides(b, infered_size);
std::vector<int64_t> outer_size_per_dim;
int64_t dim_size = infered_size[infered_size.size() - 1];
TORCH_INTERNAL_ASSERT_DEBUG_ONLY(dim_size != 1);
int64_t outer_size = 1;
// The last dim is the loop unit. We need to minus 2 to exclude the last dim.
// infered_size.size() - 2 is the -2th dimension.
for (int64_t i = infered_size.size() - 2; i >= 0; i--) {
// Record outer dimensions
outer_size_per_dim.insert(outer_size_per_dim.begin(), outer_size);
// Calculate outer loop number;
outer_size *= infered_size[i];
}
int64_t grain_size = at::internal::GRAIN_SIZE / (16 * dim_size);
if (grain_size < 1)
grain_size = 1;
int64_t outer_dims_num = outer_size_per_dim.size();
at::parallel_for(0, outer_size, grain_size, [&](int64_t begin, int64_t end) {
float val = 0.0;
int64_t b_offset = 0;
for (int64_t i = begin; i < end; i++) {
if (need_broadcast) {
b_offset =
_calc_element_offset(i, outer_size_per_dim, b_adjusted_strides);
} else {
b_offset = i * dim_size;
}
// Add a and b and get the maximum value:
// output_data = a + b
// val = max(output_data)
_dil_add_reduce_max_fusion_kernel(
a_data_base + i * dim_size,
b_data_base + b_offset,
dim_size,
output_data_base + i * dim_size,
val);
// Calculate the e^x and get the sum value:
// output_data = output_data - max(output_data)
// output_data = e^(output_data)
// val = sum(output_data)
_dil_exp_reduce_sum_fusion_kernel(
output_data_base + i * dim_size,
dim_size,
output_data_base + i * dim_size,
val);
// Calculat the normalization [e^x / sum(e^x)]:
// output_data = output_data / sum(output_data)
_dil_normalization_kernel(
output_data_base + i * dim_size,
val,
dim_size,
output_data_base + i * dim_size);
}
});
return output;
}
} // namespace vec512
} // namespace vec
} // namespace kernel
} // namespace cpu
} // namespace torch_ipex
| 5,859 |
535 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
#include "cbmem_test/cbmem_test.h"
TEST_CASE_SELF(cbmem_test_case_1)
{
int i;
int rc;
/* i starts at 2, for the 2 overwritten entries. */
i = 2;
rc = cbmem_walk(&cbmem1, cbmem_test_case_1_walk, &i);
TEST_ASSERT_FATAL(rc == 0, "Could not walk cbmem tree! rc = %d", rc);
TEST_ASSERT_FATAL(i == CBMEM1_ENTRY_COUNT + 1,
"Did not go through every element of walk, %d processed", i - 2);
}
| 399 |
598 | <reponame>yangboz/maro
from maro.simulator import Env
from maro.simulator.scenarios.vm_scheduling import AllocateAction, DecisionPayload
from rule_based_algorithm import RuleBasedAlgorithm
class FirstFit(RuleBasedAlgorithm):
def __init__(self, **kwargs):
super().__init__()
def allocate_vm(self, decision_event: DecisionPayload, env: Env) -> AllocateAction:
# Use a valid PM based on its order.
chosen_idx: int = decision_event.valid_pms[0]
# Take action to allocate on the chose PM.
action: AllocateAction = AllocateAction(
vm_id=decision_event.vm_id,
pm_id=chosen_idx
)
return action
| 275 |
852 | import FWCore.ParameterSet.Config as cms
# producer for alcahbhemuon (HCAL with muons)
import Calibration.HcalAlCaRecoProducers.alcaHBHEMuonProducer_cfi
HBHEMuonProd = Calibration.HcalAlCaRecoProducers.alcaHBHEMuonProducer_cfi.alcaHBHEMuonProducer.clone()
| 106 |
1,056 | package org.netbeans.test.java.hints;
/**
* @author <NAME>
*/
public class CastOrMethodInvocation58494hUsee {
public CastOrMethodInvocation58494hUsee() {
}
protected void doStuff(Integer i) {
}
void doStuff(Float i) {
}
}
| 116 |
573 |
.name = "Reimu (Extra Stage Intro)",
.bgm = "intro",
.phases = (CutscenePhase[]) {
{ "cutscenes/locations/moriya", {
T_NARRATOR("— The Moriya Shrine"),
T_NARRATOR("A workaholic shrine at the top of Yōkai Mountain."),
{ 0 },
}},
{ "cutscenes/locations/moriya", {
T_NARRATOR("Reimu was reading a book she'd borrowed from the Human Village’s book rental store, Suzunaan."),
T_NARRATOR("It was her first ‘science fiction’ series, apparently about a flat, circular world that floated through space on the backs of four turtles…"),
{ 0 },
}},
{ "cutscenes/reimu_extra/01", {
T_NARRATOR("Suddenly, Yukari slid in through one of her gaps."),
T_NARRATOR("As usual, Reimu could feel her presence right away.\n"),
T_NARRATOR("Unexpectedly, Ran and Chen were also in tow, taking positions behind her with solemn, silent expressions."),
{ 0 },
}},
{ "cutscenes/reimu_extra/01", {
T_YUKARI("My oh my, passing the time at rival shrines, are we?"),
T_REIMU("Guard duty."),
T_YUKARI("Hm? Not trusting enough of Ms. Kochiya to—"),
T_REIMU("No."),
{ 0 },
}},
{ "cutscenes/reimu_extra/01", {
T_YUKARI("*giggle* I see."),
T_NARRATOR("\nAs if on cue, Elly came running up the steps of the Moriya Shrine, breathlessly, clad in a Moriya shrine maiden uniform.\n"),
{ 0 },
}},
{ "cutscenes/reimu_extra/02", {
T_ELLY("Ms. Hakurei! There’s been- Oh, my apologies, we have a guest. How nice to meet—"),
T_ELLY("Wait, no, there’s no time for that!"),
T_REIMU("Slow down. What’s wrong?"),
{ 0 },
}},
{ "cutscenes/reimu_extra/02", {
T_NARRATOR("Elly brought out some kind of small communicator device. It projected an image into the air, like an illusion.\n"),
T_ELLY("It’s the Tower! It’s spinning back up!"),
T_REIMU("Spinning? It looks completely stationary from here."),
T_ELLY("No! I mean it’s powering up, charging up! Whatever! It’s turning back on!!"),
{ 0 },
}},
{ "cutscenes/reimu_extra/02", {
T_REIMU("Huh? It thought you were in control of it."),
T_ELLY("I am! I was! I should be…! But this device isn’t letting me do anything!"),
T_ELLY("And I thought about going back to the Tower to turn it off again, but I-… no, i-it’s not safe to."),
T_REIMU("Eh? Why not?"),
{ 0 },
}},
{ "cutscenes/reimu_extra/02", {
T_KURUMI("Hm? What’s all this, then?"),
T_ELLY("Kurumi, the Tower's turning back on!"),
T_KURUMI("Huh?! But all I did was seal off the mansion's main doors, so the fairies would leave it alone, just like you asked!"),
T_ELLY("I… what?!"),
{ 0 },
}},
{ "cutscenes/reimu_extra/02", {
T_KURUMI("Y-you left me a note that said, ‘Kurumi, go close off the mansion module so the fairies stop playing inside of it'!"),
T_KURUMI("So I did!!"),
T_ELLY("I didn’t leave you any such note! This isn’t even my handwriting! You *know* I have a hard time with stroke order—"),
T_REIMU("Okay, excuse me, but what does this have to do with anything?"),
{ 0 },
}},
{ "cutscenes/reimu_extra/02", {
T_ELLY("I… I may have stolen it from someone."),
T_REIMU("That doesn't answer my question."),
T_ELLY("She’s… still trapped in there. I thought I'd sealed her away, though!"),
T_ELLY("There’s no way she should've been able to escape that!"),
{ 0 },
}},
{ "cutscenes/reimu_extra/02", {
T_NARRATOR("Elly and Kurumi got chilled expressions on their faces, falling completely silent.\n"),
T_NARRATOR("Reimu closed her book and sighed."),
{ 0 },
}},
{ "cutscenes/reimu_extra/02", {
T_REIMU("Alright, okay, she’s a big scary monster. I had a feeling something like this might happen…"),
T_YUKARI("Before you go, Reimu…"),
T_REIMU("Huh? What now? Want to come with?"),
{ 0 },
}},
{ "cutscenes/reimu_extra/02", {
T_YUKARI("You do care very deeply about Gensōkyō, and everyone within it, don’t you?"),
T_YUKARI("Even those who fundamenally threaten us. You even give the nastiest yōkai a chance at redemption."),
T_YUKARI("I admire that in you."),
{ 0 },
}},
{ "cutscenes/reimu_extra/02", {
T_REIMU("Ehhhh? Why are you being so sincere? It’s freaking me out."),
T_REIMU("Even you think I’m a huge softie, huh? How disappointing…"),
T_NARRATOR("\nYukari remained silent, responding only with a gentle smile."),
{ 0 },
}},
{ "cutscenes/reimu_extra/02", {
T_NARRATOR("In the distance, the Tower of Babel began to emit a low, eerie rumble, as Reimu flew off to meet her next challenge…"),
{ 0 },
}},
{ NULL }
}
| 1,712 |
714 | <reponame>imadcat/science_rcn
"""
Learn a two-layer RCN model. See train_image for the main entry.
"""
from collections import namedtuple
import logging
import numpy as np
import networkx as nx
from scipy.spatial import distance, cKDTree
from science_rcn.preproc import Preproc
LOG = logging.getLogger(__name__)
ModelFactors = namedtuple('ModelFactors', 'frcs edge_factors graph')
def train_image(img, perturb_factor=2.):
"""Main function for training on one image.
Parameters
----------
img : 2D numpy.ndarray
The training image.
perturb_factor : float
How much two points are allowed to vary on average given the distance
between them. See Sec S2.3.2 for details.
Returns
-------
frcs : numpy.ndarray of numpy.int
Nx3 array of (feature idx, row, column), where each row represents a
single pool center
edge_factors : numpy.ndarray of numpy.int
Nx3 array of (source pool index, target pool index, perturb_radius), where
each row is a pairwise constraints on a pair of pool choices.
graph : networkx.Graph
An undirected graph whose edges describe the pairwise constraints between
the pool centers.
The tightness of the constraint is in the 'perturb_radius' edge attribute.
"""
# Pre-processing layer (cf. Sec 4.2.1)
preproc_layer = Preproc()
bu_msg = preproc_layer.fwd_infer(img)
# Sparsification (cf. Sec 5.1.1)
frcs = sparsify(bu_msg)
# Lateral learning (cf. 5.2)
graph, edge_factors = learn_laterals(frcs, bu_msg, perturb_factor=perturb_factor)
return ModelFactors(frcs, edge_factors, graph)
def sparsify(bu_msg, suppress_radius=3):
"""Make a sparse representation of the edges by greedily selecting features from the
output of preprocessing layer and suppressing overlapping activations.
Parameters
----------
bu_msg : 3D numpy.ndarray of float
The bottom-up messages from the preprocessing layer.
Shape is (num_feats, rows, cols)
suppress_radius : int
How many pixels in each direction we assume this filter
explains when included in the sparsification.
Returns
-------
frcs : see train_image.
"""
frcs = []
img = bu_msg.max(0) > 0
while True:
r, c = np.unravel_index(img.argmax(), img.shape)
if not img[r, c]:
break
frcs.append((bu_msg[:, r, c].argmax(), r, c))
img[r - suppress_radius:r + suppress_radius + 1,
c - suppress_radius:c + suppress_radius + 1] = False
return np.array(frcs)
def learn_laterals(frcs, bu_msg, perturb_factor, use_adjaceny_graph=False):
"""Given the sparse representation of each training example,
learn perturbation laterals. See train_image for parameters and returns.
"""
if use_adjaceny_graph:
graph = make_adjacency_graph(frcs, bu_msg)
graph = adjust_edge_perturb_radii(frcs, graph, perturb_factor=perturb_factor)
else:
graph = nx.Graph()
graph.add_nodes_from(range(frcs.shape[0]))
graph = add_underconstraint_edges(frcs, graph, perturb_factor=perturb_factor)
graph = adjust_edge_perturb_radii(frcs, graph, perturb_factor=perturb_factor)
edge_factors = np.array(
[(edge_source, edge_target, edge_attrs['perturb_radius'])
for edge_source, edge_target, edge_attrs in graph.edges_iter(data=True)])
return graph, edge_factors
def make_adjacency_graph(frcs, bu_msg, max_dist=3):
"""Make a graph based on contour adjacency."""
preproc_pos = np.transpose(np.nonzero(bu_msg > 0))[:, 1:]
preproc_tree = cKDTree(preproc_pos)
# Assign each preproc to the closest F1
f1_bus_tree = cKDTree(frcs[:, 1:])
_, preproc_to_f1 = f1_bus_tree.query(preproc_pos, k=1)
# Add edges
preproc_pairs = np.array(list(preproc_tree.query_pairs(r=max_dist, p=1)))
f1_edges = np.array(list({(x, y) for x, y in preproc_to_f1[preproc_pairs] if x != y}))
graph = nx.Graph()
graph.add_nodes_from(range(frcs.shape[0]))
graph.add_edges_from(f1_edges)
return graph
def add_underconstraint_edges(frcs,
graph,
perturb_factor=2.,
max_cxn_length=100,
tolerance=4):
"""Examines all pairs of variables and greedily adds pairwise constraints
until the pool flexibility matches the desired amount of flexibility specified by
perturb_factor and tolerance.
Parameters
----------
frcs : numpy.ndarray of numpy.int
Nx3 array of (feature idx, row, column), where each row represents a
single pool center.
perturb_factor : float
How much two points are allowed to vary on average given the distance
between them.
max_cxn_length : int
The maximum radius to consider adding laterals.
tolerance : float
How much relative error to tolerate in how much two points vary relative to each
other.
Returns
-------
graph : see train_image.
"""
graph = graph.copy()
f1_bus_tree = cKDTree(frcs[:, 1:])
close_pairs = np.array(list(f1_bus_tree.query_pairs(r=max_cxn_length)))
dists = [distance.euclidean(frcs[x, 1:], frcs[y, 1:]) for x, y in close_pairs]
for close_pairs_idx in np.argsort(dists):
source, target = close_pairs[close_pairs_idx]
dist = dists[close_pairs_idx]
try:
perturb_dist = nx.shortest_path_length(graph, source, target, 'perturb_radius')
except nx.NetworkXNoPath:
perturb_dist = np.inf
target_perturb_dist = dist / float(perturb_factor)
actual_perturb_dist = max(0, np.ceil(target_perturb_dist))
if perturb_dist >= target_perturb_dist * tolerance:
graph.add_edge(source,
target,
perturb_radius=int(actual_perturb_dist))
return graph
def adjust_edge_perturb_radii(frcs,
graph,
perturb_factor=2):
"""Returns a new graph where the 'perturb_radius' has been adjusted to account for
rounding errors. See train_image for parameters and returns.
"""
graph = graph.copy()
total_rounding_error = 0
for n1, n2 in nx.edge_dfs(graph):
desired_radius = distance.euclidean(frcs[n1, 1:], frcs[n2, 1:]) / perturb_factor
upper = int(np.ceil(desired_radius))
lower = int(np.floor(desired_radius))
round_up_error = total_rounding_error + upper - desired_radius
round_down_error = total_rounding_error + lower - desired_radius
if abs(round_up_error) < abs(round_down_error):
graph.edge[n1][n2]['perturb_radius'] = upper
total_rounding_error = round_up_error
else:
graph.edge[n1][n2]['perturb_radius'] = lower
total_rounding_error = round_down_error
return graph
| 2,949 |
615 | /* ************************************************************************
* Copyright 2013 Advanced Micro Devices, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ************************************************************************/
#include <cltypes.h>
unsigned int
dtypeSize(DataType type)
{
size_t ret;
switch (type) {
case TYPE_FLOAT:
ret = sizeof(cl_float);
break;
case TYPE_DOUBLE:
ret = sizeof(cl_double);
break;
case TYPE_COMPLEX_FLOAT:
ret = sizeof(cl_float2);
break;
case TYPE_COMPLEX_DOUBLE:
ret = sizeof(cl_double2);
break;
case TYPE_UNSIGNED_INT:// For iAMAX
ret = sizeof(cl_uint);
break;
default:
ret = (size_t)-1;
break;
}
return (unsigned int)ret;
}
size_t
fl4RowWidth(size_t width, size_t typeSize)
{
size_t s;
s = width / (sizeof(cl_float4) / typeSize);
if (s * (sizeof(cl_float4) / typeSize) != width) {
s++;
}
return s;
}
| 555 |
376 | <gh_stars>100-1000
package net.zhuoweizhang.boardwalk.yggdrasil;
import java.util.*;
import java.net.*;
import java.io.*;
import net.zhuoweizhang.boardwalk.downloader.*;
import net.zhuoweizhang.boardwalk.util.*;
import com.google.gson.Gson;
public class YggdrasilAuthenticator {
private static final String API_URL = "https://authserver.mojang.com";
//private static final String API_URL = "http://localhost:8000";
private String clientName = "Minecraft";
private int clientVersion = 1;
private Gson gson = new Gson();
private <T> T makeRequest(String endpoint, Object inputObject, Class<T> responseClass) throws IOException {
InputStream is = null;
HttpURLConnection conn;
byte[] buf = new byte[0x4000];
int statusCode = -1;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
String requestJson = gson.toJson(inputObject);
URL url = null;
try {
url = new URL(API_URL + "/" + endpoint);
conn = (HttpURLConnection) url.openConnection();
conn.setRequestProperty("User-Agent", DownloadUtils.USER_AGENT);
conn.setDoInput(true);
conn.setRequestMethod("POST");
conn.setRequestProperty("Content-Type", "application/json");
conn.connect();
OutputStream os = null;
try {
os = conn.getOutputStream();
os.write(requestJson.getBytes(DownloadUtils.utf8));
} finally {
if (os != null) os.close();
}
statusCode = conn.getResponseCode();
if (statusCode != 200) {
is = conn.getErrorStream();
} else {
is = conn.getInputStream();
}
IoUtil.pipe(is, bos, buf);
} finally {
if (is != null) {
try {
is.close();
} catch (Exception e) {
}
}
}
String outString = new String(bos.toByteArray(), DownloadUtils.utf8);
if (statusCode != 200) {
throw new RuntimeException("Status: " + statusCode + ":" + outString);
} else {
T outResult = gson.fromJson(outString, responseClass);
return outResult;
}
}
public AuthenticateResponse authenticate(String username, String password, UUID clientId) throws IOException {
AuthenticateRequest request = new AuthenticateRequest(username, password, clientId, clientName, clientVersion);
return makeRequest("authenticate", request, AuthenticateResponse.class);
}
public RefreshResponse refresh(String authToken, UUID clientId/*, Profile activeProfile*/) throws IOException {
RefreshRequest request = new RefreshRequest(authToken, clientId/*, activeProfile*/);
return makeRequest("refresh", request, RefreshResponse.class);
}
}
| 862 |
347 | <reponame>hbraha/ovirt-engine<filename>backend/manager/modules/restapi/jaxrs/src/test/java/org/ovirt/engine/api/restapi/resource/BackendDataCenterClusterResourceTest.java
package org.ovirt.engine.api.restapi.resource;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.ovirt.engine.api.restapi.resource.BackendClustersResourceTest.getModel;
import static org.ovirt.engine.api.restapi.resource.BackendClustersResourceTest.setUpEntityExpectations;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.UriInfo;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.junit.jupiter.MockitoSettings;
import org.mockito.quality.Strictness;
import org.ovirt.engine.core.common.action.ActionType;
import org.ovirt.engine.core.common.action.ClusterOperationParameters;
import org.ovirt.engine.core.common.action.ClusterParametersBase;
import org.ovirt.engine.core.common.businessentities.Cluster;
import org.ovirt.engine.core.common.businessentities.network.Network;
import org.ovirt.engine.core.common.queries.IdQueryParameters;
import org.ovirt.engine.core.common.queries.QueryType;
import org.ovirt.engine.core.compat.Guid;
@MockitoSettings(strictness = Strictness.LENIENT)
public class BackendDataCenterClusterResourceTest
extends AbstractBackendSubResourceTest<org.ovirt.engine.api.model.Cluster, Cluster, BackendClusterResource<BackendDataCenterClustersResource>> {
private static final Guid MANAGEMENT_NETWORK_ID = Guid.newGuid();
private static final Guid clusterId = GUIDS[0];
private static final Guid dataCenterId = GUIDS[1];
private boolean isPopulateSet = false;
public BackendDataCenterClusterResourceTest() {
super(new BackendDataCenterClusterResource(
new BackendDataCenterClustersResource(dataCenterId.toString()),
clusterId.toString()));
}
@BeforeEach
public void initParent() {
initResource(resource.parent);
}
@Override
protected void setUriInfo(UriInfo uriInfo) {
resource.setUriInfo(uriInfo);
((BackendDataCenterClusterResource)resource).getParent().setUriInfo(uriInfo);
}
@Override
protected void init() {
initResource(resource);
initResource(((BackendDataCenterClusterResource)resource).getParent());
}
@Test
public void testBadGuid() {
verifyNotFoundException(assertThrows(
WebApplicationException.class,
() -> new BackendDataCenterClusterResource(
new BackendDataCenterClustersResource(dataCenterId.toString()),
"foo"))
);
}
@Test
public void testGetNotFound() {
setUriInfo(setUpBasicUriExpectations());
setUpEntityQueryExpectations(QueryType.GetClustersByStoragePoolId,
IdQueryParameters.class,
new String[] { "Id" },
new Object[] { dataCenterId },
new ArrayList<Cluster>(),
null);
verifyNotFoundException(assertThrows(WebApplicationException.class, () -> resource.get()));
}
@Test
public void testGet() {
setUriInfo(setUpBasicUriExpectations());
setUpEntityQueryExpectations(QueryType.GetClustersByStoragePoolId,
IdQueryParameters.class,
new String[] { "Id" },
new Object[] { dataCenterId },
setUpClusters(),
null);
verifyModel(resource.get(), 0);
}
@Test
public void testUpdateNotFound() {
setUriInfo(setUpBasicUriExpectations());
setUpGetEntityExpectations(1, true);
verifyNotFoundException(assertThrows(WebApplicationException.class, () -> resource.update(getModel(0))));
}
@Test
public void testUpdate() {
setUpGetEntityExpectations(2);
setUpManagementNetworkExpectation();
setUriInfo(setUpActionExpectations(ActionType.UpdateCluster,
ClusterOperationParameters.class,
new String[] {},
new Object[] {},
true,
true));
final org.ovirt.engine.api.model.Cluster updatedCluster = resource.update(getModel(0));
verifyModel(updatedCluster, 0);
verifyManagementNetwork(updatedCluster);
}
private void verifyManagementNetwork(org.ovirt.engine.api.model.Cluster updatedCluster) {
assertEquals(String.format("%s/%s/%s/%s/%s",
BASE_PATH,
"clusters",
GUIDS[0],
"networks",
MANAGEMENT_NETWORK_ID),
updatedCluster.getManagementNetwork().getHref());
}
private void setUpManagementNetworkExpectation() {
setUpPopulateExpectation();
final Network mockNetwork = mock(Network.class);
when(mockNetwork.getId()).thenReturn(MANAGEMENT_NETWORK_ID);
setUpGetEntityExpectations(QueryType.GetManagementNetwork,
IdQueryParameters.class,
new String[] { "Id" },
new Object[] { GUIDS[0] },
mockNetwork);
}
private void setUpPopulateExpectation() {
if (!isPopulateSet) {
when(httpHeaders.getRequestHeader(BackendResource.ALL_CONTENT_HEADER)).thenReturn(Collections.singletonList("true"));
isPopulateSet = true;
}
}
@Test
public void testUpdateCantDo() {
doTestBadUpdate(false, true, CANT_DO);
}
@Test
public void testUpdateFailed() {
doTestBadUpdate(true, false, FAILURE);
}
private void doTestBadUpdate(boolean valid, boolean success, String detail) {
setUpGetEntityExpectations(1);
setUriInfo(setUpActionExpectations(ActionType.UpdateCluster,
ClusterOperationParameters.class,
new String[] {},
new Object[] {},
valid,
success));
verifyFault(assertThrows(WebApplicationException.class, () -> resource.update(getModel(0))), detail);
}
@Test
public void testConflictedUpdate() {
setUpGetEntityExpectations(1);
org.ovirt.engine.api.model.Cluster model = getModel(1);
model.setId(GUIDS[1].toString());
verifyImmutabilityConstraint(assertThrows(WebApplicationException.class, () -> resource.update(model)));
}
@Test
public void testRemove() {
setUpEntityQueryExpectations(
QueryType.GetClustersByStoragePoolId,
IdQueryParameters.class,
new String[] { "Id" },
new Object[] { dataCenterId },
setUpClusters(),
null
);
setUriInfo(
setUpActionExpectations(
ActionType.RemoveCluster,
ClusterParametersBase.class,
new String[] { "ClusterId" },
new Object[] { GUIDS[0] },
true,
true
)
);
verifyRemove(resource.remove());
}
@Test
public void testRemoveNonExistant() {
setUpEntityQueryExpectations(
QueryType.GetClustersByStoragePoolId,
IdQueryParameters.class,
new String[] { "Id" },
new Object[] { dataCenterId },
new ArrayList<Cluster>(),
null
);
verifyNotFoundException(assertThrows(WebApplicationException.class, () -> resource.remove()));
}
@Test
public void testRemoveCantDo() {
setUpEntityQueryExpectations(
QueryType.GetClustersByStoragePoolId,
IdQueryParameters.class,
new String[] { "Id" },
new Object[] { dataCenterId },
setUpClusters(),
null
);
doTestBadRemove(false, true, CANT_DO);
}
@Test
public void testRemoveFailed() {
setUpEntityQueryExpectations(
QueryType.GetClustersByStoragePoolId,
IdQueryParameters.class,
new String[] { "Id" },
new Object[] { dataCenterId },
setUpClusters(),
null
);
doTestBadRemove(true, false, FAILURE);
}
protected void doTestBadRemove(boolean valid, boolean success, String detail) {
setUriInfo(
setUpActionExpectations(
ActionType.RemoveCluster,
ClusterParametersBase.class,
new String[] { "ClusterId" },
new Object[] { GUIDS[0] },
valid,
success
)
);
verifyFault(assertThrows(WebApplicationException.class, resource::remove), detail);
}
protected void setUpGetEntityExpectations(int times) {
setUpGetEntityExpectations(times, false);
}
protected void setUpGetEntityExpectations(int times, boolean notFound) {
while (times-- > 0) {
setUpGetEntityExpectations(QueryType.GetClusterById,
IdQueryParameters.class,
new String[] { "Id" },
new Object[] { GUIDS[0] },
notFound ? null : getEntity(0));
}
}
@Override
protected Cluster getEntity(int index) {
return setUpEntityExpectations(mock(Cluster.class), index);
}
protected List<Cluster> setUpClusters() {
List<Cluster> entities = new ArrayList<>();
for (int i = 0; i < NAMES.length; i++) {
entities.add(getEntity(i));
}
return entities;
}
}
| 4,904 |
348 | <filename>docs/data/leg-t2/065/06502462.json
{"nom":"Vidouze","circ":"2ème circonscription","dpt":"Hautes-Pyrénées","inscrits":203,"abs":112,"votants":91,"blancs":7,"nuls":5,"exp":79,"res":[{"nuance":"REM","nom":"<NAME>","voix":46},{"nuance":"RDG","nom":"<NAME>","voix":33}]} | 118 |
319 | /**
* Copyright (c) 2011, The University of Southampton and the individual contributors.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of the University of Southampton nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.openimaj.image.feature.local.aggregate;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
import org.junit.Before;
import org.junit.Test;
import org.openimaj.OpenIMAJ;
import org.openimaj.data.DataSource;
import org.openimaj.feature.SparseDoubleFV;
import org.openimaj.feature.SparseIntFV;
import org.openimaj.feature.local.data.LocalFeatureListDataSource;
import org.openimaj.feature.local.list.LocalFeatureList;
import org.openimaj.image.FImage;
import org.openimaj.image.ImageUtilities;
import org.openimaj.image.feature.local.engine.DoGSIFTEngine;
import org.openimaj.image.feature.local.keypoints.Keypoint;
import org.openimaj.ml.clustering.ByteCentroidsResult;
import org.openimaj.ml.clustering.assignment.soft.ByteKNNAssigner;
import org.openimaj.ml.clustering.kmeans.ByteKMeans;
/**
* Basic tests for BoVW and Soft BoVW
*
* @author <NAME> (<EMAIL>)
*
*/
public class BoVWTests {
private LocalFeatureList<Keypoint> features;
private ByteCentroidsResult centroids;
/**
* Setup for the tests - load an image, extract features, learn codebook.
*
* @throws IOException
*/
@Before
public void setup() throws IOException {
final FImage image = ImageUtilities.readF(OpenIMAJ.getLogoAsStream());
features = new DoGSIFTEngine().findFeatures(image);
final DataSource<byte[]> datasource = new LocalFeatureListDataSource<Keypoint, byte[]>(features);
final ByteKMeans km = ByteKMeans.createExact(10, 1);
centroids = km.cluster(datasource);
}
/**
* Test BoVW
*/
@Test
public void testBoVW() {
final BagOfVisualWords<byte[]> bovw = new BagOfVisualWords<byte[]>(centroids.defaultHardAssigner());
final SparseIntFV vector = bovw.aggregate(features);
assertEquals(10, vector.length());
}
/**
* Test SoftBoVW
*/
@Test
public void testSoftBoVW() {
final SoftBagOfVisualWords<byte[], float[]> bovw = new SoftBagOfVisualWords<byte[], float[]>(new ByteKNNAssigner(
centroids, false, 1));
final SparseDoubleFV vector = bovw.aggregate(features);
assertEquals(10, vector.length());
}
/**
* Test SoftBoVW
*/
@Test
public void testSoftBoVW2() {
final SoftBagOfVisualWords<byte[], float[]> bovw = new SoftBagOfVisualWords<byte[], float[]>(new ByteKNNAssigner(
centroids, false, 5));
final SparseDoubleFV vector = bovw.aggregate(features);
assertEquals(10, vector.length());
}
}
| 1,304 |
797 | /*
The MIT License (MIT)
Copyright (c) 2015 <NAME> and Contributors
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
(the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package com.intellij.lang.jsgraphql.types.util;
import com.google.common.collect.ImmutableList;
import com.intellij.lang.jsgraphql.types.Internal;
import com.intellij.lang.jsgraphql.types.collect.ImmutableKit;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static com.intellij.lang.jsgraphql.types.Assert.*;
@Internal
public class DefaultTraverserContext<T> implements TraverserContext<T> {
private final T curNode;
private T newNode;
private boolean nodeDeleted;
private final TraverserContext<T> parent;
private final Set<T> visited;
private final Map<Class<?>, Object> vars;
private final Object sharedContextData;
private Object newAccValue;
private boolean hasNewAccValue;
private Object curAccValue;
private final NodeLocation location;
private final boolean isRootContext;
private boolean parallel;
private Map<String, List<TraverserContext<T>>> children;
private Phase phase;
private final List<Breadcrumb<T>> breadcrumbs;
public DefaultTraverserContext(T curNode,
TraverserContext<T> parent,
Set<T> visited,
Map<Class<?>, Object> vars,
Object sharedContextData,
NodeLocation location,
boolean isRootContext,
boolean parallel) {
this.curNode = curNode;
this.parent = parent;
this.visited = visited;
this.vars = vars;
this.sharedContextData = sharedContextData;
this.location = location;
this.isRootContext = isRootContext;
this.parallel = parallel;
if (parent == null || parent.isRootContext()) {
this.breadcrumbs = ImmutableKit.emptyList();
} else {
List<Breadcrumb<T>> breadcrumbs = new ArrayList<>(parent.getBreadcrumbs().size() + 1);
breadcrumbs.add(new Breadcrumb<>(this.parent.thisNode(), this.location));
breadcrumbs.addAll(parent.getBreadcrumbs());
this.breadcrumbs = ImmutableList.copyOf(breadcrumbs);
}
}
public static <T> DefaultTraverserContext<T> dummy() {
return new DefaultTraverserContext<>(null, null, null, null, null, null, true, false);
}
public static <T> DefaultTraverserContext<T> simple(T node) {
return new DefaultTraverserContext<>(node, null, null, null, null, null, true, false);
}
@Override
public T thisNode() {
assertFalse(this.nodeDeleted, () -> "node is deleted");
if (newNode != null) {
return newNode;
}
return curNode;
}
@Override
public T originalThisNode() {
return curNode;
}
@Override
public void changeNode(T newNode) {
assertNotNull(newNode);
assertFalse(this.nodeDeleted, () -> "node is deleted");
this.newNode = newNode;
}
@Override
public void deleteNode() {
assertNull(this.newNode, () -> "node is already changed");
assertFalse(this.nodeDeleted, () -> "node is already deleted");
this.nodeDeleted = true;
}
@Override
public boolean isDeleted() {
return this.nodeDeleted;
}
@Override
public boolean isChanged() {
return this.newNode != null;
}
@Override
public TraverserContext<T> getParentContext() {
return parent;
}
@Override
public List<T> getParentNodes() {
List<T> result = new ArrayList<>();
TraverserContext<T> curContext = parent;
while (!curContext.isRootContext()) {
result.add(curContext.thisNode());
curContext = curContext.getParentContext();
}
return result;
}
@Override
public List<Breadcrumb<T>> getBreadcrumbs() {
return breadcrumbs;
}
@Override
public T getParentNode() {
if (parent == null) {
return null;
}
return parent.thisNode();
}
@Override
public Set<T> visitedNodes() {
return visited;
}
@Override
public boolean isVisited() {
return visited.contains(curNode);
}
@Override
public <S> S getVar(Class<? super S> key) {
return (S) key.cast(vars.get(key));
}
@Override
public <S> TraverserContext<T> setVar(Class<? super S> key, S value) {
vars.put(key, value);
return this;
}
@Override
public void setAccumulate(Object accumulate) {
hasNewAccValue = true;
newAccValue = accumulate;
}
@Override
public <U> U getNewAccumulate() {
if (hasNewAccValue) {
return (U) newAccValue;
} else {
return (U) curAccValue;
}
}
@Override
public <U> U getCurrentAccumulate() {
return (U) curAccValue;
}
@Override
public Object getSharedContextData() {
return sharedContextData;
}
/*
* PRIVATE: Used by {@link Traverser}
*/
void setCurAccValue(Object curAccValue) {
hasNewAccValue = false;
this.curAccValue = curAccValue;
}
@Override
public NodeLocation getLocation() {
return location;
}
@Override
public boolean isRootContext() {
return isRootContext;
}
@Override
public <S> S getVarFromParents(Class<? super S> key) {
TraverserContext<T> curContext = parent;
while (curContext != null) {
S var = curContext.getVar(key);
if (var != null) {
return var;
}
curContext = curContext.getParentContext();
}
return null;
}
/*
* PRIVATE: Used by {@link Traverser}
*/
void setChildrenContexts(Map<String, List<TraverserContext<T>>> children) {
assertTrue(this.children == null, () -> "children already set");
this.children = children;
}
@Override
public Map<String, List<TraverserContext<T>>> getChildrenContexts() {
assertNotNull(children, () -> "children not available");
return children;
}
/*
* PRIVATE: Used by {@link Traverser}
*/
void setPhase(Phase phase) {
this.phase = phase;
}
@Override
public Phase getPhase() {
return phase;
}
@Override
public boolean isParallel() {
return parallel;
}
}
| 3,146 |
607 | <reponame>AIHZP/andysworkshop-stm32plus
/*
* This file is a part of the open source stm32plus library.
* Copyright (c) 2011,2012,2013 <NAME> <www.andybrown.me.uk>
* Please see website for licensing terms.
*/
#pragma once
// ensure the MCU series is correct
#ifndef STM32PLUS_F4
#error This class can only be used with the STM32F4 series
#endif
namespace stm32plus {
/**
* Template class that will be used to initialise the multi-adc mode. You're expected
* to use one of the template typedefs down below instead of instantiating this
* directly.
*/
template<uint32_t TDmaModeType,uint32_t TAdcMultiModeType,uint8_t TTwoSamplingDelay>
struct AdcMultiFeature : AdcFeatureBase {
AdcMultiFeature(Adc& adc)
: AdcFeatureBase(adc) {
static_assert(TTwoSamplingDelay>=5 && TTwoSamplingDelay<=20,"TTwoSamplingDelay must be between 5 and 20");
// modes for ADC and DMA
((ADC_CommonInitTypeDef *)adc)->ADC_Mode=TAdcMultiModeType;
((ADC_CommonInitTypeDef *)adc)->ADC_DMAAccessMode=TDmaModeType;
// initialise this such that the optimised will eliminate all the conditional tests
((ADC_CommonInitTypeDef *)adc)->ADC_TwoSamplingDelay=
TTwoSamplingDelay==5 ? ADC_TwoSamplingDelay_5Cycles :
TTwoSamplingDelay==6 ? ADC_TwoSamplingDelay_6Cycles :
TTwoSamplingDelay==7 ? ADC_TwoSamplingDelay_7Cycles :
TTwoSamplingDelay==8 ? ADC_TwoSamplingDelay_8Cycles :
TTwoSamplingDelay==9 ? ADC_TwoSamplingDelay_9Cycles :
TTwoSamplingDelay==10 ? ADC_TwoSamplingDelay_10Cycles :
TTwoSamplingDelay==11 ? ADC_TwoSamplingDelay_11Cycles :
TTwoSamplingDelay==12 ? ADC_TwoSamplingDelay_12Cycles :
TTwoSamplingDelay==13 ? ADC_TwoSamplingDelay_13Cycles :
TTwoSamplingDelay==14 ? ADC_TwoSamplingDelay_14Cycles :
TTwoSamplingDelay==15 ? ADC_TwoSamplingDelay_15Cycles :
TTwoSamplingDelay==16 ? ADC_TwoSamplingDelay_16Cycles :
TTwoSamplingDelay==17 ? ADC_TwoSamplingDelay_17Cycles :
TTwoSamplingDelay==18 ? ADC_TwoSamplingDelay_18Cycles :
TTwoSamplingDelay==19 ? ADC_TwoSamplingDelay_19Cycles :
ADC_TwoSamplingDelay_20Cycles;
}
/**
* Get the results of a multi-conversion
* @return The results of the conversion
*/
uint32_t getMultiConversionValue() const {
return ADC_GetMultiModeConversionValue();
}
};
}
| 1,089 |
395 | <filename>app/src/main/java/com/asksven/betterbatterystats/localeplugin/PluginApplication.java<gh_stars>100-1000
/*
* Copyright (C) 2012 asksven
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* This file was contributed by two forty four a.m. LLC <http://www.twofortyfouram.com>
* unter the terms of the Apache License, Version 2.0
*/
package com.asksven.betterbatterystats.localeplugin;
import android.app.Application;
import android.content.pm.ApplicationInfo;
import android.os.Build;
import android.util.Log;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
/**
* Implements an application object for the plug-in.
* <p>
* This application is non-essential for the plug-in's operation; it simply enables debugging options globally for the app.
*/
public final class PluginApplication extends Application
{
@Override
public void onCreate()
{
super.onCreate();
if ((getApplicationInfo().flags & ApplicationInfo.FLAG_DEBUGGABLE) != 0)
{
if (Constants.IS_LOGGABLE)
{
Log.v(Constants.LOG_TAG, "Application is debuggable. Enabling additional debug logging"); //$NON-NLS-1$
}
/*
* If using the Fragment compatibility library, enable debug logging here
*/
// FragmentManager.enableDebugLogging(true);
// LoaderManager.enableDebugLogging(true);
if (Build.VERSION.SDK_INT >= 9)
{
try
{
final Class<?> strictModeClass = Class.forName("android.os.StrictMode"); //$NON-NLS-1$
final Method enableDefaultsMethod = strictModeClass.getMethod("enableDefaults"); //$NON-NLS-1$
enableDefaultsMethod.invoke(strictModeClass);
}
catch (final ClassNotFoundException e)
{
throw new RuntimeException(e);
}
catch (final SecurityException e)
{
throw new RuntimeException(e);
}
catch (final NoSuchMethodException e)
{
throw new RuntimeException(e);
}
catch (final IllegalArgumentException e)
{
throw new RuntimeException(e);
}
catch (final IllegalAccessException e)
{
throw new RuntimeException(e);
}
catch (final InvocationTargetException e)
{
throw new RuntimeException(e);
}
}
}
}
} | 1,410 |
325 | <gh_stars>100-1000
#pragma GCC diagnostic warning "-Wgnu-statement-expression"
int g = ({
int x = 5;
x + 10;
});
#pragma GCC diagnostic ignored "-Wgnu-statement-expression"
void foo(void) {
int x = ({});
({ 1; });
({ 2; 3; });
int y = ({
int z = 5;
z += 10;
z;
});
z++;
}
void self_referential_initializer(void) {
int x = ({
x = 5;
});
}
#define TESTS_SKIPPED 1
#define EXPECTED_ERRORS "statement expressions.c:3:10: warning: use of GNU statement expression extension [-Wgnu-statement-expression]" \
"statement expressions.c:3:10: error: statement expression not allowed at file scope" \
"statement expressions.c:10:13: error: initializing 'int' from incompatible type 'void'" \
"statement expressions.c:11:5: warning: expression result unused [-Wunused-value]" \
"statement expressions.c:12:8: warning: expression result unused [-Wunused-value]" \
"statement expressions.c:12:5: warning: expression result unused [-Wunused-value]" \
"statement expressions.c:18:5: error: use of undeclared identifier 'z'" \
| 407 |
2,109 | <reponame>Kipjr/core
# Help functions for compatibility between python version 2 and 3
# From https://legacy.python.org/dev/peps/pep-0469
try:
dict.iteritems
except AttributeError:
# python 3
def iteritems(d):
return iter(d.items())
else:
# python 2
def iteritems(d):
return d.iteritems()
| 130 |
659 | <filename>notebooks/solutions/06B_basic_grid_search.py<gh_stars>100-1000
for Model in [Lasso, Ridge]:
scores = [cross_val_score(Model(alpha), X, y, cv=3).mean()
for alpha in alphas]
plt.plot(alphas, scores, label=Model.__name__)
plt.legend(loc='lower left')
| 120 |
1,285 | package demo.commands;
import net.kyori.adventure.text.Component;
import net.minestom.server.command.CommandSender;
import net.minestom.server.command.builder.Command;
import net.minestom.server.command.builder.CommandContext;
import net.minestom.server.command.builder.arguments.ArgumentType;
import net.minestom.server.command.builder.arguments.number.ArgumentNumber;
import net.minestom.server.command.builder.condition.Conditions;
import net.minestom.server.command.builder.exception.ArgumentSyntaxException;
import net.minestom.server.entity.Player;
public class HealthCommand extends Command {
public HealthCommand() {
super("health");
setCondition(Conditions::playerOnly);
setDefaultExecutor(this::defaultExecutor);
var modeArg = ArgumentType.Word("mode").from("set", "add");
var valueArg = ArgumentType.Integer("value").between(0, 100);
setArgumentCallback(this::onModeError, modeArg);
setArgumentCallback(this::onValueError, valueArg);
addSyntax(this::sendSuggestionMessage, modeArg);
addSyntax(this::onHealthCommand, modeArg, valueArg);
}
private void defaultExecutor(CommandSender sender, CommandContext context) {
sender.sendMessage(Component.text("Correct usage: health set|add <number>"));
}
private void onModeError(CommandSender sender, ArgumentSyntaxException exception) {
sender.sendMessage(Component.text("SYNTAX ERROR: '" + exception.getInput() + "' should be replaced by 'set' or 'add'"));
}
private void onValueError(CommandSender sender, ArgumentSyntaxException exception) {
final int error = exception.getErrorCode();
final String input = exception.getInput();
switch (error) {
case ArgumentNumber.NOT_NUMBER_ERROR:
sender.sendMessage(Component.text("SYNTAX ERROR: '" + input + "' isn't a number!"));
break;
case ArgumentNumber.TOO_LOW_ERROR:
case ArgumentNumber.TOO_HIGH_ERROR:
sender.sendMessage(Component.text("SYNTAX ERROR: " + input + " is not between 0 and 100"));
break;
}
}
private void sendSuggestionMessage(CommandSender sender, CommandContext context) {
sender.sendMessage(Component.text("/health " + context.get("mode") + " [Integer]"));
}
private void onHealthCommand(CommandSender sender, CommandContext context) {
final Player player = (Player) sender;
final String mode = context.get("mode");
final int value = context.get("value");
switch (mode.toLowerCase()) {
case "set":
player.setHealth(value);
break;
case "add":
player.setHealth(player.getHealth() + value);
break;
}
player.sendMessage(Component.text("You have now " + player.getHealth() + " health"));
}
} | 1,106 |
6,992 | <reponame>kdombeck/spring-security
/*
* Copyright 2002-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.web.server.csrf;
import java.security.MessageDigest;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import reactor.core.publisher.Mono;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.codec.multipart.FormFieldPart;
import org.springframework.http.server.reactive.ServerHttpRequest;
import org.springframework.security.crypto.codec.Utf8;
import org.springframework.security.web.server.authorization.HttpStatusServerAccessDeniedHandler;
import org.springframework.security.web.server.authorization.ServerAccessDeniedHandler;
import org.springframework.security.web.server.util.matcher.ServerWebExchangeMatcher;
import org.springframework.security.web.server.util.matcher.ServerWebExchangeMatcher.MatchResult;
import org.springframework.util.Assert;
import org.springframework.web.server.ServerWebExchange;
import org.springframework.web.server.WebFilter;
import org.springframework.web.server.WebFilterChain;
/**
* <p>
* Applies
* <a href="https://www.owasp.org/index.php/Cross-Site_Request_Forgery_(CSRF)" >CSRF</a>
* protection using a synchronizer token pattern. Developers are required to ensure that
* {@link CsrfWebFilter} is invoked for any request that allows state to change. Typically
* this just means that they should ensure their web application follows proper REST
* semantics (i.e. do not change state with the HTTP methods GET, HEAD, TRACE, OPTIONS).
* </p>
*
* <p>
* Typically the {@link ServerCsrfTokenRepository} implementation chooses to store the
* {@link CsrfToken} in {@link org.springframework.web.server.WebSession} with
* {@link WebSessionServerCsrfTokenRepository}. This is preferred to storing the token in
* a cookie which can be modified by a client application.
* </p>
* <p>
* The {@code Mono<CsrfToken>} is exposes as a request attribute with the name of
* {@code CsrfToken.class.getName()}. If the token is new it will automatically be saved
* at the time it is subscribed.
* </p>
*
* @author <NAME>
* @author <NAME>
* @since 5.0
*/
public class CsrfWebFilter implements WebFilter {
public static final ServerWebExchangeMatcher DEFAULT_CSRF_MATCHER = new DefaultRequireCsrfProtectionMatcher();
/**
* The attribute name to use when marking a given request as one that should not be
* filtered.
*
* To use, set the attribute on your {@link ServerWebExchange}: <pre>
* CsrfWebFilter.skipExchange(exchange);
* </pre>
*/
private static final String SHOULD_NOT_FILTER = "SHOULD_NOT_FILTER" + CsrfWebFilter.class.getName();
private ServerWebExchangeMatcher requireCsrfProtectionMatcher = DEFAULT_CSRF_MATCHER;
private ServerCsrfTokenRepository csrfTokenRepository = new WebSessionServerCsrfTokenRepository();
private ServerAccessDeniedHandler accessDeniedHandler = new HttpStatusServerAccessDeniedHandler(
HttpStatus.FORBIDDEN);
private boolean isTokenFromMultipartDataEnabled;
public void setAccessDeniedHandler(ServerAccessDeniedHandler accessDeniedHandler) {
Assert.notNull(accessDeniedHandler, "accessDeniedHandler");
this.accessDeniedHandler = accessDeniedHandler;
}
public void setCsrfTokenRepository(ServerCsrfTokenRepository csrfTokenRepository) {
Assert.notNull(csrfTokenRepository, "csrfTokenRepository cannot be null");
this.csrfTokenRepository = csrfTokenRepository;
}
public void setRequireCsrfProtectionMatcher(ServerWebExchangeMatcher requireCsrfProtectionMatcher) {
Assert.notNull(requireCsrfProtectionMatcher, "requireCsrfProtectionMatcher cannot be null");
this.requireCsrfProtectionMatcher = requireCsrfProtectionMatcher;
}
/**
* Specifies if the {@code CsrfWebFilter} should try to resolve the actual CSRF token
* from the body of multipart data requests.
* @param tokenFromMultipartDataEnabled true if should read from multipart form body,
* else false. Default is false
*/
public void setTokenFromMultipartDataEnabled(boolean tokenFromMultipartDataEnabled) {
this.isTokenFromMultipartDataEnabled = tokenFromMultipartDataEnabled;
}
@Override
public Mono<Void> filter(ServerWebExchange exchange, WebFilterChain chain) {
if (Boolean.TRUE.equals(exchange.getAttribute(SHOULD_NOT_FILTER))) {
return chain.filter(exchange).then(Mono.empty());
}
return this.requireCsrfProtectionMatcher.matches(exchange).filter(MatchResult::isMatch)
.filter((matchResult) -> !exchange.getAttributes().containsKey(CsrfToken.class.getName()))
.flatMap((m) -> validateToken(exchange)).flatMap((m) -> continueFilterChain(exchange, chain))
.switchIfEmpty(continueFilterChain(exchange, chain).then(Mono.empty()))
.onErrorResume(CsrfException.class, (ex) -> this.accessDeniedHandler.handle(exchange, ex));
}
public static void skipExchange(ServerWebExchange exchange) {
exchange.getAttributes().put(SHOULD_NOT_FILTER, Boolean.TRUE);
}
private Mono<Void> validateToken(ServerWebExchange exchange) {
return this.csrfTokenRepository.loadToken(exchange)
.switchIfEmpty(
Mono.defer(() -> Mono.error(new CsrfException("An expected CSRF token cannot be found"))))
.filterWhen((expected) -> containsValidCsrfToken(exchange, expected))
.switchIfEmpty(Mono.defer(() -> Mono.error(new CsrfException("Invalid CSRF Token")))).then();
}
private Mono<Boolean> containsValidCsrfToken(ServerWebExchange exchange, CsrfToken expected) {
return exchange.getFormData().flatMap((data) -> Mono.justOrEmpty(data.getFirst(expected.getParameterName())))
.switchIfEmpty(Mono.justOrEmpty(exchange.getRequest().getHeaders().getFirst(expected.getHeaderName())))
.switchIfEmpty(tokenFromMultipartData(exchange, expected))
.map((actual) -> equalsConstantTime(actual, expected.getToken()));
}
private Mono<String> tokenFromMultipartData(ServerWebExchange exchange, CsrfToken expected) {
if (!this.isTokenFromMultipartDataEnabled) {
return Mono.empty();
}
ServerHttpRequest request = exchange.getRequest();
HttpHeaders headers = request.getHeaders();
MediaType contentType = headers.getContentType();
if (!contentType.includes(MediaType.MULTIPART_FORM_DATA)) {
return Mono.empty();
}
return exchange.getMultipartData().map((d) -> d.getFirst(expected.getParameterName())).cast(FormFieldPart.class)
.map(FormFieldPart::value);
}
private Mono<Void> continueFilterChain(ServerWebExchange exchange, WebFilterChain chain) {
return Mono.defer(() -> {
Mono<CsrfToken> csrfToken = csrfToken(exchange);
exchange.getAttributes().put(CsrfToken.class.getName(), csrfToken);
return chain.filter(exchange);
});
}
private Mono<CsrfToken> csrfToken(ServerWebExchange exchange) {
return this.csrfTokenRepository.loadToken(exchange).switchIfEmpty(generateToken(exchange));
}
/**
* Constant time comparison to prevent against timing attacks.
* @param expected
* @param actual
* @return
*/
private static boolean equalsConstantTime(String expected, String actual) {
if (expected == actual) {
return true;
}
if (expected == null || actual == null) {
return false;
}
// Encode after ensure that the string is not null
byte[] expectedBytes = Utf8.encode(expected);
byte[] actualBytes = Utf8.encode(actual);
return MessageDigest.isEqual(expectedBytes, actualBytes);
}
private Mono<CsrfToken> generateToken(ServerWebExchange exchange) {
return this.csrfTokenRepository.generateToken(exchange)
.delayUntil((token) -> this.csrfTokenRepository.saveToken(exchange, token)).cache();
}
private static class DefaultRequireCsrfProtectionMatcher implements ServerWebExchangeMatcher {
private static final Set<HttpMethod> ALLOWED_METHODS = new HashSet<>(
Arrays.asList(HttpMethod.GET, HttpMethod.HEAD, HttpMethod.TRACE, HttpMethod.OPTIONS));
@Override
public Mono<MatchResult> matches(ServerWebExchange exchange) {
return Mono.just(exchange.getRequest()).flatMap((r) -> Mono.justOrEmpty(r.getMethod()))
.filter(ALLOWED_METHODS::contains).flatMap((m) -> MatchResult.notMatch())
.switchIfEmpty(MatchResult.match());
}
}
}
| 2,795 |
7,857 | <gh_stars>1000+
#pragma once
#include <napi.h>
#include <QPointer>
#include "Extras/Utils/nutils.h"
#include "QtWidgets/QTableView/ntableview.hpp"
#include "QtWidgets/QTableView/qtableview_macro.h"
class DLL_EXPORT QTableViewWrap : public Napi::ObjectWrap<QTableViewWrap> {
QTABLEVIEW_WRAPPED_METHODS_DECLARATION
private:
QPointer<NTableView> instance;
bool disableDeletion;
public:
static Napi::Object init(Napi::Env env, Napi::Object exports);
QTableViewWrap(const Napi::CallbackInfo& info);
~QTableViewWrap();
NTableView* getInternalInstance();
// class constructor
static Napi::FunctionReference constructor;
// wrapped methods
};
| 245 |
365 | <gh_stars>100-1000
# coding=utf-8
# Copyright 2019 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple script to convert CSV output from rf_benchmark to Markdown format.
The input CSV should have the following fields:
- CNN
- input resolution
- end_point
- FLOPS (Billion)
- RF size hor
- RF size ver
- effective stride hor
- effective stride ver
- effective padding hor
- effective padding ver
Since usually in all cases the parameters in the horizontal and vertical
directions are the same, this is assumed by this script, which only prints one
of them to the Markdown file.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import csv
import sys
from absl import app
cmd_args = None
def main(unused_argv):
with open(cmd_args.markdown_path, 'w') as f:
# Write table header and field size.
f.write('CNN | resolution | end-point | FLOPs (Billion) | RF | '
'effective stride | effective padding\n')
f.write(':--------------------: | :----------: | :---------------: | '
':---------------: | :-----: | :----: | :----:\n')
with open(cmd_args.csv_path) as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
# Make sure horizontal and parameters are the same.
assert row['RF size hor'] == row['RF size ver']
assert row['effective stride hor'] == row['effective stride ver']
assert row['effective padding hor'] == row['effective padding ver']
f.write('%s|%s|%s|%s|%s|%s|%s\n' %
(row['CNN'], row['input resolution'], row['end_point'],
row['FLOPs (Billion)'], row['RF size hor'],
row['effective stride hor'], row['effective padding hor']))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.register('type', 'bool', lambda v: v.lower() == 'true')
parser.add_argument(
'--csv_path',
type=str,
default='/tmp/rf.csv',
help='Path where CSV output of rf_benchmark was saved.')
parser.add_argument(
'--markdown_path',
type=str,
default='/tmp/rf.md',
help='Path where Markdown output will be saved.')
cmd_args, unparsed = parser.parse_known_args()
app.run(main=main, argv=[sys.argv[0]] + unparsed)
| 971 |
530 | #ifndef _VGUGV_COMMON_MULTIFRAMES_
#define _VGUGV_COMMON_MULTIFRAMES_
#include <memory>
#include <vector>
#include "../frame/frame.h"
#include "../transformation/transformation.h"
namespace VGUGV
{
namespace Common
{
template<class T_FeatureType, class T_FeatureDescriptorType>
class MultiFrames
{
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
public:
typedef std::shared_ptr<MultiFrames<T_FeatureType, T_FeatureDescriptorType> > Ptr;
typedef typename Frame<T_FeatureType, T_FeatureDescriptorType>::Ptr BaseFramePtr;
public:
MultiFrames(int seqID, int nFrames);
void insertNewFrame(const BaseFramePtr& frame);
BaseFramePtr getFrame(int index);
void setMultiFramePose(Transformation multiFramePose);
void setMultiFrameGTPose(Transformation multiFrameGTPose) { mMultiFrameGTPose = multiFrameGTPose; }
Transformation getMultiFramePose();
Transformation getMultiFrameGTPose() { return mMultiFrameGTPose; }
int getMultiFrameID();
private:
int mnFrames;
int mMultiFrameID;
Transformation mMultiFramePose;
Transformation mMultiFrameGTPose;
std::vector<BaseFramePtr> mvFrames;
};
}
}
#endif | 503 |
14,668 | # script to generate the generateKey tests
import os
here = os.path.dirname(__file__)
successes_html = """<!DOCTYPE html>
<meta charset=utf-8>
<meta name="timeout" content="long">
<title>WebCryptoAPI: generateKey() Successful Calls</title>
<link rel="author" title="<NAME>" href="mailto:<EMAIL>">
<link rel="help" href="https://www.w3.org/TR/WebCryptoAPI/#dfn-SubtleCrypto-method-generateKey">
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/WebCryptoAPI/util/helpers.js"></script>
<script src="successes.js"></script>
<h1>generateKey Tests for Good Parameters</h1>
<p>
<strong>Warning!</strong> RSA key generation is intrinsically
very slow, so the related tests can take up to
several minutes to complete, depending on browser!
</p>
<div id="log"></div>
<script>
run_test([%s]);
</script>"""
failures_html = """<!DOCTYPE html>
<meta charset=utf-8>
<meta name="timeout" content="long">
<title>WebCryptoAPI: generateKey() for Failures</title>
<link rel="author" title="<NAME>" href="mailto:<EMAIL>">
<link rel="help" href="https://www.w3.org/TR/WebCryptoAPI/#dfn-SubtleCrypto-method-generateKey">
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<script src="/WebCryptoAPI/util/helpers.js"></script>
<script src="failures.js"></script>
<h1>generateKey Tests for Bad Parameters</h1>
<div id="log"></div>
<script>
run_test([%s]);
</script>
"""
successes_worker = """// META: timeout=long
importScripts("/resources/testharness.js");
importScripts("../util/helpers.js");
importScripts("successes.js");
run_test([%s]);
done();"""
failures_worker = """// META: timeout=long
importScripts("/resources/testharness.js");
importScripts("../util/helpers.js");
importScripts("failures.js");
run_test([%s]);
done();"""
names = ["AES-CTR", "AES-CBC", "AES-GCM", "AES-KW", "HMAC", "RSASSA-PKCS1-v1_5",
"RSA-PSS", "RSA-OAEP", "ECDSA", "ECDH"]
for filename_pattern, template in [("test_successes_%s.https.html", successes_html),
("test_failures_%s.https.html", failures_html),
("successes_%s.https.worker.js", successes_worker),
("failures_%s.https.worker.js", failures_worker)]:
for name in names:
path = os.path.join(here, os.pardir, "generateKey", filename_pattern % name)
with open(path, "w") as f:
f.write(template % '"%s"' % name)
| 1,009 |
754 | /*-
* <<
* UAVStack
* ==
* Copyright (C) 2016 - 2017 UAVStack
* ==
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* >>
*/
package com.creditease.monitor.captureframework.repository;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import com.creditease.monitor.captureframework.spi.MonitorElement;
import com.creditease.monitor.captureframework.spi.MonitorElementInstance;
public class StandardMonitorElementInstance implements MonitorElementInstance {
private static class SumBySeconds {
private static SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");
private long timespan;
private long timestamp;
private int lastRecordNumber = 1;
private Map<String, Long> records = new LinkedHashMap<String, Long>();
public void setLastRecordNumber(int lastRecordNumber) {
this.lastRecordNumber = lastRecordNumber;
}
public long getTimespan() {
return timespan;
}
public void setTimespan(long timespan) {
this.timespan = timespan;
}
public long getTimestamp() {
return timestamp;
}
public void setTimestamp(long timestamp) {
this.timestamp = timestamp;
}
public void putHistoryRecord(long timestamp, long value) {
if (records.size() >= lastRecordNumber) {
String key = records.keySet().iterator().next();
records.remove(key);
}
records.put(sdf.format(new Date(timestamp)), value);
}
public Map<String, Long> getHistoryRecords() {
return this.records;
}
}
private final String id;
private final Map<String, Object> values = new ConcurrentHashMap<String, Object>();
private final Map<String, SumBySeconds> elemSumBySeconds = new ConcurrentHashMap<String, SumBySeconds>();
private final MonitorElement parent;
public StandardMonitorElementInstance(String id, MonitorElement parent) {
this.id = id;
this.parent = parent;
}
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
public String toJSONString() {
StringBuilder sb = new StringBuilder("{");
sb.append("id:\"" + id + "\",values:{");
Set<Entry<String, Object>> entry = values.entrySet();
int count = 0;
for (Entry<String, Object> e : entry) {
sb.append("\"" + e.getKey() + "\":");
Object val = e.getValue();
Class<?> valClass = val.getClass();
if (AtomicLong.class.isAssignableFrom(valClass) || Long.class.isAssignableFrom(valClass)
|| Integer.class.isAssignableFrom(valClass) || Double.class.isAssignableFrom(valClass)) {
sb.append(val);
}
else if (Map.class.isAssignableFrom(valClass)) {
Map map = (Map) val;
Iterator<Entry> newentry = map.entrySet().iterator();
StringBuilder msb = new StringBuilder("{");
int countMSB = 0;
while (newentry.hasNext()) {
Entry entryElem = newentry.next();
msb.append("\"" + entryElem.getKey() + "\":" + entryElem.getValue() + ",");
countMSB++;
}
if (countMSB > 0) {
msb = msb.deleteCharAt(msb.length() - 1);
}
msb.append("}");
sb.append(msb.toString());
}
else {
sb.append("\"" + val + "\"");
}
sb.append(",");
count++;
}
if (count > 0) {
sb = sb.deleteCharAt(sb.length() - 1);
}
sb.append("}");
return sb.append("}").toString();
}
@Override
public String getInstanceId() {
return id;
}
@Override
public Map<String, Object> getValues() {
return values;
}
@Override
public void setValue(String key, Object value) {
if (key == null || value == null)
return;
values.put(key, value);
}
@Override
public void destroy() {
values.clear();
}
@Override
public Object getValue(String key) {
if (key == null)
return null;
return values.get(key);
}
/**
* sumValue to to keep the Atomic Increment Number operation on one instance value
*
* @param key
* @param addValue
*/
@Override
public long sumValue(String key, long addValue) {
Object obj = values.get(key);
if (obj == null) {
synchronized (values) {
obj = values.get(key);
if (obj == null) {
AtomicLong incre = new AtomicLong(addValue);
values.put(key, incre);
return addValue;
}
}
}
AtomicLong incre = AtomicLong.class.cast(obj);
long tmp = 0;
SumBySeconds sbs = this.elemSumBySeconds.get(key);
// sum by period
if (sbs != null) {
if (System.currentTimeMillis() - sbs.getTimestamp() >= sbs.getTimespan()) {
synchronized (incre) {
if (System.currentTimeMillis() - sbs.getTimestamp() >= sbs.getTimespan()) {
// update timestamp
sbs.setTimestamp(System.currentTimeMillis());
// recore last value
sbs.putHistoryRecord(sbs.getTimestamp(), incre.get());
// set records value
setValue(key + ".rc", sbs.getHistoryRecords());
// return current value to 0
incre.set(addValue);
tmp = addValue;
}
else {
tmp = incre.addAndGet(addValue);
}
}
}
else {
tmp = incre.addAndGet(addValue);
}
}
// normal incre
else {
tmp = incre.addAndGet(addValue);
}
// when exceed MAXVALUE
if (tmp < 0) {
values.remove(key);
synchronized (values) {
obj = values.get(key);
if (obj == null) {
AtomicLong newincre = new AtomicLong(addValue);
values.put(key, newincre);
tmp = addValue;
}
else {
AtomicLong newincre = AtomicLong.class.cast(obj);
tmp = newincre.addAndGet(addValue);
}
}
}
return tmp;
}
/**
* IncreValue to keep the Atomic Increment operation on one instance value
*/
@Override
public long increValue(String key) {
return sumValue(key, 1);
}
/**
* compareSet to do Atomic Comparison operation the newValue to oldValue and set the comparison result
*/
@Override
public boolean compareSet(String key, long newValue, CompareSetOperation operation) {
Object obj = values.get(key);
if (obj == null) {
synchronized (values) {
obj = values.get(key);
if (obj == null) {
AtomicLong incre = new AtomicLong(newValue);
values.put(key, incre);
return true;
}
}
}
AtomicLong incre = AtomicLong.class.cast(obj);
synchronized (incre) {
long curValue = incre.get();
switch (operation) {
case MAX:
if (curValue < newValue) {
incre.set(newValue);
return true;
}
break;
case MIN:
if (curValue > newValue) {
incre.set(newValue);
return true;
}
break;
default:
break;
}
}
return false;
}
/**
* get the value as long type
*/
@Override
public long getValueLong(String key) {
Object o = this.getValue(key);
if (o == null)
return 0;
if (AtomicLong.class.isAssignableFrom(o.getClass())) {
return AtomicLong.class.cast(o).get();
}
else {
return 0;
}
}
@Override
public MonitorElement getMonitorElement() {
return this.parent;
}
/**
* 设置某个计数器的统计周期
*
* @param key
* @param tu
* 统计多少秒内的值,过后回0
*/
public void setValueSumBySeconds(String key, Long tu) {
setValueSumBySeconds(key, tu, 1);
}
/**
* 设置某个计数器的统计周期
*
* @param key
* @param tu
* 统计多少秒内的值,过后回0
* @param lastRecordNumber
* 记录过去记录的个数
*
*/
public void setValueSumBySeconds(String key, Long tu, int lastRecordNumber) {
if (null == key || tu <= 0 || lastRecordNumber <= 0) {
return;
}
SumBySeconds sbs = new SumBySeconds();
sbs.setTimespan(tu * 1000);
sbs.setTimestamp(System.currentTimeMillis());
sbs.setLastRecordNumber(lastRecordNumber);
elemSumBySeconds.put(key, sbs);
}
/**
* 清除某个计数器的统计周期
*
* @param key
*/
public void unsetValueSumBySeconds(String key) {
if (null == key) {
return;
}
elemSumBySeconds.remove(key);
}
}
| 5,289 |
545 | int main() {
const int i = 42; // const int
auto j = 1; // int
const auto &k = i; // const int &
auto *p = &i; // const int *
const auto j2 = i, &k2 = i; // const int, const int &
return 0;
}
| 85 |
1,199 | <filename>aqua/qt4-mac/files/library_path/library_path_test.c
#include <library_path_lib.h>
int main (void) {
test_cpath = 1;
return (0);
}
| 62 |
339 | <filename>AfxHookSource/csgo_CCSGameMovement.h
#pragma once
extern bool g_Enable_csgo_CCSGameMovement_DuckFix;
bool Hook_csgo_CCSGameMovement_DuckFix(void);
| 73 |
1,056 | <filename>ide/api.lsp/src/org/netbeans/spi/lsp/HyperlinkLocationProvider.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.spi.lsp;
import org.netbeans.api.lsp.HyperlinkLocation;
import java.util.concurrent.CompletableFuture;
import javax.swing.text.Document;
import org.netbeans.api.annotations.common.NonNull;
import org.netbeans.modules.lsp.HyperlinkLocationAccessor;
import org.netbeans.spi.editor.mimelookup.MimeLocation;
import org.openide.filesystems.FileObject;
/**
* Interface for resolving hyperlink locations. Implementations of the interface
* should be registered in MimeLookup.
* <pre>
*
* {@code @MimeRegistration(mimeType = "text/foo", service = HyperlinkLocationProvider.class)
* public class FooHyperlinkLocationProvider implements HyperlinkLocationProvider {
* ...
* }
* }
* </pre>
*
* @author <NAME>
* @since 1.0
*/
@MimeLocation(subfolderName = "HyperlinkLocationProviders")
public interface HyperlinkLocationProvider {
/**
* Resolves a hyperlink at the given document offset and returns its
* target location.
*
* @param doc document on which to operate.
* @param offset offset within document
* @return target location
*
* @since 1.0
*/
CompletableFuture<HyperlinkLocation> getHyperlinkLocation(@NonNull Document doc, int offset);
/**
* Creates {@link HyperlinkLocation} instances.
*
* @param fileObject target file object of the hyperlink
* @param startOffset start offset of the hyperlink's target range
* @param endOffset end offset of the hyperlink's target range
* @return new created instance
*
* @since 1.0
*/
public static HyperlinkLocation createHyperlinkLocation(@NonNull FileObject fileObject, int startOffset, int endOffset) {
return HyperlinkLocationAccessor.getDefault().createHyperlinkLocation(fileObject, startOffset, endOffset);
}
}
| 811 |
2,644 | /*
* Copyright (c) 2013-2015 <NAME>
*
* Use of this source code is governed by a MIT-style
* license that can be found in the LICENSE file or at
* https://opensource.org/licenses/MIT
*/
#include <app/cdcserialtest/cdcserialtest.h>
#include <lk/err.h>
#include <lk/debug.h>
#include <stdio.h>
#include <lk/trace.h>
#include <target.h>
#include <lk/compiler.h>
#include <dev/usb.h>
#include <dev/usbc.h>
#include <dev/usb/class/cdcserial.h>
#include <hw/usb.h>
#include <lk/init.h>
#define LOCAL_TRACE 0
#define W(w) (w & 0xff), (w >> 8)
#define W3(w) (w & 0xff), ((w >> 8) & 0xff), ((w >> 16) & 0xff)
/* top level device descriptor */
static const uint8_t dev_descr[] = {
0x12, /* descriptor length */
DEVICE, /* Device Descriptor type */
W(0x0200), /* USB Version */
239, /* class */
2, /* subclass */
1, /* protocol */
64, /* max packet size, ept0 */
W(0x9999), /* vendor */
W(0x9999), /* product */
W(0x9999), /* release */
0x2, /* manufacturer string */
0x1, /* product string */
0x0, /* serialno string */
0x1, /* num configs */
};
/* high/low speed device qualifier */
static const uint8_t devqual_descr[] = {
0x0a, /* len */
DEVICE_QUALIFIER, /* Device Qualifier type */
W(0x0200), /* USB version */
0x00, /* class */
0x00, /* subclass */
0x00, /* protocol */
64, /* max packet size, ept0 */
0x01, /* num configs */
0x00 /* reserved */
};
static const uint8_t cfg_descr[] = {
0x09, /* Length of Cfg Descr */
CONFIGURATION, /* Type of Cfg Descr */
W(0x09), /* Total Length (incl ifc, ept) */
0x00, /* # Interfaces */
0x01, /* Cfg Value */
0x00, /* Cfg String */
0xc0, /* Attributes -- self powered */
250, /* Power Consumption - 500mA */
};
static const uchar langid[] = { 0x04, 0x03, 0x09, 0x04 };
static const uint8_t if_descriptor_lowspeed[] = {
0x09, /* length */
INTERFACE, /* type */
0x01, /* interface num */
0x00, /* alternates */
0x02, /* endpoint count */
0xff, /* interface class */
0xff, /* interface subclass */
0x00, /* interface protocol */
0x00, /* string index */
/* endpoint 1 IN */
0x07, /* length */
ENDPOINT, /* type */
0x83, /* address: 1 IN */
0x02, /* type: bulk */
W(64), /* max packet size: 64 */
00, /* interval */
/* endpoint 1 OUT */
0x07, /* length */
ENDPOINT, /* type */
0x03, /* address: 1 OUT */
0x02, /* type: bulk */
W(64), /* max packet size: 64 */
00, /* interval */
};
usb_config config = {
.lowspeed = {
.device = USB_DESC_STATIC(dev_descr),
.device_qual = USB_DESC_STATIC(devqual_descr),
.config = USB_DESC_STATIC(cfg_descr),
},
.highspeed = {
.device = USB_DESC_STATIC(dev_descr),
.device_qual = USB_DESC_STATIC(devqual_descr),
.config = USB_DESC_STATIC(cfg_descr),
},
.langid = USB_DESC_STATIC(langid),
};
static status_t ep_cb_rx(ep_t endpoint, usbc_transfer_t *t);
static status_t ep_cb_tx(ep_t endpoint, usbc_transfer_t *t);
static cdcserial_channel_t cdc_channel;
static void queue_rx(void) {
static usbc_transfer_t transfer;
static uint8_t buf[512];
transfer.callback = &ep_cb_rx;
transfer.result = 0;
transfer.buf = &buf;
transfer.buflen = sizeof(buf);
transfer.bufpos = 0;
transfer.extra = 0;
usbc_queue_rx(3, &transfer);
}
static void queue_tx(void) {
static usbc_transfer_t transfer;
static uint8_t buf[512];
for (uint i = 0; i < sizeof(buf); i++) {
buf[i] = ~i;
}
transfer.callback = &ep_cb_tx;
transfer.result = 0;
transfer.buf = &buf;
transfer.buflen = sizeof(buf);
transfer.bufpos = 0;
transfer.extra = 0;
usbc_queue_tx(3, &transfer);
}
static status_t ep_cb_rx(ep_t endpoint, usbc_transfer_t *t) {
#if LOCAL_TRACE
LTRACEF("ep %u transfer %p\n", endpoint, t);
usbc_dump_transfer(t);
if (t->result >= 0) {
hexdump8(t->buf, t->bufpos);
}
#endif
if (t->result >= 0)
queue_rx();
return NO_ERROR;
}
static status_t ep_cb_tx(ep_t endpoint, usbc_transfer_t *t) {
#if LOCAL_TRACE
LTRACEF("ep %u transfer %p\n", endpoint, t);
usbc_dump_transfer(t);
#endif
if (t->result >= 0)
queue_tx();
return NO_ERROR;
}
static status_t usb_cb(void *cookie, usb_callback_op_t op, const union usb_callback_args *args) {
LTRACEF("cookie %p, op %u, args %p\n", cookie, op, args);
if (op == USB_CB_ONLINE) {
usbc_setup_endpoint(3, USB_IN, 0x40, USB_BULK);
usbc_setup_endpoint(3, USB_OUT, 0x40, USB_BULK);
queue_rx();
queue_tx();
}
return NO_ERROR;
}
void target_usb_setup(void) {
usb_setup(&config);
printf("appending interfaces\n");
cdcserial_create_channel(&cdc_channel, 0x1, 0x2);
cdctest_setup(&cdc_channel);
usb_append_interface_lowspeed(if_descriptor_lowspeed, sizeof(if_descriptor_lowspeed));
usb_append_interface_highspeed(if_descriptor_lowspeed, sizeof(if_descriptor_lowspeed));
usb_register_callback(&usb_cb, NULL);
usb_add_string("LK", 1);
usb_add_string("LK Industries", 2);
usb_start();
}
| 2,729 |
561 | <filename>minerva/op/impl/ps.h<gh_stars>100-1000
#pragma once
#include <string>
namespace minerva {
void PushGradAndPullWeight(const float * grad, float * weights, size_t size, const std::string & layer_name);
} | 75 |
462 | <reponame>matvaibhav/pensieve
{
"appDesc": {
"description": "App description.",
"message": "Google Drive: buat, bagikan, dan simpan semua dokumen Anda dalam satu tempat."
},
"appName": {
"description": "App name.",
"message": "Google Drive"
}
}
| 118 |
759 | <gh_stars>100-1000
/*
* Copyright 2018 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.rometools.modules.psc.io;
import com.rometools.modules.psc.types.SimpleChapter;
import com.rometools.modules.psc.modules.PodloveSimpleChapterModule;
import com.rometools.modules.psc.modules.PodloveSimpleChapterModuleImpl;
import com.rometools.rome.feed.module.Module;
import com.rometools.rome.io.ModuleParser;
import org.jdom2.Attribute;
import org.jdom2.Element;
import org.jdom2.Namespace;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
/**
* The ModuleParser implementation for the Podlove Simple Chapter plug in.
*/
public class PodloveSimpleChapterParser implements ModuleParser {
private static final Namespace NS = Namespace.getNamespace(PodloveSimpleChapterModule.URI);
@Override
public String getNamespaceUri() {
return PodloveSimpleChapterModule.URI;
}
@Override
public Module parse(final Element element, final Locale locale) {
final Element chaptersElement = element.getChild(PodloveSimpleChapterAttribute.CHAPTERS, NS);
if (chaptersElement != null) {
final PodloveSimpleChapterModuleImpl m = new PodloveSimpleChapterModuleImpl();
final List<Element> es = chaptersElement.getChildren(PodloveSimpleChapterAttribute.CHAPTER, NS);
if (!es.isEmpty()) {
final List<SimpleChapter> result = new LinkedList<SimpleChapter>();
for (Element e : es) {
final SimpleChapter c = parseChapter(e);
result.add(c);
}
m.setChapters(result);
return m;
}
}
return null;
}
private SimpleChapter parseChapter(final Element eChapter) {
final SimpleChapter chapter = new SimpleChapter();
final String start = getAttributeValue(eChapter, PodloveSimpleChapterAttribute.START);
if (start != null) {
chapter.setStart(start);
}
final String title = getAttributeValue(eChapter, PodloveSimpleChapterAttribute.TITLE);
if (title != null) {
chapter.setTitle(title);
}
final String href = getAttributeValue(eChapter, PodloveSimpleChapterAttribute.HREF);
if (href != null) {
chapter.setHref(href);
}
final String image = getAttributeValue(eChapter, PodloveSimpleChapterAttribute.IMAGE);
if (image != null) {
chapter.setImage(image);
}
return chapter;
}
protected String getAttributeValue(final Element e, final String attributeName) {
Attribute attr = e.getAttribute(attributeName);
if (attr == null) {
attr = e.getAttribute(attributeName, NS);
}
if (attr != null) {
return attr.getValue();
} else {
return null;
}
}
}
| 1,294 |
3,010 | #include "shared-bindings/board/__init__.h"
STATIC const mp_rom_map_elem_t board_module_globals_table[] = {
CIRCUITPYTHON_BOARD_DICT_STANDARD_ITEMS
{ MP_ROM_QSTR(MP_QSTR_SCK), MP_ROM_PTR(&pin_PA17) },
{ MP_ROM_QSTR(MP_QSTR_MOSI), MP_ROM_PTR(&pin_PB23) },
{ MP_ROM_QSTR(MP_QSTR_MISO), MP_ROM_PTR(&pin_PB22) },
{ MP_ROM_QSTR(MP_QSTR_DAC_CS), MP_ROM_PTR(&pin_PA18) },
{ MP_ROM_QSTR(MP_QSTR_G1), MP_ROM_PTR(&pin_PA20) },
{ MP_ROM_QSTR(MP_QSTR_G2), MP_ROM_PTR(&pin_PA21) },
{ MP_ROM_QSTR(MP_QSTR_G3), MP_ROM_PTR(&pin_PA22) },
{ MP_ROM_QSTR(MP_QSTR_G4), MP_ROM_PTR(&pin_PA23) },
{ MP_ROM_QSTR(MP_QSTR_NEOPIXEL), MP_ROM_PTR(&pin_PB03) },
{ MP_ROM_QSTR(MP_QSTR_SPI), MP_ROM_PTR(&board_spi_obj) },
};
MP_DEFINE_CONST_DICT(board_module_globals, board_module_globals_table);
| 438 |
483 | <filename>49.丑数/49.丑数.py
# -*- coding:utf-8 -*-
class Solution:
def GetUglyNumber_Solution(self, index):
# write code here
if index <= 0:
return 0
dp = [1]
t2 = t3 = t5 = 0
for i in range(index):
ugly = min(min(dp[t2]*2,dp[t3]*3),dp[t5]*5)
if ugly == dp[t2]*2:
t2+=1
if ugly == dp[t3]*3:
t3+=1
if ugly == dp[t5]*5:
t5+=1
dp.append(ugly)
return dp[index-1] | 347 |
9,491 | /*
+----------------------------------------------------------------------+
| HipHop for PHP |
+----------------------------------------------------------------------+
| Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
+----------------------------------------------------------------------+
| This source file is subject to version 3.01 of the PHP license, |
| that is bundled with this package in the file LICENSE, and is |
| available through the world-wide-web at the following url: |
| http://www.php.net/license/3_01.txt |
| If you did not receive a copy of the PHP license and are unable to |
| obtain it through the world-wide-web, please send a note to |
| <EMAIL> so we can mail you a copy immediately. |
+----------------------------------------------------------------------+
*/
#include "hphp/runtime/vm/jit/abi.h"
#include "hphp/util/arch.h"
#include "hphp/runtime/vm/jit/abi-arm.h"
#include "hphp/runtime/vm/jit/abi-x64.h"
namespace HPHP { namespace jit {
///////////////////////////////////////////////////////////////////////////////
const Abi& abi(CodeKind kind) { return ARCH_SWITCH_CALL(abi, kind); }
PhysReg rvmfp() { return ARCH_SWITCH_CALL(rvmfp); }
PhysReg rvmsp() { return ARCH_SWITCH_CALL(rvmsp); }
PhysReg rvmtl() { return ARCH_SWITCH_CALL(rvmtl); }
PhysReg rsp() { return ARCH_SWITCH_CALL(rsp); }
RegSet vm_regs_with_sp() { return ARCH_SWITCH_CALL(vm_regs_with_sp); }
RegSet vm_regs_no_sp() { return ARCH_SWITCH_CALL(vm_regs_no_sp); }
RegSet cross_jit_save() { return ARCH_SWITCH_CALL(cross_jit_save); }
PhysReg rret_data() { return ARCH_SWITCH_CALL(rret_data); }
PhysReg rret_type() { return ARCH_SWITCH_CALL(rret_type); }
PhysReg rret(size_t i) { return ARCH_SWITCH_CALL(rret, i); }
PhysReg rret_simd(size_t i) { return ARCH_SWITCH_CALL(rret_simd, i); }
PhysReg rarg(size_t i) { return ARCH_SWITCH_CALL(rarg, i); }
PhysReg rarg_simd(size_t i) { return ARCH_SWITCH_CALL(rarg_simd, i); }
PhysReg rarg_ind_ret(size_t i) { return ARCH_SWITCH_CALL(rarg_ind_ret, i); }
size_t num_arg_regs() { return ARCH_SWITCH_CALL(num_arg_regs); }
size_t num_arg_regs_simd() { return ARCH_SWITCH_CALL(num_arg_regs_simd); }
size_t num_arg_regs_ind_ret() { return ARCH_SWITCH_CALL(num_arg_regs_ind_ret); }
PhysReg r_svcreq_req() { return ARCH_SWITCH_CALL(r_svcreq_req); }
PhysReg r_svcreq_spoff() { return ARCH_SWITCH_CALL(r_svcreq_spoff); }
PhysReg r_svcreq_stub() { return ARCH_SWITCH_CALL(r_svcreq_stub); }
PhysReg r_svcreq_sf() { return ARCH_SWITCH_CALL(r_svcreq_sf); }
PhysReg r_svcreq_arg(size_t i) { return ARCH_SWITCH_CALL(r_svcreq_arg, i); }
///////////////////////////////////////////////////////////////////////////////
RegSet arg_regs(size_t n) {
RegSet regs;
for (auto i = 0; i < n; i++) regs |= rarg(i);
return regs;
}
RegSet arg_regs_simd(size_t n) {
RegSet regs;
for (auto i = 0; i < n; i++) regs |= rarg_simd(i);
return regs;
}
RegSet arg_regs_ind_ret(size_t n) {
RegSet regs;
for (auto i = 0; i < n; i++) regs |= rarg_ind_ret(i);
return regs;
}
///////////////////////////////////////////////////////////////////////////////
Vflags required_flags(jit::ConditionCode cc) {
return ARCH_SWITCH_CALL(required_flags, cc);
}
///////////////////////////////////////////////////////////////////////////////
}}
| 1,362 |
475 | // Copyright (c) 2015-2016 <NAME>
// License: Academic Free License ("AFL") v. 3.0
// AFL License page: http://opensource.org/licenses/AFL-3.0
// http://vittorioromeo.info | <EMAIL>
#pragma once
#include <vrm/core/config.hpp>
#include <ecst/config.hpp>
#include <ecst/aliases.hpp>
ECST_HARDWARE_NAMESPACE
{
namespace status
{
/// @brief Returns the CPU's core count.
/// @details Returns `0` if the count is unknown.
ECST_ALWAYS_INLINE auto core_count() noexcept
{
return ecst::thread::hardware_concurrency();
}
/// @brief Returns true if the CPU's core count is known.
ECST_ALWAYS_INLINE auto core_count_known() noexcept
{
return core_count() > 0;
}
}
}
ECST_HARDWARE_NAMESPACE_END
| 352 |
311 | <reponame>sqreen/dd-trace-java
package datadog.trace.bootstrap;
import java.util.EnumSet;
import org.slf4j.Logger;
/** Tracks third-party libraries that may need special handling during agent startup. */
public enum Library {
WILDFLY;
/**
* Best-effort detection of libraries potentially used by the application. This is called at boot
* so we need to be very careful how many checks happen here. Some library use may not be visible
* to the agent at this point.
*/
public static EnumSet<Library> detectLibraries(final Logger log) {
final EnumSet<Library> libraries = EnumSet.noneOf(Library.class);
final String jbossHome = System.getenv("JBOSS_HOME");
if (jbossHome != null) {
log.debug("Env - jboss: {}", jbossHome);
libraries.add(WILDFLY);
}
if (!libraries.isEmpty()) {
log.debug("Detected {}", libraries);
}
return libraries;
}
}
| 307 |
872 | <gh_stars>100-1000
#!/usr/bin/python3
"""
Given a non-empty array of integers, return the third maximum number in this
array. If it does not exist, return the maximum number. The time complexity
must be in O(n).
"""
__author__ = 'Danyang'
import heapq
class Solution:
def thirdMax(self, nums):
"""
It is an easy question but error prone:
1. Choice of min heap or max heap: use min heap (not max heap) because
we want to know the smallest maximum number
2. Duplicate number
:type nums: List[int]
:rtype: int
"""
if not nums:
return None
h = []
for e in set(nums):
if len(h) < 3:
heapq.heappush(h, e)
elif len(h) == 3 and e > h[0]:
heapq.heappushpop(h, e)
assert len(h) <= 3
if len(h) == 3:
ret = min(h)
else:
ret = max(h)
return ret
if __name__ == "__main__":
assert Solution().thirdMax([1, 2, 3, 4]) == 2
assert Solution().thirdMax([4, 3, 2, 1]) == 2
assert Solution().thirdMax([2, 2, 3, 1]) == 1
assert Solution().thirdMax([4, 3]) == 4
| 554 |
3,631 | /*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.mvel.integrationtests;
import java.util.Arrays;
import java.util.List;
public class TestObject {
private List list;
public TestObject(List list) {
this.list = list;
}
public boolean checkHighestPriority(String promoType,
long priority) {
this.list.add( "TestObject.checkHighestPriority: " + promoType + '|' + priority );
return true;
}
public boolean stayHasDaysOfWeek(String daysOfWeek,
boolean allDaysRequired,
String[][] days) {
this.list.add( "TestObject.stayHasDaysOfWeek: " + daysOfWeek + '|' + allDaysRequired + '|' + Arrays.toString( days[0] ) );
return true;
}
public void applyValueAddPromo(long aRuleId,
int aRuleVersion,
long aValueAddDctCode,
int aPromoType,
String aPromoCode) {
this.list.add( "TestObject.applyValueAddPromo: " + aRuleId + '|' + aRuleVersion + '|' + aValueAddDctCode + '|' + aPromoType + '|' + aPromoCode );
}
public static String[][] array(String arg1,
String arg2) {
return new String[][]{{arg1, arg2}};
}
}
| 855 |
582 | /**
* This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*/
package com.archimatetool.editor.diagram;
import org.eclipse.gef.palette.MarqueeToolEntry;
import org.eclipse.gef.palette.PaletteRoot;
import org.eclipse.gef.palette.PaletteStack;
import org.eclipse.gef.tools.MarqueeSelectionTool;
/**
* PaletteRoot for All Diagrams
*
* @author <NAME>
*/
public class AbstractPaletteRoot extends PaletteRoot {
/**
* Create a PaletteStack containing the Marquee selection tools
*/
protected PaletteStack createMarqueeSelectionStack() {
PaletteStack stack = new PaletteStack(Messages.AbstractPaletteRoot_0, Messages.AbstractPaletteRoot_1, null);
MarqueeToolEntry marquee = new MarqueeToolEntry(Messages.AbstractPaletteRoot_2);
marquee.setToolProperty(MarqueeSelectionTool.PROPERTY_MARQUEE_BEHAVIOR,
Integer.valueOf(MarqueeSelectionTool.BEHAVIOR_NODES_CONTAINED_AND_RELATED_CONNECTIONS));
stack.add(marquee);
marquee = new MarqueeToolEntry(Messages.AbstractPaletteRoot_3);
marquee.setToolProperty(MarqueeSelectionTool.PROPERTY_MARQUEE_BEHAVIOR,
Integer.valueOf(MarqueeSelectionTool.BEHAVIOR_NODES_TOUCHED_AND_RELATED_CONNECTIONS));
stack.add(marquee);
marquee = new MarqueeToolEntry(Messages.AbstractPaletteRoot_4);
marquee.setToolProperty(MarqueeSelectionTool.PROPERTY_MARQUEE_BEHAVIOR,
Integer.valueOf(MarqueeSelectionTool.BEHAVIOR_CONNECTIONS_CONTAINED));
stack.add(marquee);
marquee = new MarqueeToolEntry(Messages.AbstractPaletteRoot_5);
marquee.setToolProperty(MarqueeSelectionTool.PROPERTY_MARQUEE_BEHAVIOR,
Integer.valueOf(MarqueeSelectionTool.BEHAVIOR_CONNECTIONS_TOUCHED));
stack.add(marquee);
marquee = new MarqueeToolEntry(Messages.AbstractPaletteRoot_6);
marquee.setToolProperty(MarqueeSelectionTool.PROPERTY_MARQUEE_BEHAVIOR,
Integer.valueOf(MarqueeSelectionTool.BEHAVIOR_NODES_CONTAINED));
stack.add(marquee);
marquee = new MarqueeToolEntry(Messages.AbstractPaletteRoot_7);
marquee.setToolProperty(MarqueeSelectionTool.PROPERTY_MARQUEE_BEHAVIOR,
Integer.valueOf(MarqueeSelectionTool.BEHAVIOR_NODES_TOUCHED));
stack.add(marquee);
return stack;
}
}
| 1,122 |
2,540 | public class Foo extends Exception implements Cloneable {
}
| 12 |
344 | <filename>lib_dasp/lib_dasp/impl/Sampling.cpp
/*
* Sampling.cpp
*
* Created on: Mar 25, 2012
* Author: david
*/
//------------------------------------------------------------------------------
#include <color.hpp>
#include "Sampling.hpp"
#include "../Superpixels.hpp"
#include <pds/Tools.hpp>
#include <density/ScalePyramid.hpp>
#include <Slimage/Paint.hpp>
#include <functional>
#include <boost/random.hpp>
#include <boost/math/constants/constants.hpp>
#include <cmath>
#define CREATE_DEBUG_IMAGES
#ifdef CREATE_DEBUG_IMAGES
#include <fstream>
#include <boost/format.hpp>
#endif
//------------------------------------------------------------------------------
namespace dasp {
//------------------------------------------------------------------------------
#ifdef CREATE_DEBUG_IMAGES
template<unsigned int Q>
Eigen::MatrixXf CombineMipmaps(const std::vector<Eigen::MatrixXf>& mm)
{
Eigen::MatrixXf r = Eigen::MatrixXf::Zero(Q*3*mm[0].rows()/2, Q*mm[0].cols());
r.block(0, 0, Q*mm[0].rows(), Q*mm[0].cols()) = density::ScaleUp(mm[0], Q);
unsigned int y = 0;
for(unsigned int i=1; i<mm.size(); ++i) {
r.block(Q*mm[0].rows(), y, Q*mm[i].rows(), Q*mm[i].cols()) = density::ScaleUp(mm[i], Q);
y += Q*mm[i].cols();
}
return r;
}
void DebugShowMatrix(const Eigen::MatrixXf& mat, const std::string& tag)
{
const float range = 5000.0f / static_cast<float>((640*480)/25);
sDebugImages[tag] = slimage::Ptr(
common::MatrixToImage(mat,
std::bind(&common::IntensityColor, std::placeholders::_1, 0.0f, range)));
}
void DebugWriteMatrix(const Eigen::MatrixXf& mat, const std::string& tag)
{
std::ofstream ofs(tag);
for(int i=0; i<mat.rows(); i++) {
for(int j=0; j<mat.cols(); j++) {
ofs << mat(i,j);
if(j+1 != mat.cols()) {
ofs << "\t";
}
}
if(i+1 != mat.rows()) {
ofs << "\n";
}
}
}
template<unsigned int Q>
void DebugMipmap(const std::vector<Eigen::MatrixXf>& mipmaps, const std::string& tag)
{
// boost::format fmt(tag + "_%2d");
// for(std::size_t i=0; i<mipmaps.size(); ++i) {
// const float range = 3000.0f / static_cast<float>(mipmaps[i].rows() * mipmaps[i].cols());
// Eigen::MatrixXf scl = density::ScaleUp(mipmaps[i], ((Q==2) ? 1 : Q)*(1<<i));
// sDebugImages[(fmt % i).str()] = slimage::Ptr(
// common::MatrixToImage(scl,
// std::bind(&common::IntensityColor, std::placeholders::_1, 0.0f, range)));
// }
DebugShowMatrix(CombineMipmaps<Q>(mipmaps), tag);
}
template<unsigned int Q>
void DebugMipmapDelta(const std::vector<Eigen::MatrixXf>& mipmaps, const std::string& tag)
{
// boost::format fmt(tag + "_%2d");
// for(std::size_t i=0; i<mipmaps.size(); ++i) {
// const float range = 3000.0f / static_cast<float>(mipmaps[i].rows() * mipmaps[i].cols());
// Eigen::MatrixXf scl = density::ScaleUp(mipmaps[i], ((Q==2) ? 1 : Q)*(1<<i));
// sDebugImages[(fmt % i).str()] = slimage::Ptr(
// common::MatrixToImage(scl,
// std::bind(&common::PlusMinusColor, std::placeholders::_1, range)));
// }
const float range = 2500.0f / static_cast<float>((640*480)/25);
sDebugImages[tag] = slimage::Ptr(
common::MatrixToImage(CombineMipmaps<Q>(mipmaps),
std::bind(&common::PlusMinusColor, std::placeholders::_1, range)));
}
#endif
Eigen::MatrixXf ComputeDepthDensity(const ImagePoints& points, const Parameters& opt)
{
constexpr float NZ_MIN = 0.174f; // = std::sin(80 deg)
Eigen::MatrixXf density(points.width(), points.height());
float* p_density = density.data();
for(unsigned int i=0; i<points.size(); i++) {
const Point& p = points[i];
/** Estimated number of super pixels at this point
* We assume circular superpixels. So the area A of a superpixel at
* point location is R*R*pi and the superpixel density is 1/A.
* If the depth information is invalid, the density is 0.
*/
float cnt = 0.0f;
if(p.is_valid) {
cnt = 1.0f / (M_PI * p.cluster_radius_px * p.cluster_radius_px);
// Additionally the local gradient has to be considered.
if(opt.gradient_adaptive_density) {
cnt /= std::max(NZ_MIN, p.computeCircularity());
}
}
p_density[i] = cnt;
}
return density;
}
Eigen::MatrixXf ComputeSaliency(const ImagePoints& points, const Parameters& opt)
{
const int rows = points.rows();
const int cols = points.cols();
const float BR2_INV = 1.0f / (opt.base_radius * opt.base_radius);
Eigen::MatrixXf saliency_col(rows, cols);
// Eigen::MatrixXf saliency_norm(rows, cols);
float* p_saliency_col = saliency_col.data();
// float* p_saliency_norm = saliency_norm.data();
for(int y=0; y<cols; ++y) {
for(int x=0; x<rows; ++x, ++p_saliency_col/*,++p_saliency_norm*/) {
const Point& p = points(x,y);
if(!p.is_valid) {
*p_saliency_col = 0.0f;
// *p_saliency_norm = 0.0f;
continue;
}
const int r = static_cast<int>(p.cluster_radius_px + 0.5f);
const int x0 = std::max(x - r, 0);
const int x1 = std::min(x + r, rows - 1);
const int y0 = std::max(y - r, 0);
const int y1 = std::min(y + r, cols - 1);
// compute mean
const Eigen::Vector3f mean_col = p.color;
const Eigen::Vector3f mean_pos = p.position;
// const Eigen::Vector3f mean_normal = p.normal;
// compute compression error
float err_col = 0.0f;
float err_norm = 0.0f;
float w_total = 0.0f;
for(int i=y0; i<=y1; i++) {
for(int j=x0; j<=x1; j++) {
const Point& q = points(j,i);
if(!q.is_valid)
continue;
float w = 1.0f / (1.0f + (q.position - mean_pos).squaredNorm() * BR2_INV);
w_total += w;
err_col += w * (q.color - mean_col).squaredNorm();
// err_norm += w * (1.0f - q.normal.dot(mean_normal));
}
}
// write
*p_saliency_col = std::sqrt(err_col / w_total);
// *p_saliency_norm = err_norm / w_total;
}
}
// normalize
{
const float mean = saliency_col.mean();
const float min = saliency_col.minCoeff();
const float max = saliency_col.maxCoeff();
// std::cout << "color: mean=" << mean << ", min=" << min << ", max=" << max << std::endl;
saliency_col = (saliency_col.array() - mean)/std::max(mean-min, max-mean);
}
// {
// const float mean = saliency_norm.mean();
// const float min = saliency_norm.minCoeff();
// const float max = saliency_norm.maxCoeff();
// // std::cout << "normal: mean=" << mean << ", min=" << min << ", max=" << max << std::endl;
// saliency_norm = (saliency_norm.array() - mean)/std::max(mean-min, max-mean);
// }
return saliency_col;// + 0.25f * saliency_norm;
}
void AdaptClusterRadiusBySaliency(ImagePoints& points, const Eigen::MatrixXf& saliency, const Parameters& opt)
{
// FIXME what is base?
const float base = 0.5f;
auto it_p = points.begin();
auto it_p_end = points.end();
auto it_s = saliency.data();
for(; it_p!=it_p_end; ++it_p, ++it_s) {
it_p->cluster_radius_px *= std::pow(base, -*it_s);
}
}
//------------------------------------------------------------------------------
void SetRandomNumberSeed(unsigned int x)
{
pds::impl::RndSeed(x);
}
//------------------------------------------------------------------------------
}
//------------------------------------------------------------------------------
| 2,878 |
2,448 | <reponame>pologood/EasyTransaction
package com.yiqiniu.easytrans.util;
import com.yiqiniu.easytrans.protocol.TransactionId;
public interface ByteFormIdCodec {
byte[] getTransIdByte(TransactionId transId);
byte[] getAppIdCeil(String appId);
byte[] getAppIdFloor(String appId);
TransactionId getTransIdFromByte(byte[] transId);
} | 115 |
2,322 | <reponame>sckevmit/cartography<gh_stars>1000+
def test_basic():
assert True
| 32 |
602 | package org.corfudb.infrastructure.logreplication.runtime.fsm;
import lombok.extern.slf4j.Slf4j;
import org.corfudb.infrastructure.logreplication.replication.LogReplicationSourceManager;
/**
* Log Replication Runtime Stopped State.
*
* In this state log replication has been stopped due to loss of leadership.
*
* @author amartinezman
*/
@Slf4j
public class StoppedState implements LogReplicationRuntimeState {
private LogReplicationSourceManager replicationSourceManager;
public StoppedState(LogReplicationSourceManager replicationSourceManager) {
this.replicationSourceManager = replicationSourceManager;
}
@Override
public LogReplicationRuntimeStateType getType() {
return LogReplicationRuntimeStateType.STOPPED;
}
@Override
public LogReplicationRuntimeState processEvent(LogReplicationRuntimeEvent event) {
// Regardless of the event
return this;
}
@Override
public void onEntry(LogReplicationRuntimeState from) {
replicationSourceManager.shutdown();
}
}
| 333 |
353 |
package org.nutz.integration.grpc;
import java.io.File;
import java.io.IOException;
import org.nutz.log.Log;
import org.nutz.log.Logs;
import io.grpc.Server;
import io.grpc.ServerBuilder;
/**
* 简单封装一下
*/
public class SimpleGrpcServer {
private static final Log log = Logs.get();
private Server server;
protected int port = 50051;
protected DynamicHandlerRegistry registry;
protected File certChain;
protected File privateKey;
public void start() {
try {
if (registry == null)
registry = new DynamicHandlerRegistry();
ServerBuilder<?> sb = ServerBuilder.forPort(port);
if (certChain != null)
sb.useTransportSecurity(certChain, privateKey);
server = sb.fallbackHandlerRegistry(registry).build().start();
log.info("gRPC Server started, listening on " + port);
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
public void stop() {
if (server != null) {
server.shutdown();
}
}
public void setPort(int port) {
this.port = port;
}
public void setRegistry(DynamicHandlerRegistry registry) {
this.registry = registry;
}
public DynamicHandlerRegistry getRegistry() {
return registry;
}
public Server getServer() {
return server;
}
public int getPort() {
return port;
}
} | 665 |
373 | /*
* Power by www.xiaoi.com
*/
package com.eastrobot.doc.web;
import com.eastrobot.doc.config.BaseController;
import com.eastrobot.doc.config.SystemConstants;
import com.eastrobot.doc.config.WebappContext;
import com.eastrobot.doc.model.entity.Attachment;
import com.eastrobot.doc.service.FileService;
import com.eastrobot.doc.util.HtmlUtils;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Calendar;
import java.util.List;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletRequest;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
import org.artofsolving.jodconverter.OfficeDocumentConverter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.util.FileCopyUtils;
import org.springframework.util.ResourceUtils;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import org.springframework.web.multipart.MultipartFile;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
/**
* @author <a href="mailto:<EMAIL>">eko.zhan</a>
* @date 2017年7月29日 上午9:44:36
* @version 1.0
*/
@Api
@RequestMapping("index")
@RestController
@Slf4j
public class IndexController extends BaseController {
private final String outputExtension = "html";
@Resource
FileService fileService;
/**
* 文件列表
* @author eko.zhan at 2017年8月9日 下午8:32:19
* @return
* @throws FileNotFoundException
*/
@ApiOperation(value="获取文件数据列表", notes="获取固定路径下的文件,并返回文件名,文件所在路径和文件大小")
@RequestMapping(value="getDataList", method=RequestMethod.POST)
public List<Attachment> getDataList() throws FileNotFoundException{
return fileService.list();
}
/**
* 加载文件
* @author eko.zhan at 2017年8月9日 下午8:32:30
* @param name
* @return
* @throws FileNotFoundException
* @throws IOException
*/
@ApiOperation(value="根据指定的文件名称获取文件内容", notes="返回文件内容会自动过滤图片信息")
@ApiImplicitParams({
@ApiImplicitParam(name="name", value="文件相对路径", required=true, dataType="String")
})
@RequestMapping(value="loadFileData", produces="text/plain;charset=utf-8", method=RequestMethod.POST)
public String loadFileData(String name) throws IOException{
return fileService.loadData(name);
}
/**
* 加载 html 中的图片资源
* @author eko.zhan at 2017年8月9日 下午8:32:06
* @param name
* @param imgname
* @return
* @throws IOException
*/
@ApiOperation(value="加载图片内容", notes="获取word文档中内嵌的图片资源,返回图片内容")
@ApiImplicitParams({
@ApiImplicitParam(name="name", value="文件相对路径", required=true, dataType="String"),
@ApiImplicitParam(name="imgname", value="图片名称", required=true, dataType="String")
})
@RequestMapping(value="loadFileImg", method=RequestMethod.GET)
public ResponseEntity<byte[]> loadFileImg(String name, String imgname){
try {
String basename = FilenameUtils.getBaseName(name);
File file = ResourceUtils.getFile("classpath:static/DATAS/" + basename + "/" + imgname);
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.IMAGE_PNG);
return new ResponseEntity<byte[]>(IOUtils.toByteArray(new FileInputStream(file)), headers, HttpStatus.OK);
} catch (FileNotFoundException e) {
log.error("文件[" + name + "]不存在, " + e.getMessage());
} catch (IOException e) {
log.error(e.getMessage());
e.printStackTrace();
}
return null;
}
/**
* 上传文件
* @author eko.zhan at 2017年8月9日 下午8:32:39
* @param uploadFile
* @return
*/
@ApiOperation(value="上传文件", notes="")
@ApiImplicitParams({
@ApiImplicitParam(name="uploadFile", value="待上传的文件", required=true, dataType="__file")
})
@RequestMapping(value="uploadData", method=RequestMethod.POST)
public JSONObject uploadData(@RequestParam("uploadFile") MultipartFile uploadFile){
JSONObject json = new JSONObject();
json.put("result", fileService.upload(uploadFile)==true?1:0);
return json;
}
/**
* 保存html内容
* @author eko.zhan at 2017年8月9日 下午9:04:20
* @param name
* @param data
* @throws IOException
*/
@ApiOperation(value="保存文件", notes="")
@ApiImplicitParams({
@ApiImplicitParam(name="name", value="文件相对路径", required=true, dataType="String"),
@ApiImplicitParam(name="data", value="文件内容", required=true, dataType="String")
})
@RequestMapping(value="saveFileData", method=RequestMethod.POST)
public JSONObject saveFileData(String name, String data, HttpServletRequest request){
//TODO 这是一个伪保存,只是修改了 HTML 内容,并未修改 file 文件,如果用户单击下载,依然会存在问题
//TODO 如果用户修改了图片,如何处理?
JSONObject json = new JSONObject();
json.put("result", 1);
try {
//DONE 优化底层 DefaultDocumentFormatRegistry.java 后可实现html转docx
//boolean is07Xml = false; //是否是07以后的文档(docx/xlsx/pptx/),如果是,需要将html转成97版本的office文件,再从97转成07,直接将html转成07存在问题
File file = ResourceUtils.getFile("classpath:static/DATAS/" + name);
File newFile = new File(file.getParent() + "/" + Calendar.getInstance().getTimeInMillis() + "_" + name);
if (!name.toLowerCase().endsWith("x")){
newFile = new File(file.getParent() + "/" + Calendar.getInstance().getTimeInMillis() + "_" + name + "x");
}
String basename = FilenameUtils.getBaseName(file.getName());
File targetFile = new File(file.getParent() + "/" + basename + "/" + basename + "." + outputExtension);
if (targetFile.exists()){
//将html中的body内容替换为当前 data 数据
//String htmlData = IOUtils.toString(new FileInputStream(targetFile), HtmlUtils.getFileEncoding(targetFile));
String htmlData = (String)request.getSession().getAttribute(SystemConstants.HTML_HEADER) + data + HtmlUtils.FOOT_TEMPLATE;
//DONE 如何处理文件编码?保存后尽管通过请求能访问中文内容,但是直接磁盘双击html文件显示乱码
//add by eko.zhan at 2017-08-11 14:55 解决方案:重写Html头,编码修改为 utf-8
//IOUtils.write(htmlData.getBytes(HtmlUtils.UTF8), new FileOutputStream(targetFile));
IOUtils.write(htmlData.getBytes(), new FileOutputStream(targetFile));
//DONE 由于html文件编码不正确,导致转换成word后文件编码也不正确
//add by eko.zhan at 2017-08-11 15:05 上面处理了html编码后,转换的编码问题也相应解决了
//TODO 由html转换成doc会导致doc样式有误
convert(targetFile, newFile);
}
} catch (FileNotFoundException e) {
json.put("result", 0);
e.printStackTrace();
} catch (IOException e) {
json.put("result", 0);
e.printStackTrace();
}
return json;
}
/**
* 删除文件
* @author eko.zhan at 2017年8月9日 下午9:32:18
* @param name
* @return
*/
@ApiOperation(value="删除文件", notes="")
@ApiImplicitParams({
@ApiImplicitParam(name="name", value="文件相对路径", required=true, dataType="String")
})
@RequestMapping(value="delete", method=RequestMethod.POST)
public JSONObject delete(String name){
//TODO windows操作系统上如果html文件被占用则无法删除,是否可以用 File.creteTempFile 来解决?
JSONObject json = new JSONObject();
json.put("result", 1);
try {
File file = ResourceUtils.getFile("classpath:static/DATAS/" + name);
String basename = FilenameUtils.getBaseName(file.getName());
File targetDir = new File(file.getParent() + "/" + basename);
if (targetDir.exists()){
FileUtils.deleteDirectory(targetDir);
}
FileUtils.forceDelete(file);
} catch (FileNotFoundException e) {
json.put("result", 0);
json.put("msg", e.getMessage());
e.printStackTrace();
} catch (IOException e) {
json.put("result", 0);
json.put("msg", e.getMessage());
e.printStackTrace();
}
return json;
}
}
| 3,794 |
1,750 | //
// CROPrototypeB.h
// Tests
//
// Created by <NAME> on 11/09/13.
//
//
#import <Foundation/Foundation.h>
@class CROSingletonA;
@class CROPrototypeA;
@interface CROPrototypeB : NSObject
@property(nonatomic, strong, readonly) CROSingletonA *singletonA;
@property(nonatomic, strong, readonly) CROPrototypeA *prototypeA;
- (id)initWithCROSingletonA:(CROSingletonA *)singletonA;
- (id)initWithCROPrototypeA:(CROPrototypeA *)prototypeA;
@end
| 176 |
585 | #include "gtest/gtest.h"
#include "gbtsync/gbtsync.h"
#include <fstream>
#include <cstdio>
TEST(DataManager, Pool)
{
// cleanup before start testing
std::remove("testdata/filemanagertestdata/temp.empty.txt");
auto fileOperationManager = new FileDataOperationManager("testdata/filemanagertestdata/", {'G', 'B', 'T'}, {'G', 'B', 'T'}, "");
DataManager manager(fileOperationManager);
{
auto diffResult = manager.DiffDataHandles();
EXPECT_EQ(diffResult.first.size(), 2u);
EXPECT_EQ(diffResult.second.size(), 0u);
}
{
auto iter = manager.GetDataHandlers().find("correct.empty.txt");
EXPECT_TRUE(iter != manager.GetDataHandlers().end());
}
{
auto iter = manager.GetDataHandlers().find("correct.1.txt");
EXPECT_TRUE(iter != manager.GetDataHandlers().end());
}
{
std::ofstream ofile("testdata/filemanagertestdata/temp.empty.txt");
ofile << "GBTGBT";
ofile.close();
{
auto diffResult = manager.DiffDataHandles();
EXPECT_EQ(diffResult.second.size(), 0u);
auto& newFiles = diffResult.first;
ASSERT_EQ(newFiles.size(), 1u);
EXPECT_EQ(newFiles[0], "temp.empty.txt");
auto iter = manager.GetDataHandlers().find("temp.empty.txt");
EXPECT_TRUE(iter != manager.GetDataHandlers().end());
}
{
std::remove("testdata/filemanagertestdata/temp.empty.txt");
auto diffResult = manager.DiffDataHandles();
EXPECT_EQ(diffResult.second.size(), 1u);
EXPECT_EQ(diffResult.first.size(), 0u);
auto iter = manager.GetDataHandlers().find("temp.empty.txt");
EXPECT_TRUE(iter == manager.GetDataHandlers().end());
}
}
}
TEST(DataManager, Data)
{
auto fileOperationManager = new FileDataOperationManager("testdata/filemanagertestdata/", {'G', 'B', 'T'}, {'G', 'B', 'T'}, "");
DataManager manager(fileOperationManager);
auto newFiles = manager.DiffDataHandles();
{
const auto& loader = manager.GetDataHandlers().at("correct.1.txt");
std::string compareStr = "[somestring]";
EXPECT_TRUE(loader->GetData() == std::vector<char>(compareStr.begin(), compareStr.end()));
}
} | 1,010 |
642 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jeesuite.common.util;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.lang3.StringUtils;
import com.jeesuite.common.util.BeanUtils;
/**
* @description <br>
* @author <a href="mailto:<EMAIL>">vakin</a>
* @date 2018年4月25日
*/
@SuppressWarnings({ "unchecked"})
public class ParameterUtils {
public final static String CONTACT_STR = "&";
public final static String EQUALS_STR = "=";
public final static String SPLIT_STR = ",";
public static final String JSON_SUFFIX = "}";
public static final String JSON_PREFIX = "{";
public static final String BRACKET_PREFIX = "[";
public static final String BRACKET_SUFFIX = "]";
public final static String PARAM_SIGN = "sign";
public final static String PARAM_SIGN_TYPE = "signType";
public final static String PARAM_DATA = "data";
public static Map<String, Object> queryParamsToMap(HttpServletRequest request) {
Map<String, Object> params = new HashMap<String, Object>();
Enumeration<String> e = request.getParameterNames();
StringBuilder tmpbuff = new StringBuilder();
if (e.hasMoreElements()) {
while (e.hasMoreElements()) {
String name = e.nextElement();
String[] values = request.getParameterValues(name);
if (values.length == 1) {
if (StringUtils.isNotBlank(values[0]))
params.put(name, values[0]);
} else {
tmpbuff.setLength(0);
for (int i = 0; i < values.length; i++) {
if (StringUtils.isNotBlank(values[i])) {
tmpbuff.append(values[i].trim()).append(",");
}
}
if (tmpbuff.length() > 0) {
tmpbuff.deleteCharAt(tmpbuff.length() - 1);
params.put(name, tmpbuff.toString());
}
}
}
}
return params;
}
public static Map<String, Object> queryParamsToMap(String queryParams){
Map<String, Object> map = new HashMap<String, Object>();
String[] paramSegs = StringUtils.split(queryParams, CONTACT_STR);
String[] kv;
for (String param : paramSegs) {
kv = StringUtils.split(param,EQUALS_STR);
if(kv.length == 1 || StringUtils.isBlank(kv[1]))continue;
map.put(kv[0].trim(), kv[1].trim());
}
return map;
}
public static String objectToQueryParams(Object param){
Map<String, Object> map = BeanUtils.beanToMap(param);
return mapToQueryParams(map);
}
public static String mapToQueryParams(Map<String, Object> param){
if(param == null || param.isEmpty())return null;
StringBuilder sb = new StringBuilder();
List<String> keys = new ArrayList<>(param.keySet());
Collections.sort(keys);
Object value;
for (String key : keys) {
if(PARAM_SIGN_TYPE.equals(key) || PARAM_SIGN.equals(key))continue;
value = param.get(key);
if(value == null || StringUtils.isBlank(value.toString()))continue;
if(value instanceof Map){
value = mapToQueryParams((Map<String, Object>) value);
if(value != null){
value = JSON_PREFIX + value + JSON_SUFFIX;
}
}else if(value instanceof Iterable) {
StringBuilder sb1 = new StringBuilder();
sb1.append(BRACKET_PREFIX);
Iterator<?> it = ((Iterable<?>) value).iterator();
while (it.hasNext()) {
Object object = it.next();
if(BeanUtils.isSimpleDataType(object)){
sb1.append(object).append(SPLIT_STR);
}else{
sb1.append(JSON_PREFIX).append(objectToQueryParams(object)).append(JSON_SUFFIX).append(SPLIT_STR);
}
}
if(sb1.length() == 1){
value = null;
} else if(sb1.length() > 0){
sb1.deleteCharAt(sb1.length() - 1);
sb1.append(BRACKET_SUFFIX);
value = sb1.toString();
}
}
if(value != null){
sb.append(key).append(EQUALS_STR).append(value).append(CONTACT_STR);
}
}
sb.deleteCharAt(sb.length() - 1);
return sb.toString();
}
}
| 2,008 |
348 | {"nom":"Brémontier-Merval","dpt":"Seine-Maritime","inscrits":409,"abs":93,"votants":316,"blancs":26,"nuls":12,"exp":278,"res":[{"panneau":"2","voix":174},{"panneau":"1","voix":104}]} | 75 |
15,179 | from jina import Executor
class TestExecutor(Executor):
pass
| 22 |
4,537 | // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef __STOUT_OS_ACCESS_HPP__
#define __STOUT_OS_ACCESS_HPP__
#include <string>
#include <stout/error.hpp>
#include <stout/try.hpp>
#ifdef __WINDOWS__
#include <stout/windows.hpp>
#include <stout/internal/windows/longpath.hpp>
#endif // __WINDOWS__
namespace os {
inline Try<bool> access(const std::string& path, int how)
{
int result;
#ifdef __WINDOWS__
std::wstring longpath = ::internal::windows::longpath(path);
result = ::_waccess(longpath.data(), how);
#else
result = ::access(path.data(), how);
#endif
if (result < 0) {
if (errno == EACCES) {
return false;
} else {
return ErrnoError();
}
}
return true;
}
} // namespace os {
#endif // __STOUT_OS_ACCESS_HPP__
| 429 |
442 | <filename>autots/models/dnn.py<gh_stars>100-1000
"""Neural Nets."""
import pandas as pd
try:
import tensorflow as tf
except Exception: # except ImportError
_has_tf = False
else:
_has_tf = True
"""
X, Y = window_maker(df, forecast_length = 6, shuffle = False,
input_dim = 'univariate', window_size = 10,
output_dim = '1step')
"""
if _has_tf:
class ResidualWrapper(tf.keras.Model):
"""From https://www.tensorflow.org/tutorials/structured_data/time_series"""
def __init__(self, model):
super().__init__()
self.model = model
def call(self, inputs, *args, **kwargs):
delta = self.model(inputs, *args, **kwargs)
# The prediction for each time step is the input
# from the previous time step plus the delta
# calculated by the model.
return inputs + delta
class KerasRNN(object):
"""Wrapper for Tensorflow Keras based RNN.
Args:
rnn_type (str): Keras cell type 'GRU' or default 'LSTM'
kernel_initializer (str): passed to first keras LSTM or GRU layer
hidden_layer_sizes (tuple): of len 1 or 3 passed to first keras LSTM or GRU layers
optimizer (str): Passed to keras model.compile
loss (str): Passed to keras model.compile
epochs (int): Passed to keras model.fit
batch_size (int): Passed to keras model.fit
verbose (int): 0, 1 or 2. Passed to keras model.fit
random_seed (int): passed to tf.random.set_seed()
"""
def __init__(
self,
rnn_type: str = 'LSTM',
kernel_initializer: str = 'lecun_uniform',
hidden_layer_sizes: tuple = (32, 32, 32),
optimizer: str = 'adam',
loss: str = 'huber',
epochs: int = 50,
batch_size: int = 32,
shape=1,
verbose: int = 1,
random_seed: int = 2020,
):
self.name = 'KerasRNN'
verbose = 0 if verbose < 0 else verbose
verbose = 2 if verbose > 2 else verbose
self.verbose = verbose
self.random_seed = random_seed
self.kernel_initializer = kernel_initializer
self.epochs = epochs
self.batch_size = batch_size
self.optimizer = optimizer
self.loss = loss
self.hidden_layer_sizes = hidden_layer_sizes
self.rnn_type = rnn_type
self.shape = shape
def fit(self, X, Y):
"""Train the model on dataframes of X and Y."""
if not _has_tf:
raise ImportError(
"Tensorflow not available, install with pip install tensorflow."
)
tf.keras.backend.clear_session()
tf.random.set_seed(self.random_seed)
train_X = pd.DataFrame(X).to_numpy()
if self.shape == 1:
train_X = train_X.reshape((train_X.shape[0], 1, train_X.shape[1]))
else:
train_X = train_X.reshape((train_X.shape[0], train_X.shape[1], 1))
INPUT_SHAPE = (train_X.shape[1], train_X.shape[2])
if Y.ndim == 1:
Y = Y.reshape(-1, 1)
OUTPUT_SHAPE = Y.shape[1]
if self.rnn_type == "E2D2":
# crudely borrowed from: https://www.analyticsvidhya.com/blog/2020/10/multivariate-multi-step-time-series-forecasting-using-stacked-lstm-sequence-to-sequence-autoencoder-in-tensorflow-2-0-keras/
encoder_inputs = tf.keras.layers.Input(shape=INPUT_SHAPE)
encoder_l1 = tf.keras.layers.LSTM(
self.hidden_layer_sizes[0], return_sequences=True, return_state=True
)
encoder_outputs1 = encoder_l1(encoder_inputs)
encoder_states1 = encoder_outputs1[1:]
encoder_l2 = tf.keras.layers.LSTM(
self.hidden_layer_sizes[0], return_state=True
)
encoder_outputs2 = encoder_l2(encoder_outputs1[0])
encoder_states2 = encoder_outputs2[1:]
#
decoder_inputs = tf.keras.layers.RepeatVector(OUTPUT_SHAPE)(
encoder_outputs2[0]
)
layer_2_shape = self.hidden_layer_sizes
layer_2_size = (
layer_2_shape[2] if len(layer_2_shape) >= 3 else layer_2_shape[0]
)
#
decoder_l1 = tf.keras.layers.LSTM(layer_2_size, return_sequences=True)(
decoder_inputs, initial_state=encoder_states1
)
decoder_l2 = tf.keras.layers.LSTM(layer_2_size, return_sequences=True)(
decoder_l1, initial_state=encoder_states2
)
decoder_outputs2 = tf.keras.layers.TimeDistributed(
tf.keras.layers.Dense(OUTPUT_SHAPE)
)(decoder_l2)
#
simple_lstm_model = tf.keras.models.Model(encoder_inputs, decoder_outputs2)
if self.rnn_type == "CNN":
if len(self.hidden_layer_sizes) == 1:
kernel_size = 10 if INPUT_SHAPE[0] > 10 else INPUT_SHAPE[0]
simple_lstm_model = tf.keras.Sequential(
[
tf.keras.layers.Conv1D(
filters=self.hidden_layer_sizes[0],
kernel_size=kernel_size,
activation='relu',
input_shape=INPUT_SHAPE,
kernel_initializer=self.kernel_initializer,
),
tf.keras.layers.Dense(
units=self.hidden_layer_sizes[0], activation='relu'
),
tf.keras.layers.Dense(units=OUTPUT_SHAPE),
]
)
else:
# borrowed from https://keras.io/examples/timeseries/timeseries_classification_from_scratch
input_layer = tf.keras.layers.Input(INPUT_SHAPE)
layer_shape = self.hidden_layer_sizes
layer_2_size = (
layer_shape[1] if len(layer_shape) >= 2 else layer_shape[0]
)
layer_3_size = (
layer_shape[2] if len(layer_shape) >= 3 else layer_shape[0]
)
conv1 = tf.keras.layers.Conv1D(
filters=self.hidden_layer_sizes[0],
kernel_size=3,
padding="same",
kernel_initializer=self.kernel_initializer,
)(input_layer)
conv1 = tf.keras.layers.BatchNormalization()(conv1)
conv1 = tf.keras.layers.ReLU()(conv1)
conv2 = tf.keras.layers.Conv1D(
filters=layer_2_size, kernel_size=3, padding="same"
)(conv1)
conv2 = tf.keras.layers.BatchNormalization()(conv2)
conv2 = tf.keras.layers.ReLU()(conv2)
conv3 = tf.keras.layers.Conv1D(
filters=layer_3_size, kernel_size=3, padding="same"
)(conv2)
conv3 = tf.keras.layers.BatchNormalization()(conv3)
conv3 = tf.keras.layers.ReLU()(conv3)
gap = tf.keras.layers.GlobalAveragePooling1D()(conv3)
output_layer = tf.keras.layers.Dense(OUTPUT_SHAPE)(gap)
simple_lstm_model = tf.keras.models.Model(
inputs=input_layer, outputs=output_layer
)
elif len(self.hidden_layer_sizes) == 3:
if self.rnn_type == 'GRU':
simple_lstm_model = tf.keras.models.Sequential(
[
tf.keras.layers.GRU(
self.hidden_layer_sizes[0],
kernel_initializer=self.kernel_initializer,
input_shape=INPUT_SHAPE,
return_sequences=True,
),
tf.keras.layers.GRU(
self.hidden_layer_sizes[1], return_sequences=True
),
tf.keras.layers.Dropout(0.2),
tf.keras.layers.GRU(self.hidden_layer_sizes[2]),
tf.keras.layers.Dense(OUTPUT_SHAPE),
]
)
else:
simple_lstm_model = tf.keras.models.Sequential(
[
tf.keras.layers.LSTM(
self.hidden_layer_sizes[0],
kernel_initializer=self.kernel_initializer,
input_shape=INPUT_SHAPE,
return_sequences=True,
),
tf.keras.layers.Dropout(0.2),
tf.keras.layers.LSTM(
self.hidden_layer_sizes[1], return_sequences=True
),
tf.keras.layers.Dropout(0.2),
tf.keras.layers.LSTM(self.hidden_layer_sizes[2]),
tf.keras.layers.Dense(OUTPUT_SHAPE),
]
)
if len(self.hidden_layer_sizes) == 1:
if self.rnn_type == 'GRU':
simple_lstm_model = ResidualWrapper(
tf.keras.models.Sequential(
[
tf.keras.layers.GRU(
self.hidden_layer_sizes[0],
kernel_initializer=self.kernel_initializer,
input_shape=INPUT_SHAPE,
),
tf.keras.layers.Dense(10, activation='relu'),
tf.keras.layers.Dense(OUTPUT_SHAPE),
]
)
)
else:
simple_lstm_model = tf.keras.models.Sequential(
[
tf.keras.layers.Bidirectional(
tf.keras.layers.LSTM(
self.hidden_layer_sizes[0],
kernel_initializer=self.kernel_initializer,
input_shape=INPUT_SHAPE,
)
),
tf.keras.layers.Dense(32, activation='relu'),
tf.keras.layers.Dense(OUTPUT_SHAPE),
tf.keras.layers.Lambda(lambda x: x * 100.0),
]
)
if self.loss == 'Huber':
loss = tf.keras.losses.Huber()
else:
loss = self.loss
simple_lstm_model.compile(optimizer=self.optimizer, loss=loss)
simple_lstm_model.fit(
x=train_X,
y=Y,
epochs=self.epochs,
batch_size=self.batch_size,
verbose=self.verbose,
)
self.model = simple_lstm_model
return self
def predict(self, X):
"""Predict on dataframe of X."""
if self.shape == 1:
test = pd.DataFrame(X).to_numpy().reshape((X.shape[0], 1, X.shape[1]))
else:
test = pd.DataFrame(X).to_numpy().reshape((X.shape[0], X.shape[1], 1))
return pd.DataFrame(self.model.predict(test))
def transformer_encoder(inputs, head_size, num_heads, ff_dim, dropout=0):
# Attention and Normalization
x = tf.keras.layers.MultiHeadAttention(
key_dim=head_size, num_heads=num_heads, dropout=dropout
)(inputs, inputs)
x = tf.keras.layers.Dropout(dropout)(x)
x = tf.keras.layers.LayerNormalization(epsilon=1e-6)(x)
res = x + inputs
# Feed Forward Part
x = tf.keras.layers.Conv1D(filters=ff_dim, kernel_size=1, activation="relu")(res)
x = tf.keras.layers.Dropout(dropout)(x)
x = tf.keras.layers.Conv1D(filters=inputs.shape[-1], kernel_size=1)(x)
x = tf.keras.layers.LayerNormalization(epsilon=1e-6)(x)
return x + res
def transformer_build_model(
input_shape,
output_shape,
head_size,
num_heads,
ff_dim,
num_transformer_blocks,
mlp_units,
dropout=0,
mlp_dropout=0,
):
inputs = tf.keras.Input(shape=input_shape)
x = inputs
for _ in range(num_transformer_blocks):
x = transformer_encoder(x, head_size, num_heads, ff_dim, dropout)
x = tf.keras.layers.GlobalAveragePooling1D(data_format="channels_first")(x)
for dim in mlp_units:
x = tf.keras.layers.Dense(dim, activation="relu")(x)
x = tf.keras.layers.Dropout(mlp_dropout)(x)
outputs = tf.keras.layers.Dense(output_shape)(x)
return tf.keras.Model(inputs, outputs)
class Transformer(object):
"""Wrapper for Tensorflow Keras based Transformer.
based on: https://keras.io/examples/timeseries/timeseries_transformer_classification/
Args:
optimizer (str): Passed to keras model.compile
loss (str): Passed to keras model.compile
epochs (int): Passed to keras model.fit
batch_size (int): Passed to keras model.fit
verbose (int): 0, 1 or 2. Passed to keras model.fit
random_seed (int): passed to tf.random.set_seed()
"""
def __init__(
self,
head_size=256,
num_heads=4,
ff_dim=4,
num_transformer_blocks=4,
mlp_units=[128],
mlp_dropout=0.4,
dropout=0.25,
optimizer: str = 'adam',
loss: str = 'huber',
epochs: int = 50,
batch_size: int = 32,
verbose: int = 1,
random_seed: int = 2020,
):
self.name = 'Transformer'
verbose = 0 if verbose < 0 else verbose
verbose = 2 if verbose > 2 else verbose
self.verbose = verbose
self.random_seed = random_seed
self.epochs = epochs
self.batch_size = batch_size
self.optimizer = optimizer
self.loss = loss
self.head_size = head_size
self.num_heads = num_heads
self.ff_dim = ff_dim
self.num_transformer_blocks = num_transformer_blocks
self.mlp_units = mlp_units
self.mlp_dropout = mlp_dropout
self.dropout = dropout
def fit(self, X, Y):
"""Train the model on dataframes of X and Y."""
if not _has_tf:
raise ImportError(
"Tensorflow not available, install with pip install tensorflow."
)
tf.keras.backend.clear_session()
tf.random.set_seed(self.random_seed)
train_X = pd.DataFrame(X).to_numpy()
train_X = train_X.reshape((train_X.shape[0], train_X.shape[1], 1))
input_shape = train_X.shape[1:]
if Y.ndim == 1:
Y = Y.reshape(-1, 1)
OUTPUT_SHAPE = Y.shape[1]
self.model = transformer_build_model(
input_shape,
OUTPUT_SHAPE,
head_size=self.head_size,
num_heads=self.num_heads,
ff_dim=self.ff_dim,
num_transformer_blocks=self.num_transformer_blocks,
mlp_units=self.mlp_units,
mlp_dropout=self.mlp_dropout,
dropout=self.dropout,
)
if self.loss == 'Huber':
loss = tf.keras.losses.Huber()
else:
loss = self.loss
optimizer = self.optimizer
if optimizer == "adam":
optimizer = tf.keras.optimizers.Adam(learning_rate=1e-4)
self.model.compile(
loss=loss,
optimizer=optimizer,
)
callbacks = [
tf.keras.callbacks.EarlyStopping(patience=10, restore_best_weights=True)
]
self.model.fit(
train_X,
Y,
validation_split=0.2,
epochs=self.epochs,
batch_size=self.batch_size,
callbacks=callbacks,
verbose=self.verbose,
)
return self
def predict(self, X):
"""Predict on dataframe of X."""
test = pd.DataFrame(X).to_numpy().reshape((X.shape[0], X.shape[1], 1))
return pd.DataFrame(self.model.predict(test))
| 8,896 |
723 | <gh_stars>100-1000
from bin_chunk.binary_chunk import BinaryChunk
def main():
with open('./lua/hello.luac', 'rb') as f:
chunk = f.read()
if BinaryChunk.is_binary_chunk(chunk):
bc = BinaryChunk(chunk)
bc.undump()
bc.print_header()
bc.check_header()
bc.print_main_func()
if __name__ == '__main__':
main()
| 200 |
371 | <reponame>pambros/StereoKit<gh_stars>100-1000
#pragma once
const unsigned char sks_shader_builtin_font_hlsl[11744] = {
83,
75,
83,
72,
65,
68,
69,
82,
2,
0,
8,
0,
0,
0,
115,
107,
47,
102,
111,
110,
116,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
3,
0,
0,
0,
1,
0,
0,
0,
83,
116,
101,
114,
101,
111,
75,
105,
116,
66,
117,
102,
102,
101,
114,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
1,
3,
16,
3,
0,
0,
11,
0,
0,
0,
0,
0,
0,
0,
115,
107,
95,
118,
105,
101,
119,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
20,
212,
125,
214,
0,
60,
0,
128,
240,
121,
250,
78,
174,
1,
0,
0,
80,
120,
250,
78,
174,
1,
0,
0,
185,
0,
0,
0,
69,
0,
0,
0,
22,
212,
127,
214,
0,
61,
0,
128,
16,
115,
250,
78,
174,
1,
0,
0,
208,
121,
250,
78,
174,
1,
0,
0,
186,
0,
0,
0,
70,
0,
0,
0,
0,
0,
0,
0,
128,
0,
0,
0,
4,
0,
32,
0,
115,
107,
95,
112,
114,
111,
106,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
36,
0,
0,
0,
174,
1,
0,
0,
44,
212,
133,
214,
0,
64,
0,
128,
80,
123,
250,
78,
174,
1,
0,
0,
176,
129,
250,
78,
174,
1,
0,
0,
155,
0,
0,
0,
57,
0,
0,
0,
46,
212,
135,
214,
0,
65,
0,
128,
144,
122,
250,
78,
174,
1,
0,
0,
240,
115,
250,
78,
174,
1,
0,
0,
128,
0,
0,
0,
128,
0,
0,
0,
4,
0,
32,
0,
115,
107,
95,
112,
114,
111,
106,
95,
105,
110,
118,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
208,
102,
251,
78,
174,
1,
0,
0,
208,
110,
251,
78,
174,
1,
0,
0,
36,
212,
141,
214,
0,
68,
0,
128,
144,
120,
250,
78,
174,
1,
0,
0,
80,
116,
250,
78,
174,
1,
0,
0,
96,
0,
0,
0,
66,
0,
0,
0,
38,
212,
143,
214,
0,
69,
0,
128,
208,
125,
250,
78,
174,
1,
0,
0,
0,
1,
0,
0,
128,
0,
0,
0,
4,
0,
32,
0,
115,
107,
95,
118,
105,
101,
119,
112,
114,
111,
106,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
80,
114,
250,
78,
174,
1,
0,
0,
48,
116,
250,
78,
174,
1,
0,
0,
91,
0,
0,
0,
60,
0,
0,
0,
60,
212,
149,
214,
0,
72,
0,
128,
112,
124,
250,
78,
174,
1,
0,
0,
80,
122,
250,
78,
174,
1,
0,
0,
156,
0,
0,
0,
76,
0,
0,
0,
62,
212,
151,
214,
0,
73,
0,
128,
128,
1,
0,
0,
128,
0,
0,
0,
4,
0,
32,
0,
115,
107,
95,
108,
105,
103,
104,
116,
105,
110,
103,
95,
115,
104,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
50,
212,
155,
214,
0,
75,
0,
128,
176,
125,
250,
78,
174,
1,
0,
0,
144,
127,
250,
78,
174,
1,
0,
0,
131,
0,
0,
0,
70,
0,
0,
0,
52,
212,
157,
214,
0,
76,
0,
128,
48,
126,
250,
78,
174,
1,
0,
0,
176,
127,
250,
78,
174,
1,
0,
0,
138,
0,
0,
0,
174,
1,
0,
0,
0,
2,
0,
0,
144,
0,
0,
0,
4,
0,
36,
0,
115,
107,
95,
99,
97,
109,
101,
114,
97,
95,
112,
111,
115,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
102,
0,
0,
0,
88,
0,
0,
0,
74,
212,
163,
214,
0,
79,
0,
128,
208,
127,
250,
78,
174,
1,
0,
0,
48,
126,
250,
78,
174,
1,
0,
0,
140,
0,
0,
0,
89,
0,
0,
0,
76,
212,
165,
214,
0,
80,
0,
128,
112,
129,
250,
78,
174,
1,
0,
0,
112,
126,
250,
78,
174,
1,
0,
0,
144,
2,
0,
0,
32,
0,
0,
0,
4,
0,
8,
0,
115,
107,
95,
99,
97,
109,
101,
114,
97,
95,
100,
105,
114,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
80,
129,
250,
78,
174,
1,
0,
0,
110,
0,
0,
0,
174,
1,
0,
0,
66,
212,
171,
214,
0,
83,
0,
128,
112,
125,
250,
78,
174,
1,
0,
0,
112,
124,
250,
78,
174,
1,
0,
0,
164,
0,
0,
0,
68,
0,
0,
0,
68,
212,
173,
214,
0,
84,
0,
128,
112,
123,
250,
78,
174,
1,
0,
0,
176,
2,
0,
0,
32,
0,
0,
0,
4,
0,
8,
0,
115,
107,
95,
102,
105,
110,
103,
101,
114,
116,
105,
112,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
48,
122,
250,
78,
174,
1,
0,
0,
48,
118,
250,
78,
174,
1,
0,
0,
100,
0,
0,
0,
174,
1,
0,
0,
90,
212,
179,
214,
0,
87,
0,
128,
112,
119,
250,
78,
174,
1,
0,
0,
48,
122,
250,
78,
174,
1,
0,
0,
101,
0,
0,
0,
174,
1,
0,
0,
92,
212,
181,
214,
0,
88,
0,
128,
208,
2,
0,
0,
32,
0,
0,
0,
4,
0,
8,
0,
115,
107,
95,
99,
117,
98,
101,
109,
97,
112,
95,
105,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
80,
212,
185,
214,
0,
90,
0,
128,
32,
19,
252,
78,
174,
1,
0,
0,
80,
125,
250,
78,
174,
1,
0,
0,
167,
0,
0,
0,
62,
0,
0,
0,
82,
212,
187,
214,
0,
91,
0,
128,
240,
128,
250,
78,
174,
1,
0,
0,
176,
123,
250,
78,
174,
1,
0,
0,
132,
0,
0,
0,
255,
255,
255,
255,
240,
2,
0,
0,
16,
0,
0,
0,
4,
0,
4,
0,
115,
107,
95,
116,
105,
109,
101,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
126,
0,
0,
0,
63,
0,
0,
0,
104,
212,
193,
214,
0,
94,
0,
128,
240,
114,
250,
78,
174,
1,
0,
0,
80,
118,
250,
78,
174,
1,
0,
0,
103,
0,
0,
0,
174,
1,
0,
0,
106,
212,
195,
214,
0,
95,
0,
128,
48,
124,
250,
78,
174,
1,
0,
0,
208,
123,
250,
78,
174,
1,
0,
0,
0,
3,
0,
0,
4,
0,
0,
0,
4,
0,
1,
0,
115,
107,
95,
118,
105,
101,
119,
95,
99,
111,
117,
110,
116,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
16,
128,
250,
78,
174,
1,
0,
0,
143,
0,
0,
0,
174,
1,
0,
0,
96,
212,
201,
214,
0,
98,
0,
128,
12,
0,
0,
0,
122,
0,
0,
0,
160,
0,
0,
0,
174,
1,
0,
0,
64,
69,
251,
78,
174,
1,
0,
0,
98,
212,
203,
214,
0,
99,
0,
128,
16,
122,
250,
78,
174,
1,
0,
0,
4,
3,
0,
0,
4,
0,
0,
0,
2,
0,
1,
0,
84,
114,
97,
110,
115,
102,
111,
114,
109,
66,
117,
102,
102,
101,
114,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
0,
1,
3,
240,
255,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
115,
107,
95,
105,
110,
115,
116,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
6,
0,
0,
0,
0,
0,
0,
0,
18,
0,
0,
0,
174,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
240,
255,
0,
0,
0,
0,
51,
3,
36,
71,
108,
111,
98,
97,
108,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
3,
16,
0,
0,
0,
1,
0,
0,
0,
16,
0,
0,
0,
0,
0,
128,
63,
0,
0,
128,
63,
0,
0,
128,
63,
0,
0,
128,
63,
99,
111,
108,
111,
114,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
99,
111,
108,
111,
114,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
16,
0,
0,
0,
4,
0,
4,
0,
100,
105,
102,
102,
117,
115,
101,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
119,
104,
105,
116,
101,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
4,
0,
0,
0,
0,
1,
0,
0,
0,
28,
11,
0,
0,
68,
88,
66,
67,
209,
39,
63,
19,
55,
209,
17,
199,
50,
27,
40,
225,
63,
118,
7,
199,
1,
0,
0,
0,
28,
11,
0,
0,
6,
0,
0,
0,
56,
0,
0,
0,
88,
6,
0,
0,
16,
7,
0,
0,
184,
7,
0,
0,
112,
10,
0,
0,
128,
10,
0,
0,
82,
68,
69,
70,
24,
6,
0,
0,
3,
0,
0,
0,
200,
0,
0,
0,
3,
0,
0,
0,
60,
0,
0,
0,
0,
5,
254,
255,
16,
137,
4,
0,
240,
5,
0,
0,
82,
68,
49,
49,
60,
0,
0,
0,
24,
0,
0,
0,
32,
0,
0,
0,
40,
0,
0,
0,
36,
0,
0,
0,
12,
0,
0,
0,
0,
0,
0,
0,
156,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
165,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
1,
0,
0,
0,
1,
0,
0,
0,
181,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
0,
0,
0,
1,
0,
0,
0,
1,
0,
0,
0,
36,
71,
108,
111,
98,
97,
108,
115,
0,
83,
116,
101,
114,
101,
111,
75,
105,
116,
66,
117,
102,
102,
101,
114,
0,
84,
114,
97,
110,
115,
102,
111,
114,
109,
66,
117,
102,
102,
101,
114,
0,
171,
171,
171,
156,
0,
0,
0,
1,
0,
0,
0,
16,
1,
0,
0,
16,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
165,
0,
0,
0,
11,
0,
0,
0,
108,
1,
0,
0,
16,
3,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
181,
0,
0,
0,
1,
0,
0,
0,
48,
5,
0,
0,
240,
255,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
56,
1,
0,
0,
0,
0,
0,
0,
16,
0,
0,
0,
2,
0,
0,
0,
72,
1,
0,
0,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
99,
111,
108,
111,
114,
0,
102,
108,
111,
97,
116,
52,
0,
171,
171,
171,
1,
0,
3,
0,
1,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
62,
1,
0,
0,
36,
3,
0,
0,
0,
0,
0,
0,
128,
0,
0,
0,
0,
0,
0,
0,
56,
3,
0,
0,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
92,
3,
0,
0,
128,
0,
0,
0,
128,
0,
0,
0,
0,
0,
0,
0,
100,
3,
0,
0,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
136,
3,
0,
0,
0,
1,
0,
0,
128,
0,
0,
0,
0,
0,
0,
0,
148,
3,
0,
0,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
184,
3,
0,
0,
128,
1,
0,
0,
128,
0,
0,
0,
2,
0,
0,
0,
196,
3,
0,
0,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
232,
3,
0,
0,
0,
2,
0,
0,
144,
0,
0,
0,
0,
0,
0,
0,
248,
3,
0,
0,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
28,
4,
0,
0,
144,
2,
0,
0,
32,
0,
0,
0,
0,
0,
0,
0,
44,
4,
0,
0,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
80,
4,
0,
0,
176,
2,
0,
0,
32,
0,
0,
0,
0,
0,
0,
0,
96,
4,
0,
0,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
132,
4,
0,
0,
208,
2,
0,
0,
32,
0,
0,
0,
0,
0,
0,
0,
148,
4,
0,
0,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
184,
4,
0,
0,
240,
2,
0,
0,
16,
0,
0,
0,
0,
0,
0,
0,
72,
1,
0,
0,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
197,
4,
0,
0,
0,
3,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
212,
4,
0,
0,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
248,
4,
0,
0,
4,
3,
0,
0,
4,
0,
0,
0,
2,
0,
0,
0,
12,
5,
0,
0,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
115,
107,
95,
118,
105,
101,
119,
0,
102,
108,
111,
97,
116,
52,
120,
52,
0,
171,
171,
171,
3,
0,
3,
0,
4,
0,
4,
0,
2,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
44,
3,
0,
0,
115,
107,
95,
112,
114,
111,
106,
0,
3,
0,
3,
0,
4,
0,
4,
0,
2,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
44,
3,
0,
0,
115,
107,
95,
112,
114,
111,
106,
95,
105,
110,
118,
0,
3,
0,
3,
0,
4,
0,
4,
0,
2,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
44,
3,
0,
0,
115,
107,
95,
118,
105,
101,
119,
112,
114,
111,
106,
0,
3,
0,
3,
0,
4,
0,
4,
0,
2,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
44,
3,
0,
0,
115,
107,
95,
108,
105,
103,
104,
116,
105,
110,
103,
95,
115,
104,
0,
171,
1,
0,
3,
0,
1,
0,
4,
0,
9,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
62,
1,
0,
0,
115,
107,
95,
99,
97,
109,
101,
114,
97,
95,
112,
111,
115,
0,
171,
171,
1,
0,
3,
0,
1,
0,
4,
0,
2,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
62,
1,
0,
0,
115,
107,
95,
99,
97,
109,
101,
114,
97,
95,
100,
105,
114,
0,
171,
171,
1,
0,
3,
0,
1,
0,
4,
0,
2,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
62,
1,
0,
0,
115,
107,
95,
102,
105,
110,
103,
101,
114,
116,
105,
112,
0,
171,
171,
171,
1,
0,
3,
0,
1,
0,
4,
0,
2,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
62,
1,
0,
0,
115,
107,
95,
99,
117,
98,
101,
109,
97,
112,
95,
105,
0,
115,
107,
95,
116,
105,
109,
101,
0,
102,
108,
111,
97,
116,
0,
171,
0,
0,
3,
0,
1,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
205,
4,
0,
0,
115,
107,
95,
118,
105,
101,
119,
95,
99,
111,
117,
110,
116,
0,
100,
119,
111,
114,
100,
0,
0,
0,
19,
0,
1,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
6,
5,
0,
0,
88,
5,
0,
0,
0,
0,
0,
0,
240,
255,
0,
0,
2,
0,
0,
0,
204,
5,
0,
0,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
115,
107,
95,
105,
110,
115,
116,
0,
73,
110,
115,
116,
0,
119,
111,
114,
108,
100,
0,
171,
3,
0,
3,
0,
4,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
44,
3,
0,
0,
1,
0,
3,
0,
1,
0,
4,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
62,
1,
0,
0,
101,
5,
0,
0,
108,
5,
0,
0,
0,
0,
0,
0,
56,
1,
0,
0,
144,
5,
0,
0,
64,
0,
0,
0,
5,
0,
0,
0,
1,
0,
20,
0,
51,
3,
2,
0,
180,
5,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
96,
5,
0,
0,
77,
105,
99,
114,
111,
115,
111,
102,
116,
32,
40,
82,
41,
32,
72,
76,
83,
76,
32,
83,
104,
97,
100,
101,
114,
32,
67,
111,
109,
112,
105,
108,
101,
114,
32,
49,
48,
46,
49,
0,
73,
83,
71,
78,
176,
0,
0,
0,
5,
0,
0,
0,
8,
0,
0,
0,
128,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
3,
0,
0,
0,
0,
0,
0,
0,
15,
7,
0,
0,
140,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
3,
0,
0,
0,
1,
0,
0,
0,
7,
0,
0,
0,
147,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
3,
0,
0,
0,
2,
0,
0,
0,
3,
3,
0,
0,
156,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
3,
0,
0,
0,
3,
0,
0,
0,
15,
15,
0,
0,
162,
0,
0,
0,
0,
0,
0,
0,
8,
0,
0,
0,
1,
0,
0,
0,
4,
0,
0,
0,
1,
1,
0,
0,
83,
86,
95,
80,
111,
115,
105,
116,
105,
111,
110,
0,
78,
79,
82,
77,
65,
76,
0,
84,
69,
88,
67,
79,
79,
82,
68,
0,
67,
79,
76,
79,
82,
0,
83,
86,
95,
73,
110,
115,
116,
97,
110,
99,
101,
73,
68,
0,
79,
83,
71,
78,
160,
0,
0,
0,
4,
0,
0,
0,
8,
0,
0,
0,
104,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
3,
0,
0,
0,
0,
0,
0,
0,
15,
0,
0,
0,
116,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
3,
0,
0,
0,
1,
0,
0,
0,
3,
12,
0,
0,
125,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
3,
0,
0,
0,
2,
0,
0,
0,
15,
0,
0,
0,
131,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
1,
0,
0,
0,
3,
0,
0,
0,
1,
14,
0,
0,
83,
86,
95,
80,
111,
115,
105,
116,
105,
111,
110,
0,
84,
69,
88,
67,
79,
79,
82,
68,
0,
67,
79,
76,
79,
82,
0,
83,
86,
95,
82,
101,
110,
100,
101,
114,
84,
97,
114,
103,
101,
116,
65,
114,
114,
97,
121,
73,
110,
100,
101,
120,
0,
171,
171,
171,
83,
72,
69,
88,
176,
2,
0,
0,
80,
0,
1,
0,
172,
0,
0,
0,
106,
8,
0,
1,
89,
0,
0,
4,
70,
142,
32,
0,
0,
0,
0,
0,
1,
0,
0,
0,
89,
8,
0,
4,
70,
142,
32,
0,
1,
0,
0,
0,
49,
0,
0,
0,
89,
8,
0,
4,
70,
142,
32,
0,
2,
0,
0,
0,
255,
15,
0,
0,
95,
0,
0,
3,
114,
16,
16,
0,
0,
0,
0,
0,
95,
0,
0,
3,
50,
16,
16,
0,
2,
0,
0,
0,
95,
0,
0,
3,
242,
16,
16,
0,
3,
0,
0,
0,
96,
0,
0,
4,
18,
16,
16,
0,
4,
0,
0,
0,
8,
0,
0,
0,
103,
0,
0,
4,
242,
32,
16,
0,
0,
0,
0,
0,
1,
0,
0,
0,
101,
0,
0,
3,
50,
32,
16,
0,
1,
0,
0,
0,
101,
0,
0,
3,
242,
32,
16,
0,
2,
0,
0,
0,
103,
0,
0,
4,
18,
32,
16,
0,
3,
0,
0,
0,
4,
0,
0,
0,
104,
0,
0,
2,
4,
0,
0,
0,
54,
0,
0,
5,
114,
0,
16,
0,
0,
0,
0,
0,
70,
18,
16,
0,
0,
0,
0,
0,
54,
0,
0,
5,
130,
0,
16,
0,
0,
0,
0,
0,
1,
64,
0,
0,
0,
0,
128,
63,
78,
0,
0,
10,
18,
0,
16,
0,
1,
0,
0,
0,
18,
0,
16,
0,
2,
0,
0,
0,
10,
16,
16,
0,
4,
0,
0,
0,
26,
128,
32,
0,
1,
0,
0,
0,
48,
0,
0,
0,
38,
0,
0,
8,
0,
208,
0,
0,
18,
0,
16,
0,
1,
0,
0,
0,
10,
0,
16,
0,
1,
0,
0,
0,
1,
64,
0,
0,
5,
0,
0,
0,
17,
0,
0,
9,
18,
0,
16,
0,
3,
0,
0,
0,
70,
14,
16,
0,
0,
0,
0,
0,
70,
142,
32,
4,
2,
0,
0,
0,
10,
0,
16,
0,
1,
0,
0,
0,
17,
0,
0,
10,
34,
0,
16,
0,
3,
0,
0,
0,
70,
14,
16,
0,
0,
0,
0,
0,
70,
142,
32,
6,
2,
0,
0,
0,
1,
0,
0,
0,
10,
0,
16,
0,
1,
0,
0,
0,
17,
0,
0,
10,
66,
0,
16,
0,
3,
0,
0,
0,
70,
14,
16,
0,
0,
0,
0,
0,
70,
142,
32,
6,
2,
0,
0,
0,
2,
0,
0,
0,
10,
0,
16,
0,
1,
0,
0,
0,
41,
0,
0,
7,
18,
0,
16,
0,
0,
0,
0,
0,
10,
0,
16,
0,
2,
0,
0,
0,
1,
64,
0,
0,
2,
0,
0,
0,
54,
0,
0,
5,
18,
32,
16,
0,
3,
0,
0,
0,
10,
0,
16,
0,
2,
0,
0,
0,
54,
0,
0,
5,
130,
0,
16,
0,
3,
0,
0,
0,
1,
64,
0,
0,
0,
0,
128,
63,
17,
0,
0,
10,
18,
32,
16,
0,
0,
0,
0,
0,
70,
14,
16,
0,
3,
0,
0,
0,
70,
142,
32,
6,
1,
0,
0,
0,
24,
0,
0,
0,
10,
0,
16,
0,
0,
0,
0,
0,
17,
0,
0,
10,
34,
32,
16,
0,
0,
0,
0,
0,
70,
14,
16,
0,
3,
0,
0,
0,
70,
142,
32,
6,
1,
0,
0,
0,
25,
0,
0,
0,
10,
0,
16,
0,
0,
0,
0,
0,
17,
0,
0,
10,
66,
32,
16,
0,
0,
0,
0,
0,
70,
14,
16,
0,
3,
0,
0,
0,
70,
142,
32,
6,
1,
0,
0,
0,
26,
0,
0,
0,
10,
0,
16,
0,
0,
0,
0,
0,
17,
0,
0,
10,
130,
32,
16,
0,
0,
0,
0,
0,
70,
14,
16,
0,
3,
0,
0,
0,
70,
142,
32,
6,
1,
0,
0,
0,
27,
0,
0,
0,
10,
0,
16,
0,
0,
0,
0,
0,
54,
0,
0,
5,
50,
32,
16,
0,
1,
0,
0,
0,
70,
16,
16,
0,
2,
0,
0,
0,
56,
0,
0,
8,
242,
32,
16,
0,
2,
0,
0,
0,
70,
30,
16,
0,
3,
0,
0,
0,
70,
142,
32,
0,
0,
0,
0,
0,
0,
0,
0,
0,
62,
0,
0,
1,
83,
70,
73,
48,
8,
0,
0,
0,
0,
32,
0,
0,
0,
0,
0,
0,
83,
84,
65,
84,
148,
0,
0,
0,
17,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
8,
0,
0,
0,
8,
0,
0,
0,
2,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
5,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
0,
0,
0,
1,
0,
0,
0,
58,
7,
0,
0,
35,
118,
101,
114,
115,
105,
111,
110,
32,
52,
51,
48,
10,
35,
105,
102,
100,
101,
102,
32,
71,
76,
95,
65,
82,
66,
95,
115,
104,
97,
100,
101,
114,
95,
100,
114,
97,
119,
95,
112,
97,
114,
97,
109,
101,
116,
101,
114,
115,
10,
35,
101,
120,
116,
101,
110,
115,
105,
111,
110,
32,
71,
76,
95,
65,
82,
66,
95,
115,
104,
97,
100,
101,
114,
95,
100,
114,
97,
119,
95,
112,
97,
114,
97,
109,
101,
116,
101,
114,
115,
32,
58,
32,
101,
110,
97,
98,
108,
101,
10,
35,
101,
110,
100,
105,
102,
10,
35,
105,
102,
100,
101,
102,
32,
71,
76,
95,
65,
77,
68,
95,
118,
101,
114,
116,
101,
120,
95,
115,
104,
97,
100,
101,
114,
95,
108,
97,
121,
101,
114,
10,
35,
101,
120,
116,
101,
110,
115,
105,
111,
110,
32,
71,
76,
95,
65,
77,
68,
95,
118,
101,
114,
116,
101,
120,
95,
115,
104,
97,
100,
101,
114,
95,
108,
97,
121,
101,
114,
32,
58,
32,
101,
110,
97,
98,
108,
101,
10,
35,
101,
108,
105,
102,
32,
100,
101,
102,
105,
110,
101,
100,
40,
71,
76,
95,
78,
86,
95,
118,
105,
101,
119,
112,
111,
114,
116,
95,
97,
114,
114,
97,
121,
50,
41,
10,
35,
101,
120,
116,
101,
110,
115,
105,
111,
110,
32,
71,
76,
95,
78,
86,
95,
118,
105,
101,
119,
112,
111,
114,
116,
95,
97,
114,
114,
97,
121,
50,
32,
58,
32,
101,
110,
97,
98,
108,
101,
10,
35,
101,
108,
115,
101,
10,
35,
100,
101,
102,
105,
110,
101,
32,
103,
108,
95,
76,
97,
121,
101,
114,
32,
105,
110,
116,
32,
95,
100,
117,
109,
109,
121,
95,
103,
108,
95,
108,
97,
121,
101,
114,
95,
118,
97,
114,
10,
35,
101,
110,
100,
105,
102,
10,
10,
115,
116,
114,
117,
99,
116,
32,
73,
110,
115,
116,
10,
123,
10,
32,
32,
32,
32,
109,
97,
116,
52,
32,
119,
111,
114,
108,
100,
59,
10,
32,
32,
32,
32,
118,
101,
99,
52,
32,
99,
111,
108,
111,
114,
59,
10,
125,
59,
10,
10,
108,
97,
121,
111,
117,
116,
40,
98,
105,
110,
100,
105,
110,
103,
32,
61,
32,
49,
44,
32,
115,
116,
100,
49,
52,
48,
41,
32,
117,
110,
105,
102,
111,
114,
109,
32,
83,
116,
101,
114,
101,
111,
75,
105,
116,
66,
117,
102,
102,
101,
114,
10,
123,
10,
32,
32,
32,
32,
108,
97,
121,
111,
117,
116,
40,
114,
111,
119,
95,
109,
97,
106,
111,
114,
41,
32,
109,
97,
116,
52,
32,
115,
107,
95,
118,
105,
101,
119,
91,
50,
93,
59,
10,
32,
32,
32,
32,
108,
97,
121,
111,
117,
116,
40,
114,
111,
119,
95,
109,
97,
106,
111,
114,
41,
32,
109,
97,
116,
52,
32,
115,
107,
95,
112,
114,
111,
106,
91,
50,
93,
59,
10,
32,
32,
32,
32,
108,
97,
121,
111,
117,
116,
40,
114,
111,
119,
95,
109,
97,
106,
111,
114,
41,
32,
109,
97,
116,
52,
32,
115,
107,
95,
112,
114,
111,
106,
95,
105,
110,
118,
91,
50,
93,
59,
10,
32,
32,
32,
32,
108,
97,
121,
111,
117,
116,
40,
114,
111,
119,
95,
109,
97,
106,
111,
114,
41,
32,
109,
97,
116,
52,
32,
115,
107,
95,
118,
105,
101,
119,
112,
114,
111,
106,
91,
50,
93,
59,
10,
32,
32,
32,
32,
118,
101,
99,
52,
32,
115,
107,
95,
108,
105,
103,
104,
116,
105,
110,
103,
95,
115,
104,
91,
57,
93,
59,
10,
32,
32,
32,
32,
118,
101,
99,
52,
32,
115,
107,
95,
99,
97,
109,
101,
114,
97,
95,
112,
111,
115,
91,
50,
93,
59,
10,
32,
32,
32,
32,
118,
101,
99,
52,
32,
115,
107,
95,
99,
97,
109,
101,
114,
97,
95,
100,
105,
114,
91,
50,
93,
59,
10,
32,
32,
32,
32,
118,
101,
99,
52,
32,
115,
107,
95,
102,
105,
110,
103,
101,
114,
116,
105,
112,
91,
50,
93,
59,
10,
32,
32,
32,
32,
118,
101,
99,
52,
32,
115,
107,
95,
99,
117,
98,
101,
109,
97,
112,
95,
105,
59,
10,
32,
32,
32,
32,
102,
108,
111,
97,
116,
32,
115,
107,
95,
116,
105,
109,
101,
59,
10,
32,
32,
32,
32,
117,
105,
110,
116,
32,
115,
107,
95,
118,
105,
101,
119,
95,
99,
111,
117,
110,
116,
59,
10,
125,
32,
95,
51,
56,
59,
10,
10,
108,
97,
121,
111,
117,
116,
40,
98,
105,
110,
100,
105,
110,
103,
32,
61,
32,
50,
44,
32,
115,
116,
100,
49,
52,
48,
41,
32,
117,
110,
105,
102,
111,
114,
109,
32,
84,
114,
97,
110,
115,
102,
111,
114,
109,
66,
117,
102,
102,
101,
114,
10,
123,
10,
32,
32,
32,
32,
108,
97,
121,
111,
117,
116,
40,
114,
111,
119,
95,
109,
97,
106,
111,
114,
41,
32,
73,
110,
115,
116,
32,
115,
107,
95,
105,
110,
115,
116,
91,
56,
49,
57,
93,
59,
10,
125,
32,
95,
53,
54,
59,
10,
10,
108,
97,
121,
111,
117,
116,
40,
98,
105,
110,
100,
105,
110,
103,
32,
61,
32,
48,
44,
32,
115,
116,
100,
49,
52,
48,
41,
32,
117,
110,
105,
102,
111,
114,
109,
32,
95,
71,
108,
111,
98,
97,
108,
10,
123,
10,
32,
32,
32,
32,
118,
101,
99,
52,
32,
99,
111,
108,
111,
114,
59,
10,
125,
32,
95,
57,
52,
59,
10,
10,
108,
97,
121,
111,
117,
116,
40,
108,
111,
99,
97,
116,
105,
111,
110,
32,
61,
32,
48,
41,
32,
105,
110,
32,
118,
101,
99,
52,
32,
105,
110,
112,
117,
116,
95,
112,
111,
115,
59,
10,
108,
97,
121,
111,
117,
116,
40,
108,
111,
99,
97,
116,
105,
111,
110,
32,
61,
32,
49,
41,
32,
105,
110,
32,
118,
101,
99,
51,
32,
105,
110,
112,
117,
116,
95,
110,
111,
114,
109,
59,
10,
108,
97,
121,
111,
117,
116,
40,
108,
111,
99,
97,
116,
105,
111,
110,
32,
61,
32,
50,
41,
32,
105,
110,
32,
118,
101,
99,
50,
32,
105,
110,
112,
117,
116,
95,
117,
118,
59,
10,
108,
97,
121,
111,
117,
116,
40,
108,
111,
99,
97,
116,
105,
111,
110,
32,
61,
32,
51,
41,
32,
105,
110,
32,
118,
101,
99,
52,
32,
105,
110,
112,
117,
116,
95,
99,
111,
108,
111,
114,
59,
10,
35,
105,
102,
100,
101,
102,
32,
71,
76,
95,
65,
82,
66,
95,
115,
104,
97,
100,
101,
114,
95,
100,
114,
97,
119,
95,
112,
97,
114,
97,
109,
101,
116,
101,
114,
115,
10,
35,
100,
101,
102,
105,
110,
101,
32,
83,
80,
73,
82,
86,
95,
67,
114,
111,
115,
115,
95,
66,
97,
115,
101,
73,
110,
115,
116,
97,
110,
99,
101,
32,
103,
108,
95,
66,
97,
115,
101,
73,
110,
115,
116,
97,
110,
99,
101,
65,
82,
66,
10,
35,
101,
108,
115,
101,
10,
117,
110,
105,
102,
111,
114,
109,
32,
105,
110,
116,
32,
83,
80,
73,
82,
86,
95,
67,
114,
111,
115,
115,
95,
66,
97,
115,
101,
73,
110,
115,
116,
97,
110,
99,
101,
59,
10,
35,
101,
110,
100,
105,
102,
10,
108,
97,
121,
111,
117,
116,
40,
108,
111,
99,
97,
116,
105,
111,
110,
32,
61,
32,
48,
41,
32,
111,
117,
116,
32,
118,
101,
99,
50,
32,
102,
115,
116,
114,
121,
80,
111,
105,
110,
116,
79,
117,
116,
112,
117,
116,
95,
117,
118,
59,
10,
108,
97,
121,
111,
117,
116,
40,
108,
111,
99,
97,
116,
105,
111,
110,
32,
61,
32,
49,
41,
32,
111,
117,
116,
32,
118,
101,
99,
52,
32,
102,
115,
116,
114,
121,
80,
111,
105,
110,
116,
79,
117,
116,
112,
117,
116,
95,
99,
111,
108,
111,
114,
59,
10,
10,
109,
97,
116,
52,
32,
115,
112,
118,
87,
111,
114,
107,
97,
114,
111,
117,
110,
100,
82,
111,
119,
77,
97,
106,
111,
114,
40,
109,
97,
116,
52,
32,
119,
114,
97,
112,
41,
32,
123,
32,
114,
101,
116,
117,
114,
110,
32,
119,
114,
97,
112,
59,
32,
125,
10,
10,
118,
111,
105,
100,
32,
109,
97,
105,
110,
40,
41,
10,
123,
10,
32,
32,
32,
32,
117,
105,
110,
116,
32,
95,
49,
54,
49,
32,
61,
32,
117,
105,
110,
116,
40,
40,
103,
108,
95,
73,
110,
115,
116,
97,
110,
99,
101,
73,
68,
32,
43,
32,
83,
80,
73,
82,
86,
95,
67,
114,
111,
115,
115,
95,
66,
97,
115,
101,
73,
110,
115,
116,
97,
110,
99,
101,
41,
41,
32,
37,
32,
95,
51,
56,
46,
115,
107,
95,
118,
105,
101,
119,
95,
99,
111,
117,
110,
116,
59,
10,
32,
32,
32,
32,
103,
108,
95,
80,
111,
115,
105,
116,
105,
111,
110,
32,
61,
32,
115,
112,
118,
87,
111,
114,
107,
97,
114,
111,
117,
110,
100,
82,
111,
119,
77,
97,
106,
111,
114,
40,
95,
51,
56,
46,
115,
107,
95,
118,
105,
101,
119,
112,
114,
111,
106,
91,
95,
49,
54,
49,
93,
41,
32,
42,
32,
118,
101,
99,
52,
40,
40,
115,
112,
118,
87,
111,
114,
107,
97,
114,
111,
117,
110,
100,
82,
111,
119,
77,
97,
106,
111,
114,
40,
95,
53,
54,
46,
115,
107,
95,
105,
110,
115,
116,
91,
117,
105,
110,
116,
40,
40,
103,
108,
95,
73,
110,
115,
116,
97,
110,
99,
101,
73,
68,
32,
43,
32,
83,
80,
73,
82,
86,
95,
67,
114,
111,
115,
115,
95,
66,
97,
115,
101,
73,
110,
115,
116,
97,
110,
99,
101,
41,
41,
32,
47,
32,
95,
51,
56,
46,
115,
107,
95,
118,
105,
101,
119,
95,
99,
111,
117,
110,
116,
93,
46,
119,
111,
114,
108,
100,
41,
32,
42,
32,
118,
101,
99,
52,
40,
105,
110,
112,
117,
116,
95,
112,
111,
115,
46,
120,
121,
122,
44,
32,
49,
46,
48,
41,
41,
46,
120,
121,
122,
44,
32,
49,
46,
48,
41,
59,
10,
32,
32,
32,
32,
102,
115,
116,
114,
121,
80,
111,
105,
110,
116,
79,
117,
116,
112,
117,
116,
95,
117,
118,
32,
61,
32,
105,
110,
112,
117,
116,
95,
117,
118,
59,
10,
32,
32,
32,
32,
102,
115,
116,
114,
121,
80,
111,
105,
110,
116,
79,
117,
116,
112,
117,
116,
95,
99,
111,
108,
111,
114,
32,
61,
32,
105,
110,
112,
117,
116,
95,
99,
111,
108,
111,
114,
32,
42,
32,
95,
57,
52,
46,
99,
111,
108,
111,
114,
59,
10,
32,
32,
32,
32,
103,
108,
95,
76,
97,
121,
101,
114,
32,
61,
32,
105,
110,
116,
40,
95,
49,
54,
49,
41,
59,
10,
125,
10,
10,
0,
3,
0,
0,
0,
1,
0,
0,
0,
25,
6,
0,
0,
35,
118,
101,
114,
115,
105,
111,
110,
32,
51,
50,
48,
32,
101,
115,
10,
35,
105,
102,
100,
101,
102,
32,
71,
76,
95,
65,
77,
68,
95,
118,
101,
114,
116,
101,
120,
95,
115,
104,
97,
100,
101,
114,
95,
108,
97,
121,
101,
114,
10,
35,
101,
120,
116,
101,
110,
115,
105,
111,
110,
32,
71,
76,
95,
65,
77,
68,
95,
118,
101,
114,
116,
101,
120,
95,
115,
104,
97,
100,
101,
114,
95,
108,
97,
121,
101,
114,
32,
58,
32,
101,
110,
97,
98,
108,
101,
10,
35,
101,
108,
105,
102,
32,
100,
101,
102,
105,
110,
101,
100,
40,
71,
76,
95,
78,
86,
95,
118,
105,
101,
119,
112,
111,
114,
116,
95,
97,
114,
114,
97,
121,
50,
41,
10,
35,
101,
120,
116,
101,
110,
115,
105,
111,
110,
32,
71,
76,
95,
78,
86,
95,
118,
105,
101,
119,
112,
111,
114,
116,
95,
97,
114,
114,
97,
121,
50,
32,
58,
32,
101,
110,
97,
98,
108,
101,
10,
35,
101,
108,
115,
101,
10,
35,
100,
101,
102,
105,
110,
101,
32,
103,
108,
95,
76,
97,
121,
101,
114,
32,
105,
110,
116,
32,
95,
100,
117,
109,
109,
121,
95,
103,
108,
95,
108,
97,
121,
101,
114,
95,
118,
97,
114,
10,
35,
101,
110,
100,
105,
102,
10,
10,
115,
116,
114,
117,
99,
116,
32,
73,
110,
115,
116,
10,
123,
10,
32,
32,
32,
32,
109,
97,
116,
52,
32,
119,
111,
114,
108,
100,
59,
10,
32,
32,
32,
32,
118,
101,
99,
52,
32,
99,
111,
108,
111,
114,
59,
10,
125,
59,
10,
10,
108,
97,
121,
111,
117,
116,
40,
98,
105,
110,
100,
105,
110,
103,
32,
61,
32,
49,
44,
32,
115,
116,
100,
49,
52,
48,
41,
32,
117,
110,
105,
102,
111,
114,
109,
32,
83,
116,
101,
114,
101,
111,
75,
105,
116,
66,
117,
102,
102,
101,
114,
10,
123,
10,
32,
32,
32,
32,
108,
97,
121,
111,
117,
116,
40,
114,
111,
119,
95,
109,
97,
106,
111,
114,
41,
32,
109,
97,
116,
52,
32,
115,
107,
95,
118,
105,
101,
119,
91,
50,
93,
59,
10,
32,
32,
32,
32,
108,
97,
121,
111,
117,
116,
40,
114,
111,
119,
95,
109,
97,
106,
111,
114,
41,
32,
109,
97,
116,
52,
32,
115,
107,
95,
112,
114,
111,
106,
91,
50,
93,
59,
10,
32,
32,
32,
32,
108,
97,
121,
111,
117,
116,
40,
114,
111,
119,
95,
109,
97,
106,
111,
114,
41,
32,
109,
97,
116,
52,
32,
115,
107,
95,
112,
114,
111,
106,
95,
105,
110,
118,
91,
50,
93,
59,
10,
32,
32,
32,
32,
108,
97,
121,
111,
117,
116,
40,
114,
111,
119,
95,
109,
97,
106,
111,
114,
41,
32,
109,
97,
116,
52,
32,
115,
107,
95,
118,
105,
101,
119,
112,
114,
111,
106,
91,
50,
93,
59,
10,
32,
32,
32,
32,
118,
101,
99,
52,
32,
115,
107,
95,
108,
105,
103,
104,
116,
105,
110,
103,
95,
115,
104,
91,
57,
93,
59,
10,
32,
32,
32,
32,
118,
101,
99,
52,
32,
115,
107,
95,
99,
97,
109,
101,
114,
97,
95,
112,
111,
115,
91,
50,
93,
59,
10,
32,
32,
32,
32,
118,
101,
99,
52,
32,
115,
107,
95,
99,
97,
109,
101,
114,
97,
95,
100,
105,
114,
91,
50,
93,
59,
10,
32,
32,
32,
32,
118,
101,
99,
52,
32,
115,
107,
95,
102,
105,
110,
103,
101,
114,
116,
105,
112,
91,
50,
93,
59,
10,
32,
32,
32,
32,
118,
101,
99,
52,
32,
115,
107,
95,
99,
117,
98,
101,
109,
97,
112,
95,
105,
59,
10,
32,
32,
32,
32,
102,
108,
111,
97,
116,
32,
115,
107,
95,
116,
105,
109,
101,
59,
10,
32,
32,
32,
32,
117,
105,
110,
116,
32,
115,
107,
95,
118,
105,
101,
119,
95,
99,
111,
117,
110,
116,
59,
10,
125,
32,
95,
51,
56,
59,
10,
10,
108,
97,
121,
111,
117,
116,
40,
98,
105,
110,
100,
105,
110,
103,
32,
61,
32,
50,
44,
32,
115,
116,
100,
49,
52,
48,
41,
32,
117,
110,
105,
102,
111,
114,
109,
32,
84,
114,
97,
110,
115,
102,
111,
114,
109,
66,
117,
102,
102,
101,
114,
10,
123,
10,
32,
32,
32,
32,
108,
97,
121,
111,
117,
116,
40,
114,
111,
119,
95,
109,
97,
106,
111,
114,
41,
32,
73,
110,
115,
116,
32,
115,
107,
95,
105,
110,
115,
116,
91,
56,
49,
57,
93,
59,
10,
125,
32,
95,
53,
54,
59,
10,
10,
108,
97,
121,
111,
117,
116,
40,
98,
105,
110,
100,
105,
110,
103,
32,
61,
32,
48,
44,
32,
115,
116,
100,
49,
52,
48,
41,
32,
117,
110,
105,
102,
111,
114,
109,
32,
95,
71,
108,
111,
98,
97,
108,
10,
123,
10,
32,
32,
32,
32,
118,
101,
99,
52,
32,
99,
111,
108,
111,
114,
59,
10,
125,
32,
95,
57,
52,
59,
10,
10,
108,
97,
121,
111,
117,
116,
40,
108,
111,
99,
97,
116,
105,
111,
110,
32,
61,
32,
48,
41,
32,
105,
110,
32,
118,
101,
99,
52,
32,
105,
110,
112,
117,
116,
95,
112,
111,
115,
59,
10,
108,
97,
121,
111,
117,
116,
40,
108,
111,
99,
97,
116,
105,
111,
110,
32,
61,
32,
49,
41,
32,
105,
110,
32,
118,
101,
99,
51,
32,
105,
110,
112,
117,
116,
95,
110,
111,
114,
109,
59,
10,
108,
97,
121,
111,
117,
116,
40,
108,
111,
99,
97,
116,
105,
111,
110,
32,
61,
32,
50,
41,
32,
105,
110,
32,
118,
101,
99,
50,
32,
105,
110,
112,
117,
116,
95,
117,
118,
59,
10,
108,
97,
121,
111,
117,
116,
40,
108,
111,
99,
97,
116,
105,
111,
110,
32,
61,
32,
51,
41,
32,
105,
110,
32,
118,
101,
99,
52,
32,
105,
110,
112,
117,
116,
95,
99,
111,
108,
111,
114,
59,
10,
108,
97,
121,
111,
117,
116,
40,
108,
111,
99,
97,
116,
105,
111,
110,
32,
61,
32,
48,
41,
32,
111,
117,
116,
32,
118,
101,
99,
50,
32,
102,
115,
116,
114,
121,
80,
111,
105,
110,
116,
79,
117,
116,
112,
117,
116,
95,
117,
118,
59,
10,
108,
97,
121,
111,
117,
116,
40,
108,
111,
99,
97,
116,
105,
111,
110,
32,
61,
32,
49,
41,
32,
111,
117,
116,
32,
118,
101,
99,
52,
32,
102,
115,
116,
114,
121,
80,
111,
105,
110,
116,
79,
117,
116,
112,
117,
116,
95,
99,
111,
108,
111,
114,
59,
10,
10,
109,
97,
116,
52,
32,
115,
112,
118,
87,
111,
114,
107,
97,
114,
111,
117,
110,
100,
82,
111,
119,
77,
97,
106,
111,
114,
40,
109,
97,
116,
52,
32,
119,
114,
97,
112,
41,
32,
123,
32,
114,
101,
116,
117,
114,
110,
32,
119,
114,
97,
112,
59,
32,
125,
10,
10,
118,
111,
105,
100,
32,
109,
97,
105,
110,
40,
41,
10,
123,
10,
32,
32,
32,
32,
117,
105,
110,
116,
32,
95,
49,
54,
49,
32,
61,
32,
117,
105,
110,
116,
40,
103,
108,
95,
73,
110,
115,
116,
97,
110,
99,
101,
73,
68,
41,
32,
37,
32,
95,
51,
56,
46,
115,
107,
95,
118,
105,
101,
119,
95,
99,
111,
117,
110,
116,
59,
10,
32,
32,
32,
32,
103,
108,
95,
80,
111,
115,
105,
116,
105,
111,
110,
32,
61,
32,
115,
112,
118,
87,
111,
114,
107,
97,
114,
111,
117,
110,
100,
82,
111,
119,
77,
97,
106,
111,
114,
40,
95,
51,
56,
46,
115,
107,
95,
118,
105,
101,
119,
112,
114,
111,
106,
91,
95,
49,
54,
49,
93,
41,
32,
42,
32,
118,
101,
99,
52,
40,
40,
115,
112,
118,
87,
111,
114,
107,
97,
114,
111,
117,
110,
100,
82,
111,
119,
77,
97,
106,
111,
114,
40,
95,
53,
54,
46,
115,
107,
95,
105,
110,
115,
116,
91,
117,
105,
110,
116,
40,
103,
108,
95,
73,
110,
115,
116,
97,
110,
99,
101,
73,
68,
41,
32,
47,
32,
95,
51,
56,
46,
115,
107,
95,
118,
105,
101,
119,
95,
99,
111,
117,
110,
116,
93,
46,
119,
111,
114,
108,
100,
41,
32,
42,
32,
118,
101,
99,
52,
40,
105,
110,
112,
117,
116,
95,
112,
111,
115,
46,
120,
121,
122,
44,
32,
49,
46,
48,
41,
41,
46,
120,
121,
122,
44,
32,
49,
46,
48,
41,
59,
10,
32,
32,
32,
32,
102,
115,
116,
114,
121,
80,
111,
105,
110,
116,
79,
117,
116,
112,
117,
116,
95,
117,
118,
32,
61,
32,
105,
110,
112,
117,
116,
95,
117,
118,
59,
10,
32,
32,
32,
32,
102,
115,
116,
114,
121,
80,
111,
105,
110,
116,
79,
117,
116,
112,
117,
116,
95,
99,
111,
108,
111,
114,
32,
61,
32,
105,
110,
112,
117,
116,
95,
99,
111,
108,
111,
114,
32,
42,
32,
95,
57,
52,
46,
99,
111,
108,
111,
114,
59,
10,
32,
32,
32,
32,
103,
108,
95,
76,
97,
121,
101,
114,
32,
61,
32,
105,
110,
116,
40,
95,
49,
54,
49,
41,
59,
10,
125,
10,
10,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
0,
0,
0,
240,
5,
0,
0,
68,
88,
66,
67,
97,
125,
245,
253,
231,
133,
149,
112,
129,
80,
227,
168,
89,
165,
247,
225,
1,
0,
0,
0,
240,
5,
0,
0,
5,
0,
0,
0,
52,
0,
0,
0,
244,
0,
0,
0,
156,
1,
0,
0,
208,
1,
0,
0,
84,
5,
0,
0,
82,
68,
69,
70,
184,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
0,
0,
0,
60,
0,
0,
0,
0,
5,
255,
255,
16,
137,
4,
0,
142,
0,
0,
0,
82,
68,
49,
49,
60,
0,
0,
0,
24,
0,
0,
0,
32,
0,
0,
0,
40,
0,
0,
0,
36,
0,
0,
0,
12,
0,
0,
0,
0,
0,
0,
0,
124,
0,
0,
0,
3,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
1,
0,
0,
0,
134,
0,
0,
0,
2,
0,
0,
0,
5,
0,
0,
0,
4,
0,
0,
0,
255,
255,
255,
255,
0,
0,
0,
0,
1,
0,
0,
0,
13,
0,
0,
0,
100,
105,
102,
102,
117,
115,
101,
95,
115,
0,
100,
105,
102,
102,
117,
115,
101,
0,
77,
105,
99,
114,
111,
115,
111,
102,
116,
32,
40,
82,
41,
32,
72,
76,
83,
76,
32,
83,
104,
97,
100,
101,
114,
32,
67,
111,
109,
112,
105,
108,
101,
114,
32,
49,
48,
46,
49,
0,
171,
171,
73,
83,
71,
78,
160,
0,
0,
0,
4,
0,
0,
0,
8,
0,
0,
0,
104,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
3,
0,
0,
0,
0,
0,
0,
0,
15,
0,
0,
0,
116,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
3,
0,
0,
0,
1,
0,
0,
0,
3,
3,
0,
0,
125,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
3,
0,
0,
0,
2,
0,
0,
0,
15,
15,
0,
0,
131,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
1,
0,
0,
0,
3,
0,
0,
0,
1,
0,
0,
0,
83,
86,
95,
80,
111,
115,
105,
116,
105,
111,
110,
0,
84,
69,
88,
67,
79,
79,
82,
68,
0,
67,
79,
76,
79,
82,
0,
83,
86,
95,
82,
101,
110,
100,
101,
114,
84,
97,
114,
103,
101,
116,
65,
114,
114,
97,
121,
73,
110,
100,
101,
120,
0,
171,
171,
171,
79,
83,
71,
78,
44,
0,
0,
0,
1,
0,
0,
0,
8,
0,
0,
0,
32,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
3,
0,
0,
0,
0,
0,
0,
0,
15,
0,
0,
0,
83,
86,
95,
84,
65,
82,
71,
69,
84,
0,
171,
171,
83,
72,
69,
88,
124,
3,
0,
0,
80,
0,
0,
0,
223,
0,
0,
0,
106,
8,
0,
1,
90,
0,
0,
3,
0,
96,
16,
0,
0,
0,
0,
0,
88,
24,
0,
4,
0,
112,
16,
0,
0,
0,
0,
0,
85,
85,
0,
0,
98,
16,
0,
3,
50,
16,
16,
0,
1,
0,
0,
0,
98,
16,
0,
3,
242,
16,
16,
0,
2,
0,
0,
0,
101,
0,
0,
3,
242,
32,
16,
0,
0,
0,
0,
0,
104,
0,
0,
2,
3,
0,
0,
0,
122,
0,
0,
5,
242,
0,
16,
0,
0,
0,
0,
0,
70,
20,
16,
0,
1,
0,
0,
0,
50,
0,
0,
12,
242,
0,
16,
0,
1,
0,
0,
0,
70,
14,
16,
0,
0,
0,
0,
0,
2,
64,
0,
0,
0,
0,
0,
62,
0,
0,
0,
62,
0,
0,
192,
62,
0,
0,
192,
62,
70,
20,
16,
0,
1,
0,
0,
0,
50,
0,
0,
13,
242,
0,
16,
0,
0,
0,
0,
0,
70,
14,
16,
128,
65,
0,
0,
0,
0,
0,
0,
0,
2,
64,
0,
0,
0,
0,
0,
62,
0,
0,
0,
62,
0,
0,
192,
62,
0,
0,
192,
62,
70,
20,
16,
0,
1,
0,
0,
0,
124,
0,
0,
5,
242,
0,
16,
0,
2,
0,
0,
0,
70,
20,
16,
0,
1,
0,
0,
0,
50,
0,
0,
12,
50,
0,
16,
0,
1,
0,
0,
0,
70,
0,
16,
0,
2,
0,
0,
0,
2,
64,
0,
0,
0,
0,
192,
62,
0,
0,
192,
62,
0,
0,
0,
0,
0,
0,
0,
0,
70,
0,
16,
0,
1,
0,
0,
0,
50,
0,
0,
13,
194,
0,
16,
0,
1,
0,
0,
0,
166,
14,
16,
128,
65,
0,
0,
0,
2,
0,
0,
0,
2,
64,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
62,
0,
0,
0,
62,
166,
14,
16,
0,
1,
0,
0,
0,
74,
0,
0,
141,
194,
0,
0,
128,
67,
85,
21,
0,
66,
0,
16,
0,
1,
0,
0,
0,
230,
10,
16,
0,
1,
0,
0,
0,
150,
124,
16,
0,
0,
0,
0,
0,
0,
96,
16,
0,
0,
0,
0,
0,
1,
64,
0,
0,
0,
0,
128,
191,
74,
0,
0,
141,
194,
0,
0,
128,
67,
85,
21,
0,
18,
0,
16,
0,
1,
0,
0,
0,
70,
0,
16,
0,
1,
0,
0,
0,
70,
126,
16,
0,
0,
0,
0,
0,
0,
96,
16,
0,
0,
0,
0,
0,
1,
64,
0,
0,
0,
0,
128,
191,
50,
0,
0,
13,
50,
0,
16,
0,
0,
0,
0,
0,
70,
0,
16,
128,
65,
0,
0,
0,
2,
0,
0,
0,
2,
64,
0,
0,
0,
0,
192,
62,
0,
0,
192,
62,
0,
0,
0,
0,
0,
0,
0,
0,
70,
0,
16,
0,
0,
0,
0,
0,
50,
0,
0,
12,
194,
0,
16,
0,
0,
0,
0,
0,
166,
14,
16,
0,
2,
0,
0,
0,
2,
64,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
62,
0,
0,
0,
62,
166,
14,
16,
0,
0,
0,
0,
0,
74,
0,
0,
141,
194,
0,
0,
128,
67,
85,
21,
0,
66,
0,
16,
0,
0,
0,
0,
0,
230,
10,
16,
0,
0,
0,
0,
0,
150,
124,
16,
0,
0,
0,
0,
0,
0,
96,
16,
0,
0,
0,
0,
0,
1,
64,
0,
0,
0,
0,
128,
191,
74,
0,
0,
141,
194,
0,
0,
128,
67,
85,
21,
0,
18,
0,
16,
0,
0,
0,
0,
0,
70,
0,
16,
0,
0,
0,
0,
0,
70,
126,
16,
0,
0,
0,
0,
0,
0,
96,
16,
0,
0,
0,
0,
0,
1,
64,
0,
0,
0,
0,
128,
191,
0,
0,
0,
7,
18,
0,
16,
0,
0,
0,
0,
0,
10,
0,
16,
0,
0,
0,
0,
0,
10,
0,
16,
0,
1,
0,
0,
0,
0,
0,
0,
7,
18,
0,
16,
0,
0,
0,
0,
0,
42,
0,
16,
0,
1,
0,
0,
0,
10,
0,
16,
0,
0,
0,
0,
0,
0,
0,
0,
7,
18,
0,
16,
0,
0,
0,
0,
0,
42,
0,
16,
0,
0,
0,
0,
0,
10,
0,
16,
0,
0,
0,
0,
0,
56,
0,
0,
7,
18,
0,
16,
0,
0,
0,
0,
0,
10,
0,
16,
0,
0,
0,
0,
0,
1,
64,
0,
0,
0,
0,
128,
62,
50,
0,
0,
9,
34,
0,
16,
0,
0,
0,
0,
0,
10,
0,
16,
0,
0,
0,
0,
0,
58,
16,
16,
0,
2,
0,
0,
0,
1,
64,
0,
0,
111,
18,
131,
187,
56,
0,
0,
7,
18,
0,
16,
0,
0,
0,
0,
0,
10,
0,
16,
0,
0,
0,
0,
0,
58,
16,
16,
0,
2,
0,
0,
0,
54,
0,
0,
5,
130,
32,
16,
0,
0,
0,
0,
0,
10,
0,
16,
0,
0,
0,
0,
0,
49,
0,
0,
7,
18,
0,
16,
0,
0,
0,
0,
0,
26,
0,
16,
0,
0,
0,
0,
0,
1,
64,
0,
0,
0,
0,
0,
0,
13,
0,
4,
3,
10,
0,
16,
0,
0,
0,
0,
0,
54,
0,
0,
5,
114,
32,
16,
0,
0,
0,
0,
0,
70,
18,
16,
0,
2,
0,
0,
0,
62,
0,
0,
1,
83,
84,
65,
84,
148,
0,
0,
0,
23,
0,
0,
0,
3,
0,
0,
0,
0,
0,
0,
0,
3,
0,
0,
0,
15,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
4,
0,
0,
0,
0,
0,
0,
0,
2,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
0,
0,
0,
2,
0,
0,
0,
128,
3,
0,
0,
35,
118,
101,
114,
115,
105,
111,
110,
32,
52,
51,
48,
10,
10,
108,
97,
121,
111,
117,
116,
40,
98,
105,
110,
100,
105,
110,
103,
32,
61,
32,
48,
41,
32,
117,
110,
105,
102,
111,
114,
109,
32,
115,
97,
109,
112,
108,
101,
114,
50,
68,
32,
100,
105,
102,
102,
117,
115,
101,
59,
10,
10,
108,
97,
121,
111,
117,
116,
40,
108,
111,
99,
97,
116,
105,
111,
110,
32,
61,
32,
48,
41,
32,
105,
110,
32,
118,
101,
99,
50,
32,
102,
115,
112,
117,
116,
95,
117,
118,
59,
10,
108,
97,
121,
111,
117,
116,
40,
108,
111,
99,
97,
116,
105,
111,
110,
32,
61,
32,
49,
41,
32,
105,
110,
32,
118,
101,
99,
52,
32,
102,
115,
112,
117,
116,
95,
99,
111,
108,
111,
114,
59,
10,
108,
97,
121,
111,
117,
116,
40,
108,
111,
99,
97,
116,
105,
111,
110,
32,
61,
32,
48,
41,
32,
111,
117,
116,
32,
118,
101,
99,
52,
32,
95,
101,
110,
116,
114,
121,
80,
111,
105,
110,
116,
79,
117,
116,
112,
117,
116,
59,
10,
10,
118,
111,
105,
100,
32,
109,
97,
105,
110,
40,
41,
10,
123,
10,
32,
32,
32,
32,
118,
101,
99,
50,
32,
95,
50,
51,
53,
32,
61,
32,
100,
70,
100,
120,
40,
102,
115,
112,
117,
116,
95,
117,
118,
41,
59,
10,
32,
32,
32,
32,
118,
101,
99,
50,
32,
95,
50,
51,
56,
32,
61,
32,
100,
70,
100,
121,
40,
102,
115,
112,
117,
116,
95,
117,
118,
41,
59,
10,
32,
32,
32,
32,
118,
101,
99,
50,
32,
95,
50,
52,
52,
32,
61,
32,
95,
50,
51,
53,
32,
42,
32,
48,
46,
49,
50,
53,
59,
10,
32,
32,
32,
32,
118,
101,
99,
50,
32,
95,
50,
52,
57,
32,
61,
32,
95,
50,
51,
56,
32,
42,
32,
48,
46,
51,
55,
53,
59,
10,
32,
32,
32,
32,
118,
101,
99,
52,
32,
95,
50,
54,
48,
32,
61,
32,
116,
101,
120,
116,
117,
114,
101,
40,
100,
105,
102,
102,
117,
115,
101,
44,
32,
40,
40,
102,
115,
112,
117,
116,
95,
117,
118,
32,
43,
32,
95,
50,
52,
52,
41,
32,
43,
32,
95,
50,
52,
57,
41,
46,
120,
121,
44,
32,
45,
49,
46,
48,
41,
59,
10,
32,
32,
32,
32,
118,
101,
99,
52,
32,
95,
50,
56,
52,
32,
61,
32,
116,
101,
120,
116,
117,
114,
101,
40,
100,
105,
102,
102,
117,
115,
101,
44,
32,
40,
40,
102,
115,
112,
117,
116,
95,
117,
118,
32,
45,
32,
95,
50,
52,
52,
41,
32,
45,
32,
95,
50,
52,
57,
41,
46,
120,
121,
44,
32,
45,
49,
46,
48,
41,
59,
10,
32,
32,
32,
32,
118,
101,
99,
50,
32,
95,
50,
57,
50,
32,
61,
32,
95,
50,
51,
53,
32,
42,
32,
48,
46,
51,
55,
53,
59,
10,
32,
32,
32,
32,
118,
101,
99,
50,
32,
95,
50,
57,
55,
32,
61,
32,
95,
50,
51,
56,
32,
42,
32,
48,
46,
49,
50,
53,
59,
10,
32,
32,
32,
32,
118,
101,
99,
52,
32,
95,
51,
48,
56,
32,
61,
32,
116,
101,
120,
116,
117,
114,
101,
40,
100,
105,
102,
102,
117,
115,
101,
44,
32,
40,
40,
102,
115,
112,
117,
116,
95,
117,
118,
32,
43,
32,
95,
50,
57,
50,
41,
32,
45,
32,
95,
50,
57,
55,
41,
46,
120,
121,
44,
32,
45,
49,
46,
48,
41,
59,
10,
32,
32,
32,
32,
118,
101,
99,
52,
32,
95,
51,
51,
50,
32,
61,
32,
116,
101,
120,
116,
117,
114,
101,
40,
100,
105,
102,
102,
117,
115,
101,
44,
32,
40,
40,
102,
115,
112,
117,
116,
95,
117,
118,
32,
45,
32,
95,
50,
57,
50,
41,
32,
43,
32,
95,
50,
57,
55,
41,
46,
120,
121,
44,
32,
45,
49,
46,
48,
41,
59,
10,
32,
32,
32,
32,
102,
108,
111,
97,
116,
32,
95,
51,
52,
49,
32,
61,
32,
40,
40,
40,
40,
95,
50,
54,
48,
32,
43,
32,
95,
50,
56,
52,
41,
32,
43,
32,
95,
51,
48,
56,
41,
32,
43,
32,
95,
51,
51,
50,
41,
32,
42,
32,
48,
46,
50,
53,
41,
46,
120,
32,
42,
32,
102,
115,
112,
117,
116,
95,
99,
111,
108,
111,
114,
46,
119,
59,
10,
32,
32,
32,
32,
105,
102,
32,
40,
40,
95,
51,
52,
49,
32,
45,
32,
48,
46,
48,
48,
52,
48,
48,
48,
48,
48,
48,
49,
56,
57,
57,
56,
57,
56,
48,
53,
50,
50,
49,
53,
53,
55,
54,
49,
55,
49,
56,
55,
53,
41,
32,
60,
32,
48,
46,
48,
41,
10,
32,
32,
32,
32,
123,
10,
32,
32,
32,
32,
32,
32,
32,
32,
100,
105,
115,
99,
97,
114,
100,
59,
10,
32,
32,
32,
32,
125,
10,
32,
32,
32,
32,
95,
101,
110,
116,
114,
121,
80,
111,
105,
110,
116,
79,
117,
116,
112,
117,
116,
32,
61,
32,
118,
101,
99,
52,
40,
102,
115,
112,
117,
116,
95,
99,
111,
108,
111,
114,
46,
120,
121,
122,
44,
32,
95,
51,
52,
49,
41,
59,
10,
125,
10,
10,
0,
3,
0,
0,
0,
2,
0,
0,
0,
11,
4,
0,
0,
35,
118,
101,
114,
115,
105,
111,
110,
32,
51,
50,
48,
32,
101,
115,
10,
112,
114,
101,
99,
105,
115,
105,
111,
110,
32,
109,
101,
100,
105,
117,
109,
112,
32,
102,
108,
111,
97,
116,
59,
10,
112,
114,
101,
99,
105,
115,
105,
111,
110,
32,
104,
105,
103,
104,
112,
32,
105,
110,
116,
59,
10,
10,
108,
97,
121,
111,
117,
116,
40,
98,
105,
110,
100,
105,
110,
103,
32,
61,
32,
48,
41,
32,
117,
110,
105,
102,
111,
114,
109,
32,
104,
105,
103,
104,
112,
32,
115,
97,
109,
112,
108,
101,
114,
50,
68,
32,
100,
105,
102,
102,
117,
115,
101,
59,
10,
10,
108,
97,
121,
111,
117,
116,
40,
108,
111,
99,
97,
116,
105,
111,
110,
32,
61,
32,
48,
41,
32,
105,
110,
32,
104,
105,
103,
104,
112,
32,
118,
101,
99,
50,
32,
102,
115,
112,
117,
116,
95,
117,
118,
59,
10,
108,
97,
121,
111,
117,
116,
40,
108,
111,
99,
97,
116,
105,
111,
110,
32,
61,
32,
49,
41,
32,
105,
110,
32,
104,
105,
103,
104,
112,
32,
118,
101,
99,
52,
32,
102,
115,
112,
117,
116,
95,
99,
111,
108,
111,
114,
59,
10,
108,
97,
121,
111,
117,
116,
40,
108,
111,
99,
97,
116,
105,
111,
110,
32,
61,
32,
48,
41,
32,
111,
117,
116,
32,
104,
105,
103,
104,
112,
32,
118,
101,
99,
52,
32,
95,
101,
110,
116,
114,
121,
80,
111,
105,
110,
116,
79,
117,
116,
112,
117,
116,
59,
10,
10,
118,
111,
105,
100,
32,
109,
97,
105,
110,
40,
41,
10,
123,
10,
32,
32,
32,
32,
104,
105,
103,
104,
112,
32,
118,
101,
99,
50,
32,
95,
50,
51,
53,
32,
61,
32,
100,
70,
100,
120,
40,
102,
115,
112,
117,
116,
95,
117,
118,
41,
59,
10,
32,
32,
32,
32,
104,
105,
103,
104,
112,
32,
118,
101,
99,
50,
32,
95,
50,
51,
56,
32,
61,
32,
100,
70,
100,
121,
40,
102,
115,
112,
117,
116,
95,
117,
118,
41,
59,
10,
32,
32,
32,
32,
104,
105,
103,
104,
112,
32,
118,
101,
99,
50,
32,
95,
50,
52,
52,
32,
61,
32,
95,
50,
51,
53,
32,
42,
32,
48,
46,
49,
50,
53,
59,
10,
32,
32,
32,
32,
104,
105,
103,
104,
112,
32,
118,
101,
99,
50,
32,
95,
50,
52,
57,
32,
61,
32,
95,
50,
51,
56,
32,
42,
32,
48,
46,
51,
55,
53,
59,
10,
32,
32,
32,
32,
104,
105,
103,
104,
112,
32,
118,
101,
99,
52,
32,
95,
50,
54,
48,
32,
61,
32,
116,
101,
120,
116,
117,
114,
101,
40,
100,
105,
102,
102,
117,
115,
101,
44,
32,
40,
40,
102,
115,
112,
117,
116,
95,
117,
118,
32,
43,
32,
95,
50,
52,
52,
41,
32,
43,
32,
95,
50,
52,
57,
41,
46,
120,
121,
44,
32,
45,
49,
46,
48,
41,
59,
10,
32,
32,
32,
32,
104,
105,
103,
104,
112,
32,
118,
101,
99,
52,
32,
95,
50,
56,
52,
32,
61,
32,
116,
101,
120,
116,
117,
114,
101,
40,
100,
105,
102,
102,
117,
115,
101,
44,
32,
40,
40,
102,
115,
112,
117,
116,
95,
117,
118,
32,
45,
32,
95,
50,
52,
52,
41,
32,
45,
32,
95,
50,
52,
57,
41,
46,
120,
121,
44,
32,
45,
49,
46,
48,
41,
59,
10,
32,
32,
32,
32,
104,
105,
103,
104,
112,
32,
118,
101,
99,
50,
32,
95,
50,
57,
50,
32,
61,
32,
95,
50,
51,
53,
32,
42,
32,
48,
46,
51,
55,
53,
59,
10,
32,
32,
32,
32,
104,
105,
103,
104,
112,
32,
118,
101,
99,
50,
32,
95,
50,
57,
55,
32,
61,
32,
95,
50,
51,
56,
32,
42,
32,
48,
46,
49,
50,
53,
59,
10,
32,
32,
32,
32,
104,
105,
103,
104,
112,
32,
118,
101,
99,
52,
32,
95,
51,
48,
56,
32,
61,
32,
116,
101,
120,
116,
117,
114,
101,
40,
100,
105,
102,
102,
117,
115,
101,
44,
32,
40,
40,
102,
115,
112,
117,
116,
95,
117,
118,
32,
43,
32,
95,
50,
57,
50,
41,
32,
45,
32,
95,
50,
57,
55,
41,
46,
120,
121,
44,
32,
45,
49,
46,
48,
41,
59,
10,
32,
32,
32,
32,
104,
105,
103,
104,
112,
32,
118,
101,
99,
52,
32,
95,
51,
51,
50,
32,
61,
32,
116,
101,
120,
116,
117,
114,
101,
40,
100,
105,
102,
102,
117,
115,
101,
44,
32,
40,
40,
102,
115,
112,
117,
116,
95,
117,
118,
32,
45,
32,
95,
50,
57,
50,
41,
32,
43,
32,
95,
50,
57,
55,
41,
46,
120,
121,
44,
32,
45,
49,
46,
48,
41,
59,
10,
32,
32,
32,
32,
104,
105,
103,
104,
112,
32,
102,
108,
111,
97,
116,
32,
95,
51,
52,
49,
32,
61,
32,
40,
40,
40,
40,
95,
50,
54,
48,
32,
43,
32,
95,
50,
56,
52,
41,
32,
43,
32,
95,
51,
48,
56,
41,
32,
43,
32,
95,
51,
51,
50,
41,
32,
42,
32,
48,
46,
50,
53,
41,
46,
120,
32,
42,
32,
102,
115,
112,
117,
116,
95,
99,
111,
108,
111,
114,
46,
119,
59,
10,
32,
32,
32,
32,
105,
102,
32,
40,
40,
95,
51,
52,
49,
32,
45,
32,
48,
46,
48,
48,
52,
48,
48,
48,
48,
48,
48,
49,
56,
57,
57,
56,
57,
56,
48,
53,
50,
50,
49,
53,
53,
55,
54,
49,
55,
49,
56,
55,
53,
41,
32,
60,
32,
48,
46,
48,
41,
10,
32,
32,
32,
32,
123,
10,
32,
32,
32,
32,
32,
32,
32,
32,
100,
105,
115,
99,
97,
114,
100,
59,
10,
32,
32,
32,
32,
125,
10,
32,
32,
32,
32,
95,
101,
110,
116,
114,
121,
80,
111,
105,
110,
116,
79,
117,
116,
112,
117,
116,
32,
61,
32,
118,
101,
99,
52,
40,
102,
115,
112,
117,
116,
95,
99,
111,
108,
111,
114,
46,
120,
121,
122,
44,
32,
95,
51,
52,
49,
41,
59,
10,
125,
10,
10,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
};
| 35,285 |
2,151 | package com.android.ex.chips;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.graphics.PorterDuff;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.StateListDrawable;
import android.net.Uri;
import android.support.annotation.DrawableRes;
import android.support.annotation.IdRes;
import android.support.annotation.LayoutRes;
import android.support.annotation.Nullable;
import android.support.v4.view.MarginLayoutParamsCompat;
import android.text.SpannableStringBuilder;
import android.text.Spanned;
import android.text.TextUtils;
import android.text.style.ForegroundColorSpan;
import android.text.util.Rfc822Tokenizer;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.view.ViewGroup.MarginLayoutParams;
import android.widget.ImageView;
import android.widget.TextView;
import com.android.ex.chips.Queries.Query;
/**
* A class that inflates and binds the views in the dropdown list from
* RecipientEditTextView.
*/
public class DropdownChipLayouter {
/**
* The type of adapter that is requesting a chip layout.
*/
public enum AdapterType {
BASE_RECIPIENT,
RECIPIENT_ALTERNATES,
SINGLE_RECIPIENT
}
public interface ChipDeleteListener {
void onChipDelete();
}
/**
* Listener that handles the dismisses of the entries of the
* {@link RecipientEntry#ENTRY_TYPE_PERMISSION_REQUEST} type.
*/
public interface PermissionRequestDismissedListener {
/**
* Callback that occurs when user dismisses the item that asks user to grant permissions to
* the app.
*/
void onPermissionRequestDismissed();
}
private final LayoutInflater mInflater;
private final Context mContext;
private ChipDeleteListener mDeleteListener;
private PermissionRequestDismissedListener mPermissionRequestDismissedListener;
private Query mQuery;
private int mAutocompleteDividerMarginStart;
public DropdownChipLayouter(LayoutInflater inflater, Context context) {
mInflater = inflater;
mContext = context;
mAutocompleteDividerMarginStart =
context.getResources().getDimensionPixelOffset(R.dimen.chip_wrapper_start_padding);
}
public void setQuery(Query query) {
mQuery = query;
}
public void setDeleteListener(ChipDeleteListener listener) {
mDeleteListener = listener;
}
public void setPermissionRequestDismissedListener(PermissionRequestDismissedListener listener) {
mPermissionRequestDismissedListener = listener;
}
public void setAutocompleteDividerMarginStart(int autocompleteDividerMarginStart) {
mAutocompleteDividerMarginStart = autocompleteDividerMarginStart;
}
/**
* Layouts and binds recipient information to the view. If convertView is null, inflates a new
* view with getItemLaytout().
*
* @param convertView The view to bind information to.
* @param parent The parent to bind the view to if we inflate a new view.
* @param entry The recipient entry to get information from.
* @param position The position in the list.
* @param type The adapter type that is requesting the bind.
* @param constraint The constraint typed in the auto complete view.
*
* @return A view ready to be shown in the drop down list.
*/
public View bindView(View convertView, ViewGroup parent, RecipientEntry entry, int position,
AdapterType type, String constraint) {
return bindView(convertView, parent, entry, position, type, constraint, null);
}
/**
* See {@link #bindView(View, ViewGroup, RecipientEntry, int, AdapterType, String)}
* @param deleteDrawable a {@link android.graphics.drawable.StateListDrawable} representing
* the delete icon. android.R.attr.state_activated should map to the delete icon, and the
* default state can map to a drawable of your choice (or null for no drawable).
*/
public View bindView(View convertView, ViewGroup parent, RecipientEntry entry, int position,
AdapterType type, String constraint, StateListDrawable deleteDrawable) {
// Default to show all the information
CharSequence[] styledResults = getStyledResults(constraint, entry);
CharSequence displayName = styledResults[0];
CharSequence destination = styledResults[1];
boolean showImage = true;
CharSequence destinationType = getDestinationType(entry);
final View itemView = reuseOrInflateView(convertView, parent, type);
final ViewHolder viewHolder = new ViewHolder(itemView);
// Hide some information depending on the adapter type.
switch (type) {
case BASE_RECIPIENT:
if (TextUtils.isEmpty(displayName) || TextUtils.equals(displayName, destination)) {
displayName = destination;
// We only show the destination for secondary entries, so clear it only for the
// first level.
if (entry.isFirstLevel()) {
destination = null;
}
}
if (!entry.isFirstLevel()) {
displayName = null;
showImage = false;
}
// For BASE_RECIPIENT set all top dividers except for the first one to be GONE.
if (viewHolder.topDivider != null) {
viewHolder.topDivider.setVisibility(position == 0 ? View.VISIBLE : View.GONE);
MarginLayoutParamsCompat.setMarginStart(
(MarginLayoutParams) viewHolder.topDivider.getLayoutParams(),
mAutocompleteDividerMarginStart);
}
if (viewHolder.bottomDivider != null) {
MarginLayoutParamsCompat.setMarginStart(
(MarginLayoutParams) viewHolder.bottomDivider.getLayoutParams(),
mAutocompleteDividerMarginStart);
}
break;
case RECIPIENT_ALTERNATES:
if (position != 0) {
displayName = null;
showImage = false;
}
break;
case SINGLE_RECIPIENT:
if (!PhoneUtil.isPhoneNumber(entry.getDestination())) {
destination = Rfc822Tokenizer.tokenize(entry.getDestination())[0].getAddress();
}
destinationType = null;
}
// Bind the information to the view
bindTextToView(displayName, viewHolder.displayNameView);
bindTextToView(destination, viewHolder.destinationView);
bindTextToView(destinationType, viewHolder.destinationTypeView);
bindIconToView(showImage, entry, viewHolder.imageView, type);
bindDrawableToDeleteView(deleteDrawable, entry.getDisplayName(), viewHolder.deleteView);
bindIndicatorToView(
entry.getIndicatorIconId(), entry.getIndicatorText(), viewHolder.indicatorView);
bindPermissionRequestDismissView(viewHolder.permissionRequestDismissView);
// Hide some view groups depending on the entry type
final int entryType = entry.getEntryType();
if (entryType == RecipientEntry.ENTRY_TYPE_PERSON) {
setViewVisibility(viewHolder.personViewGroup, View.VISIBLE);
setViewVisibility(viewHolder.permissionViewGroup, View.GONE);
setViewVisibility(viewHolder.permissionBottomDivider, View.GONE);
} else if (entryType == RecipientEntry.ENTRY_TYPE_PERMISSION_REQUEST) {
setViewVisibility(viewHolder.personViewGroup, View.GONE);
setViewVisibility(viewHolder.permissionViewGroup, View.VISIBLE);
setViewVisibility(viewHolder.permissionBottomDivider, View.VISIBLE);
}
return itemView;
}
/**
* Returns a new view with {@link #getItemLayoutResId(AdapterType)}.
*/
public View newView(AdapterType type) {
return mInflater.inflate(getItemLayoutResId(type), null);
}
/**
* Returns the same view, or inflates a new one if the given view was null.
*/
protected View reuseOrInflateView(View convertView, ViewGroup parent, AdapterType type) {
int itemLayout = getItemLayoutResId(type);
switch (type) {
case BASE_RECIPIENT:
case RECIPIENT_ALTERNATES:
break;
case SINGLE_RECIPIENT:
itemLayout = getAlternateItemLayoutResId(type);
break;
}
return convertView != null ? convertView : mInflater.inflate(itemLayout, parent, false);
}
/**
* Binds the text to the given text view. If the text was null, hides the text view.
*/
protected void bindTextToView(CharSequence text, TextView view) {
if (view == null) {
return;
}
if (text != null) {
view.setText(text);
view.setVisibility(View.VISIBLE);
} else {
view.setVisibility(View.GONE);
}
}
/**
* Binds the avatar icon to the image view. If we don't want to show the image, hides the
* image view.
*/
protected void bindIconToView(boolean showImage, RecipientEntry entry, ImageView view,
AdapterType type) {
if (view == null) {
return;
}
if (showImage) {
switch (type) {
case BASE_RECIPIENT:
byte[] photoBytes = entry.getPhotoBytes();
if (photoBytes != null && photoBytes.length > 0) {
final Bitmap photo = BitmapFactory.decodeByteArray(photoBytes, 0,
photoBytes.length);
view.setImageBitmap(photo);
} else {
view.setImageResource(getDefaultPhotoResId());
}
break;
case RECIPIENT_ALTERNATES:
Uri thumbnailUri = entry.getPhotoThumbnailUri();
if (thumbnailUri != null) {
// TODO: see if this needs to be done outside the main thread
// as it may be too slow to get immediately.
view.setImageURI(thumbnailUri);
} else {
view.setImageResource(getDefaultPhotoResId());
}
break;
case SINGLE_RECIPIENT:
default:
break;
}
view.setVisibility(View.VISIBLE);
} else {
view.setVisibility(View.GONE);
}
}
protected void bindDrawableToDeleteView(final StateListDrawable drawable, String recipient,
ImageView view) {
if (view == null) {
return;
}
if (drawable == null) {
view.setVisibility(View.GONE);
} else {
final Resources res = mContext.getResources();
view.setImageDrawable(drawable);
view.setContentDescription(
res.getString(R.string.dropdown_delete_button_desc, recipient));
if (mDeleteListener != null) {
view.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (drawable.getCurrent() != null) {
mDeleteListener.onChipDelete();
}
}
});
}
}
}
protected void bindIndicatorToView(
@DrawableRes int indicatorIconId, String indicatorText, TextView view) {
if (view != null) {
if (indicatorText != null || indicatorIconId != 0) {
view.setText(indicatorText);
view.setVisibility(View.VISIBLE);
final Drawable indicatorIcon;
if (indicatorIconId != 0) {
indicatorIcon = mContext.getDrawable(indicatorIconId).mutate();
indicatorIcon.setColorFilter(Color.WHITE, PorterDuff.Mode.SRC_IN);
} else {
indicatorIcon = null;
}
view.setCompoundDrawablesRelativeWithIntrinsicBounds(
indicatorIcon, null, null, null);
} else {
view.setVisibility(View.GONE);
}
}
}
protected void bindPermissionRequestDismissView(ImageView view) {
if (view == null) {
return;
}
view.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if (mPermissionRequestDismissedListener != null) {
mPermissionRequestDismissedListener.onPermissionRequestDismissed();
}
}
});
}
protected void setViewVisibility(View view, int visibility) {
if (view != null) {
view.setVisibility(visibility);
}
}
protected CharSequence getDestinationType(RecipientEntry entry) {
return mQuery.getTypeLabel(mContext.getResources(), entry.getDestinationType(),
entry.getDestinationLabel()).toString().toUpperCase();
}
/**
* Returns a layout id for each item inside auto-complete list.
*
* Each View must contain two TextViews (for display name and destination) and one ImageView
* (for photo). Ids for those should be available via {@link #getDisplayNameResId()},
* {@link #getDestinationResId()}, and {@link #getPhotoResId()}.
*/
protected @LayoutRes int getItemLayoutResId(AdapterType type) {
switch (type) {
case BASE_RECIPIENT:
return R.layout.chips_autocomplete_recipient_dropdown_item;
case RECIPIENT_ALTERNATES:
return R.layout.chips_recipient_dropdown_item;
default:
return R.layout.chips_recipient_dropdown_item;
}
}
/**
* Returns a layout id for each item inside alternate auto-complete list.
*
* Each View must contain two TextViews (for display name and destination) and one ImageView
* (for photo). Ids for those should be available via {@link #getDisplayNameResId()},
* {@link #getDestinationResId()}, and {@link #getPhotoResId()}.
*/
protected @LayoutRes int getAlternateItemLayoutResId(AdapterType type) {
switch (type) {
case BASE_RECIPIENT:
return R.layout.chips_autocomplete_recipient_dropdown_item;
case RECIPIENT_ALTERNATES:
return R.layout.chips_recipient_dropdown_item;
default:
return R.layout.chips_recipient_dropdown_item;
}
}
/**
* Returns a resource ID representing an image which should be shown when ther's no relevant
* photo is available.
*/
protected @DrawableRes int getDefaultPhotoResId() {
return R.drawable.ic_contact_picture;
}
/**
* Returns an id for the ViewGroup in an item View that contains the person ui elements.
*/
protected @IdRes int getPersonGroupResId() {
return R.id.chip_person_wrapper;
}
/**
* Returns an id for TextView in an item View for showing a display name. By default
* {@link android.R.id#title} is returned.
*/
protected @IdRes int getDisplayNameResId() {
return android.R.id.title;
}
/**
* Returns an id for TextView in an item View for showing a destination
* (an email address or a phone number).
* By default {@link android.R.id#text1} is returned.
*/
protected @IdRes int getDestinationResId() {
return android.R.id.text1;
}
/**
* Returns an id for TextView in an item View for showing the type of the destination.
* By default {@link android.R.id#text2} is returned.
*/
protected @IdRes int getDestinationTypeResId() {
return android.R.id.text2;
}
/**
* Returns an id for ImageView in an item View for showing photo image for a person. In default
* {@link android.R.id#icon} is returned.
*/
protected @IdRes int getPhotoResId() {
return android.R.id.icon;
}
/**
* Returns an id for ImageView in an item View for showing the delete button. In default
* {@link android.R.id#icon1} is returned.
*/
protected @IdRes int getDeleteResId() { return android.R.id.icon1; }
/**
* Returns an id for the ViewGroup in an item View that contains the request permission ui
* elements.
*/
protected @IdRes int getPermissionGroupResId() {
return R.id.chip_permission_wrapper;
}
/**
* Returns an id for ImageView in an item View for dismissing the permission request. In default
* {@link android.R.id#icon2} is returned.
*/
protected @IdRes int getPermissionRequestDismissResId() {
return android.R.id.icon2;
}
/**
* Given a constraint and a recipient entry, tries to find the constraint in the name and
* destination in the recipient entry. A foreground font color style will be applied to the
* section that matches the constraint. As soon as a match has been found, no further matches
* are attempted.
*
* @param constraint A string that we will attempt to find within the results.
* @param entry The recipient entry to style results for.
*
* @return An array of CharSequences, the length determined by the length of results. Each
* CharSequence will either be a styled SpannableString or just the input String.
*/
protected CharSequence[] getStyledResults(@Nullable String constraint, RecipientEntry entry) {
return getStyledResults(constraint, entry.getDisplayName(), entry.getDestination());
}
/**
* Given a constraint and results, tries to find the constraint in those results, one at a time.
* A foreground font color style will be applied to the section that matches the constraint. As
* soon as a match has been found, no further matches are attempted.
*
* @param constraint A string that we will attempt to find within the results.
* @param results Strings that may contain the constraint. The order given is the order used to
* search for the constraint.
*
* @return An array of CharSequences, the length determined by the length of results. Each
* CharSequence will either be a styled SpannableString or just the input String.
*/
protected CharSequence[] getStyledResults(@Nullable String constraint, String... results) {
if (isAllWhitespace(constraint)) {
return results;
}
CharSequence[] styledResults = new CharSequence[results.length];
boolean foundMatch = false;
for (int i = 0; i < results.length; i++) {
String result = results[i];
if (result == null) {
continue;
}
if (!foundMatch) {
int index = result.toLowerCase().indexOf(constraint.toLowerCase());
if (index != -1) {
SpannableStringBuilder styled = SpannableStringBuilder.valueOf(result);
ForegroundColorSpan highlightSpan =
new ForegroundColorSpan(mContext.getResources().getColor(
R.color.chips_dropdown_text_highlighted));
styled.setSpan(highlightSpan,
index, index + constraint.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
styledResults[i] = styled;
foundMatch = true;
continue;
}
}
styledResults[i] = result;
}
return styledResults;
}
private static boolean isAllWhitespace(@Nullable String string) {
if (TextUtils.isEmpty(string)) {
return true;
}
for (int i = 0; i < string.length(); ++i) {
if (!Character.isWhitespace(string.charAt(i))) {
return false;
}
}
return true;
}
/**
* A holder class the view. Uses the getters in DropdownChipLayouter to find the id of the
* corresponding views.
*/
protected class ViewHolder {
public final ViewGroup personViewGroup;
public final TextView displayNameView;
public final TextView destinationView;
public final TextView destinationTypeView;
public final TextView indicatorView;
public final ImageView imageView;
public final ImageView deleteView;
public final View topDivider;
public final View bottomDivider;
public final View permissionBottomDivider;
public final ViewGroup permissionViewGroup;
public final ImageView permissionRequestDismissView;
public ViewHolder(View view) {
personViewGroup = (ViewGroup) view.findViewById(getPersonGroupResId());
displayNameView = (TextView) view.findViewById(getDisplayNameResId());
destinationView = (TextView) view.findViewById(getDestinationResId());
destinationTypeView = (TextView) view.findViewById(getDestinationTypeResId());
imageView = (ImageView) view.findViewById(getPhotoResId());
deleteView = (ImageView) view.findViewById(getDeleteResId());
topDivider = view.findViewById(R.id.chip_autocomplete_top_divider);
bottomDivider = view.findViewById(R.id.chip_autocomplete_bottom_divider);
permissionBottomDivider = view.findViewById(R.id.chip_permission_bottom_divider);
indicatorView = (TextView) view.findViewById(R.id.chip_indicator_text);
permissionViewGroup = (ViewGroup) view.findViewById(getPermissionGroupResId());
permissionRequestDismissView =
(ImageView) view.findViewById(getPermissionRequestDismissResId());
}
}
}
| 9,493 |
582 | <filename>com.archimatetool.editor/src/com/archimatetool/editor/ui/factory/IGraphicalObjectUIProvider.java
/**
* This program and the accompanying materials
* are made available under the terms of the License
* which accompanies this distribution in the file LICENSE.txt
*/
package com.archimatetool.editor.ui.factory;
import org.eclipse.draw2d.geometry.Dimension;
import org.eclipse.swt.graphics.Color;
/**
* Interface for Graphical Object UI Provider
*
* @author <NAME>
*/
public interface IGraphicalObjectUIProvider extends IObjectUIProvider {
/**
* @return the base default size for a figure
*/
static Dimension defaultSize() {
return new Dimension(120, 55);
}
/**
* @return The default colour to use for this object (usually a fill color)
*/
Color getDefaultColor();
/**
* @return The default line colour to use for this object
*/
Color getDefaultLineColor();
/**
* @return The default size width and height for this object as set by the user or inbuilt
*/
Dimension getDefaultSize();
/**
* @deprecated Use {@link #getDefaultSize()}
* @return The default size as set by the user for this object
*/
Dimension getUserDefaultSize();
/**
* @return The default text alignment
*/
int getDefaultTextAlignment();
/**
* @return The default text position
*/
int getDefaultTextPosition();
/**
* @return true if the figure for this object should have an icon in the top-right position
*/
boolean hasIcon();
}
| 548 |
386 | /*
* Copyright (c) 2017 by <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.hansolo.fx.charts.heatmap;
public class HeatMapSpot {
private double x;
private double y;
private double radius;
private OpacityDistribution opacityDistribution;
// ******************** Constructors **************************************
public HeatMapSpot(final double X, final double Y) {
this(X, Y, 15.5, OpacityDistribution.CUSTOM);
}
public HeatMapSpot(final double X, final double Y, final double RADIUS) {
this(X, Y, RADIUS, OpacityDistribution.CUSTOM);
}
public HeatMapSpot(final double X, final double Y, final double RADIUS, final OpacityDistribution OPACITY_GRADIENT) {
x = X;
y = Y;
radius = RADIUS;
opacityDistribution = OPACITY_GRADIENT;
}
// ******************** Methods *******************************************
public double getX() { return x; }
public double getY() { return y; }
public double getRadius() { return radius; }
public void setRadius(final double RADIUS) { radius = RADIUS; }
public OpacityDistribution getOpacityDistribution() { return opacityDistribution; }
public void setOpacityDistribution(final OpacityDistribution OPACITY_GRADIENT) { opacityDistribution = OPACITY_GRADIENT; }
}
| 671 |
361 | <gh_stars>100-1000
/*
SlimeVR Code is placed under the MIT license
Copyright (c) 2021 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
#ifndef SLIMEVR_CREDENTIALS_H_
#define SLIMEVR_CREDENTIALS_H_
// The OTA password is public, server should know it for OTA updates,
// and devices don't have any authentication anyway.
// We have password here to prevent random attacks on IOT things
// that might try to hack all esp-based devices and upload malicious
// firmware. We don't have any hardware buttons for the user to confirm
// OTA update, so this is the best way we have.
// OTA is allowed only for the first 60 seconds after device startup.
const char *otaPassword = "<PASSWORD>"; // YOUR OTA PASSWORD HERE, LEAVE EMPTY TO DISABLE OTA UPDATES
#endif // SLIMEVR_CREDENTIALS_H_
| 525 |
833 | import logging
from urllib.parse import urlencode
import tornado.concurrent
import tornado.escape
import tornado.ioloop
import tornado.web
import tornado.websocket
from utils.tornado_utils import redirect_relative
from web.web_utils import identify_user
LOGGER = logging.getLogger('web_server')
webpack_prefixed_extensions = ['.css', '.js.map', '.js', '.jpg', '.woff', '.woff2']
# In case of REST requests we don't redirect explicitly, but reply with Unauthorized code.
# Client application should provide redirection in the way it likes
def check_authorization(func):
def wrapper(self, *args, **kwargs):
auth = self.application.auth
authorizer = self.application.authorizer
login_url = self.get_login_url()
request_path = self.request.path
login_resource = is_allowed_during_login(request_path, login_url, self)
if login_resource:
return func(self, *args, **kwargs)
authenticated = auth.is_authenticated(self)
access_allowed = authenticated and authorizer.is_allowed_in_app(identify_user(self))
if authenticated and (not access_allowed):
user = identify_user(self)
LOGGER.warning('User ' + user + ' is not allowed')
code = 403
message = 'Access denied. Please contact system administrator'
if isinstance(self, tornado.websocket.WebSocketHandler):
self.close(code=code, reason=message)
else:
raise tornado.web.HTTPError(code, message)
if authenticated and access_allowed:
return func(self, *args, **kwargs)
if not isinstance(self, tornado.web.StaticFileHandler):
message = 'Not authenticated'
code = 401
LOGGER.warning('%s %s %s: user is not authenticated' % (code, self.request.method, request_path))
if isinstance(self, tornado.websocket.WebSocketHandler):
self.close(code=code, reason=message)
return
else:
raise tornado.web.HTTPError(code, message)
login_url += "?" + urlencode(dict(next=request_path))
redirect_relative(login_url, self)
return
return wrapper
def is_allowed_during_login(request_path, login_url, request_handler):
if request_handler.request.method != 'GET':
return False
if request_path == '/favicon.ico':
return True
if request_path == login_url:
return True
request_path = remove_webpack_suffixes(request_path)
login_resources = ['/js/login.js',
'/js/login.js.map',
'/js/chunk-login-vendors.js',
'/js/chunk-login-vendors.js.map',
'/favicon.ico',
'/css/login.css',
'/css/chunk-login-vendors.css',
'/fonts/roboto-latin-500.woff2',
'/fonts/roboto-latin-500.woff',
'/fonts/roboto-latin-400.woff2',
'/fonts/roboto-latin-400.woff',
'/img/titleBackground_login.jpg']
return (request_path in login_resources) or (request_path.startswith('/theme/'))
def remove_webpack_suffixes(request_path):
if request_path.endswith('.js.map'):
extension_start = len(request_path) - 7
else:
extension_start = request_path.rfind('.')
extension = request_path[extension_start:]
if extension not in webpack_prefixed_extensions:
return request_path
if extension_start < 0:
return request_path
prefix_start = request_path.rfind('.', 0, extension_start)
if prefix_start < 0:
return request_path
return request_path[:prefix_start] + extension
| 1,645 |
1,111 | <filename>demos/kitchen_sink/libs/baseclass/chips.py
from kivy.uix.screenmanager import Screen
from kivymd.toast import toast
class KitchenSinkChips(Screen):
def callback_for_menu_items(self, instance):
toast(instance.text)
| 90 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.