max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
572 | <reponame>BrunoPontesLira/live-de-python
from asyncio import gather, run
from httpx import AsyncClient
base_url = 'https://pokeapi.co/api/v2/pokemon/{number}'
async def downlaod(number):
async with AsyncClient() as client:
response = await client.get(
base_url.format(number=number),
timeout=None
)
print(number)
return number, response.json()['name']
async def coro(start, stop):
return await gather(
*[downlaod(number) for number in range(start, stop)]
)
from pprint import pprint
result = run(coro(1, 5))
pprint(result)
| 252 |
387 | /**
* Copyright 2019-2021 LinkedIn Corporation. All rights reserved.
* Licensed under the BSD-2 Clause license.
* See LICENSE in the project root for license information.
*/
package com.linkedin.coral.spark.utils;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.Multimap;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.rel.type.RelDataTypeFieldImpl;
import org.apache.calcite.rel.type.RelDataTypeSystem;
import org.apache.calcite.rel.type.RelRecordType;
import org.apache.calcite.sql.type.ArraySqlType;
import org.apache.calcite.sql.type.BasicSqlType;
import org.apache.calcite.sql.type.MapSqlType;
import org.apache.calcite.sql.type.SqlTypeName;
import org.testng.annotations.Test;
import static org.testng.Assert.*;
public class RelDataTypeToSparkDataTypeStringConverterTest {
@Test
public void testPrimitiveRelDataType() {
Multimap<String, SqlTypeName> sparkDataTypeStringToSqlTypeNameMap =
new ImmutableMultimap.Builder<String, SqlTypeName>().put("\"string\"", SqlTypeName.VARCHAR)
.put("\"integer\"", SqlTypeName.INTEGER).put("\"short\"", SqlTypeName.SMALLINT)
.put("\"long\"", SqlTypeName.BIGINT).put("\"double\"", SqlTypeName.DOUBLE)
.put("\"float\"", SqlTypeName.FLOAT).put("\"boolean\"", SqlTypeName.BOOLEAN)
.put("\"date\"", SqlTypeName.DATE).put("\"timestamp\"", SqlTypeName.TIMESTAMP)
.put("\"calendarinterval\"", SqlTypeName.INTERVAL_DAY).put("\"binary\"", SqlTypeName.BINARY)
.put("\"null\"", SqlTypeName.NULL).build();
for (Map.Entry<String, SqlTypeName> entry : sparkDataTypeStringToSqlTypeNameMap.entries()) {
String expectedSparkDataTypeSchemaString = entry.getKey();
SqlTypeName sqlTypeName = entry.getValue();
RelDataType relDataType = new BasicSqlType(RelDataTypeSystem.DEFAULT, sqlTypeName);
String sparkDataTypeSchemaString = RelDataTypeToSparkDataTypeStringConverter.convertRelDataType(relDataType);
assertEquals(sparkDataTypeSchemaString, expectedSparkDataTypeSchemaString);
}
}
@Test
public void testStructRelDataType() {
String expectedSparkDataTypeSchemaString = "{" + "\"type\":\"struct\"," + "\"fields\":["
+ "{\"type\":\"string\",\"name\":\"str\",\"nullable\":true,\"metadata\":{}},"
+ "{\"type\":\"integer\",\"name\":\"int\",\"nullable\":true,\"metadata\":{}}" + "]}";
List<RelDataTypeField> fields = new ArrayList<>();
fields.add(new RelDataTypeFieldImpl("str", 0, new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.VARCHAR)));
fields.add(new RelDataTypeFieldImpl("int", 0, new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.INTEGER)));
RelRecordType relRecordType = new RelRecordType(fields);
String sparkDataTypeSchemaString = RelDataTypeToSparkDataTypeStringConverter.convertRelDataType(relRecordType);
assertEquals(sparkDataTypeSchemaString, expectedSparkDataTypeSchemaString);
}
@Test
public void testArrayRelDataType() {
String expectedSparkDataTypeSchemaString = "{\"type\":\"array\",\"containsNull\":true,\"elementType\":\"integer\"}";
ArraySqlType arraySqlType =
new ArraySqlType(new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.INTEGER), true);
String sparkDataTypeSchemaString = RelDataTypeToSparkDataTypeStringConverter.convertRelDataType(arraySqlType);
assertEquals(sparkDataTypeSchemaString, expectedSparkDataTypeSchemaString);
}
@Test
public void testMapRelDataType() {
String expectedSparkDataTypeSchemaString =
"{\"type\":\"map\",\"valueContainsNull\":true,\"keyType\":\"integer\",\"valueType\":\"integer\"}";
MapSqlType mapSqlType = new MapSqlType(new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.INTEGER),
new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.INTEGER), true);
String sparkDataTypeSchemaString = RelDataTypeToSparkDataTypeStringConverter.convertRelDataType(mapSqlType);
assertEquals(sparkDataTypeSchemaString, expectedSparkDataTypeSchemaString);
}
@Test
public void testNestedStructRelDataType() {
String expectedSparkDataTypeSchemaString = "{" + "\"type\":\"struct\"," + "\"fields\":["
+ "{\"type\":\"string\",\"name\":\"str\",\"nullable\":true,\"metadata\":{}},"
+ "{\"type\":{\"type\":\"struct\",\"fields\":["
+ "{\"type\":\"string\",\"name\":\"str\",\"nullable\":true,\"metadata\":{}},"
+ "{\"type\":\"integer\",\"name\":\"int\",\"nullable\":true,\"metadata\":{}}]},"
+ "\"name\":\"struct\",\"nullable\":true,\"metadata\":{}}]}";
List<RelDataTypeField> nestedFields = new ArrayList<>();
nestedFields
.add(new RelDataTypeFieldImpl("str", 0, new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.VARCHAR)));
nestedFields
.add(new RelDataTypeFieldImpl("int", 0, new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.INTEGER)));
RelRecordType nestedRelRecordType = new RelRecordType(nestedFields);
List<RelDataTypeField> fields = new ArrayList<>();
fields.add(new RelDataTypeFieldImpl("str", 0, new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.VARCHAR)));
fields.add(new RelDataTypeFieldImpl("struct", 0, nestedRelRecordType));
RelRecordType relRecordType = new RelRecordType(fields);
String sparkDataTypeSchemaString = RelDataTypeToSparkDataTypeStringConverter.convertRelDataType(relRecordType);
assertEquals(sparkDataTypeSchemaString, expectedSparkDataTypeSchemaString);
}
@Test
public void testMapWithStructValueRelDataType() {
String expectedSparkDataTypeSchemaString = "{" + "\"type\":\"map\"," + "\"valueContainsNull\":true,"
+ "\"keyType\":\"integer\"," + "\"valueType\":" + "{\"type\":\"struct\",\"fields\":["
+ "{\"type\":\"string\",\"name\":\"str\",\"nullable\":true,\"metadata\":{}},"
+ "{\"type\":\"integer\",\"name\":\"int\",\"nullable\":true,\"metadata\":{}}]}}";
List<RelDataTypeField> fields = new ArrayList<>();
fields.add(new RelDataTypeFieldImpl("str", 0, new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.VARCHAR)));
fields.add(new RelDataTypeFieldImpl("int", 0, new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.INTEGER)));
RelRecordType relRecordType = new RelRecordType(fields);
MapSqlType mapSqlType =
new MapSqlType(new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.INTEGER), relRecordType, true);
String sparkDataTypeSchemaString = RelDataTypeToSparkDataTypeStringConverter.convertRelDataType(mapSqlType);
assertEquals(sparkDataTypeSchemaString, expectedSparkDataTypeSchemaString);
}
@Test
public void testArrayWithStructEleRelDataType() {
String expectedSparkDataTypeSchemaString = "{" + "\"type\":\"array\"," + "\"containsNull\":true,"
+ "\"elementType\":" + "{\"type\":\"struct\",\"fields\":["
+ "{\"type\":\"string\",\"name\":\"str\",\"nullable\":true,\"metadata\":{}},"
+ "{\"type\":\"integer\",\"name\":\"int\",\"nullable\":true,\"metadata\":{}}]}}";
List<RelDataTypeField> fields = new ArrayList<>();
fields.add(new RelDataTypeFieldImpl("str", 0, new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.VARCHAR)));
fields.add(new RelDataTypeFieldImpl("int", 0, new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.INTEGER)));
RelRecordType relRecordType = new RelRecordType(fields);
ArraySqlType arraySqlType = new ArraySqlType(relRecordType, true);
String sparkDataTypeSchemaString = RelDataTypeToSparkDataTypeStringConverter.convertRelDataType(arraySqlType);
assertEquals(sparkDataTypeSchemaString, expectedSparkDataTypeSchemaString);
}
@Test
public void testComplexRelDataType() {
String expectedSparkDataTypeSchemaString =
"{" + "\"type\":\"map\"," + "\"valueContainsNull\":true," + "\"keyType\":\"integer\"," + "\"valueType\":"
+ "{\"type\":\"array\",\"containsNull\":true,\"elementType\":" + "{\"type\":\"struct\",\"fields\":["
+ "{\"type\":\"string\",\"name\":\"str\",\"nullable\":true,\"metadata\":{}},"
+ "{\"type\":{\"type\":\"struct\",\"fields\":["
+ "{\"type\":\"string\",\"name\":\"str\",\"nullable\":true,\"metadata\":{}},"
+ "{\"type\":\"integer\",\"name\":\"int\",\"nullable\":true,\"metadata\":{}}]},"
+ "\"name\":\"struct\",\"nullable\":true,\"metadata\":{}}]}}}";
List<RelDataTypeField> nestedFields = new ArrayList<>();
nestedFields
.add(new RelDataTypeFieldImpl("str", 0, new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.VARCHAR)));
nestedFields
.add(new RelDataTypeFieldImpl("int", 0, new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.INTEGER)));
RelRecordType nestedRelRecordType = new RelRecordType(nestedFields);
List<RelDataTypeField> fields = new ArrayList<>();
fields.add(new RelDataTypeFieldImpl("str", 0, new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.VARCHAR)));
fields.add(new RelDataTypeFieldImpl("struct", 0, nestedRelRecordType));
RelRecordType relRecordType = new RelRecordType(fields);
ArraySqlType arraySqlType = new ArraySqlType(relRecordType, true);
MapSqlType mapSqlType =
new MapSqlType(new BasicSqlType(RelDataTypeSystem.DEFAULT, SqlTypeName.INTEGER), arraySqlType, true);
String sparkDataTypeSchemaString = RelDataTypeToSparkDataTypeStringConverter.convertRelDataType(mapSqlType);
assertEquals(sparkDataTypeSchemaString, expectedSparkDataTypeSchemaString);
}
}
| 3,500 |
5,169 | {
"name": "PowerUpSwift",
"version": "0.1.0",
"summary": "Make iOS development faster and be more productive.",
"description": "Apple doesn't provide useful utilities to make our lives easier. This might be for a good reason. Use this library to save time and be more productive!",
"homepage": "https://github.com/PowerUpX/PowerUpSwift",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"authors": {
"<NAME>": "<EMAIL>"
},
"source": {
"git": "https://github.com/PowerUpX/PowerUpSwift.git",
"tag": "0.1.0"
},
"platforms": {
"ios": "10.0"
},
"source_files": "PowerUpSwift/Classes/**/*",
"swift_version": "4.1",
"frameworks": [
"UIKit",
"Foundation"
]
}
| 280 |
667 | /*
* Copyright © 2013-2017 <NAME>
*
* This file is part of FreshPlayerPlugin.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#include "pp_interface.h"
#include "pp_resource.h"
#include "ppb_core.h"
#include "ppb_cursor_control.h"
#include "ppb_image_data.h"
#include "ppb_instance.h"
#include "tables.h"
#include "trace_core.h"
#include "trace_helpers.h"
#include <X11/Xcursor/Xcursor.h>
#include <X11/Xlib.h>
#include <X11/cursorfont.h>
#include <glib.h>
#include <pthread.h>
#include <string.h>
struct comt_param_s {
PP_Instance instance_id;
int xtype;
int hide_cursor;
PP_Resource custom_image;
int hotspot_x;
int hotspot_y;
};
static
Cursor
create_cursor_from_image_data_resource(Display *dpy, Window wnd, PP_Resource image_data,
int hotspot_x, int hotspot_y)
{
struct pp_image_data_s *id = pp_resource_acquire(image_data, PP_RESOURCE_IMAGE_DATA);
if (!id) {
trace_warning("%s, bad resource\n", __func__);
return None;
}
XcursorImage *cursor_image = XcursorImageCreate(id->width, id->height);
cursor_image->xhot = hotspot_x;
cursor_image->yhot = hotspot_y;
memcpy(cursor_image->pixels, id->data, id->stride * id->height);
Cursor cursor = XcursorImageLoadCursor(dpy, cursor_image);
XcursorImageDestroy(cursor_image);
pp_resource_release(image_data);
return cursor;
}
void
set_cursor_ptac(void *user_data)
{
Window wnd = None;
Cursor cursor;
struct comt_param_s *params = user_data;
struct pp_instance_s *pp_i = tables_get_pp_instance(params->instance_id);
if (!pp_i)
goto quit;
if (pp_i->is_fullscreen) {
wnd = pp_i->fs_wnd;
} else if (pp_i->windowed_mode) {
wnd = pp_i->wnd;
} else {
if (npn.getvalue(pp_i->npp, NPNVnetscapeWindow, &wnd) != NPERR_NO_ERROR) {
trace_error("%s, failed to get NPNnetscapeWindow\n", __func__);
wnd = None;
}
}
pthread_mutex_lock(&display.lock);
if (params->hide_cursor) {
cursor = display.transparent_cursor;
} else {
if (params->custom_image != 0) {
cursor = create_cursor_from_image_data_resource(display.x, wnd, params->custom_image,
params->hotspot_x, params->hotspot_y);
} else {
cursor = XCreateFontCursor(display.x, params->xtype);
}
}
if (wnd != None && cursor != None) {
XDefineCursor(display.x, wnd, cursor);
XFlush(display.x);
// remember to free cursor unless we hid it
pp_i->have_prev_cursor = !params->hide_cursor;
pp_i->prev_cursor = cursor;
}
pthread_mutex_unlock(&display.lock);
quit:
g_slice_free(struct comt_param_s, params);
}
PP_Bool
ppb_cursor_control_set_cursor(PP_Instance instance, enum PP_CursorType_Dev type,
PP_Resource custom_image, const struct PP_Point *hot_spot)
{
struct pp_instance_s *pp_i = tables_get_pp_instance(instance);
if (!pp_i) {
trace_error("%s, bad instance\n", __func__);
return PP_FALSE;
}
if (!g_atomic_int_get(&pp_i->cursor_inside_instance)) {
// avoid changing pointer shape
return PP_TRUE;
}
int xtype = XC_arrow;
int hide_cursor = 0;
switch (type) {
case PP_CURSORTYPE_CUSTOM:
xtype = XC_arrow;
break;
case PP_CURSORTYPE_POINTER:
xtype = XC_left_ptr;
break;
case PP_CURSORTYPE_CROSS:
xtype = XC_crosshair;
break;
case PP_CURSORTYPE_HAND:
xtype = XC_hand2;
break;
case PP_CURSORTYPE_IBEAM:
xtype = XC_xterm;
break;
case PP_CURSORTYPE_WAIT:
xtype = XC_watch;
break;
case PP_CURSORTYPE_HELP:
xtype = XC_question_arrow;
break;
case PP_CURSORTYPE_EASTRESIZE:
xtype = XC_right_side;
break;
case PP_CURSORTYPE_NORTHRESIZE:
xtype = XC_top_side;
break;
case PP_CURSORTYPE_NORTHEASTRESIZE:
xtype = XC_top_right_corner;
break;
case PP_CURSORTYPE_NORTHWESTRESIZE:
xtype = XC_top_left_corner;
break;
case PP_CURSORTYPE_SOUTHRESIZE:
xtype = XC_bottom_side;
break;
case PP_CURSORTYPE_SOUTHEASTRESIZE:
xtype = XC_bottom_right_corner;
break;
case PP_CURSORTYPE_SOUTHWESTRESIZE:
xtype = XC_bottom_left_corner;
break;
case PP_CURSORTYPE_WESTRESIZE:
xtype = XC_left_side;
break;
case PP_CURSORTYPE_NORTHSOUTHRESIZE:
xtype = XC_sb_v_double_arrow;
break;
case PP_CURSORTYPE_EASTWESTRESIZE:
xtype = XC_sb_h_double_arrow;
break;
case PP_CURSORTYPE_NORTHEASTSOUTHWESTRESIZE:
xtype = XC_left_ptr;
break;
case PP_CURSORTYPE_NORTHWESTSOUTHEASTRESIZE:
xtype = XC_left_ptr;
break;
case PP_CURSORTYPE_COLUMNRESIZE:
xtype = XC_sb_h_double_arrow;
break;
case PP_CURSORTYPE_ROWRESIZE:
xtype = XC_sb_v_double_arrow;
break;
case PP_CURSORTYPE_MIDDLEPANNING:
xtype = XC_fleur;
break;
case PP_CURSORTYPE_EASTPANNING:
xtype = XC_sb_right_arrow;
break;
case PP_CURSORTYPE_NORTHPANNING:
xtype = XC_sb_up_arrow;
break;
case PP_CURSORTYPE_NORTHEASTPANNING:
xtype = XC_top_right_corner;
break;
case PP_CURSORTYPE_NORTHWESTPANNING:
xtype = XC_top_left_corner;
break;
case PP_CURSORTYPE_SOUTHPANNING:
xtype = XC_sb_down_arrow;
break;
case PP_CURSORTYPE_SOUTHEASTPANNING:
xtype = XC_bottom_right_corner;
break;
case PP_CURSORTYPE_SOUTHWESTPANNING:
xtype = XC_bottom_left_corner;
break;
case PP_CURSORTYPE_WESTPANNING:
xtype = XC_sb_left_arrow;
break;
case PP_CURSORTYPE_MOVE:
xtype = XC_fleur;
break;
case PP_CURSORTYPE_VERTICALTEXT:
xtype = XC_left_ptr;
break;
case PP_CURSORTYPE_CELL:
xtype = XC_left_ptr;
break;
case PP_CURSORTYPE_CONTEXTMENU:
xtype = XC_left_ptr;
break;
case PP_CURSORTYPE_ALIAS:
xtype = XC_left_ptr;
break;
case PP_CURSORTYPE_PROGRESS:
xtype = XC_watch;
break;
case PP_CURSORTYPE_NODROP:
xtype = XC_left_ptr;
break;
case PP_CURSORTYPE_COPY:
xtype = XC_left_ptr;
break;
case PP_CURSORTYPE_NONE:
xtype = XC_left_ptr;
hide_cursor = 1;
break;
case PP_CURSORTYPE_NOTALLOWED:
xtype = XC_left_ptr;
break;
case PP_CURSORTYPE_ZOOMIN:
xtype = XC_left_ptr;
break;
case PP_CURSORTYPE_ZOOMOUT:
xtype = XC_left_ptr;
break;
case PP_CURSORTYPE_GRAB:
xtype = XC_left_ptr;
break;
case PP_CURSORTYPE_GRABBING:
xtype = XC_left_ptr;
break;
}
struct comt_param_s *comt_params = g_slice_alloc0(sizeof(*comt_params));
comt_params->instance_id = instance;
comt_params->xtype = xtype;
comt_params->hide_cursor = hide_cursor;
comt_params->custom_image = (type == PP_CURSORTYPE_CUSTOM) ? custom_image : 0;
if (hot_spot) {
comt_params->hotspot_x = hot_spot->x;
comt_params->hotspot_y = hot_spot->y;
}
ppb_core_call_on_browser_thread(instance, set_cursor_ptac, comt_params);
return PP_TRUE;
}
PP_Bool
ppb_cursor_control_lock_cursor(PP_Instance instance)
{
return PP_TRUE;
}
PP_Bool
ppb_cursor_control_unlock_cursor(PP_Instance instance)
{
return PP_TRUE;
}
PP_Bool
ppb_cursor_control_has_cursor_lock(PP_Instance instance)
{
return PP_TRUE;
}
PP_Bool
ppb_cursor_control_can_lock_cursor(PP_Instance instance)
{
return PP_TRUE;
}
// trace wrappers
TRACE_WRAPPER
PP_Bool
trace_ppb_cursor_control_set_cursor(PP_Instance instance, enum PP_CursorType_Dev type,
PP_Resource custom_image, const struct PP_Point *hot_spot)
{
char *s_hot_spot = trace_point_as_string(hot_spot);
trace_info("[PPB] {full} %s instance=%d, type=%d, custom_image=%d, hot_spot=%s\n", __func__+6,
instance, type, custom_image, s_hot_spot);
g_free(s_hot_spot);
return ppb_cursor_control_set_cursor(instance, type, custom_image, hot_spot);
}
TRACE_WRAPPER
PP_Bool
trace_ppb_cursor_control_lock_cursor(PP_Instance instance)
{
trace_info("[PPB] {zilch} %s instance=%d\n", __func__+6, instance);
return ppb_cursor_control_lock_cursor(instance);
}
TRACE_WRAPPER
PP_Bool
trace_ppb_cursor_control_unlock_cursor(PP_Instance instance)
{
trace_info("[PPB] {zilch} %s instance=%d\n", __func__+6, instance);
return ppb_cursor_control_unlock_cursor(instance);
}
TRACE_WRAPPER
PP_Bool
trace_ppb_cursor_control_has_cursor_lock(PP_Instance instance)
{
trace_info("[PPB] {zilch} %s instance=%d\n", __func__+6, instance);
return ppb_cursor_control_has_cursor_lock(instance);
}
TRACE_WRAPPER
PP_Bool
trace_ppb_cursor_control_can_lock_cursor(PP_Instance instance)
{
trace_info("[PPB] {zilch} %s instance=%d\n", __func__+6, instance);
return ppb_cursor_control_can_lock_cursor(instance);
}
const struct PPB_CursorControl_Dev_0_4 ppb_cursor_control_dev_interface_0_4 = {
.SetCursor = TWRAPF(ppb_cursor_control_set_cursor),
.LockCursor = TWRAPZ(ppb_cursor_control_lock_cursor),
.UnlockCursor = TWRAPZ(ppb_cursor_control_unlock_cursor),
.HasCursorLock = TWRAPZ(ppb_cursor_control_has_cursor_lock),
.CanLockCursor = TWRAPZ(ppb_cursor_control_can_lock_cursor),
};
static
void
__attribute__((constructor))
constructor_ppb_cursor_control(void)
{
register_interface(PPB_CURSOR_CONTROL_DEV_INTERFACE_0_4, &ppb_cursor_control_dev_interface_0_4);
}
| 5,385 |
5,447 | """Prepare PASCAL VOC tiny motorbike datasets"""
import os
import autogluon as ag
if __name__ == '__main__':
root = os.path.expanduser('~/.mxnet/datasets/')
if not os.path.exists(root):
os.makedirs(root)
filename_zip = ag.download('https://autogluon.s3.amazonaws.com/datasets/tiny_motorbike.zip', path=root)
filename = ag.unzip(filename_zip, root=root)
data_root = os.path.join(root, filename)
os.remove(filename_zip)
print("dataset saved to: {}".format(data_root)) | 201 |
337 | <reponame>francescomessina/polycube
/**
* bridge API generated from bridge.yang
*
* NOTE: This file is auto generated by polycube-codegen
* https://github.com/polycube-network/polycube-codegen
*/
/* Do not edit this file manually */
/*
* FdbEntryJsonObject.h
*
*
*/
#pragma once
#include "JsonObjectBase.h"
namespace polycube {
namespace service {
namespace model {
enum class FdbEntryTypeEnum {
STATIC, DYNAMIC
};
/// <summary>
///
/// </summary>
class FdbEntryJsonObject : public JsonObjectBase {
public:
FdbEntryJsonObject();
FdbEntryJsonObject(const nlohmann::json &json);
~FdbEntryJsonObject() final = default;
nlohmann::json toJson() const final;
/// <summary>
/// VLAN identifier
/// </summary>
uint16_t getVlan() const;
void setVlan(uint16_t value);
bool vlanIsSet() const;
/// <summary>
/// MAC address of the filtering database entry
/// </summary>
std::string getMac() const;
void setMac(std::string value);
bool macIsSet() const;
/// <summary>
/// Type of filtering entry
/// </summary>
FdbEntryTypeEnum getType() const;
void setType(FdbEntryTypeEnum value);
bool typeIsSet() const;
void unsetType();
static std::string FdbEntryTypeEnum_to_string(const FdbEntryTypeEnum &value);
static FdbEntryTypeEnum string_to_FdbEntryTypeEnum(const std::string &str);
/// <summary>
/// Output port name
/// </summary>
std::string getPort() const;
void setPort(std::string value);
bool portIsSet() const;
/// <summary>
/// Age of the current filtering database entry
/// </summary>
uint32_t getAge() const;
void setAge(uint32_t value);
bool ageIsSet() const;
void unsetAge();
private:
uint16_t m_vlan;
bool m_vlanIsSet;
std::string m_mac;
bool m_macIsSet;
FdbEntryTypeEnum m_type;
bool m_typeIsSet;
std::string m_port;
bool m_portIsSet;
uint32_t m_age;
bool m_ageIsSet;
};
}
}
}
| 679 |
5,169 | {
"name": "DDKeyPathChannel",
"version": "0.0.1",
"summary": "Synchronize data between different object of different class by keyPath.",
"description": "Synchronize data between different object of different class by keyPath.",
"homepage": "https://github.com/djs66256/DDKeyPathChannel",
"license": "MIT",
"authors": {
"Daniel": "<EMAIL>"
},
"platforms": {
"ios": "8.0"
},
"source": {
"git": "https://github.com/djs66256/DDKeyPathChannel.git",
"tag": "0.0.1"
},
"source_files": [
"Classes",
"Classes/**/*.{h,m}"
],
"exclude_files": "Classes/Exclude",
"frameworks": "Foundation",
"requires_arc": true
}
| 256 |
5,169 | {
"name": "CMTextView",
"version": "1.0.0",
"summary": "UITextView subclass with placeholder text.",
"description": "CMTextView is an UITextView subclass which provides the placeholder text functionality.",
"homepage": "https://github.com/Code-Mafia/CMTextView.git",
"license": "MIT",
"authors": {
"<NAME>": "<EMAIL>"
},
"platforms": {
"ios": "11.0"
},
"source": {
"git": "https://github.com/Code-Mafia/CMTextView.git",
"tag": "1.0.1"
},
"source_files": "CMTextView/*"
}
| 200 |
705 | class WorkflowRegistry(object):
def __init__(self):
self.workflows = {}
self.class_index = {}
def add(self, name, cls):
self.workflows[id(cls)] = self.workflows.get(id(cls), set())
self.workflows[id(cls)].add(name)
self.class_index[id(cls)] = cls
def get_class_fields(self, model):
return self.workflows[id(model)]
workflow_registry = WorkflowRegistry()
| 186 |
921 | //-*****************************************************************************
//
// Copyright (c) 2009-2011,
// Sony Pictures Imageworks Inc. and
// Industrial Light & Magic, a division of Lucasfilm Entertainment Company Ltd.
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Industrial Light & Magic nor the names of
// its contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
//-*****************************************************************************
#include <Alembic/Util/Dimensions.h>
int main( int, char** )
{
using namespace Alembic::Util;
//
// Test fundamental BaseDimensions class
//
{
typedef BaseDimensions<int> IntScalarDimension;
IntScalarDimension rank0;
IntScalarDimension rank0_val1( (int) 1);
IntScalarDimension rank1;
rank1.setRank( 1 );
}
//
// Test Dimensions class
//
{
Dimensions rank00;
Dimensions rank0;
Dimensions rank1( 1 );
assert(rank1.rank() == 1);
assert(rank1[0] == 1);
}
{
Dimensions rank2;
rank2.setRank(2);
assert(rank2.rank() == 2);
Alembic::Util::uint64_t *ptr = rank2.rootPtr();
ptr[0] = 11;
ptr[1] = 12;
assert( rank2[0] == 11 );
assert( rank2[1] == 12 );
Dimensions rank2_copy(rank2);
assert(rank2_copy.rank() == 2);
assert( rank2_copy[0] == 11 );
assert( rank2_copy[1] == 12 );
assert( rank2_copy == rank2 );
Dimensions rank3;
rank3.setRank(3);
ptr = rank3.rootPtr();
ptr[0] = 20;
ptr[1] = 21;
ptr[2] = 22;
rank2_copy = rank3;
assert( rank2_copy == rank3 );
}
std::cout << "Success!" << std::endl;
return 0;
}
| 1,150 |
1,028 | /**
* Copyright 2019-present, GraphQL Foundation
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
#pragma once
#ifdef __cplusplus
extern "C" {
#endif
struct GraphQLAstNode;
/**
* Serialize the given AST to JSON. The returned C string must be
* freed with free().
*/
const char *graphql_ast_to_json(const struct GraphQLAstNode *node);
#ifdef __cplusplus
}
#endif
| 141 |
532 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Setup file for nlpia.
This file was generated with PyScaffold 3.0.
PyScaffold helps you to put up the scaffold of your new Python project.
Learn more under: http://pyscaffold.readthedocs.org/
"""
import sys
from setuptools import setup
# Add here console scripts and other entry points in ini-style format
# audio = nlpia.scripts.audio:main
entry_points = ""
# [console_scripts]
# renderbook = nlpia.scripts.countpages:main
# cleandialog = nlpia.scripts.cleandialog:main
# downloadgdrive = nlpia.scripts.google_drive:main
# """
pyscaffold_ver = '3.1'
if sys.version_info[0] < 3:
pyscaffold_ver = '2.5.10'
def setup_package():
needs_sphinx = {'build_sphinx', 'upload_docs'}.intersection(sys.argv)
sphinx = ['sphinx'] if needs_sphinx else []
setup(setup_requires=['pyscaffold=={}'.format(pyscaffold_ver)] + sphinx,
entry_points=entry_points,
use_pyscaffold=True)
if __name__ == "__main__":
setup_package()
| 403 |
11,868 | <filename>samples/client/petstore/tiny/cpp/lib/Models/Order.cpp<gh_stars>1000+
#include "Order.h"
using namespace Tiny;
Order::Order()
{
id = long(0);
petId = long(0);
quantity = int(0);
shipDate = std::string();
status = std::string();
complete = bool(false);
}
Order::Order(std::string jsonString)
{
this->fromJson(jsonString);
}
Order::~Order()
{
}
void
Order::fromJson(std::string jsonObj)
{
bourne::json object = bourne::json::parse(jsonObj);
const char *idKey = "id";
if(object.has_key(idKey))
{
bourne::json value = object[idKey];
jsonToValue(&id, value, "long");
}
const char *petIdKey = "petId";
if(object.has_key(petIdKey))
{
bourne::json value = object[petIdKey];
jsonToValue(&petId, value, "long");
}
const char *quantityKey = "quantity";
if(object.has_key(quantityKey))
{
bourne::json value = object[quantityKey];
jsonToValue(&quantity, value, "int");
}
const char *shipDateKey = "shipDate";
if(object.has_key(shipDateKey))
{
bourne::json value = object[shipDateKey];
jsonToValue(&shipDate, value, "std::string");
}
const char *statusKey = "status";
if(object.has_key(statusKey))
{
bourne::json value = object[statusKey];
jsonToValue(&status, value, "std::string");
}
const char *completeKey = "complete";
if(object.has_key(completeKey))
{
bourne::json value = object[completeKey];
jsonToValue(&complete, value, "bool");
}
}
bourne::json
Order::toJson()
{
bourne::json object = bourne::json::object();
object["id"] = getId();
object["petId"] = getPetId();
object["quantity"] = getQuantity();
object["shipDate"] = getShipDate();
object["status"] = getStatus();
object["complete"] = isComplete();
return object;
}
long
Order::getId()
{
return id;
}
void
Order::setId(long id)
{
this->id = id;
}
long
Order::getPetId()
{
return petId;
}
void
Order::setPetId(long petId)
{
this->petId = petId;
}
int
Order::getQuantity()
{
return quantity;
}
void
Order::setQuantity(int quantity)
{
this->quantity = quantity;
}
std::string
Order::getShipDate()
{
return shipDate;
}
void
Order::setShipDate(std::string shipDate)
{
this->shipDate = shipDate;
}
std::string
Order::getStatus()
{
return status;
}
void
Order::setStatus(std::string status)
{
this->status = status;
}
bool
Order::isComplete()
{
return complete;
}
void
Order::setComplete(bool complete)
{
this->complete = complete;
}
| 1,207 |
351 | <gh_stars>100-1000
package org.antlr.intellij.plugin.profiler;
import javax.swing.table.AbstractTableModel;
public abstract class ProfilerTableDataModel extends AbstractTableModel {
public abstract String[] getColumnNames();
public abstract String[] getColumnToolTips();
@Override
public String getColumnName(int column) {
return getColumnNames()[column];
}
@Override
public Class<?> getColumnClass(int columnIndex) {
return Integer.class;
}
@Override
public int getColumnCount() {
return getColumnNames().length;
}
}
| 193 |
401 | /*
* Hedgewars, a free turn based strategy game
* Copyright (c) 2004-2015 <NAME> <<EMAIL>>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; version 2 of the License
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include <QGridLayout>
#include <QVBoxLayout>
#include <QLabel>
#include <QListWidget>
#include <QListWidgetItem>
#include <QPushButton>
#include <QTextStream>
#include <QFile>
#include <QLocale>
#include <QSettings>
#include "mission.h"
#include "hwconsts.h"
#include "DataManager.h"
#include "pagetraining.h"
QLayout * PageTraining::bodyLayoutDefinition()
{
QGridLayout * pageLayout = new QGridLayout();
// declare start button, caption and description
btnPreview = formattedButton(":/res/Trainings.png", true);
// tweak widget spacing
pageLayout->setRowStretch(0, 1);
pageLayout->setRowStretch(1, 1);
pageLayout->setRowStretch(2, 1);
pageLayout->setColumnStretch(0, 5);
pageLayout->setColumnStretch(1, 1);
pageLayout->setColumnStretch(2, 9);
pageLayout->setColumnStretch(3, 5);
QWidget * infoWidget = new QWidget();
QHBoxLayout * infoLayout = new QHBoxLayout();
// add preview, caption and description
infoWidget->setLayout(infoLayout);
infoLayout->addWidget(btnPreview);
// center preview
infoLayout->setAlignment(btnPreview, Qt::AlignRight | Qt::AlignVCenter);
// info area (caption on top, description below)
QWidget * infoTextWidget = new QWidget();
QVBoxLayout * infoTextLayout = new QVBoxLayout();
infoTextWidget->setObjectName("trainingInfo");
infoTextWidget->setLayout(infoTextLayout);
lblCaption = new QLabel();
lblCaption->setMinimumWidth(360);
lblCaption->setAlignment(Qt::AlignHCenter | Qt::AlignBottom);
lblCaption->setWordWrap(true);
lblDescription = new QLabel();
lblDescription->setMinimumWidth(360);
lblDescription->setAlignment(Qt::AlignHCenter | Qt::AlignTop);
lblDescription->setWordWrap(true);
lblHighscores = new QLabel();
lblHighscores->setMinimumWidth(360);
lblHighscores->setAlignment(Qt::AlignHCenter | Qt::AlignTop);
infoTextLayout->addWidget(lblCaption);
infoTextLayout->addWidget(lblDescription);
infoTextLayout->addWidget(lblHighscores);
infoLayout->addWidget(infoTextWidget);
pageLayout->addWidget(infoWidget, 0, 1, 1, 2); // span 2 columns
pageLayout->setAlignment(infoTextWidget, Qt::AlignLeft);
// tab widget containing all lists
tbw = new QTabWidget(this);
pageLayout->addWidget(tbw, 1, 0, 1, 4); // span 4 columns
// let's not make the tab widget use more space than needed
tbw->setFixedWidth(400);
pageLayout->setAlignment(tbw, Qt::AlignHCenter);
// training/challenge/scenario lists
lstTrainings = new QListWidget(this);
lstTrainings ->setWhatsThis(tr("Pick the training to play"));
lstTrainings ->setObjectName("trainingList");
lstChallenges = new QListWidget(this);
lstChallenges ->setWhatsThis(tr("Pick the challenge to play"));
lstChallenges ->setObjectName("trainingList");
lstScenarios= new QListWidget(this);
lstScenarios->setWhatsThis(tr("Pick the scenario to play"));
lstScenarios->setObjectName("trainingList");
tbw->addTab(lstTrainings, tr("Trainings"));
tbw->addTab(lstChallenges, tr("Challenges"));
tbw->addTab(lstScenarios, tr("Scenarios"));
tbw->setCurrentWidget(lstTrainings);
QLabel* lblteam = new QLabel(tr("Team"));
CBTeam = new QComboBox(this);
CBTeam->setMaxVisibleItems(30);
pageLayout->addWidget(lblteam, 2, 1);
pageLayout->addWidget(CBTeam, 2, 2);
return pageLayout;
}
QLayout * PageTraining::footerLayoutDefinition()
{
QBoxLayout * bottomLayout = new QVBoxLayout();
const QIcon& lp = QIcon(":/res/Start.png");
QSize sz = lp.actualSize(QSize(65535, 65535));
btnStart = new QPushButton();
btnStart->setStyleSheet("padding: 5px 10px");
btnStart->setText(QPushButton::tr("Start"));
btnStart->setWhatsThis(tr("Start fighting"));
btnStart->setMinimumWidth(sz.width() + 60);
btnStart->setIcon(lp);
btnStart->setFixedHeight(50);
btnStart->setIconSize(sz);
btnStart->setFlat(true);
btnStart->setSizePolicy(QSizePolicy::Fixed, QSizePolicy::Fixed);
bottomLayout->addWidget(btnStart);
bottomLayout->setAlignment(btnStart, Qt::AlignRight | Qt::AlignVCenter);
return bottomLayout;
}
void PageTraining::connectSignals()
{
connect(lstTrainings, SIGNAL(currentItemChanged(QListWidgetItem*, QListWidgetItem*)), this, SLOT(updateInfo()));
connect(lstTrainings, SIGNAL(itemClicked(QListWidgetItem*)), this, SLOT(updateInfo()));
connect(lstTrainings, SIGNAL(itemDoubleClicked(QListWidgetItem*)), this, SLOT(startSelected()));
connect(lstChallenges, SIGNAL(currentItemChanged(QListWidgetItem*, QListWidgetItem*)), this, SLOT(updateInfo()));
connect(lstChallenges, SIGNAL(itemClicked(QListWidgetItem*)), this, SLOT(updateInfo()));
connect(lstChallenges, SIGNAL(itemDoubleClicked(QListWidgetItem*)), this, SLOT(startSelected()));
connect(lstScenarios, SIGNAL(currentItemChanged(QListWidgetItem*, QListWidgetItem*)), this, SLOT(updateInfo()));
connect(lstScenarios, SIGNAL(itemClicked(QListWidgetItem*)), this, SLOT(updateInfo()));
connect(lstScenarios, SIGNAL(itemDoubleClicked(QListWidgetItem*)), this, SLOT(startSelected()));
connect(tbw, SIGNAL(currentChanged(int)), this, SLOT(updateInfo()));
connect(btnPreview, SIGNAL(clicked()), this, SLOT(startSelected()));
connect(btnStart, SIGNAL(clicked()), this, SLOT(startSelected()));
}
PageTraining::PageTraining(QWidget* parent) : AbstractPage(parent)
{
initPage();
DataManager & dataMgr = DataManager::instance();
// get locale
QSettings settings(dataMgr.settingsFileName(),
QSettings::IniFormat);
QString loc = QLocale().name();
QString infoFile = QString("physfs://Locale/missions_" + loc + ".txt");
// if file is non-existant try with language only
if (!QFile::exists(infoFile))
infoFile = QString("physfs://Locale/missions_" + loc.remove(QRegExp("_.*$")) + ".txt");
// fallback if file for current locale is non-existant
if (!QFile::exists(infoFile))
infoFile = QString("physfs://Locale/missions_en.txt");
// preload mission info for current locale
m_info = new QSettings(infoFile, QSettings::IniFormat, this);
m_info->setIniCodec("UTF-8");
QStringList m_list;
QListWidget * m_widget;
QString subFolder;
for(int i=1; i<=3; i++) {
switch(i) {
case 1:
subFolder = "Training";
m_widget = lstTrainings;
break;
case 2:
subFolder = "Challenge";
m_widget = lstChallenges;
break;
case 3:
subFolder = "Scenario";
m_widget = lstScenarios;
break;
}
// scripts to load
// first, load scripts in order specified in order.cfg (if present)
QFile orderFile(QString("physfs://Missions/%1/order.cfg").arg(subFolder));
QStringList orderedMissions;
if (orderFile.open(QFile::ReadOnly))
{
QString m_id;
QTextStream input(&orderFile);
while(true)
{
m_id = input.readLine();
if(m_id.isNull() || m_id.isEmpty())
{
break;
}
QListWidgetItem * item = new QListWidgetItem(m_id);
QString name = item->text().replace("_", " ");
name = m_info->value(m_id + ".name", name).toString();
item->setText(name);
item->setData(Qt::UserRole, m_id);
m_widget->addItem(item);
orderedMissions << m_id;
}
}
// then, just load anything else in no particular order
m_list = dataMgr.entryList(
"Missions/" + subFolder,
QDir::Files, QStringList("*.lua")).
replaceInStrings(QRegExp("\\.lua$"), "");
foreach (const QString & m_id, m_list)
{
// Disallow duplicates from order.cfg
if (orderedMissions.contains(m_id))
{
continue;
}
QListWidgetItem * item = new QListWidgetItem(m_id);
// fallback name: replace underscores in mission name with spaces
QString name = item->text().replace("_", " ");
// see if we can get a prettier/translated name
name = m_info->value(m_id + ".name", name).toString();
item->setText(name);
// store original name in data
item->setData(Qt::UserRole, m_id);
m_widget->addItem(item);
}
}
updateInfo();
// pre-select first mission
if (lstTrainings->count() > 0)
lstTrainings->setCurrentRow(0);
if (lstChallenges->count() > 0)
lstChallenges->setCurrentRow(0);
if (lstScenarios->count() > 0)
lstScenarios->setCurrentRow(0);
}
QString PageTraining::getSubFolderOfSelected()
{
QString subFolder;
if (tbw->currentWidget() == lstTrainings) {
subFolder = "Training";
} else if (tbw->currentWidget() == lstChallenges) {
subFolder = "Challenge";
} else if (tbw->currentWidget() == lstScenarios) {
subFolder = "Scenario";
} else {
subFolder = "Training";
}
return subFolder;
}
void PageTraining::startSelected()
{
QListWidget *list;
list = (QListWidget*) tbw->currentWidget();
QListWidgetItem * curItem = list->currentItem();
if ((curItem != NULL) && (CBTeam->currentIndex() != -1))
emit startMission(curItem->data(Qt::UserRole).toString(), getSubFolderOfSelected());
}
void PageTraining::updateInfo()
{
if (tbw->currentWidget())
{
QString subFolder;
QListWidget *list;
subFolder = getSubFolderOfSelected();
list = (QListWidget*) tbw->currentWidget();
if (list->currentItem())
{
QString missionName = list->currentItem()->data(Qt::UserRole).toString();
QString thumbFile = "physfs://Graphics/Missions/" +
subFolder + "/" +
missionName +
"@2x.png";
if (QFile::exists(thumbFile))
btnPreview->setIcon(QIcon(thumbFile));
else if (tbw->currentWidget() == lstChallenges)
btnPreview->setIcon(QIcon(":/res/Challenges.png"));
else if (tbw->currentWidget() == lstScenarios)
// TODO: Prettier scenario fallback image
btnPreview->setIcon(QIcon(":/res/Scenarios.png"));
else
btnPreview->setIcon(QIcon(":/res/Trainings.png"));
btnPreview->setWhatsThis(tr("Start fighting"));
QString caption = m_info->value(missionName + ".name",
list->currentItem()->text()).toString();
QString description = m_info->value(missionName + ".desc",
tr("No description available")).toString();
lblCaption->setText("<h2>" + caption +"</h2>");
lblDescription->setText(description);
// Challenge highscores
QString highscoreText = QString("");
QString teamName = CBTeam->currentText();
if (missionValueExists(missionName, teamName, "Highscore"))
highscoreText = highscoreText +
//: Highest score of a team
tr("Team highscore: %1")
.arg(getMissionValue(missionName, teamName, "Highscore").toString()) + "\n";
if (missionValueExists(missionName, teamName, "Lowscore"))
highscoreText = highscoreText +
//: Lowest score of a team
tr("Team lowscore: %1")
.arg(getMissionValue(missionName, teamName, "Lowscore").toString()) + "\n";
if (missionValueExists(missionName, teamName, "AccuracyRecord"))
highscoreText = highscoreText +
//: Best accuracy of a team (in a challenge)
tr("Team's top accuracy: %1%")
.arg(getMissionValue(missionName, teamName, "AccuracyRecord").toString()) + "\n";
if (missionValueExists(missionName, teamName, "TimeRecord"))
{
double time = ((double) getMissionValue(missionName, teamName, "TimeRecord").toInt()) / 1000.0;
highscoreText = highscoreText + tr("Team's best time: %L1 s").arg(time, 0, 'f', 3) + "\n";
}
if (missionValueExists(missionName, teamName, "TimeRecordHigh"))
{
double time = ((double) getMissionValue(missionName, teamName, "TimeRecordHigh").toInt()) / 1000.0;
highscoreText = highscoreText + tr("Team's longest time: %L1 s").arg(time, 0, 'f', 3) + "\n";
}
lblHighscores->setText(highscoreText);
}
else
{
btnPreview->setIcon(QIcon(":/res/Trainings.png"));
lblCaption->setText(tr("Select a mission!"));
lblDescription->setText("");
lblHighscores->setText("");
}
}
}
| 6,042 |
504 | package org.dayatang.rule.examples;
public class Helper {
public static void sayStatic() {
System.out.println("hello sayStatic");
}
public void sayNoStatic() {
System.out.println("hello sayNoStatic");
}
}
| 76 |
700 | <gh_stars>100-1000
/*-
* Copyright (c) 2003-2007 <NAME>
* All rights reserved.
*
* Based on libarchive/test/test_read_format_isorr_bz2.c with
* bugs introduced by <NAME> <<EMAIL>> for
* testing ISO9660 image with Joliet extension.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR(S) ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR(S) BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "test.h"
__FBSDID("$FreeBSD: head/lib/libarchive/test/test_read_format_isojoliet_rr.c 201247 2009-12-30 05:59:21Z kientzle $");
/*
Execute the following to rebuild the data for this program:
tail -n +35 test_read_format_isojoliet_rr.c | /bin/sh
rm -rf /tmp/iso
mkdir /tmp/iso
mkdir /tmp/iso/dir
file="long-joliet-file-name.textfile"
echo "hello" >/tmp/iso/$file
ln /tmp/iso/$file /tmp/iso/hardlink
(cd /tmp/iso; ln -s $file symlink)
if [ "$(uname -s)" = "Linux" ]; then # gnu coreutils touch doesn't have -h
TZ=utc touch -afm -t 197001020000.01 /tmp/iso/hardlink /tmp/iso/$file /tmp/iso/dir
TZ=utc touch -afm -t 197001030000.02 /tmp/iso/symlink
TZ=utc touch -afm -t 197001020000.01 /tmp/iso
else
TZ=utc touch -afhm -t 197001020000.01 /tmp/iso/hardlink /tmp/iso/$file /tmp/iso/dir
TZ=utc touch -afhm -t 197001030000.02 /tmp/iso/symlink
TZ=utc touch -afhm -t 197001020000.01 /tmp/iso
fi
F=test_read_format_iso_joliet_rockridge.iso.Z
mkhybrid -J -uid 1 -gid 2 /tmp/iso | compress > $F
uuencode $F $F > $F.uu
exit 1
*/
DEFINE_TEST(test_read_format_isojoliet_rr)
{
const char *refname = "test_read_format_iso_joliet_rockridge.iso.Z";
struct archive_entry *ae;
struct archive *a;
const void *p;
size_t size;
off_t offset;
extract_reference_file(refname);
assert((a = archive_read_new()) != NULL);
assertEqualInt(0, archive_read_support_compression_all(a));
assertEqualInt(0, archive_read_support_format_all(a));
assertEqualInt(ARCHIVE_OK,
archive_read_open_filename(a, refname, 10240));
/* First entry is '.' root directory. */
assertEqualInt(0, archive_read_next_header(a, &ae));
assertEqualString(".", archive_entry_pathname(ae));
assertEqualInt(AE_IFDIR, archive_entry_filetype(ae));
assertEqualInt(2048, archive_entry_size(ae));
assertEqualInt(86401, archive_entry_mtime(ae));
assertEqualInt(0, archive_entry_mtime_nsec(ae));
assertEqualInt(3, archive_entry_stat(ae)->st_nlink);
assertEqualInt(1, archive_entry_uid(ae));
assertEqualIntA(a, ARCHIVE_EOF,
archive_read_data_block(a, &p, &size, &offset));
assertEqualInt((int)size, 0);
/* A directory. */
assertEqualInt(0, archive_read_next_header(a, &ae));
assertEqualString("dir", archive_entry_pathname(ae));
assertEqualInt(AE_IFDIR, archive_entry_filetype(ae));
assertEqualInt(2048, archive_entry_size(ae));
assertEqualInt(86401, archive_entry_mtime(ae));
assertEqualInt(86401, archive_entry_atime(ae));
assertEqualInt(2, archive_entry_stat(ae)->st_nlink);
assertEqualInt(1, archive_entry_uid(ae));
assertEqualInt(2, archive_entry_gid(ae));
/* A regular file with two names ("hardlink" gets returned
* first, so it's not marked as a hardlink). */
assertEqualInt(0, archive_read_next_header(a, &ae));
assertEqualString("long-joliet-file-name.textfile",
archive_entry_pathname(ae));
assertEqualInt(AE_IFREG, archive_entry_filetype(ae));
assert(archive_entry_hardlink(ae) == NULL);
assertEqualInt(6, archive_entry_size(ae));
assertEqualInt(0, archive_read_data_block(a, &p, &size, &offset));
assertEqualInt(6, (int)size);
assertEqualInt(0, offset);
assertEqualInt(0, memcmp(p, "hello\n", 6));
assertEqualInt(86401, archive_entry_mtime(ae));
/* mkisofs records their access time. */
/*assertEqualInt(86401, archive_entry_atime(ae));*/
/* TODO: Actually, libarchive should be able to
* compute nlinks correctly even without RR
* extensions. See comments in libarchive source. */
assertEqualInt(2, archive_entry_nlink(ae));
assertEqualInt(1, archive_entry_uid(ae));
assertEqualInt(2, archive_entry_gid(ae));
/* Second name for the same regular file (this happens to be
* returned second, so does get marked as a hardlink). */
assertEqualInt(0, archive_read_next_header(a, &ae));
assertEqualString("hardlink", archive_entry_pathname(ae));
assertEqualInt(AE_IFREG, archive_entry_filetype(ae));
assertEqualString("long-joliet-file-name.textfile",
archive_entry_hardlink(ae));
assert(!archive_entry_size_is_set(ae));
assertEqualInt(86401, archive_entry_mtime(ae));
assertEqualInt(86401, archive_entry_atime(ae));
/* TODO: See above. */
assertEqualInt(2, archive_entry_nlink(ae));
assertEqualInt(1, archive_entry_uid(ae));
assertEqualInt(2, archive_entry_gid(ae));
/* A symlink to the regular file. */
assertEqualInt(0, archive_read_next_header(a, &ae));
assertEqualString("symlink", archive_entry_pathname(ae));
assertEqualInt(AE_IFLNK, archive_entry_filetype(ae));
assertEqualString("long-joliet-file-name.textfile",
archive_entry_symlink(ae));
assertEqualInt(0, archive_entry_size(ae));
assertEqualInt(172802, archive_entry_mtime(ae));
assertEqualInt(172802, archive_entry_atime(ae));
assertEqualInt(1, archive_entry_nlink(ae));
assertEqualInt(1, archive_entry_uid(ae));
assertEqualInt(2, archive_entry_gid(ae));
/* End of archive. */
assertEqualInt(ARCHIVE_EOF, archive_read_next_header(a, &ae));
/* Verify archive format. */
assertEqualInt(archive_compression(a), ARCHIVE_COMPRESSION_COMPRESS);
assertEqualInt(archive_format(a), ARCHIVE_FORMAT_ISO9660_ROCKRIDGE);
/* Close the archive. */
assertEqualInt(0, archive_read_close(a));
assertEqualInt(0, archive_read_finish(a));
}
| 2,455 |
598 | <filename>eliot/tests/test_journald.py<gh_stars>100-1000
"""
Tests for L{eliot.journald}.
"""
from os import getpid, strerror
from unittest import skipUnless, TestCase
from subprocess import check_output, CalledProcessError, STDOUT
from errno import EINVAL
from sys import argv
from uuid import uuid4
from time import sleep
from six import text_type as unicode
from .._bytesjson import loads
from .._output import MemoryLogger
from .._message import TASK_UUID_FIELD
from .. import start_action, Message, write_traceback
try:
from ..journald import sd_journal_send, JournaldDestination
except ImportError:
sd_journal_send = None
def _journald_available():
"""
:return: Boolean indicating whether journald is available to use.
"""
if sd_journal_send is None:
return False
try:
check_output(["journalctl", "-b", "-n1"], stderr=STDOUT)
except (OSError, CalledProcessError):
return False
return True
def last_journald_message():
"""
@return: Last journald message from this process as a dictionary in
journald JSON format.
"""
# It may take a little for messages to actually reach journald, so we
# write out marker message and wait until it arrives. We can then be
# sure the message right before it is the one we want.
marker = unicode(uuid4())
sd_journal_send(MESSAGE=marker.encode("ascii"))
for i in range(500):
messages = check_output(
[
b"journalctl",
b"-a",
b"-o",
b"json",
b"-n2",
b"_PID=" + str(getpid()).encode("ascii"),
]
)
messages = [loads(m) for m in messages.splitlines()]
if len(messages) == 2 and messages[1]["MESSAGE"] == marker:
return messages[0]
sleep(0.01)
raise RuntimeError("Message never arrived?!")
class SdJournaldSendTests(TestCase):
"""
Functional tests for L{sd_journal_send}.
"""
@skipUnless(
_journald_available(), "journald unavailable or inactive on this machine."
)
def setUp(self):
pass
def assert_roundtrip(self, value):
"""
Write a value as a C{MESSAGE} field, assert it is output.
@param value: Value to write as unicode.
"""
sd_journal_send(MESSAGE=value)
result = last_journald_message()
self.assertEqual(value, result["MESSAGE"].encode("utf-8"))
def test_message(self):
"""
L{sd_journal_send} can write a C{MESSAGE} field.
"""
self.assert_roundtrip(b"hello")
def test_percent(self):
"""
L{sd_journal_send} can write a C{MESSAGE} field with a percent.
Underlying C API calls does printf formatting so this is a
plausible failure mode.
"""
self.assert_roundtrip(b"hello%world")
def test_large(self):
"""
L{sd_journal_send} can write a C{MESSAGE} field with a large message.
"""
self.assert_roundtrip(b"hello world" * 20000)
def test_multiple_fields(self):
"""
L{sd_journal_send} can send multiple fields.
"""
sd_journal_send(MESSAGE=b"hello", BONUS_FIELD=b"world")
result = last_journald_message()
self.assertEqual(
(b"hello", b"world"),
(result["MESSAGE"].encode("ascii"), result["BONUS_FIELD"].encode("ascii")),
)
def test_error(self):
"""
L{sd_journal_send} raises an error when it gets a non-0 result
from the underlying API.
"""
with self.assertRaises(IOError) as context:
sd_journal_send(**{"": b"123"})
exc = context.exception
self.assertEqual((exc.errno, exc.strerror), (EINVAL, strerror(EINVAL)))
class JournaldDestinationTests(TestCase):
"""
Tests for L{JournaldDestination}.
"""
@skipUnless(
_journald_available(), "journald unavailable or inactive on this machine."
)
def setUp(self):
self.destination = JournaldDestination()
self.logger = MemoryLogger()
def test_json(self):
"""
The message is stored as JSON in the MESSAGE field.
"""
Message.new(hello="world", key=123).write(self.logger)
message = self.logger.messages[0]
self.destination(message)
self.assertEqual(loads(last_journald_message()["MESSAGE"]), message)
def assert_field_for(self, message, field_name, field_value):
"""
If the given message is logged by Eliot, the given journald field has
the expected value.
@param message: Dictionary to log.
@param field_name: Journald field name to check.
@param field_value: Expected value for the field.
"""
self.destination(message)
self.assertEqual(last_journald_message()[field_name], field_value)
def test_action_type(self):
"""
The C{action_type} is stored in the ELIOT_TYPE field.
"""
action_type = "test:type"
start_action(self.logger, action_type=action_type)
self.assert_field_for(self.logger.messages[0], "ELIOT_TYPE", action_type)
def test_message_type(self):
"""
The C{message_type} is stored in the ELIOT_TYPE field.
"""
message_type = "test:type:message"
Message.new(message_type=message_type).write(self.logger)
self.assert_field_for(self.logger.messages[0], "ELIOT_TYPE", message_type)
def test_no_type(self):
"""
An empty string is stored in ELIOT_TYPE if no type is known.
"""
Message.new().write(self.logger)
self.assert_field_for(self.logger.messages[0], "ELIOT_TYPE", "")
def test_uuid(self):
"""
The task UUID is stored in the ELIOT_TASK field.
"""
start_action(self.logger, action_type="xxx")
self.assert_field_for(
self.logger.messages[0],
"ELIOT_TASK",
self.logger.messages[0][TASK_UUID_FIELD],
)
def test_info_priorities(self):
"""
Untyped messages, action start, successful action end, random typed
message all get priority 6 ("info").
"""
with start_action(self.logger, action_type="xxx"):
Message.new(message_type="msg").write(self.logger)
Message.new(x=123).write(self.logger)
priorities = []
for message in self.logger.messages:
self.destination(message)
priorities.append(last_journald_message()["PRIORITY"])
self.assertEqual(priorities, ["6", "6", "6", "6"])
def test_error_priority(self):
"""
A failed action gets priority 3 ("error").
"""
try:
with start_action(self.logger, action_type="xxx"):
raise ZeroDivisionError()
except ZeroDivisionError:
pass
self.assert_field_for(self.logger.messages[-1], "PRIORITY", "3")
def test_critical_priority(self):
"""
A traceback gets priority 2 ("critical").
"""
try:
raise ZeroDivisionError()
except ZeroDivisionError:
write_traceback(logger=self.logger)
self.assert_field_for(self.logger.serialize()[-1], "PRIORITY", "2")
def test_identifier(self):
"""
C{SYSLOG_IDENTIFIER} defaults to C{os.path.basename(sys.argv[0])}.
"""
identifier = "/usr/bin/testing123"
try:
original = argv[0]
argv[0] = identifier
# Recreate JournaldDestination with the newly set argv[0].
self.destination = JournaldDestination()
Message.new(message_type="msg").write(self.logger)
self.assert_field_for(
self.logger.messages[0], "SYSLOG_IDENTIFIER", "testing123"
)
finally:
argv[0] = original
| 3,566 |
428 | <filename>Java/Loon-Neo/srpg/loon/srpg/field/SRPGFieldElement.java
package loon.srpg.field;
/**
* Copyright 2008 - 2011
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
* @project loonframework
* @author chenpeng
* @email:<EMAIL>
* @version 0.1
*/
public class SRPGFieldElement {
public int atk;
public int def;
public int state;
public String depict;
public int imgId;
public String name;
public int index;
public int mv;
SRPGFieldElement() {
}
public SRPGFieldElement(int id, String name, String depict, int mv,
int atk, int def, int state) {
this.imgId = id;
this.name = name;
this.mv = mv;
this.atk = atk;
this.def = def;
this.depict = depict;
this.state = state;
}
}
| 408 |
5,169 | <gh_stars>1000+
{
"name": "CDZUIKitAutoLayoutDebugging",
"version": "0.0.7",
"summary": "Tools for easier Auto Layout debugging",
"description": " Tools for easier Auto Layout debugging on iOS 7+.\n",
"homepage": "https://github.com/cdzombak/CDZUIKitAutoLayoutDebugging",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"authors": {
"<NAME>": "<EMAIL>"
},
"social_media_url": "http://twitter.com/cdzombak",
"platforms": {
"ios": "7.0"
},
"source": {
"git": "https://github.com/cdzombak/CDZUIKitAutoLayoutDebugging.git",
"tag": "0.0.7"
},
"source_files": "UIKitAutoLayoutDebugging/*.{h,m}",
"frameworks": [
"Foundation",
"UIKit"
],
"requires_arc": true
}
| 319 |
608 | /*
* Copyright (c) 2021 The ZLToolKit project authors. All Rights Reserved.
*
* This file is part of ZLToolKit(https://github.com/ZLMediaKit/ZLToolKit).
*
* Use of this source code is governed by MIT license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#include "Server.h"
using namespace std;
namespace toolkit {
Server::Server(EventPoller::Ptr poller) {
_poller = poller ? std::move(poller) : EventPollerPool::Instance().getPoller();
}
////////////////////////////////////////////////////////////////////////////////////
SessionHelper::SessionHelper(const std::weak_ptr<Server> &server, Session::Ptr session) {
_server = server;
_session = std::move(session);
//记录session至全局的map,方便后面管理
_session_map = SessionMap::Instance().shared_from_this();
_identifier = _session->getIdentifier();
_session_map->add(_identifier, _session);
}
SessionHelper::~SessionHelper() {
if (!_server.lock()) {
//务必通知TcpSession已从TcpServer脱离
_session->onError(SockException(Err_other, "Server shutdown!"));
}
//从全局map移除相关记录
_session_map->del(_identifier);
}
const Session::Ptr &SessionHelper::session() const {
return _session;
}
////////////////////////////////////////////////////////////////////////////////////
bool SessionMap::add(const string &tag, const Session::Ptr &session) {
lock_guard<mutex> lck(_mtx_session);
return _map_session.emplace(tag, session).second;
}
bool SessionMap::del(const string &tag) {
lock_guard<mutex> lck(_mtx_session);
return _map_session.erase(tag);
}
Session::Ptr SessionMap::get(const string &tag) {
lock_guard<mutex> lck(_mtx_session);
auto it = _map_session.find(tag);
if (it == _map_session.end()) {
return nullptr;
}
return it->second.lock();
}
void SessionMap::for_each_session(const function<void(const string &id, const Session::Ptr &session)> &cb) {
lock_guard<mutex> lck(_mtx_session);
for (auto it = _map_session.begin(); it != _map_session.end();) {
auto session = it->second.lock();
if (!session) {
it = _map_session.erase(it);
continue;
}
cb(it->first, session);
++it;
}
}
} // namespace toolkit | 895 |
435 | package datawave.query.postprocessing.tf;
import java.util.Collections;
import java.util.Map;
import datawave.query.attributes.Document;
import datawave.query.util.Tuple2;
import datawave.query.util.Tuple3;
import datawave.query.util.Tuples;
import org.apache.accumulo.core.data.Key;
import com.google.common.base.Function;
public class EmptyTermFrequencyFunction implements Function<Tuple2<Key,Document>,Tuple3<Key,Document,Map<String,Object>>> {
private static final Map<String,Object> emptyContext = Collections.emptyMap();
@Override
public Tuple3<Key,Document,Map<String,Object>> apply(Tuple2<Key,Document> from) {
return Tuples.tuple(from.first(), from.second(), emptyContext);
}
}
| 258 |
1,444 |
package mage.cards.f;
import java.util.UUID;
import mage.abilities.Ability;
import mage.abilities.effects.RequirementEffect;
import mage.abilities.effects.common.counter.AddCountersTargetEffect;
import mage.cards.CardImpl;
import mage.cards.CardSetInfo;
import mage.constants.CardType;
import mage.constants.Duration;
import mage.counters.CounterType;
import mage.filter.common.FilterCreaturePermanent;
import mage.filter.predicate.other.AnotherTargetPredicate;
import mage.game.Game;
import mage.game.permanent.Permanent;
import mage.target.common.TargetControlledCreaturePermanent;
import mage.target.common.TargetCreaturePermanent;
/**
*
* @author jeffwadsworth
*/
public final class FeralContest extends CardImpl {
public FeralContest(UUID ownerId, CardSetInfo setInfo) {
super(ownerId,setInfo,new CardType[]{CardType.SORCERY},"{3}{G}");
// Put a +1/+1 counter on target creature you control.
this.getSpellAbility().addEffect(new AddCountersTargetEffect(CounterType.P1P1.createInstance()));
TargetControlledCreaturePermanent target1 = new TargetControlledCreaturePermanent();
target1.setTargetTag(1);
this.getSpellAbility().addTarget(target1);
// Another target creature blocks it this turn if able.
this.getSpellAbility().addEffect(new FeralContestEffect());
FilterCreaturePermanent filter = new FilterCreaturePermanent("another creature (must block this turn)");
filter.add(new AnotherTargetPredicate(2));
TargetCreaturePermanent target2 = new TargetCreaturePermanent(filter);
target2.setTargetTag(2);
this.getSpellAbility().addTarget(target2);
}
private FeralContest(final FeralContest card) {
super(card);
}
@Override
public FeralContest copy() {
return new FeralContest(this);
}
}
class FeralContestEffect extends RequirementEffect {
public FeralContestEffect() {
this(Duration.EndOfTurn);
}
public FeralContestEffect(Duration duration) {
super(duration);
staticText = "Another target creature blocks it this turn if able";
}
public FeralContestEffect(final FeralContestEffect effect) {
super(effect);
}
@Override
public boolean applies(Permanent permanent, Ability source, Game game) {
if (permanent.getId().equals(source.getTargets().get(1).getFirstTarget())) {
return permanent.canBlock(source.getFirstTarget(), game);
}
return false;
}
@Override
public boolean mustAttack(Game game) {
return false;
}
@Override
public boolean mustBlock(Game game) {
return true;
}
@Override
public UUID mustBlockAttacker(Ability source, Game game) {
return source.getFirstTarget();
}
@Override
public FeralContestEffect copy() {
return new FeralContestEffect(this);
}
}
| 1,057 |
3,294 | // <snippet1>
// wrl-consume-events.cpp
// compile with: runtimeobject.lib
// <snippet2>
#include <Windows.Devices.Enumeration.h>
#include <wrl/event.h>
#include <stdio.h>
using namespace ABI::Windows::Devices::Enumeration;
using namespace ABI::Windows::Foundation;
using namespace Microsoft::WRL;
using namespace Microsoft::WRL::Wrappers;
// </snippet2>
// Prints an error string for the provided source code line and HRESULT
// value and returns the HRESULT value as an int.
int PrintError(unsigned int line, HRESULT hr)
{
wprintf_s(L"ERROR: Line:%d HRESULT: 0x%X\n", line, hr);
return hr;
}
int wmain()
{
// Type define the event handler types to make the code more readable.
typedef __FITypedEventHandler_2_Windows__CDevices__CEnumeration__CDeviceWatcher_Windows__CDevices__CEnumeration__CDeviceInformation AddedHandler;
typedef __FITypedEventHandler_2_Windows__CDevices__CEnumeration__CDeviceWatcher_IInspectable EnumerationCompletedHandler;
typedef __FITypedEventHandler_2_Windows__CDevices__CEnumeration__CDeviceWatcher_IInspectable StoppedHandler;
// <snippet7>
// Counts the number of enumerated devices.
unsigned int deviceCount = 0;
// Event registration tokens that enable us to later unsubscribe from events.
EventRegistrationToken addedToken;
EventRegistrationToken stoppedToken;
EventRegistrationToken enumCompletedToken;
// </snippet7>
// <snippet3>
// Initialize the Windows Runtime.
RoInitializeWrapper initialize(RO_INIT_MULTITHREADED);
if (FAILED(initialize))
{
return PrintError(__LINE__, initialize);
}
// </snippet3>
// <snippet4>
// Create an event that is set after device enumeration completes. We later use this event to wait for the timer to complete.
// This event is for demonstration only in a console app. In most apps, you typically don't wait for async operations to complete.
Event enumerationCompleted(CreateEventEx(nullptr, nullptr, CREATE_EVENT_MANUAL_RESET, WRITE_OWNER | EVENT_ALL_ACCESS));
HRESULT hr = enumerationCompleted.IsValid() ? S_OK : HRESULT_FROM_WIN32(GetLastError());
if (FAILED(hr))
{
return PrintError(__LINE__, hr);
}
// </snippet4>
// <snippet5>
// Get the activation factory for the IDeviceWatcher interface.
ComPtr<IDeviceInformationStatics> watcherFactory;
hr = ABI::Windows::Foundation::GetActivationFactory(HStringReference(RuntimeClass_Windows_Devices_Enumeration_DeviceInformation).Get(), &watcherFactory);
if (FAILED(hr))
{
return PrintError(__LINE__, hr);
}
// </snippet5>
// <snippet6>
// Create a IDeviceWatcher object from the factory.
ComPtr<IDeviceWatcher> watcher;
hr = watcherFactory->CreateWatcher(&watcher);
if (FAILED(hr))
{
return PrintError(__LINE__, hr);
}
// </snippet6>
// <snippet8>
// Subscribe to the Added event.
hr = watcher->add_Added(Callback<AddedHandler>([&deviceCount](IDeviceWatcher* watcher, IDeviceInformation*) -> HRESULT
{
// Print a message and increment the device count.
// When we reach 10 devices, stop enumerating devices.
wprintf_s(L"Added device...\n");
deviceCount++;
if (deviceCount == 10)
{
return watcher->Stop();
}
return S_OK;
}).Get(), &addedToken);
if (FAILED(hr))
{
return PrintError(__LINE__, hr);
}
hr = watcher->add_Stopped(Callback<StoppedHandler>([=, &enumerationCompleted](IDeviceWatcher* watcher, IInspectable*) -> HRESULT
{
wprintf_s(L"Device enumeration stopped.\nRemoving event handlers...");
// Unsubscribe from the events. This is shown for demonstration.
// The need to remove event handlers depends on the requirements of
// your app. For instance, if you only need to handle an event for
// a short period of time, you might remove the event handler when you
// no longer need it. If you handle an event for the duration of the app,
// you might not need to explicitly remove it.
HRESULT hr1 = watcher->remove_Added(addedToken);
HRESULT hr2 = watcher->remove_Stopped(stoppedToken);
HRESULT hr3 = watcher->remove_EnumerationCompleted(enumCompletedToken);
// Set the completion event and return.
SetEvent(enumerationCompleted.Get());
return FAILED(hr1) ? hr1 : FAILED(hr2) ? hr2 : hr3;
}).Get(), &stoppedToken);
if (FAILED(hr))
{
return PrintError(__LINE__, hr);
}
// Subscribe to the EnumerationCompleted event.
hr = watcher->add_EnumerationCompleted(Callback<EnumerationCompletedHandler>([](IDeviceWatcher* watcher, IInspectable*) -> HRESULT
{
wprintf_s(L"Enumeration completed.\n");
return watcher->Stop();
}).Get(), &enumCompletedToken);
if (FAILED(hr))
{
return PrintError(__LINE__, hr);
}
// </snippet8>
// <snippet9>
wprintf_s(L"Starting device enumeration...\n");
hr = watcher->Start();
if (FAILED(hr))
{
return PrintError(__LINE__, hr);
}
// </snippet9>
// <snippet10>
// Wait for the operation to complete.
WaitForSingleObjectEx(enumerationCompleted.Get(), INFINITE, FALSE);
wprintf_s(L"Enumerated %u devices.\n", deviceCount);
// All smart pointers and RAII objects go out of scope here.
// </snippet10>
}
/*
Sample output:
Starting device enumeration...
Added device...
Added device...
Added device...
Added device...
Added device...
Added device...
Added device...
Added device...
Added device...
Added device...
Device enumeration stopped.
Removing event handlers...
Enumerated 10 devices.
*/
// </snippet1> | 2,129 |
4,036 | // Generated automatically from org.apache.commons.collections4.collection.TransformedCollection for testing purposes
package org.apache.commons.collections4.collection;
import java.util.Collection;
import org.apache.commons.collections4.Transformer;
import org.apache.commons.collections4.collection.AbstractCollectionDecorator;
public class TransformedCollection<E> extends AbstractCollectionDecorator<E>
{
protected TransformedCollection() {}
protected Collection<E> transform(Collection<? extends E> p0){ return null; }
protected E transform(E p0){ return null; }
protected TransformedCollection(Collection<E> p0, Transformer<? super E, ? extends E> p1){}
protected final Transformer<? super E, ? extends E> transformer = null;
public boolean add(E p0){ return false; }
public boolean addAll(Collection<? extends E> p0){ return false; }
public static <E> TransformedCollection<E> transformedCollection(Collection<E> p0, Transformer<? super E, ? extends E> p1){ return null; }
public static <E> TransformedCollection<E> transformingCollection(Collection<E> p0, Transformer<? super E, ? extends E> p1){ return null; }
}
| 330 |
1,383 | <reponame>Benatti1991/chrono<filename>src/chrono_thirdparty/yafel/DualNumber.hpp<gh_stars>1000+
#ifndef YAFEL_DUALNUMBER_HPP
#define YAFEL_DUALNUMBER_HPP
#include <cmath>
namespace yafel {
template<typename T>
class DualNumber
{
public:
T first;
T second;
DualNumber() : DualNumber(0, 0)
{}
DualNumber(T v1) : DualNumber(v1, 0)
{}
DualNumber(T v1, T v2) : first(v1), second(v2)
{}
// arithmetic operator overloading (+, -, *, /)
DualNumber<T> &operator+=(const DualNumber<T> &rhs)
{
second += rhs.second;
first += rhs.first;
return *this;
}
DualNumber<T> &operator-=(const DualNumber<T> &rhs)
{
second -= rhs.second;
first -= rhs.first;
return *this;
}
DualNumber<T> &operator*=(const DualNumber<T> &rhs)
{
second = second * rhs.first + first * rhs.second;
first = first * rhs.first;
return *this;
}
DualNumber<T> &operator/=(const DualNumber<T> &rhs)
{
second = (second * rhs.first - first * rhs.second) / (rhs.first * rhs.first);
first = first / rhs.first;
return *this;
}
DualNumber<T> operator+(DualNumber<T> rhs) const
{
return (rhs += *this);
}
DualNumber<T> operator-(const DualNumber<T> &rhs) const
{
DualNumber<T> copy(*this);
return (copy -= rhs);
}
DualNumber<T> operator*(DualNumber<T> rhs) const
{
return (rhs *= *this);
}
DualNumber<T> operator/(const DualNumber<T> &rhs) const
{
DualNumber<T> copy(*this);
return (copy /= rhs);
}
// comparison operators
bool operator>(const DualNumber<T> &rhs) const
{
return (first > rhs.first);
}
bool operator<(const DualNumber<T> &rhs) const
{
return (rhs > *this);
}
// unary operator-()
DualNumber<T> operator-() const
{
return DualNumber<T>(-first, -second);
}
};
template<typename T>
DualNumber<T> make_dual(T lhs)
{
return DualNumber<T>(lhs);
}
template<typename T, typename L>
DualNumber<T> operator+(L lhs, DualNumber<T> rhs)
{
return (make_dual(static_cast<T>(lhs)) + rhs);
}
template<typename T, typename L>
DualNumber<T> operator-(L lhs, DualNumber<T> rhs)
{
return (make_dual(static_cast<T>(lhs)) - rhs);
}
template<typename T, typename L>
DualNumber<T> operator*(L lhs, DualNumber<T> rhs)
{
return (make_dual(static_cast<T>(lhs)) * rhs);
}
template<typename T, typename L>
DualNumber<T> operator/(L lhs, DualNumber<T> rhs)
{
return (make_dual(static_cast<T>(lhs)) / rhs);
}
// More Useful Functions
template<typename T>
DualNumber<T> sin(DualNumber<T> x)
{
using std::sin;
using std::cos;
return DualNumber<T>(sin(x.first), x.second * cos(x.first));
}
template<typename T>
DualNumber<T> cos(DualNumber<T> x)
{
using std::sin;
using std::cos;
return DualNumber<T>(cos(x.first), -x.second * sin(x.first));
}
template<typename T>
DualNumber<T> exp(DualNumber<T> x)
{
using std::exp;
return DualNumber<T>(exp(x.first), x.second * exp(x.first));
}
} //end namespace yafel
#endif
| 1,432 |
435 | <gh_stars>100-1000
{
"copyright_text": "Standard YouTube License",
"description": "PyData SF 2016\n\nUsing Jupyter Notebooks and Python code, we will present several data-driven examples of some simple, powerful, yet relatively uncommon, ways of thinking as a good Data Scientist. We will also warn about a few dangerous ways of thinking to avoid. Our Jupyter Notebooks and slides will be made freely available after the talk.",
"duration": 2125,
"language": "eng",
"recorded": "2016-08-24",
"related_urls": [],
"speakers": [
"<NAME>"
],
"tags": [],
"thumbnail_url": "https://i.ytimg.com/vi/oj7kKE3zKig/maxresdefault.jpg",
"title": "Mental Models to Use and Avoid as a Data Scientist",
"videos": [
{
"type": "youtube",
"url": "https://www.youtube.com/watch?v=oj7kKE3zKig"
}
]
}
| 287 |
3,799 | <gh_stars>1000+
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.support.mediacompat.client;
import static org.junit.Assert.assertEquals;
import android.content.Context;
import android.media.AudioManager;
import android.os.Build;
import androidx.media.AudioManagerCompat;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SdkSuppress;
import androidx.test.filters.SmallTest;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Tests for {@link AudioManagerCompat}. */
@SmallTest
@RunWith(AndroidJUnit4.class)
public class AudioManagerCompatTest {
private AudioManager mAudioManager;
private int mStreamType;
@Before
public void setUp() {
Context context = ApplicationProvider.getApplicationContext();
mAudioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
mStreamType = AudioManager.STREAM_MUSIC;
}
@Test
public void getStreamMaxVolume_returnsStreamMaxVolume() {
assertEquals(
mAudioManager.getStreamMaxVolume(mStreamType),
AudioManagerCompat.getStreamMaxVolume(mAudioManager, mStreamType));
}
@SdkSuppress(minSdkVersion = Build.VERSION_CODES.P)
@Test
public void getStreamMinVolume_fromP_returnsStreamMinVolume() {
assertEquals(
mAudioManager.getStreamMinVolume(mStreamType),
AudioManagerCompat.getStreamMinVolume(mAudioManager, mStreamType));
}
@SdkSuppress(maxSdkVersion = Build.VERSION_CODES.O_MR1)
@Test
public void getStreamMinVolume_underP_returnsZero() {
assertEquals(0, AudioManagerCompat.getStreamMinVolume(mAudioManager, mStreamType));
}
}
| 798 |
419 | <filename>app/src/main/java/com/maning/mnprogressdialog/OtherActivity.java
package com.maning.mnprogressdialog;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import com.maning.mndialoglibrary.MProgressDialog;
import com.maning.mndialoglibrary.MStatusDialog;
public class OtherActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_other);
}
@Override
public void onBackPressed() {
// MProgressDialog.showProgress(this, "");
// new MStatusDialog(this).show("保存成功", getResources().getDrawable(R.drawable.mn_icon_dialog_ok));
super.onBackPressed();
}
}
| 282 |
879 | package org.zstack.header.identity;
import org.zstack.header.message.APIReply;
import org.zstack.header.rest.RestResponse;
import java.util.HashMap;
import java.util.Map;
/**
* Created by xing5 on 2016/4/8.
*/
@RestResponse(allTo = "inventories")
public class APIGetResourceAccountReply extends APIReply {
private Map<String, AccountInventory> inventories;
public Map<String, AccountInventory> getInventories() {
return inventories;
}
public void setInventories(Map<String, AccountInventory> inventories) {
this.inventories = inventories;
}
public static APIGetResourceAccountReply __example__() {
APIGetResourceAccountReply reply = new APIGetResourceAccountReply();
AccountInventory accountInventory = new AccountInventory();
accountInventory.setName("test");
accountInventory.setType(AccountType.Normal.toString());
accountInventory.setUuid(uuid());
Map<String, AccountInventory> inventories = new HashMap<>();
inventories.put(uuid(), accountInventory);
inventories.put(uuid(), accountInventory);
reply.setInventories(inventories);
return reply;
}
}
| 430 |
2,366 | /*
* =============================================================================
*
* Copyright (c) 2011-2018, The THYMELEAF team (http://www.thymeleaf.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* =============================================================================
*/
package org.thymeleaf.util;
import org.thymeleaf.exceptions.TemplateProcessingException;
import org.thymeleaf.templatemode.TemplateMode;
import org.unbescape.css.CssEscape;
import org.unbescape.html.HtmlEscape;
import org.unbescape.javascript.JavaScriptEscape;
import org.unbescape.xml.XmlEscape;
/**
*
* @author <NAME>
*
* @since 3.0.0
*
*/
public final class EscapedAttributeUtils {
public static String escapeAttribute(final TemplateMode templateMode, final String input) {
if (input == null) {
return null;
}
Validate.notNull(templateMode, "Template mode cannot be null");
/*
* Depending on the template mode that we are using, we might be receiving element attributes escaped in
* different ways.
*
* HTML and XML have their own escaping/unescaping rules, which we can easily apply by means
* of the corresponding Unbescape utility methods. TEXT, JAVASCRIPT and CSS are left out because there are no
* attributes to be output in those modes as such.
*
* There is no standard way to escape/unescape in TEXT modes, but given TEXT mode is many times used for
* markup (HTML or XML templates or inlined fragments), we will use HTML escaping/unescaping for TEXT mode.
* Besides, this is consistent with the fact that TEXT-mode escaped output will also be HTML-escaped by
* processors and inlining utilities in the Standard Dialects.
*/
switch (templateMode) {
case HTML:
return HtmlEscape.escapeHtml4Xml(input);
case XML:
return XmlEscape.escapeXml10Attribute(input);
default:
throw new TemplateProcessingException(
"Unrecognized template mode " + templateMode + ". Cannot produce escaped attributes for " +
"this template mode.");
}
}
public static String unescapeAttribute(final TemplateMode templateMode, final String input) {
if (input == null) {
return null;
}
Validate.notNull(templateMode, "Template mode cannot be null");
/*
* Depending on the template mode that we are using, we might be receiving element attributes escaped in
* different ways.
*
* HTML, XML, JAVASCRIPT and CSS have their own escaping/unescaping rules, which we can easily apply by means
* of the corresponding Unbescape utility methods.
*
* There is no standard way to escape/unescape in TEXT modes, but given TEXT mode is many times used for
* markup (HTML or XML templates or inlined fragments), we will use HTML escaping/unescaping for TEXT mode.
* Besides, this is consistent with the fact that TEXT-mode escaped output will also be HTML-escaped by
* processors and inlining utilities in the Standard Dialects.
*/
switch (templateMode) {
case TEXT:
// fall-through
case HTML:
return HtmlEscape.unescapeHtml(input);
case XML:
return XmlEscape.unescapeXml(input);
case JAVASCRIPT:
return JavaScriptEscape.unescapeJavaScript(input);
case CSS:
return CssEscape.unescapeCss(input);
case RAW:
return input;
default:
throw new TemplateProcessingException(
"Unrecognized template mode " + templateMode + ". Cannot unescape attribute value for " +
"this template mode.");
}
}
private EscapedAttributeUtils() {
super();
}
}
| 1,713 |
547 | <gh_stars>100-1000
#import <UIKit/UIKit.h>
FOUNDATION_EXPORT double Pods_MYTableViewIndex_TestsVersionNumber;
FOUNDATION_EXPORT const unsigned char Pods_MYTableViewIndex_TestsVersionString[];
| 67 |
1,107 | <filename>NN/Errors.py
class LayerError(Exception):
pass
class BuildLayerError(Exception):
pass
class BuildNetworkError(Exception):
pass
| 50 |
735 | // NOTE: This file was generated by the ServiceGenerator.
// ----------------------------------------------------------------------------
// API:
// Access Context Manager API (accesscontextmanager/v1)
// Description:
// An API for setting attribute based access control to requests to GCP
// services.
// Documentation:
// https://cloud.google.com/access-context-manager/docs/reference/rest/
#if SWIFT_PACKAGE || GTLR_USE_MODULAR_IMPORT
@import GoogleAPIClientForRESTCore;
#elif GTLR_BUILT_AS_FRAMEWORK
#import "GTLR/GTLRQuery.h"
#else
#import "GTLRQuery.h"
#endif
#if GTLR_RUNTIME_VERSION != 3000
#error This file was generated by a different version of ServiceGenerator which is incompatible with this GTLR library source.
#endif
@class GTLRAccessContextManager_AccessLevel;
@class GTLRAccessContextManager_AccessPolicy;
@class GTLRAccessContextManager_CancelOperationRequest;
@class GTLRAccessContextManager_CommitServicePerimetersRequest;
@class GTLRAccessContextManager_GcpUserAccessBinding;
@class GTLRAccessContextManager_GetIamPolicyRequest;
@class GTLRAccessContextManager_ReplaceAccessLevelsRequest;
@class GTLRAccessContextManager_ReplaceServicePerimetersRequest;
@class GTLRAccessContextManager_ServicePerimeter;
@class GTLRAccessContextManager_SetIamPolicyRequest;
@class GTLRAccessContextManager_TestIamPermissionsRequest;
// Generated comments include content from the discovery document; avoid them
// causing warnings since clang's checks are some what arbitrary.
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdocumentation"
NS_ASSUME_NONNULL_BEGIN
// ----------------------------------------------------------------------------
// Constants - For some of the query classes' properties below.
// ----------------------------------------------------------------------------
// accessLevelFormat
/**
* Uses the format the resource was defined in. BasicLevels are returned as
* BasicLevels, CustomLevels are returned as CustomLevels.
*
* Value: "AS_DEFINED"
*/
FOUNDATION_EXTERN NSString * const kGTLRAccessContextManagerAccessLevelFormatAsDefined;
/**
* Use Cloud Common Expression Language when returning the resource. Both
* BasicLevels and CustomLevels are returned as CustomLevels.
*
* Value: "CEL"
*/
FOUNDATION_EXTERN NSString * const kGTLRAccessContextManagerAccessLevelFormatCel;
/**
* The format was not specified.
*
* Value: "LEVEL_FORMAT_UNSPECIFIED"
*/
FOUNDATION_EXTERN NSString * const kGTLRAccessContextManagerAccessLevelFormatLevelFormatUnspecified;
// ----------------------------------------------------------------------------
// Query Classes
//
/**
* Parent class for other Access Context Manager query classes.
*/
@interface GTLRAccessContextManagerQuery : GTLRQuery
/** Selector specifying which fields to include in a partial response. */
@property(nonatomic, copy, nullable) NSString *fields;
@end
/**
* Creates an access level. The long-running operation from this RPC has a
* successful status after the access level propagates to long-lasting storage.
* If access levels contain errors, an error response is returned for the first
* error encountered.
*
* Method: accesscontextmanager.accessPolicies.accessLevels.create
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesAccessLevelsCreate : GTLRAccessContextManagerQuery
/**
* Required. Resource name for the access policy which owns this Access Level.
* Format: `accessPolicies/{policy_id}`
*/
@property(nonatomic, copy, nullable) NSString *parent;
/**
* Fetches a @c GTLRAccessContextManager_Operation.
*
* Creates an access level. The long-running operation from this RPC has a
* successful status after the access level propagates to long-lasting storage.
* If access levels contain errors, an error response is returned for the first
* error encountered.
*
* @param object The @c GTLRAccessContextManager_AccessLevel to include in the
* query.
* @param parent Required. Resource name for the access policy which owns this
* Access Level. Format: `accessPolicies/{policy_id}`
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesAccessLevelsCreate
*/
+ (instancetype)queryWithObject:(GTLRAccessContextManager_AccessLevel *)object
parent:(NSString *)parent;
@end
/**
* Deletes an access level based on the resource name. The long-running
* operation from this RPC has a successful status after the access level has
* been removed from long-lasting storage.
*
* Method: accesscontextmanager.accessPolicies.accessLevels.delete
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesAccessLevelsDelete : GTLRAccessContextManagerQuery
/**
* Required. Resource name for the Access Level. Format:
* `accessPolicies/{policy_id}/accessLevels/{access_level_id}`
*/
@property(nonatomic, copy, nullable) NSString *name;
/**
* Fetches a @c GTLRAccessContextManager_Operation.
*
* Deletes an access level based on the resource name. The long-running
* operation from this RPC has a successful status after the access level has
* been removed from long-lasting storage.
*
* @param name Required. Resource name for the Access Level. Format:
* `accessPolicies/{policy_id}/accessLevels/{access_level_id}`
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesAccessLevelsDelete
*/
+ (instancetype)queryWithName:(NSString *)name;
@end
/**
* Gets an access level based on the resource name.
*
* Method: accesscontextmanager.accessPolicies.accessLevels.get
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesAccessLevelsGet : GTLRAccessContextManagerQuery
/**
* Whether to return `BasicLevels` in the Cloud Common Expression Language
* rather than as `BasicLevels`. Defaults to AS_DEFINED, where Access Levels
* are returned as `BasicLevels` or `CustomLevels` based on how they were
* created. If set to CEL, all Access Levels are returned as `CustomLevels`. In
* the CEL case, `BasicLevels` are translated to equivalent `CustomLevels`.
*
* Likely values:
* @arg @c kGTLRAccessContextManagerAccessLevelFormatLevelFormatUnspecified
* The format was not specified. (Value: "LEVEL_FORMAT_UNSPECIFIED")
* @arg @c kGTLRAccessContextManagerAccessLevelFormatAsDefined Uses the
* format the resource was defined in. BasicLevels are returned as
* BasicLevels, CustomLevels are returned as CustomLevels. (Value:
* "AS_DEFINED")
* @arg @c kGTLRAccessContextManagerAccessLevelFormatCel Use Cloud Common
* Expression Language when returning the resource. Both BasicLevels and
* CustomLevels are returned as CustomLevels. (Value: "CEL")
*/
@property(nonatomic, copy, nullable) NSString *accessLevelFormat;
/**
* Required. Resource name for the Access Level. Format:
* `accessPolicies/{policy_id}/accessLevels/{access_level_id}`
*/
@property(nonatomic, copy, nullable) NSString *name;
/**
* Fetches a @c GTLRAccessContextManager_AccessLevel.
*
* Gets an access level based on the resource name.
*
* @param name Required. Resource name for the Access Level. Format:
* `accessPolicies/{policy_id}/accessLevels/{access_level_id}`
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesAccessLevelsGet
*/
+ (instancetype)queryWithName:(NSString *)name;
@end
/**
* Lists all access levels for an access policy.
*
* Method: accesscontextmanager.accessPolicies.accessLevels.list
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesAccessLevelsList : GTLRAccessContextManagerQuery
/**
* Whether to return `BasicLevels` in the Cloud Common Expression language, as
* `CustomLevels`, rather than as `BasicLevels`. Defaults to returning
* `AccessLevels` in the format they were defined.
*
* Likely values:
* @arg @c kGTLRAccessContextManagerAccessLevelFormatLevelFormatUnspecified
* The format was not specified. (Value: "LEVEL_FORMAT_UNSPECIFIED")
* @arg @c kGTLRAccessContextManagerAccessLevelFormatAsDefined Uses the
* format the resource was defined in. BasicLevels are returned as
* BasicLevels, CustomLevels are returned as CustomLevels. (Value:
* "AS_DEFINED")
* @arg @c kGTLRAccessContextManagerAccessLevelFormatCel Use Cloud Common
* Expression Language when returning the resource. Both BasicLevels and
* CustomLevels are returned as CustomLevels. (Value: "CEL")
*/
@property(nonatomic, copy, nullable) NSString *accessLevelFormat;
/** Number of Access Levels to include in the list. Default 100. */
@property(nonatomic, assign) NSInteger pageSize;
/**
* Next page token for the next batch of Access Level instances. Defaults to
* the first page of results.
*/
@property(nonatomic, copy, nullable) NSString *pageToken;
/**
* Required. Resource name for the access policy to list Access Levels from.
* Format: `accessPolicies/{policy_id}`
*/
@property(nonatomic, copy, nullable) NSString *parent;
/**
* Fetches a @c GTLRAccessContextManager_ListAccessLevelsResponse.
*
* Lists all access levels for an access policy.
*
* @param parent Required. Resource name for the access policy to list Access
* Levels from. Format: `accessPolicies/{policy_id}`
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesAccessLevelsList
*
* @note Automatic pagination will be done when @c shouldFetchNextPages is
* enabled. See @c shouldFetchNextPages on @c GTLRService for more
* information.
*/
+ (instancetype)queryWithParent:(NSString *)parent;
@end
/**
* Updates an access level. The long-running operation from this RPC has a
* successful status after the changes to the access level propagate to
* long-lasting storage. If access levels contain errors, an error response is
* returned for the first error encountered.
*
* Method: accesscontextmanager.accessPolicies.accessLevels.patch
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesAccessLevelsPatch : GTLRAccessContextManagerQuery
/**
* Required. Resource name for the Access Level. The `short_name` component
* must begin with a letter and only include alphanumeric and '_'. Format:
* `accessPolicies/{access_policy}/accessLevels/{access_level}`. The maximum
* length of the `access_level` component is 50 characters.
*/
@property(nonatomic, copy, nullable) NSString *name;
/**
* Required. Mask to control which fields get updated. Must be non-empty.
*
* String format is a comma-separated list of fields.
*/
@property(nonatomic, copy, nullable) NSString *updateMask;
/**
* Fetches a @c GTLRAccessContextManager_Operation.
*
* Updates an access level. The long-running operation from this RPC has a
* successful status after the changes to the access level propagate to
* long-lasting storage. If access levels contain errors, an error response is
* returned for the first error encountered.
*
* @param object The @c GTLRAccessContextManager_AccessLevel to include in the
* query.
* @param name Required. Resource name for the Access Level. The `short_name`
* component must begin with a letter and only include alphanumeric and '_'.
* Format: `accessPolicies/{access_policy}/accessLevels/{access_level}`. The
* maximum length of the `access_level` component is 50 characters.
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesAccessLevelsPatch
*/
+ (instancetype)queryWithObject:(GTLRAccessContextManager_AccessLevel *)object
name:(NSString *)name;
@end
/**
* Replaces all existing access levels in an access policy with the access
* levels provided. This is done atomically. The long-running operation from
* this RPC has a successful status after all replacements propagate to
* long-lasting storage. If the replacement contains errors, an error response
* is returned for the first error encountered. Upon error, the replacement is
* cancelled, and existing access levels are not affected. The
* Operation.response field contains ReplaceAccessLevelsResponse. Removing
* access levels contained in existing service perimeters result in an error.
*
* Method: accesscontextmanager.accessPolicies.accessLevels.replaceAll
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesAccessLevelsReplaceAll : GTLRAccessContextManagerQuery
/**
* Required. Resource name for the access policy which owns these Access
* Levels. Format: `accessPolicies/{policy_id}`
*/
@property(nonatomic, copy, nullable) NSString *parent;
/**
* Fetches a @c GTLRAccessContextManager_Operation.
*
* Replaces all existing access levels in an access policy with the access
* levels provided. This is done atomically. The long-running operation from
* this RPC has a successful status after all replacements propagate to
* long-lasting storage. If the replacement contains errors, an error response
* is returned for the first error encountered. Upon error, the replacement is
* cancelled, and existing access levels are not affected. The
* Operation.response field contains ReplaceAccessLevelsResponse. Removing
* access levels contained in existing service perimeters result in an error.
*
* @param object The @c GTLRAccessContextManager_ReplaceAccessLevelsRequest to
* include in the query.
* @param parent Required. Resource name for the access policy which owns these
* Access Levels. Format: `accessPolicies/{policy_id}`
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesAccessLevelsReplaceAll
*/
+ (instancetype)queryWithObject:(GTLRAccessContextManager_ReplaceAccessLevelsRequest *)object
parent:(NSString *)parent;
@end
/**
* Returns the IAM permissions that the caller has on the specified Access
* Context Manager resource. The resource can be an AccessPolicy, AccessLevel,
* or ServicePerimeter. This method does not support other resources.
*
* Method: accesscontextmanager.accessPolicies.accessLevels.testIamPermissions
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesAccessLevelsTestIamPermissions : GTLRAccessContextManagerQuery
/**
* REQUIRED: The resource for which the policy detail is being requested. See
* the operation documentation for the appropriate value for this field.
*/
@property(nonatomic, copy, nullable) NSString *resource;
/**
* Fetches a @c GTLRAccessContextManager_TestIamPermissionsResponse.
*
* Returns the IAM permissions that the caller has on the specified Access
* Context Manager resource. The resource can be an AccessPolicy, AccessLevel,
* or ServicePerimeter. This method does not support other resources.
*
* @param object The @c GTLRAccessContextManager_TestIamPermissionsRequest to
* include in the query.
* @param resource REQUIRED: The resource for which the policy detail is being
* requested. See the operation documentation for the appropriate value for
* this field.
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesAccessLevelsTestIamPermissions
*/
+ (instancetype)queryWithObject:(GTLRAccessContextManager_TestIamPermissionsRequest *)object
resource:(NSString *)resource;
@end
/**
* Creates an access policy. This method fails if the organization already has
* an access policy. The long-running operation has a successful status after
* the access policy propagates to long-lasting storage. Syntactic and basic
* semantic errors are returned in `metadata` as a BadRequest proto.
*
* Method: accesscontextmanager.accessPolicies.create
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesCreate : GTLRAccessContextManagerQuery
/**
* Fetches a @c GTLRAccessContextManager_Operation.
*
* Creates an access policy. This method fails if the organization already has
* an access policy. The long-running operation has a successful status after
* the access policy propagates to long-lasting storage. Syntactic and basic
* semantic errors are returned in `metadata` as a BadRequest proto.
*
* @param object The @c GTLRAccessContextManager_AccessPolicy to include in the
* query.
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesCreate
*/
+ (instancetype)queryWithObject:(GTLRAccessContextManager_AccessPolicy *)object;
@end
/**
* Deletes an access policy based on the resource name. The long-running
* operation has a successful status after the access policy is removed from
* long-lasting storage.
*
* Method: accesscontextmanager.accessPolicies.delete
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesDelete : GTLRAccessContextManagerQuery
/**
* Required. Resource name for the access policy to delete. Format
* `accessPolicies/{policy_id}`
*/
@property(nonatomic, copy, nullable) NSString *name;
/**
* Fetches a @c GTLRAccessContextManager_Operation.
*
* Deletes an access policy based on the resource name. The long-running
* operation has a successful status after the access policy is removed from
* long-lasting storage.
*
* @param name Required. Resource name for the access policy to delete. Format
* `accessPolicies/{policy_id}`
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesDelete
*/
+ (instancetype)queryWithName:(NSString *)name;
@end
/**
* Returns an access policy based on the name.
*
* Method: accesscontextmanager.accessPolicies.get
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesGet : GTLRAccessContextManagerQuery
/**
* Required. Resource name for the access policy to get. Format
* `accessPolicies/{policy_id}`
*/
@property(nonatomic, copy, nullable) NSString *name;
/**
* Fetches a @c GTLRAccessContextManager_AccessPolicy.
*
* Returns an access policy based on the name.
*
* @param name Required. Resource name for the access policy to get. Format
* `accessPolicies/{policy_id}`
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesGet
*/
+ (instancetype)queryWithName:(NSString *)name;
@end
/**
* Gets the IAM policy for the specified Access Context Manager access policy.
*
* Method: accesscontextmanager.accessPolicies.getIamPolicy
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesGetIamPolicy : GTLRAccessContextManagerQuery
/**
* REQUIRED: The resource for which the policy is being requested. See the
* operation documentation for the appropriate value for this field.
*/
@property(nonatomic, copy, nullable) NSString *resource;
/**
* Fetches a @c GTLRAccessContextManager_Policy.
*
* Gets the IAM policy for the specified Access Context Manager access policy.
*
* @param object The @c GTLRAccessContextManager_GetIamPolicyRequest to include
* in the query.
* @param resource REQUIRED: The resource for which the policy is being
* requested. See the operation documentation for the appropriate value for
* this field.
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesGetIamPolicy
*/
+ (instancetype)queryWithObject:(GTLRAccessContextManager_GetIamPolicyRequest *)object
resource:(NSString *)resource;
@end
/**
* Lists all access policies in an organization.
*
* Method: accesscontextmanager.accessPolicies.list
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesList : GTLRAccessContextManagerQuery
/** Number of AccessPolicy instances to include in the list. Default 100. */
@property(nonatomic, assign) NSInteger pageSize;
/**
* Next page token for the next batch of AccessPolicy instances. Defaults to
* the first page of results.
*/
@property(nonatomic, copy, nullable) NSString *pageToken;
/**
* Required. Resource name for the container to list AccessPolicy instances
* from. Format: `organizations/{org_id}`
*/
@property(nonatomic, copy, nullable) NSString *parent;
/**
* Fetches a @c GTLRAccessContextManager_ListAccessPoliciesResponse.
*
* Lists all access policies in an organization.
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesList
*
* @note Automatic pagination will be done when @c shouldFetchNextPages is
* enabled. See @c shouldFetchNextPages on @c GTLRService for more
* information.
*/
+ (instancetype)query;
@end
/**
* Updates an access policy. The long-running operation from this RPC has a
* successful status after the changes to the access policy propagate to
* long-lasting storage.
*
* Method: accesscontextmanager.accessPolicies.patch
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesPatch : GTLRAccessContextManagerQuery
/**
* Output only. Resource name of the `AccessPolicy`. Format:
* `accessPolicies/{access_policy}`
*/
@property(nonatomic, copy, nullable) NSString *name;
/**
* Required. Mask to control which fields get updated. Must be non-empty.
*
* String format is a comma-separated list of fields.
*/
@property(nonatomic, copy, nullable) NSString *updateMask;
/**
* Fetches a @c GTLRAccessContextManager_Operation.
*
* Updates an access policy. The long-running operation from this RPC has a
* successful status after the changes to the access policy propagate to
* long-lasting storage.
*
* @param object The @c GTLRAccessContextManager_AccessPolicy to include in the
* query.
* @param name Output only. Resource name of the `AccessPolicy`. Format:
* `accessPolicies/{access_policy}`
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesPatch
*/
+ (instancetype)queryWithObject:(GTLRAccessContextManager_AccessPolicy *)object
name:(NSString *)name;
@end
/**
* Commits the dry-run specification for all the service perimeters in an
* access policy. A commit operation on a service perimeter involves copying
* its `spec` field to the `status` field of the service perimeter. Only
* service perimeters with `use_explicit_dry_run_spec` field set to true are
* affected by a commit operation. The long-running operation from this RPC has
* a successful status after the dry-run specifications for all the service
* perimeters have been committed. If a commit fails, it causes the
* long-running operation to return an error response and the entire commit
* operation is cancelled. When successful, the Operation.response field
* contains CommitServicePerimetersResponse. The `dry_run` and the `spec`
* fields are cleared after a successful commit operation.
*
* Method: accesscontextmanager.accessPolicies.servicePerimeters.commit
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesServicePerimetersCommit : GTLRAccessContextManagerQuery
/**
* Required. Resource name for the parent Access Policy which owns all Service
* Perimeters in scope for the commit operation. Format:
* `accessPolicies/{policy_id}`
*/
@property(nonatomic, copy, nullable) NSString *parent;
/**
* Fetches a @c GTLRAccessContextManager_Operation.
*
* Commits the dry-run specification for all the service perimeters in an
* access policy. A commit operation on a service perimeter involves copying
* its `spec` field to the `status` field of the service perimeter. Only
* service perimeters with `use_explicit_dry_run_spec` field set to true are
* affected by a commit operation. The long-running operation from this RPC has
* a successful status after the dry-run specifications for all the service
* perimeters have been committed. If a commit fails, it causes the
* long-running operation to return an error response and the entire commit
* operation is cancelled. When successful, the Operation.response field
* contains CommitServicePerimetersResponse. The `dry_run` and the `spec`
* fields are cleared after a successful commit operation.
*
* @param object The @c GTLRAccessContextManager_CommitServicePerimetersRequest
* to include in the query.
* @param parent Required. Resource name for the parent Access Policy which
* owns all Service Perimeters in scope for the commit operation. Format:
* `accessPolicies/{policy_id}`
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesServicePerimetersCommit
*/
+ (instancetype)queryWithObject:(GTLRAccessContextManager_CommitServicePerimetersRequest *)object
parent:(NSString *)parent;
@end
/**
* Creates a service perimeter. The long-running operation from this RPC has a
* successful status after the service perimeter propagates to long-lasting
* storage. If a service perimeter contains errors, an error response is
* returned for the first error encountered.
*
* Method: accesscontextmanager.accessPolicies.servicePerimeters.create
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesServicePerimetersCreate : GTLRAccessContextManagerQuery
/**
* Required. Resource name for the access policy which owns this Service
* Perimeter. Format: `accessPolicies/{policy_id}`
*/
@property(nonatomic, copy, nullable) NSString *parent;
/**
* Fetches a @c GTLRAccessContextManager_Operation.
*
* Creates a service perimeter. The long-running operation from this RPC has a
* successful status after the service perimeter propagates to long-lasting
* storage. If a service perimeter contains errors, an error response is
* returned for the first error encountered.
*
* @param object The @c GTLRAccessContextManager_ServicePerimeter to include in
* the query.
* @param parent Required. Resource name for the access policy which owns this
* Service Perimeter. Format: `accessPolicies/{policy_id}`
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesServicePerimetersCreate
*/
+ (instancetype)queryWithObject:(GTLRAccessContextManager_ServicePerimeter *)object
parent:(NSString *)parent;
@end
/**
* Deletes a service perimeter based on the resource name. The long-running
* operation from this RPC has a successful status after the service perimeter
* is removed from long-lasting storage.
*
* Method: accesscontextmanager.accessPolicies.servicePerimeters.delete
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesServicePerimetersDelete : GTLRAccessContextManagerQuery
/**
* Required. Resource name for the Service Perimeter. Format:
* `accessPolicies/{policy_id}/servicePerimeters/{service_perimeter_id}`
*/
@property(nonatomic, copy, nullable) NSString *name;
/**
* Fetches a @c GTLRAccessContextManager_Operation.
*
* Deletes a service perimeter based on the resource name. The long-running
* operation from this RPC has a successful status after the service perimeter
* is removed from long-lasting storage.
*
* @param name Required. Resource name for the Service Perimeter. Format:
* `accessPolicies/{policy_id}/servicePerimeters/{service_perimeter_id}`
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesServicePerimetersDelete
*/
+ (instancetype)queryWithName:(NSString *)name;
@end
/**
* Gets a service perimeter based on the resource name.
*
* Method: accesscontextmanager.accessPolicies.servicePerimeters.get
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesServicePerimetersGet : GTLRAccessContextManagerQuery
/**
* Required. Resource name for the Service Perimeter. Format:
* `accessPolicies/{policy_id}/servicePerimeters/{service_perimeters_id}`
*/
@property(nonatomic, copy, nullable) NSString *name;
/**
* Fetches a @c GTLRAccessContextManager_ServicePerimeter.
*
* Gets a service perimeter based on the resource name.
*
* @param name Required. Resource name for the Service Perimeter. Format:
* `accessPolicies/{policy_id}/servicePerimeters/{service_perimeters_id}`
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesServicePerimetersGet
*/
+ (instancetype)queryWithName:(NSString *)name;
@end
/**
* Lists all service perimeters for an access policy.
*
* Method: accesscontextmanager.accessPolicies.servicePerimeters.list
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesServicePerimetersList : GTLRAccessContextManagerQuery
/** Number of Service Perimeters to include in the list. Default 100. */
@property(nonatomic, assign) NSInteger pageSize;
/**
* Next page token for the next batch of Service Perimeter instances. Defaults
* to the first page of results.
*/
@property(nonatomic, copy, nullable) NSString *pageToken;
/**
* Required. Resource name for the access policy to list Service Perimeters
* from. Format: `accessPolicies/{policy_id}`
*/
@property(nonatomic, copy, nullable) NSString *parent;
/**
* Fetches a @c GTLRAccessContextManager_ListServicePerimetersResponse.
*
* Lists all service perimeters for an access policy.
*
* @param parent Required. Resource name for the access policy to list Service
* Perimeters from. Format: `accessPolicies/{policy_id}`
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesServicePerimetersList
*
* @note Automatic pagination will be done when @c shouldFetchNextPages is
* enabled. See @c shouldFetchNextPages on @c GTLRService for more
* information.
*/
+ (instancetype)queryWithParent:(NSString *)parent;
@end
/**
* Updates a service perimeter. The long-running operation from this RPC has a
* successful status after the service perimeter propagates to long-lasting
* storage. If a service perimeter contains errors, an error response is
* returned for the first error encountered.
*
* Method: accesscontextmanager.accessPolicies.servicePerimeters.patch
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesServicePerimetersPatch : GTLRAccessContextManagerQuery
/**
* Required. Resource name for the ServicePerimeter. The `short_name` component
* must begin with a letter and only include alphanumeric and '_'. Format:
* `accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}`
*/
@property(nonatomic, copy, nullable) NSString *name;
/**
* Required. Mask to control which fields get updated. Must be non-empty.
*
* String format is a comma-separated list of fields.
*/
@property(nonatomic, copy, nullable) NSString *updateMask;
/**
* Fetches a @c GTLRAccessContextManager_Operation.
*
* Updates a service perimeter. The long-running operation from this RPC has a
* successful status after the service perimeter propagates to long-lasting
* storage. If a service perimeter contains errors, an error response is
* returned for the first error encountered.
*
* @param object The @c GTLRAccessContextManager_ServicePerimeter to include in
* the query.
* @param name Required. Resource name for the ServicePerimeter. The
* `short_name` component must begin with a letter and only include
* alphanumeric and '_'. Format:
* `accessPolicies/{access_policy}/servicePerimeters/{service_perimeter}`
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesServicePerimetersPatch
*/
+ (instancetype)queryWithObject:(GTLRAccessContextManager_ServicePerimeter *)object
name:(NSString *)name;
@end
/**
* Replace all existing service perimeters in an access policy with the service
* perimeters provided. This is done atomically. The long-running operation
* from this RPC has a successful status after all replacements propagate to
* long-lasting storage. Replacements containing errors result in an error
* response for the first error encountered. Upon an error, replacement are
* cancelled and existing service perimeters are not affected. The
* Operation.response field contains ReplaceServicePerimetersResponse.
*
* Method: accesscontextmanager.accessPolicies.servicePerimeters.replaceAll
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesServicePerimetersReplaceAll : GTLRAccessContextManagerQuery
/**
* Required. Resource name for the access policy which owns these Service
* Perimeters. Format: `accessPolicies/{policy_id}`
*/
@property(nonatomic, copy, nullable) NSString *parent;
/**
* Fetches a @c GTLRAccessContextManager_Operation.
*
* Replace all existing service perimeters in an access policy with the service
* perimeters provided. This is done atomically. The long-running operation
* from this RPC has a successful status after all replacements propagate to
* long-lasting storage. Replacements containing errors result in an error
* response for the first error encountered. Upon an error, replacement are
* cancelled and existing service perimeters are not affected. The
* Operation.response field contains ReplaceServicePerimetersResponse.
*
* @param object The @c
* GTLRAccessContextManager_ReplaceServicePerimetersRequest to include in the
* query.
* @param parent Required. Resource name for the access policy which owns these
* Service Perimeters. Format: `accessPolicies/{policy_id}`
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesServicePerimetersReplaceAll
*/
+ (instancetype)queryWithObject:(GTLRAccessContextManager_ReplaceServicePerimetersRequest *)object
parent:(NSString *)parent;
@end
/**
* Returns the IAM permissions that the caller has on the specified Access
* Context Manager resource. The resource can be an AccessPolicy, AccessLevel,
* or ServicePerimeter. This method does not support other resources.
*
* Method: accesscontextmanager.accessPolicies.servicePerimeters.testIamPermissions
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesServicePerimetersTestIamPermissions : GTLRAccessContextManagerQuery
/**
* REQUIRED: The resource for which the policy detail is being requested. See
* the operation documentation for the appropriate value for this field.
*/
@property(nonatomic, copy, nullable) NSString *resource;
/**
* Fetches a @c GTLRAccessContextManager_TestIamPermissionsResponse.
*
* Returns the IAM permissions that the caller has on the specified Access
* Context Manager resource. The resource can be an AccessPolicy, AccessLevel,
* or ServicePerimeter. This method does not support other resources.
*
* @param object The @c GTLRAccessContextManager_TestIamPermissionsRequest to
* include in the query.
* @param resource REQUIRED: The resource for which the policy detail is being
* requested. See the operation documentation for the appropriate value for
* this field.
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesServicePerimetersTestIamPermissions
*/
+ (instancetype)queryWithObject:(GTLRAccessContextManager_TestIamPermissionsRequest *)object
resource:(NSString *)resource;
@end
/**
* Sets the IAM policy for the specified Access Context Manager access policy.
* This method replaces the existing IAM policy on the access policy. The IAM
* policy controls the set of users who can perform specific operations on the
* Access Context Manager access policy.
*
* Method: accesscontextmanager.accessPolicies.setIamPolicy
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesSetIamPolicy : GTLRAccessContextManagerQuery
/**
* REQUIRED: The resource for which the policy is being specified. See the
* operation documentation for the appropriate value for this field.
*/
@property(nonatomic, copy, nullable) NSString *resource;
/**
* Fetches a @c GTLRAccessContextManager_Policy.
*
* Sets the IAM policy for the specified Access Context Manager access policy.
* This method replaces the existing IAM policy on the access policy. The IAM
* policy controls the set of users who can perform specific operations on the
* Access Context Manager access policy.
*
* @param object The @c GTLRAccessContextManager_SetIamPolicyRequest to include
* in the query.
* @param resource REQUIRED: The resource for which the policy is being
* specified. See the operation documentation for the appropriate value for
* this field.
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesSetIamPolicy
*/
+ (instancetype)queryWithObject:(GTLRAccessContextManager_SetIamPolicyRequest *)object
resource:(NSString *)resource;
@end
/**
* Returns the IAM permissions that the caller has on the specified Access
* Context Manager resource. The resource can be an AccessPolicy, AccessLevel,
* or ServicePerimeter. This method does not support other resources.
*
* Method: accesscontextmanager.accessPolicies.testIamPermissions
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_AccessPoliciesTestIamPermissions : GTLRAccessContextManagerQuery
/**
* REQUIRED: The resource for which the policy detail is being requested. See
* the operation documentation for the appropriate value for this field.
*/
@property(nonatomic, copy, nullable) NSString *resource;
/**
* Fetches a @c GTLRAccessContextManager_TestIamPermissionsResponse.
*
* Returns the IAM permissions that the caller has on the specified Access
* Context Manager resource. The resource can be an AccessPolicy, AccessLevel,
* or ServicePerimeter. This method does not support other resources.
*
* @param object The @c GTLRAccessContextManager_TestIamPermissionsRequest to
* include in the query.
* @param resource REQUIRED: The resource for which the policy detail is being
* requested. See the operation documentation for the appropriate value for
* this field.
*
* @return GTLRAccessContextManagerQuery_AccessPoliciesTestIamPermissions
*/
+ (instancetype)queryWithObject:(GTLRAccessContextManager_TestIamPermissionsRequest *)object
resource:(NSString *)resource;
@end
/**
* Starts asynchronous cancellation on a long-running operation. The server
* makes a best effort to cancel the operation, but success is not guaranteed.
* If the server doesn't support this method, it returns
* `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or
* other methods to check whether the cancellation succeeded or whether the
* operation completed despite cancellation. On successful cancellation, the
* operation is not deleted; instead, it becomes an operation with an
* Operation.error value with a google.rpc.Status.code of 1, corresponding to
* `Code.CANCELLED`.
*
* Method: accesscontextmanager.operations.cancel
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_OperationsCancel : GTLRAccessContextManagerQuery
/** The name of the operation resource to be cancelled. */
@property(nonatomic, copy, nullable) NSString *name;
/**
* Fetches a @c GTLRAccessContextManager_Empty.
*
* Starts asynchronous cancellation on a long-running operation. The server
* makes a best effort to cancel the operation, but success is not guaranteed.
* If the server doesn't support this method, it returns
* `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or
* other methods to check whether the cancellation succeeded or whether the
* operation completed despite cancellation. On successful cancellation, the
* operation is not deleted; instead, it becomes an operation with an
* Operation.error value with a google.rpc.Status.code of 1, corresponding to
* `Code.CANCELLED`.
*
* @param object The @c GTLRAccessContextManager_CancelOperationRequest to
* include in the query.
* @param name The name of the operation resource to be cancelled.
*
* @return GTLRAccessContextManagerQuery_OperationsCancel
*/
+ (instancetype)queryWithObject:(GTLRAccessContextManager_CancelOperationRequest *)object
name:(NSString *)name;
@end
/**
* Deletes a long-running operation. This method indicates that the client is
* no longer interested in the operation result. It does not cancel the
* operation. If the server doesn't support this method, it returns
* `google.rpc.Code.UNIMPLEMENTED`.
*
* Method: accesscontextmanager.operations.delete
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_OperationsDelete : GTLRAccessContextManagerQuery
/** The name of the operation resource to be deleted. */
@property(nonatomic, copy, nullable) NSString *name;
/**
* Fetches a @c GTLRAccessContextManager_Empty.
*
* Deletes a long-running operation. This method indicates that the client is
* no longer interested in the operation result. It does not cancel the
* operation. If the server doesn't support this method, it returns
* `google.rpc.Code.UNIMPLEMENTED`.
*
* @param name The name of the operation resource to be deleted.
*
* @return GTLRAccessContextManagerQuery_OperationsDelete
*/
+ (instancetype)queryWithName:(NSString *)name;
@end
/**
* Gets the latest state of a long-running operation. Clients can use this
* method to poll the operation result at intervals as recommended by the API
* service.
*
* Method: accesscontextmanager.operations.get
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_OperationsGet : GTLRAccessContextManagerQuery
/** The name of the operation resource. */
@property(nonatomic, copy, nullable) NSString *name;
/**
* Fetches a @c GTLRAccessContextManager_Operation.
*
* Gets the latest state of a long-running operation. Clients can use this
* method to poll the operation result at intervals as recommended by the API
* service.
*
* @param name The name of the operation resource.
*
* @return GTLRAccessContextManagerQuery_OperationsGet
*/
+ (instancetype)queryWithName:(NSString *)name;
@end
/**
* Lists operations that match the specified filter in the request. If the
* server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the
* `name` binding allows API services to override the binding to use different
* resource name schemes, such as `users/ * /operations`. To override the
* binding, API services can add a binding such as `"/v1/{name=users/
* *}/operations"` to their service configuration. For backwards compatibility,
* the default name includes the operations collection id, however overriding
* users must ensure the name binding is the parent resource, without the
* operations collection id.
*
* Method: accesscontextmanager.operations.list
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_OperationsList : GTLRAccessContextManagerQuery
/** The standard list filter. */
@property(nonatomic, copy, nullable) NSString *filter;
/** The name of the operation's parent resource. */
@property(nonatomic, copy, nullable) NSString *name;
/** The standard list page size. */
@property(nonatomic, assign) NSInteger pageSize;
/** The standard list page token. */
@property(nonatomic, copy, nullable) NSString *pageToken;
/**
* Fetches a @c GTLRAccessContextManager_ListOperationsResponse.
*
* Lists operations that match the specified filter in the request. If the
* server doesn't support this method, it returns `UNIMPLEMENTED`. NOTE: the
* `name` binding allows API services to override the binding to use different
* resource name schemes, such as `users/ * /operations`. To override the
* binding, API services can add a binding such as `"/v1/{name=users/
* *}/operations"` to their service configuration. For backwards compatibility,
* the default name includes the operations collection id, however overriding
* users must ensure the name binding is the parent resource, without the
* operations collection id.
*
* @param name The name of the operation's parent resource.
*
* @return GTLRAccessContextManagerQuery_OperationsList
*
* @note Automatic pagination will be done when @c shouldFetchNextPages is
* enabled. See @c shouldFetchNextPages on @c GTLRService for more
* information.
*/
+ (instancetype)queryWithName:(NSString *)name;
@end
/**
* Creates a GcpUserAccessBinding. If the client specifies a name, the server
* ignores it. Fails if a resource already exists with the same group_key.
* Completion of this long-running operation does not necessarily signify that
* the new binding is deployed onto all affected users, which may take more
* time.
*
* Method: accesscontextmanager.organizations.gcpUserAccessBindings.create
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_OrganizationsGcpUserAccessBindingsCreate : GTLRAccessContextManagerQuery
/** Required. Example: "organizations/256" */
@property(nonatomic, copy, nullable) NSString *parent;
/**
* Fetches a @c GTLRAccessContextManager_Operation.
*
* Creates a GcpUserAccessBinding. If the client specifies a name, the server
* ignores it. Fails if a resource already exists with the same group_key.
* Completion of this long-running operation does not necessarily signify that
* the new binding is deployed onto all affected users, which may take more
* time.
*
* @param object The @c GTLRAccessContextManager_GcpUserAccessBinding to
* include in the query.
* @param parent Required. Example: "organizations/256"
*
* @return GTLRAccessContextManagerQuery_OrganizationsGcpUserAccessBindingsCreate
*/
+ (instancetype)queryWithObject:(GTLRAccessContextManager_GcpUserAccessBinding *)object
parent:(NSString *)parent;
@end
/**
* Deletes a GcpUserAccessBinding. Completion of this long-running operation
* does not necessarily signify that the binding deletion is deployed onto all
* affected users, which may take more time.
*
* Method: accesscontextmanager.organizations.gcpUserAccessBindings.delete
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_OrganizationsGcpUserAccessBindingsDelete : GTLRAccessContextManagerQuery
/**
* Required. Example: "organizations/256/gcpUserAccessBindings/b3-BhcX_Ud5N"
*/
@property(nonatomic, copy, nullable) NSString *name;
/**
* Fetches a @c GTLRAccessContextManager_Operation.
*
* Deletes a GcpUserAccessBinding. Completion of this long-running operation
* does not necessarily signify that the binding deletion is deployed onto all
* affected users, which may take more time.
*
* @param name Required. Example:
* "organizations/256/gcpUserAccessBindings/b3-BhcX_Ud5N"
*
* @return GTLRAccessContextManagerQuery_OrganizationsGcpUserAccessBindingsDelete
*/
+ (instancetype)queryWithName:(NSString *)name;
@end
/**
* Gets the GcpUserAccessBinding with the given name.
*
* Method: accesscontextmanager.organizations.gcpUserAccessBindings.get
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_OrganizationsGcpUserAccessBindingsGet : GTLRAccessContextManagerQuery
/**
* Required. Example: "organizations/256/gcpUserAccessBindings/b3-BhcX_Ud5N"
*/
@property(nonatomic, copy, nullable) NSString *name;
/**
* Fetches a @c GTLRAccessContextManager_GcpUserAccessBinding.
*
* Gets the GcpUserAccessBinding with the given name.
*
* @param name Required. Example:
* "organizations/256/gcpUserAccessBindings/b3-BhcX_Ud5N"
*
* @return GTLRAccessContextManagerQuery_OrganizationsGcpUserAccessBindingsGet
*/
+ (instancetype)queryWithName:(NSString *)name;
@end
/**
* Lists all GcpUserAccessBindings for a Google Cloud organization.
*
* Method: accesscontextmanager.organizations.gcpUserAccessBindings.list
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_OrganizationsGcpUserAccessBindingsList : GTLRAccessContextManagerQuery
/**
* Optional. Maximum number of items to return. The server may return fewer
* items. If left blank, the server may return any number of items.
*/
@property(nonatomic, assign) NSInteger pageSize;
/**
* Optional. If left blank, returns the first page. To enumerate all items, use
* the next_page_token from your previous list operation.
*/
@property(nonatomic, copy, nullable) NSString *pageToken;
/** Required. Example: "organizations/256" */
@property(nonatomic, copy, nullable) NSString *parent;
/**
* Fetches a @c GTLRAccessContextManager_ListGcpUserAccessBindingsResponse.
*
* Lists all GcpUserAccessBindings for a Google Cloud organization.
*
* @param parent Required. Example: "organizations/256"
*
* @return GTLRAccessContextManagerQuery_OrganizationsGcpUserAccessBindingsList
*
* @note Automatic pagination will be done when @c shouldFetchNextPages is
* enabled. See @c shouldFetchNextPages on @c GTLRService for more
* information.
*/
+ (instancetype)queryWithParent:(NSString *)parent;
@end
/**
* Updates a GcpUserAccessBinding. Completion of this long-running operation
* does not necessarily signify that the changed binding is deployed onto all
* affected users, which may take more time.
*
* Method: accesscontextmanager.organizations.gcpUserAccessBindings.patch
*
* Authorization scope(s):
* @c kGTLRAuthScopeAccessContextManagerCloudPlatform
*/
@interface GTLRAccessContextManagerQuery_OrganizationsGcpUserAccessBindingsPatch : GTLRAccessContextManagerQuery
/**
* Immutable. Assigned by the server during creation. The last segment has an
* arbitrary length and has only URI unreserved characters (as defined by [RFC
* 3986 Section 2.3](https://tools.ietf.org/html/rfc3986#section-2.3)). Should
* not be specified by the client during creation. Example:
* "organizations/256/gcpUserAccessBindings/b3-BhcX_Ud5N"
*/
@property(nonatomic, copy, nullable) NSString *name;
/**
* Required. Only the fields specified in this mask are updated. Because name
* and group_key cannot be changed, update_mask is required and must always be:
* update_mask { paths: "access_levels" }
*
* String format is a comma-separated list of fields.
*/
@property(nonatomic, copy, nullable) NSString *updateMask;
/**
* Fetches a @c GTLRAccessContextManager_Operation.
*
* Updates a GcpUserAccessBinding. Completion of this long-running operation
* does not necessarily signify that the changed binding is deployed onto all
* affected users, which may take more time.
*
* @param object The @c GTLRAccessContextManager_GcpUserAccessBinding to
* include in the query.
* @param name Immutable. Assigned by the server during creation. The last
* segment has an arbitrary length and has only URI unreserved characters (as
* defined by [RFC 3986 Section
* 2.3](https://tools.ietf.org/html/rfc3986#section-2.3)). Should not be
* specified by the client during creation. Example:
* "organizations/256/gcpUserAccessBindings/b3-BhcX_Ud5N"
*
* @return GTLRAccessContextManagerQuery_OrganizationsGcpUserAccessBindingsPatch
*/
+ (instancetype)queryWithObject:(GTLRAccessContextManager_GcpUserAccessBinding *)object
name:(NSString *)name;
@end
NS_ASSUME_NONNULL_END
#pragma clang diagnostic pop
| 15,156 |
359 | <gh_stars>100-1000
/**
* Copyright (C) 2016-2020 Xilinx, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may
* not use this file except in compliance with the License. A copy of the
* License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
#ifndef KERNEL_DEBUG_MANAGER_DOT_H
#define KERNEL_DEBUG_MANAGER_DOT_H
#include <string>
#include "core/include/xclbin.h"
namespace xdp {
class KernelDebugManager
{
// Consolidated File information and headers
// This structure must be synchronized with the information
// used in xrflink
// Sections
const unsigned int PROJECT_NAME = 0 ;
const unsigned int DWARF_SECTION = 1 ;
const unsigned int BINARY_SECTION = 2 ;
const unsigned int JSON_SECTION = 3 ;
struct SectionHeader
{
unsigned int type ;
unsigned long long int offset ;
unsigned int size ;
} ;
struct FileHeader
{
unsigned int magicNumber ;
unsigned int majorVersion ;
unsigned int minorVersion ;
unsigned int numSections ;
// Followed by N section headers
} ;
private:
int uid ;
int pid ;
// The directory used to communicate information to the xrt_server
std::string sdxDirectory ;
std::string jsonFile ;
std::string dwarfFile ;
bool exists(const char* filename) ;
void createDirectory(const char* filename) ;
public:
KernelDebugManager() ;
~KernelDebugManager() ;
inline const std::string& getDwarfFile() { return dwarfFile ; }
inline const std::string& getJsonFile() { return jsonFile ; }
void reset(const axlf* xclbin) ;
void setEnvironment() ;
} ;
} // end namespace xdp
#endif
| 688 |
1,016 | /**
*
*/
package com.thinkbiganalytics.server.upgrade;
/*-
* #%L
* kylo-operational-metadata-upgrade-service
* %%
* Copyright (C) 2017 <NAME>
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.thinkbiganalytics.KyloVersion;
import org.springframework.core.annotation.Order;
/**
* Implemented by beans that should be invoked during the Kylo upgrade process.
*/
@Order(UpgradeAction.DEFAULT_ORDER)
public interface UpgradeAction {
int EARLY_ORDER = -10000;
int DEFAULT_ORDER = 0;
int LATE_ORDER = 10000;
/**
* Indicates whether this action should be invoked during a fresh install before
* version migration begins.
* @param currentVersion the starting version in the upgrade sequence
* @return true if this action will participate in the fresh install before migration
*/
default boolean isTargetPreFreshInstall(KyloVersion currentVersion) {
return false;
}
/**
* Indicates whether this action should be invoked during the migration
* sequence for the specified version.
* @param version the current version in the migration sequence
* @return true if this action will participate in the upgrade of to this version
*/
default boolean isTargetVersion(KyloVersion version) {
return false;
}
/**
* Indicates whether this action should be invoked during a fresh install after
* the final version migration has completed.
* @param initialVersion the final version in the upgrade sequence
* @return true if this action will participate in the fresh install after migration
*/
default boolean isTargetFreshInstall(KyloVersion finalVersion) {
return false;
}
/**
* Invoke if when one of the isTarget* methods returns true during the upgrade procedure.
* @param version the current target version
*/
void upgradeTo(KyloVersion version);
}
| 750 |
2,921 | {
"name": "Content and Ad Network",
"symbol": "CAN",
"type": "ERC20",
"decimals": 18,
"description": "CAN is the token designed for advertising services basing on a constantly growing AD system being developed by MobiPromo.",
"website": "http://mobipromo.io/",
"explorer": "https://etherscan.io/token/<KEY>",
"status": "active",
"id": "0x5f3789907b35DCe5605b00C0bE0a7eCDBFa8A841"
} | 171 |
1,444 |
package mage.cards.w;
import java.util.UUID;
import mage.MageInt;
import mage.abilities.common.SimpleStaticAbility;
import mage.abilities.effects.common.combat.CantAttackUnlessDefenderControllsPermanent;
import mage.cards.CardImpl;
import mage.cards.CardSetInfo;
import mage.constants.CardType;
import mage.constants.SubType;
import mage.constants.Zone;
import mage.filter.common.FilterLandPermanent;
/**
*
* @author fireshoes
*/
public final class WuWarship extends CardImpl {
public WuWarship(UUID ownerId, CardSetInfo setInfo) {
super(ownerId,setInfo,new CardType[]{CardType.CREATURE},"{2}{U}");
this.subtype.add(SubType.HUMAN);
this.subtype.add(SubType.SOLDIER);
this.power = new MageInt(3);
this.toughness = new MageInt(3);
// Wu Warship can't attack unless defending player controls an Island.
this.addAbility(new SimpleStaticAbility(Zone.BATTLEFIELD, new CantAttackUnlessDefenderControllsPermanent(new FilterLandPermanent(SubType.ISLAND,"an Island"))));
}
private WuWarship(final WuWarship card) {
super(card);
}
@Override
public WuWarship copy() {
return new WuWarship(this);
}
}
| 437 |
372 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.games.model;
/**
* This is a JSON template for an snapshot object.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Google Play Game Services API. For a detailed
* explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Snapshot extends com.google.api.client.json.GenericJson {
/**
* The cover image of this snapshot. May be absent if there is no image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private SnapshotImage coverImage;
/**
* The description of this snapshot.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String description;
/**
* The ID of the file underlying this snapshot in the Drive API. Only present if the snapshot is a
* view on a Drive file and the file is owned by the caller.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String driveId;
/**
* The duration associated with this snapshot, in millis.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long durationMillis;
/**
* The ID of the snapshot.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String id;
/**
* Uniquely identifies the type of this resource. Value is always the fixed string games#snapshot.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* The timestamp (in millis since Unix epoch) of the last modification to this snapshot.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long lastModifiedMillis;
/**
* The progress value (64-bit integer set by developer) associated with this snapshot.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long progressValue;
/**
* The title of this snapshot.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String title;
/**
* The type of this snapshot. Possible values are: - "SAVE_GAME" - A snapshot representing a
* save game.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String type;
/**
* The unique name provided when the snapshot was created.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String uniqueName;
/**
* The cover image of this snapshot. May be absent if there is no image.
* @return value or {@code null} for none
*/
public SnapshotImage getCoverImage() {
return coverImage;
}
/**
* The cover image of this snapshot. May be absent if there is no image.
* @param coverImage coverImage or {@code null} for none
*/
public Snapshot setCoverImage(SnapshotImage coverImage) {
this.coverImage = coverImage;
return this;
}
/**
* The description of this snapshot.
* @return value or {@code null} for none
*/
public java.lang.String getDescription() {
return description;
}
/**
* The description of this snapshot.
* @param description description or {@code null} for none
*/
public Snapshot setDescription(java.lang.String description) {
this.description = description;
return this;
}
/**
* The ID of the file underlying this snapshot in the Drive API. Only present if the snapshot is a
* view on a Drive file and the file is owned by the caller.
* @return value or {@code null} for none
*/
public java.lang.String getDriveId() {
return driveId;
}
/**
* The ID of the file underlying this snapshot in the Drive API. Only present if the snapshot is a
* view on a Drive file and the file is owned by the caller.
* @param driveId driveId or {@code null} for none
*/
public Snapshot setDriveId(java.lang.String driveId) {
this.driveId = driveId;
return this;
}
/**
* The duration associated with this snapshot, in millis.
* @return value or {@code null} for none
*/
public java.lang.Long getDurationMillis() {
return durationMillis;
}
/**
* The duration associated with this snapshot, in millis.
* @param durationMillis durationMillis or {@code null} for none
*/
public Snapshot setDurationMillis(java.lang.Long durationMillis) {
this.durationMillis = durationMillis;
return this;
}
/**
* The ID of the snapshot.
* @return value or {@code null} for none
*/
public java.lang.String getId() {
return id;
}
/**
* The ID of the snapshot.
* @param id id or {@code null} for none
*/
public Snapshot setId(java.lang.String id) {
this.id = id;
return this;
}
/**
* Uniquely identifies the type of this resource. Value is always the fixed string games#snapshot.
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* Uniquely identifies the type of this resource. Value is always the fixed string games#snapshot.
* @param kind kind or {@code null} for none
*/
public Snapshot setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* The timestamp (in millis since Unix epoch) of the last modification to this snapshot.
* @return value or {@code null} for none
*/
public java.lang.Long getLastModifiedMillis() {
return lastModifiedMillis;
}
/**
* The timestamp (in millis since Unix epoch) of the last modification to this snapshot.
* @param lastModifiedMillis lastModifiedMillis or {@code null} for none
*/
public Snapshot setLastModifiedMillis(java.lang.Long lastModifiedMillis) {
this.lastModifiedMillis = lastModifiedMillis;
return this;
}
/**
* The progress value (64-bit integer set by developer) associated with this snapshot.
* @return value or {@code null} for none
*/
public java.lang.Long getProgressValue() {
return progressValue;
}
/**
* The progress value (64-bit integer set by developer) associated with this snapshot.
* @param progressValue progressValue or {@code null} for none
*/
public Snapshot setProgressValue(java.lang.Long progressValue) {
this.progressValue = progressValue;
return this;
}
/**
* The title of this snapshot.
* @return value or {@code null} for none
*/
public java.lang.String getTitle() {
return title;
}
/**
* The title of this snapshot.
* @param title title or {@code null} for none
*/
public Snapshot setTitle(java.lang.String title) {
this.title = title;
return this;
}
/**
* The type of this snapshot. Possible values are: - "SAVE_GAME" - A snapshot representing a
* save game.
* @return value or {@code null} for none
*/
public java.lang.String getType() {
return type;
}
/**
* The type of this snapshot. Possible values are: - "SAVE_GAME" - A snapshot representing a
* save game.
* @param type type or {@code null} for none
*/
public Snapshot setType(java.lang.String type) {
this.type = type;
return this;
}
/**
* The unique name provided when the snapshot was created.
* @return value or {@code null} for none
*/
public java.lang.String getUniqueName() {
return uniqueName;
}
/**
* The unique name provided when the snapshot was created.
* @param uniqueName uniqueName or {@code null} for none
*/
public Snapshot setUniqueName(java.lang.String uniqueName) {
this.uniqueName = uniqueName;
return this;
}
@Override
public Snapshot set(String fieldName, Object value) {
return (Snapshot) super.set(fieldName, value);
}
@Override
public Snapshot clone() {
return (Snapshot) super.clone();
}
}
| 2,852 |
2,151 | <reponame>saden1/mockito<gh_stars>1000+
package org.mockito.release.notes.util;
/**
* Human readable text representation
*/
public interface HumanReadable {
/**
* the text representation
*/
String toText();
}
| 82 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.test.ide;
import java.io.File;
import java.io.PrintStream;
import java.io.PrintWriter;
/**
* Interface for BlacklistedClassHandlerSingleton
* Obtain using BlacklistedClassHandlerSingleton.getInstance or
* BlacklistedClassHandlerSingleton.getBlacklistedClassHandler methods
* This guarantees that only one instance is used across the different
* possible classloaders
*/
public interface BlacklistedClassesHandler {
/**
* Registers BlacklistedClassesHandler as handler for
* org.netbeans.ProxyClassLoader logger
*/
public void register();
/**
* @return true if BlacklistedClassesHandler is in whitelist generation mode
*/
boolean isGeneratingWhitelist();
/**
* Lists violations with captions
* @return list of all violations
*/
String listViolations();
/**
* Lists violations
* @param printCaptions if true prints caption and summary information
* @return list of all violations
*/
String listViolations(boolean printCaptions);
/**
* Prints list of all violations to the specified PrintStream
* @param out PrintStream
* @param printCaptions if true prints caption and summary information
*/
void listViolations(PrintStream out, boolean printCaptions);
/**
* Prints list of all violations using specified PrintWriter
* @param out PrintWriter
* @param printCaptions if true prints caption and summary information
*/
void listViolations(PrintWriter out, boolean printCaptions);
/**
* Prints list of all violations to the specified PrintStream
* @param out PrintStream
* @param listExceptions if true all exceptions are printed
* @param printCaptions if true prints caption and summary information
*/
void listViolations(PrintStream out, boolean listExceptions, boolean printCaptions);
/**
* Prints list of all violations using specified PrintWriter
* @param out PrintWriter
* @param listExceptions if true all exceptions are printed
* @param printCaptions if true prints caption and summary information
*/
void listViolations(PrintWriter out, boolean listExceptions, boolean printCaptions);
/**
* Logs list of all violations using Logger
*/
void logViolations();
/**
* @return true if there were any violations
*/
boolean noViolations();
/** @rreturn the number of violations */
int getNumberOfViolations();
/**
*
* @param listViolations if true outputs list of all violations to the System.out
* @return true if there were any violations
*/
boolean noViolations(boolean listViolations);
/**
*
* @param out if true outputs list of all violations to the specified PrintStream
* @return true if there were any violations
*/
boolean noViolations(PrintStream out);
/**
* Resets violations information
*/
void resetViolations();
/**
* Saves whitelist
*/
void saveWhiteList();
/**
* Prints whitelist to the specified PrintStream
*/
void saveWhiteList(PrintStream out);
/**
* Saves whitelist to the specified file
*/
void saveWhiteList(String filename);
/**
* Prints whitelist using specified PrintWriter
*/
void saveWhiteList(PrintWriter out);
/**
*
* @return true if BlacklistedClassesHandler was initialized properly
*/
boolean isInitialized();
/**
* Initializes the BlacklistedClassesHandler.
* @param blacklistFileName If null blacklist checking is disabled
* @param whitelistFileName If null whitelist checking is disabled
* @param generateWhitelist If true whitelist checking is disabled
* and all loaded classes are being added to whitelist
* @return true if Singleton was correctly initialized
*/
boolean initSingleton(String blacklistFileName, String whitelistFileName, boolean generateWhitelist);
/**
* Initializes the BlacklistedClassesHandler.
* @param configFileName configuration file name
* @return true if Singleton was correctly initialized
*/
boolean initSingleton(String configFileName);
/**
* Removes BlacklistedClassesHandler from logger
*/
void unregister();
/**
* @return true if whitelist storage is being used
*/
public boolean hasWhitelistStorage();
/**
* Outputs difference between collected list of classes and the last
* one from the whitelist storage
* @param out PrintStream
*/
public void reportDifference(PrintStream out);
/**
* Outputs difference between collected list of classes and the last
* one from the whitelist storage
* @param out PrintWriter
*/
public void reportDifference(PrintWriter out);
/**
* Returns difference between collected list of classes and the last
* one from the whitelist storage
* @return difference report
*/
public String reportDifference();
/**
* Returns only list of violators but prints all the exceptions to out
* @param out PrintStream
* @return list of violators
*/
public String reportViolations(PrintStream out);
/**
* Returns only list of violators but prints all the exceptions to out
* @param out PrintWriter
* @return list of violators
*/
public String reportViolations(PrintWriter out);
/**
* Allows for reinitialization of the handler
*/
public void resetInitiated();
/**
* writes list of violators in NPSS snapshot file
* @param file File NPSS output file
*/
public void writeViolationsSnapshot(File file);
/**
* filters out all violators not containing any of the strings contained in parameter list
* @param list list of all filters
*/
public void filterViolators(String[] list);
}
| 2,093 |
348 | <gh_stars>100-1000
{"nom":"Beutin","circ":"4ème circonscription","dpt":"Pas-de-Calais","inscrits":326,"abs":156,"votants":170,"blancs":2,"nuls":0,"exp":168,"res":[{"nuance":"REM","nom":"<NAME>","voix":65},{"nuance":"LR","nom":"<NAME>","voix":59},{"nuance":"SOC","nom":"Mme <NAME>","voix":20},{"nuance":"FN","nom":"M. <NAME>","voix":19},{"nuance":"FI","nom":"Mme <NAME>","voix":5},{"nuance":"EXG","nom":"M. <NAME>","voix":0},{"nuance":"EXG","nom":"Mme <NAME>","voix":0},{"nuance":"ECO","nom":"Mme <NAME>","voix":0},{"nuance":"DIV","nom":"Mme <NAME>","voix":0},{"nuance":"DVD","nom":"M. <NAME>","voix":0}]} | 251 |
312 | package com.dfire.platform.alchemy.descriptor;
import com.dfire.platform.alchemy.common.Constants;
import com.dfire.platform.alchemy.util.BindPropertiesUtil;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* 提交sql job的基本信息
*
* @author congbai
* @date 01/06/2018
*/
public class TableDescriptor implements Descriptor {
public List<SourceDescriptor> sources;
public List<UdfDescriptor> udfs;
public volatile List<SinkDescriptor> sinkDescriptors;
private List<String> codes;
private List<Map<String, Object>> sinks;
public List<UdfDescriptor> getUdfs() {
return udfs;
}
public void setUdfs(List<UdfDescriptor> udfs) {
this.udfs = udfs;
}
public List<SourceDescriptor> getSources() {
return sources;
}
public void setSources(List<SourceDescriptor> sources) {
this.sources = sources;
}
public List<Map<String, Object>> getSinks() {
return sinks;
}
public void setSinks(List<Map<String, Object>> sinks) {
this.sinks = sinks;
}
public List<SinkDescriptor> getSinkDescriptors() {
if (this.sinkDescriptors == null) {
synchronized (this) {
if (CollectionUtils.isEmpty(this.sinks)) {
return this.sinkDescriptors;
}
List<SinkDescriptor> sinkDescriptorList = new ArrayList<>(this.sinks.size());
for (Map<String, Object> sink : sinks) {
Object type = sink.get(Constants.DESCRIPTOR_TYPE_KEY);
if (type == null) {
continue;
}
SinkDescriptor descriptor = DescriptorFactory.me.find(String.valueOf(type), SinkDescriptor.class);
if (descriptor == null) {
continue;
}
try {
SinkDescriptor sinkDescriptor = BindPropertiesUtil.bindProperties(sink, descriptor.getClass());
sinkDescriptorList.add(sinkDescriptor);
} catch (Exception e) {
e.printStackTrace();
}
}
this.sinkDescriptors = sinkDescriptorList;
}
}
return sinkDescriptors;
}
public void setSinkDescriptors(List<SinkDescriptor> sinkDescriptors) {
this.sinkDescriptors = sinkDescriptors;
}
public List<String> getCodes() {
return codes;
}
public void setCodes(List<String> codes) {
this.codes = codes;
}
@Override
public String type() {
return Constants.TYPE_VALUE_TABLE;
}
@Override
public void validate() throws Exception {
Assert.notEmpty(sources, "source不能为空");
Assert.notEmpty(getSinkDescriptors(), "sink不能为空");
for (SourceDescriptor sourceDescriptor : sources) {
sourceDescriptor.validate();
}
for (SinkDescriptor sinkDescriptor : getSinkDescriptors()) {
sinkDescriptor.validate();
}
if (CollectionUtils.isEmpty(udfs)) {
return;
}
for (UdfDescriptor udfDescriptor : udfs) {
udfDescriptor.validate();
}
}
}
| 1,640 |
471 | <gh_stars>100-1000
import sys
from django.conf import settings
from django.core.management import color_style
def abort():
print("Aborting")
sys.exit(1)
def confirm_destructive_operation():
style = color_style()
print(style.ERROR("\nHEY! This is wicked dangerous, pay attention."))
print(style.WARNING("\nThis operation irreversibly deletes a lot of stuff."))
print(f"\nSERVER_ENVIRONMENT = {settings.SERVER_ENVIRONMENT}")
if settings.IS_SAAS_ENVIRONMENT:
print("This command isn't meant to be run on a SAAS environment")
abort()
confirm("Are you SURE you want to proceed?")
def confirm(msg):
print(msg)
if input("(y/N)") != 'y':
abort()
| 268 |
5,169 | <reponame>Gantios/Specs<filename>Specs/3/d/6/ShopJoySDK-v2/2.0.3/ShopJoySDK-v2.podspec.json<gh_stars>1000+
{
"name": "ShopJoySDK-v2",
"version": "2.0.3",
"summary": "ShopJoySDK-v2",
"description": "iOS SDK for ShopJoy",
"license": "(c) ShopJoy 2016. All rights reserved.",
"authors": {
"ShopJoy": "<EMAIL>"
},
"homepage": "https://github.com/ShopJoySDK/iOS-SDK-v2/",
"platforms": {
"ios": "7.0"
},
"source_files": "*.h",
"source": {
"git": "https://github.com/ShopJoySDK/iOS-SDK-v2.git",
"tag": "2.0.3"
},
"requires_arc": true,
"xcconfig": {
"FRAMEWORK_SEARCH_PATHS": "$(inherited)",
"HEADER_SEARCH_PATHS": "$(inherited)"
},
"preserve_paths": "libShopJoySDK.a",
"vendored_libraries": "libShopJoySDK.a",
"frameworks": [
"CoreBluetooth",
"CoreLocation"
],
"libraries": "sqlite3"
}
| 402 |
25,151 | // Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.remote.internal;
import com.google.common.net.MediaType;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.PooledByteBufAllocator;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.channel.epoll.Epoll;
import io.netty.channel.epoll.EpollEventLoopGroup;
import io.netty.channel.epoll.EpollServerDomainSocketChannel;
import io.netty.channel.kqueue.KQueue;
import io.netty.channel.kqueue.KQueueEventLoopGroup;
import io.netty.channel.kqueue.KQueueServerDomainSocketChannel;
import io.netty.channel.unix.DomainSocketAddress;
import io.netty.channel.unix.DomainSocketChannel;
import io.netty.channel.unix.ServerDomainSocketChannel;
import io.netty.handler.codec.http.DefaultFullHttpResponse;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.FullHttpResponse;
import io.netty.handler.codec.http.HttpObjectAggregator;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.handler.codec.http.HttpServerCodec;
import io.netty.handler.codec.http.HttpServerKeepAliveHandler;
import io.netty.handler.codec.http.HttpVersion;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.openqa.selenium.remote.http.ClientConfig;
import org.openqa.selenium.remote.http.Contents;
import org.openqa.selenium.remote.http.HttpClient;
import org.openqa.selenium.remote.http.HttpRequest;
import org.openqa.selenium.remote.http.HttpResponse;
import org.openqa.selenium.testing.Safely;
import java.io.IOException;
import java.net.SocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.concurrent.atomic.AtomicReference;
import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_LENGTH;
import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_TYPE;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assumptions.assumeThat;
import static org.openqa.selenium.remote.http.HttpMethod.GET;
public abstract class DomainSocketsTestBase {
private final AtomicReference<String> responseText = new AtomicReference<>();
private EventLoopGroup group;
private ChannelFuture future;
private URI socket;
protected abstract HttpClient.Factory createFactory();
@Before
public void setupUnixDomainSocketServer() throws IOException, URISyntaxException {
Class<? extends ServerDomainSocketChannel> channelType = null;
if (Epoll.isAvailable()) {
group = new EpollEventLoopGroup(2);
channelType = EpollServerDomainSocketChannel.class;
} else if (KQueue.isAvailable()) {
group = new KQueueEventLoopGroup(2);
channelType = KQueueServerDomainSocketChannel.class;
}
assumeThat(group).isNotNull();
assumeThat(channelType).isNotNull();
ServerBootstrap bootstrap = new ServerBootstrap()
.group(group)
.option(ChannelOption.SO_BACKLOG, 1024)
.childOption(ChannelOption.ALLOCATOR, PooledByteBufAllocator.DEFAULT)
.channel(channelType)
.childHandler(new ChannelInitializer<DomainSocketChannel>() {
@Override
protected void initChannel(DomainSocketChannel ch) {
ch.pipeline()
.addLast("http-codec", new HttpServerCodec())
.addLast("http-keep-alive", new HttpServerKeepAliveHandler())
.addLast("http-aggregator", new HttpObjectAggregator(Integer.MAX_VALUE))
.addLast(new SimpleChannelInboundHandler<FullHttpRequest>() {
@Override
protected void channelRead0(ChannelHandlerContext ctx, FullHttpRequest req) {
byte[] bytes = responseText.get().getBytes(UTF_8);
ByteBuf text = Unpooled.wrappedBuffer(bytes);
FullHttpResponse res = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK, text);
res.headers().set(CONTENT_TYPE, MediaType.PLAIN_TEXT_UTF_8.toString());
res.headers().set(CONTENT_LENGTH, bytes.length);
ctx.writeAndFlush(res);
}
});
}
});
Path temp = Files.createTempFile("domain-socket-test", "socket");
Files.deleteIfExists(temp);
SocketAddress address = new DomainSocketAddress(temp.toFile());
future = bootstrap.bind(address);
socket = new URI("unix", null, null, 0, temp.toString(), null, null);
}
@After
public void shutdown() {
Safely.safelyCall(() -> group.shutdownGracefully());
Safely.safelyCall(() -> future.channel().closeFuture().sync());
}
@Test
public void shouldBeAbleToConnectToAUnixDomainSocketUrl() {
ClientConfig config = ClientConfig.defaultConfig().baseUri(socket);
HttpClient client = createFactory().createClient(config);
String emphaticCheeseEnjoyment = "I like cheese!";
responseText.set(emphaticCheeseEnjoyment);
HttpResponse res = client.execute(new HttpRequest(GET, "/do-you-like-cheese"));
assertThat(Contents.string(res)).isEqualTo(emphaticCheeseEnjoyment);
}
}
| 2,113 |
672 | <filename>objc4/openSources/dyld-551.4/testing/nocr/nocr.c<gh_stars>100-1000
#include "execserverServer.h"
#include <mach/mach.h>
#include <mach/vm_map.h>
#include <stdio.h>
#include <stdlib.h>
#include <err.h>
#include <pthread.h>
#include <unistd.h>
#include <dispatch/dispatch.h>
#include <errno.h>
#include <signal.h>
#include <libproc.h>
#include <System/sys/reason.h>
#include <System/sys/proc_info.h>
static pid_t sChildPid;
static dispatch_semaphore_t sServerRunning;
static bool sChildCrashed = false;
static bool sChildTerminatedByDyld = false;
/*
* setup exception handling port for EXC_CRASH and EXC_CORPSE_NOTIFY.
* runs mach_msg_server once for receiving exception messages from kernel.
*/
static void* serverCode(void* arg)
{
mach_port_t exception_port;
kern_return_t kret = mach_port_allocate(mach_task_self(), MACH_PORT_RIGHT_RECEIVE, &exception_port);
if (kret != KERN_SUCCESS)
errx(1, "mach_port_allocate: %s (%d)", mach_error_string(kret), kret);
kret = mach_port_insert_right(mach_task_self(), exception_port, exception_port, MACH_MSG_TYPE_MAKE_SEND);
if (kret != KERN_SUCCESS)
errx(1, "mach_port_insert_right: %s (%d)", mach_error_string(kret), kret);
kret = task_set_exception_ports(mach_task_self(), EXC_MASK_CRASH | EXC_MASK_CORPSE_NOTIFY, exception_port,
EXCEPTION_DEFAULT | MACH_EXCEPTION_CODES, 0);
if (kret != KERN_SUCCESS)
errx(1, "task_set_exception_ports: %s (%d)", mach_error_string(kret), kret);
dispatch_semaphore_signal(sServerRunning);
kret = mach_msg_server(mach_exc_server, MACH_MSG_SIZE_RELIABLE, exception_port, 0);
if (kret != KERN_SUCCESS)
errx(1, "mach_msg_server: %s (%d)", mach_error_string(kret), kret);
return NULL;
}
static void childDied(int sig)
{
struct proc_exitreasoninfo info;
bzero(&info, sizeof(info));
uint8_t packReasonData[OS_REASON_BUFFER_MAX_SIZE];
bzero(packReasonData, OS_REASON_BUFFER_MAX_SIZE);
info.eri_reason_buf_size = OS_REASON_BUFFER_MAX_SIZE;
info.eri_kcd_buf = (user_addr_t)packReasonData;
//fprintf(stderr, "info=%p\n", &info);
if ( proc_pidinfo(sChildPid, PROC_PIDEXITREASONINFO, 1, &info, PROC_PIDEXITREASONINFO_SIZE) != sizeof(struct proc_exitreasoninfo) ) {
printf("bad return size from proc_pidinfo()\n");
return;
}
sChildTerminatedByDyld = (info.eri_namespace == OS_REASON_DYLD);
}
int main(int argc, const char* argv[])
{
if ( argc < 2 ) {
fprintf(stderr, "usage: nocr [-require_crash] prog args...\n");
return EXIT_FAILURE;
}
unsigned progArgIndex = 1;
bool requireCrash = false;
const char* testName = NULL;
if ( strcmp(argv[1], "-require_crash") == 0 ) {
progArgIndex = 2;
requireCrash = true;
testName = getenv("NOCR_TEST_NAME");
if ( testName )
printf("[BEGIN] %s\n", testName);
}
signal(SIGCHLD, childDied);
sServerRunning = dispatch_semaphore_create(0);
// start up thread for mach server which handles mach exception ports
pthread_t serverThread;
int result = pthread_create(&serverThread, NULL, serverCode, NULL);
if ( result )
err(EXIT_FAILURE, "pthread_create");
// wait until server is up before starting child
dispatch_semaphore_wait(sServerRunning, DISPATCH_TIME_FOREVER);
// fork and exec child
sChildPid = fork();
if ( sChildPid < 0 )
err(EXIT_FAILURE, "fork");
if ( sChildPid == 0 ) {
// child side
result = execvp(argv[progArgIndex], (char**)&argv[progArgIndex]);
err(EXIT_FAILURE, "exec(\"%s\",...)", argv[progArgIndex]);
}
// wait for child to finish (including crash)
int status;
int waitResult;
int childResult = EXIT_FAILURE;
do {
waitResult = waitpid(sChildPid, &status, 0);
} while ( (waitResult == -1) && (errno == EINTR) );
if ( waitResult != -1 ) {
if ( WIFEXITED(status) ) {
childResult = WEXITSTATUS(status);
}
}
if ( requireCrash ) {
if ( testName ) {
if ( sChildCrashed || sChildTerminatedByDyld )
printf("[PASS] %s\n", testName);
else
printf("[FAIL] %s\n", testName);
}
return sChildCrashed ? EXIT_SUCCESS : EXIT_FAILURE;
}
else
return childResult;
}
// Mach exception handler routines needed by execserverServer.c
kern_return_t
catch_mach_exception_raise(mach_port_t exception_port,
mach_port_t thread,
mach_port_t task,
exception_type_t exception,
mach_exception_data_t code,
mach_msg_type_number_t codeCnt)
{
//fprintf(stderr, "child crashed\n");
sChildCrashed = true;
return KERN_SUCCESS;
}
kern_return_t
catch_mach_exception_raise_state(mach_port_t exception_port,
exception_type_t exception,
const mach_exception_data_t code,
mach_msg_type_number_t codeCnt,
int * flavor,
const thread_state_t old_state,
mach_msg_type_number_t old_stateCnt,
thread_state_t new_state,
mach_msg_type_number_t * new_stateCnt)
{
errx(1, "Unsupported catch_mach_exception_raise_state");
return KERN_NOT_SUPPORTED;
}
kern_return_t
catch_mach_exception_raise_state_identity(mach_port_t exception_port,
mach_port_t thread,
mach_port_t task,
exception_type_t exception,
mach_exception_data_t code,
mach_msg_type_number_t codeCnt,
int * flavor,
thread_state_t old_state,
mach_msg_type_number_t old_stateCnt,
thread_state_t new_state,
mach_msg_type_number_t * new_stateCnt)
{
errx(1, "Unsupported catch_mach_exception_raise_state_identity");
return KERN_NOT_SUPPORTED;
}
| 3,174 |
531 | <filename>Functional_Tests/PlayerMovementFunctionalTest.cpp
// Fill out your copyright notice in the Description page of Project Settings.
#include "Tests/PlayerMovementFunctionalTest.h"
#include "GameFramework/Character.h"
#include "Kismet/GameplayStatics.h"
#include "Kismet/KismetSystemLibrary.h"
void APlayerMovementFunctionalTest::BeginPlay()
{
Super::BeginPlay();
Player = UGameplayStatics::GetPlayerCharacter(GetWorld(),0);
if (Player)
{
InitialLocation = Player->GetActorLocation();
}
}
void APlayerMovementFunctionalTest::Tick(float DeltaSeconds)
{
Super::Tick(DeltaSeconds);
if (Player)
{
//Go forward until we reach our destination
Player->AddMovementInput(Player->GetActorForwardVector());
if (TraveledTotalDistance())
{
FinishTest(EFunctionalTestResult::Succeeded,FString("Traveled required units!"));
UKismetSystemLibrary::QuitGame(GetWorld(),UGameplayStatics::GetPlayerController(GetWorld(),0),EQuitPreference::Quit,false);
}
}
else
{
FinishTest(EFunctionalTestResult::Failed, FString("Invalid player character!"));
}
}
bool APlayerMovementFunctionalTest::TraveledTotalDistance() const
{
return (Player) ? (FVector::Distance(Player->GetActorLocation(), InitialLocation + Player->GetActorForwardVector() * MovementDistance) <= DistanceThreshold) : false;
}
| 416 |
416 | #if (defined(USE_UIKIT_PUBLIC_HEADERS) && USE_UIKIT_PUBLIC_HEADERS) || !__has_include(<UIKitCore/UISceneActivationConditions.h>)
//
// UISceneActivationConditions.h
// UIKit
//
// Copyright © 2019 Apple Inc. All rights reserved.
//
#import <Foundation/Foundation.h>
#import <UIKit/UIKitDefines.h>
NS_ASSUME_NONNULL_BEGIN
UIKIT_EXTERN API_AVAILABLE(ios(13.0)) @interface UISceneActivationConditions : NSObject <NSSecureCoding>
- (instancetype)init NS_DESIGNATED_INITIALIZER;
- (nullable instancetype)initWithCoder:(NSCoder *)aDecoder NS_DESIGNATED_INITIALIZER;
/* A scene "can" activate for target content if it is possible for that content to be displayed in that scene. A scene "prefers" to activate for content if it is a better scene session to activate for that content than any scene that only "can" view it.
These predicates are used to match against targetContentIdentifier NSStrings wherever they occur, for example in UNNotificationContent, as well as against NSString text of URL links the user may tap if the application handles URLs.
Predicates used here must not be block or selector based, must not use the 'MATCHES' operator, and may not access any keypaths other than 'self' off of the NSString they're evaluated against.
*/
@property(nonatomic, copy) NSPredicate *canActivateForTargetContentIdentifierPredicate; // default is [NSPredicate predicateWithValue:YES]
@property(nonatomic, copy) NSPredicate *prefersToActivateForTargetContentIdentifierPredicate; // default is [NSPredicate predicateWithValue:NO];
@end
UIKIT_EXTERN @interface NSUserActivity (UISceneActivationConditions)
// used with UISceneActivationConditions to customize what scene should be activated for a user activity
@property (nullable, nonatomic, copy) NSString *targetContentIdentifier API_AVAILABLE(ios(13.0)); // default nil
@end
NS_ASSUME_NONNULL_END
#else
#import <UIKitCore/UISceneActivationConditions.h>
#endif
| 598 |
3,765 | /*
* BSD-style license; for more info see http://pmd.sourceforge.net/license.html
*/
package net.sourceforge.pmd.lang.plsql.ast;
import net.sourceforge.pmd.annotation.InternalApi;
public class ASTQueryBlock extends AbstractSelectStatement {
@Deprecated
@InternalApi
public ASTQueryBlock(int id) {
super(id);
}
@Deprecated
@InternalApi
public ASTQueryBlock(PLSQLParser p, int id) {
super(p, id);
}
@Override
public Object jjtAccept(PLSQLParserVisitor visitor, Object data) {
return visitor.visit(this, data);
}
}
| 227 |
475 | // Copyright (c) 2015-2016 <NAME>
// License: Academic Free License ("AFL") v. 3.0
// AFL License page: http://opensource.org/licenses/AFL-3.0
// http://vittorioromeo.info | <EMAIL>
#pragma once
#include <ecst/config.hpp>
#include <ecst/aliases.hpp>
#include <ecst/thread_pool.hpp>
#include <ecst/mp.hpp>
#include <ecst/signature_list.hpp>
#include <ecst/settings.hpp>
#include <ecst/context/bitset.hpp>
#include <ecst/context/scheduler.hpp>
#include <ecst/context/system.hpp>
#include <ecst/context/storage.hpp>
ECST_CONTEXT_NAMESPACE
{
namespace impl
{
template <typename TSettings>
class data;
namespace defer
{
using handle = ecst::context::entity::impl::handle;
template <typename TSettings>
class refresh_state;
template <typename TSettings>
class proxy
{
private:
using settings_type = TSettings;
using context_type = data<settings_type>;
using refresh_state_type = refresh_state<settings_type>;
context_type& _context;
refresh_state_type& _refresh_state;
template <typename T>
using component_from_tag = tag::component::unwrap<T>;
template <typename T>
using system_from_tag = tag::system::unwrap<T>;
protected:
auto& context() noexcept;
public:
proxy(context_type&, refresh_state_type&) noexcept;
proxy(const proxy&) = delete;
proxy& operator=(const proxy&) = delete;
proxy(proxy&&) = delete;
proxy& operator=(proxy&&) = delete;
entity_id create_entity();
handle create_handle(entity_id) noexcept;
handle create_entity_and_handle();
auto valid_handle(const handle& h) const noexcept;
auto access(const handle&) const noexcept;
auto alive(entity_id) const noexcept;
auto alive(const handle&) const noexcept;
void kill_entity(entity_id) noexcept;
template <typename TComponentTag>
decltype(auto) add_component(TComponentTag, entity_id);
template <typename TComponentTag>
decltype(auto) get_component(TComponentTag, entity_id);
template <typename TComponentTag>
auto has_component(TComponentTag, entity_id) const noexcept;
template <typename TComponentTag>
void remove_component(TComponentTag, entity_id) noexcept;
template <typename TSystemTag>
auto& instance(TSystemTag) noexcept;
template <typename TSystemTag>
auto& system(TSystemTag) noexcept;
template <typename TSystemTag, typename TF>
decltype(auto) for_system_outputs(TSystemTag, TF&& f);
template <typename TSystemTag>
auto is_in_system(TSystemTag, entity_id) const noexcept;
template <typename TSystemTag>
auto count_entities_in(TSystemTag) const noexcept;
template <typename TSystemTag>
auto any_entity_in(TSystemTag) const noexcept;
};
}
}
}
ECST_CONTEXT_NAMESPACE_END
| 1,674 |
679 | /**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
#ifndef _BASIC_TESTTOOL_HXX_
#define _BASIC_TESTTOOL_HXX_
#include <svl/smplhint.hxx>
#include <tools/string.hxx>
#define TESTTOOL_DEFAULT_PORT 12479
#define UNO_DEFAULT_PORT 12480
#define DEFAULT_HOST "localhost"
#define TT_SIGNATURE_FOR_UNICODE_TEXTFILES "'encoding UTF-8 Do not remove or change this line!"
#define ASSERTION_STACK_PREFIX "Backtrace:"
// #94145# Due to a tab in TT_SIGNATURE_FOR_UNICODE_TEXTFILES which is changed to blanks by some editors
// this routine became necessary
sal_Bool IsTTSignatureForUnicodeTextfile( String aLine );
//#include "testapp.hxx"
#define ADD_ERROR_QUIET(nNr, aStr) \
{ \
ErrorEntry *pErr; \
if ( BasicRuntimeAccess::HasRuntime() ) \
{ \
BasicRuntime aRun = BasicRuntimeAccess::GetRuntime(); \
xub_StrLen aErrLn = StarBASIC::GetErl(); \
if ( 0 == aErrLn ) \
aErrLn = aRun.GetLine(); \
pErr = new ErrorEntry(nNr, aStr, \
aErrLn, aRun.GetCol1(), aRun.GetCol2()); \
} \
else \
{ \
pErr = new ErrorEntry(nNr, aStr); \
} \
P_FEHLERLISTE->C40_INSERT(ErrorEntry, pErr, P_FEHLERLISTE->Count());\
}
// ??? Irgendwann noch was mit der UID anfangen !!
#define ADD_ERROR(nNr, aStr) { \
if ( !SbxBase::IsError() ) \
SbxBase::SetError( nNr ); \
ADD_ERROR_QUIET(nNr, aStr); \
}
#define POP_ERROR() P_FEHLERLISTE->DeleteAndDestroy(0)
#define GET_ERROR() P_FEHLERLISTE->GetObject(0)
#define IS_ERROR() ( P_FEHLERLISTE->Count() > 0 )
// Transmission of error logs
enum TTLogType { LOG_RUN, LOG_TEST_CASE, LOG_ERROR, LOG_CALL_STACK, LOG_MESSAGE, LOG_WARNING, LOG_ASSERTION, LOG_QA_ERROR, LOG_ASSERTION_STACK };
struct TTDebugData
{
public:
TTLogType aLogType;
String aMsg;
String aFilename;
xub_StrLen nLine;
xub_StrLen nCol1;
xub_StrLen nCol2;
};
struct TTLogMsg
{
public:
String aLogFileName;
TTDebugData aDebugData;
};
// For transmission of window information from the Testapp
struct WinInfoRec
{
public:
String aUId;
String aKurzname;
String aSlotname;
String aLangname;
sal_uInt16 nRType;
String aRName;
sal_Bool bIsReset;
};
// Defines for syntax Highlighting
#define TT_KEYWORD ((SbTextType)100) // Including locally executed commands like 'use' ...
#define TT_REMOTECMD ((SbTextType)101) // Remotely executed commands like 'nodebug'
#define TT_LOCALCMD ((SbTextType)102) // Locally executed commands like 'use'
#define TT_CONTROL ((SbTextType)103) // Possibly available control loaded by 'use'
#define TT_SLOT ((SbTextType)104) // Available Slots loaded by 'use'
#define TT_METHOD ((SbTextType)105) // Possibly allowed Method for controls
#define TT_NOMETHOD ((SbTextType)106) // No Possibly allowed Method for controls
#define FILELIST1 ((SbTextType)111) // Symbols in file 1
#define FILELIST2 ((SbTextType)112) // Symbols in file 2
#define FILELIST3 ((SbTextType)113) // Symbols in file 3
#define FILELIST4 ((SbTextType)114) // Symbols in file 4
/// defines for hints from TestToolObj to the Application
#define SBX_HINT_LANGUAGE_EXTENSION_LOADED SFX_HINT_USER06
#define SBX_HINT_EXECUTION_STATUS_INFORMATION SFX_HINT_USER07
#define TT_EXECUTION_ENTERWAIT 0x01
#define TT_EXECUTION_LEAVEWAIT 0x02
#define TT_EXECUTION_SHOW_ACTION 0x03
#define TT_EXECUTION_HIDE_ACTION 0x04
class TTExecutionStatusHint : public SfxSimpleHint
{
private:
sal_uInt16 mnType;
String maExecutionStatus;
String maAdditionalExecutionStatus;
public:
TYPEINFO();
TTExecutionStatusHint( sal_uInt16 nType, sal_Char *pExecutionStatus, const sal_Char *pAdditionalExecutionStatus = "" )
: SfxSimpleHint(SBX_HINT_EXECUTION_STATUS_INFORMATION)
, mnType( nType )
, maExecutionStatus( pExecutionStatus, RTL_TEXTENCODING_ASCII_US )
, maAdditionalExecutionStatus( pAdditionalExecutionStatus, RTL_TEXTENCODING_ASCII_US )
{;}
TTExecutionStatusHint( sal_uInt16 nType, const String &aExecutionStatus = String(), const String &aAdditionalExecutionStatus = String() )
: SfxSimpleHint(SBX_HINT_EXECUTION_STATUS_INFORMATION)
, mnType( nType )
, maExecutionStatus( aExecutionStatus )
, maAdditionalExecutionStatus( aAdditionalExecutionStatus )
{;}
const String& GetExecutionStatus() const { return maExecutionStatus; }
const String& GetAdditionalExecutionStatus() const { return maAdditionalExecutionStatus; }
sal_uInt16 GetType(){ return mnType; }
};
#endif // _BASIC_TESTTOOL_HXX_
| 2,111 |
1,093 | <gh_stars>1000+
/*
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.integration.mail.config;
import java.util.Properties;
import javax.mail.Authenticator;
import javax.mail.Session;
import javax.mail.URLName;
import javax.mail.internet.MimeMessage;
import org.springframework.beans.factory.BeanFactory;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.config.AbstractFactoryBean;
import org.springframework.expression.Expression;
import org.springframework.integration.mail.AbstractMailReceiver;
import org.springframework.integration.mail.ImapMailReceiver;
import org.springframework.integration.mail.MailReceiver;
import org.springframework.integration.mail.Pop3MailReceiver;
import org.springframework.integration.mail.SearchTermStrategy;
import org.springframework.integration.mapping.HeaderMapper;
import org.springframework.lang.Nullable;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
/**
* @author <NAME>
* @author <NAME>
* @author <NAME>
* @author <NAME>
*
* @since 1.0.3
*/
public class MailReceiverFactoryBean extends AbstractFactoryBean<MailReceiver> {
private String storeUri;
private String protocol;
private Session session;
private MailReceiver receiver;
private Properties javaMailProperties;
private Authenticator authenticator;
/**
* Indicates whether retrieved messages should be deleted from the server.
* This value will be <code>null</code> <i>unless</i> explicitly configured.
*/
private Boolean shouldDeleteMessages = null;
private Boolean shouldMarkMessagesAsRead = null;
private int maxFetchSize = 1;
private Expression selectorExpression;
private SearchTermStrategy searchTermStrategy;
private String userFlag;
private HeaderMapper<MimeMessage> headerMapper;
private Boolean embeddedPartsAsBytes;
private Boolean simpleContent;
private Boolean autoCloseFolder;
public void setStoreUri(@Nullable String storeUri) {
this.storeUri = storeUri;
}
public void setProtocol(@Nullable String protocol) {
this.protocol = protocol;
}
public void setSession(@Nullable Session session) {
this.session = session;
}
public void setJavaMailProperties(@Nullable Properties javaMailProperties) {
this.javaMailProperties = javaMailProperties;
}
public void setAuthenticator(@Nullable Authenticator authenticator) {
this.authenticator = authenticator;
}
public void setShouldDeleteMessages(@Nullable Boolean shouldDeleteMessages) {
this.shouldDeleteMessages = shouldDeleteMessages;
}
public void setShouldMarkMessagesAsRead(@Nullable Boolean shouldMarkMessagesAsRead) {
this.shouldMarkMessagesAsRead = shouldMarkMessagesAsRead;
}
public Boolean isShouldMarkMessagesAsRead() {
return this.shouldMarkMessagesAsRead != null && this.shouldMarkMessagesAsRead;
}
public void setMaxFetchSize(int maxFetchSize) {
this.maxFetchSize = maxFetchSize;
}
public void setSelectorExpression(@Nullable Expression selectorExpression) {
this.selectorExpression = selectorExpression;
}
public void setSearchTermStrategy(@Nullable SearchTermStrategy searchTermStrategy) {
this.searchTermStrategy = searchTermStrategy;
}
public void setUserFlag(@Nullable String userFlag) {
this.userFlag = userFlag;
}
public void setHeaderMapper(@Nullable HeaderMapper<MimeMessage> headerMapper) {
this.headerMapper = headerMapper;
}
public void setEmbeddedPartsAsBytes(@Nullable Boolean embeddedPartsAsBytes) {
this.embeddedPartsAsBytes = embeddedPartsAsBytes;
}
public void setSimpleContent(@Nullable Boolean simpleContent) {
this.simpleContent = simpleContent;
}
public void setAutoCloseFolder(@Nullable Boolean autoCloseFolder) {
this.autoCloseFolder = autoCloseFolder;
}
@Override
protected MailReceiver createInstance() {
if (this.receiver == null) {
this.receiver = this.createReceiver();
}
return this.receiver;
}
@Override
public Class<?> getObjectType() {
return (this.receiver != null) ? this.receiver.getClass() : MailReceiver.class;
}
private MailReceiver createReceiver() { // NOSONAR
verifyProtocol();
boolean isPop3 = this.protocol.toLowerCase().startsWith("pop3");
boolean isImap = this.protocol.toLowerCase().startsWith("imap");
Assert.isTrue(isPop3 || isImap, "the store URI must begin with 'pop3' or 'imap'");
AbstractMailReceiver mailReceiver = isPop3
? new Pop3MailReceiver(this.storeUri)
: new ImapMailReceiver(this.storeUri);
if (this.session != null) {
Assert.isNull(this.javaMailProperties,
"JavaMail Properties are not allowed when a Session has been provided.");
Assert.isNull(this.authenticator,
"A JavaMail Authenticator is not allowed when a Session has been provided.");
mailReceiver.setSession(this.session);
}
if (this.searchTermStrategy != null) {
Assert.isTrue(isImap, "searchTermStrategy is only allowed with imap");
((ImapMailReceiver) mailReceiver).setSearchTermStrategy(this.searchTermStrategy);
}
if (this.javaMailProperties != null) {
mailReceiver.setJavaMailProperties(this.javaMailProperties);
}
if (this.authenticator != null) {
mailReceiver.setJavaMailAuthenticator(this.authenticator);
}
if (this.shouldDeleteMessages != null) {
// always set the value if configured explicitly
// otherwise, the default is true for POP3 but false for IMAP
mailReceiver.setShouldDeleteMessages(this.shouldDeleteMessages);
}
mailReceiver.setMaxFetchSize(this.maxFetchSize);
mailReceiver.setSelectorExpression(this.selectorExpression);
if (StringUtils.hasText(this.userFlag)) {
mailReceiver.setUserFlag(this.userFlag);
}
if (isPop3) {
if (isShouldMarkMessagesAsRead()) {
this.logger.warn("Setting 'should-mark-messages-as-read' to 'true' while using POP3 has no effect");
}
}
else {
((ImapMailReceiver) mailReceiver).setShouldMarkMessagesAsRead(this.shouldMarkMessagesAsRead);
}
BeanFactory beanFactory = getBeanFactory();
if (beanFactory != null) {
mailReceiver.setBeanFactory(beanFactory);
}
if (this.headerMapper != null) {
mailReceiver.setHeaderMapper(this.headerMapper);
}
if (this.embeddedPartsAsBytes != null) {
mailReceiver.setEmbeddedPartsAsBytes(this.embeddedPartsAsBytes);
}
if (this.simpleContent != null) {
mailReceiver.setSimpleContent(this.simpleContent);
}
if (this.autoCloseFolder != null) {
mailReceiver.setAutoCloseFolder(this.autoCloseFolder);
}
mailReceiver.afterPropertiesSet();
return mailReceiver;
}
private void verifyProtocol() {
if (StringUtils.hasText(this.storeUri)) {
URLName urlName = new URLName(this.storeUri);
if (this.protocol == null) {
this.protocol = urlName.getProtocol();
}
else {
Assert.isTrue(this.protocol.equals(urlName.getProtocol()),
"The provided 'protocol' does not match that in the 'storeUri'.");
}
}
else {
Assert.hasText(this.protocol, "Either the 'storeUri' or 'protocol' is required.");
}
Assert.hasText(this.protocol, "Unable to resolve protocol.");
}
@Override
public void destroy() {
if (this.receiver != null && this.receiver instanceof DisposableBean) {
try {
((DisposableBean) this.receiver).destroy();
}
catch (Exception e) {
throw new IllegalStateException(e);
}
}
}
}
| 2,590 |
348 | {"nom":"Simacourbe","circ":"3ème circonscription","dpt":"Pyrénées-Atlantiques","inscrits":307,"abs":154,"votants":153,"blancs":6,"nuls":17,"exp":130,"res":[{"nuance":"SOC","nom":"<NAME>","voix":74},{"nuance":"REM","nom":"<NAME>","voix":56}]} | 97 |
890 | from __future__ import absolute_import, unicode_literals
from django.conf import settings
try:
from django.test.runner import DiscoverRunner
except ImportError:
from django.test.simple import DjangoTestSuiteRunner as DiscoverRunner
from celery import current_app
from celery.task import Task
from djcelery.backends.database import DatabaseBackend
USAGE = """\
Custom test runner to allow testing of celery delayed tasks.
"""
def _set_eager():
settings.CELERY_ALWAYS_EAGER = True
current_app.conf.CELERY_ALWAYS_EAGER = True
settings.CELERY_EAGER_PROPAGATES_EXCEPTIONS = True # Issue #75
current_app.conf.CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
class CeleryTestSuiteRunner(DiscoverRunner):
"""Django test runner allowing testing of celery delayed tasks.
All tasks are run locally, not in a worker.
To use this runner set ``settings.TEST_RUNNER``::
TEST_RUNNER = 'djcelery.contrib.test_runner.CeleryTestSuiteRunner'
"""
def setup_test_environment(self, **kwargs):
_set_eager()
super(CeleryTestSuiteRunner, self).setup_test_environment(**kwargs)
class CeleryTestSuiteRunnerStoringResult(DiscoverRunner):
"""Django test runner allowing testing of celery delayed tasks,
and storing the results of those tasks in ``TaskMeta``.
Requires setting CELERY_RESULT_BACKEND = 'database'.
USAGE:
In ``settings.py``::
TEST_RUNNER = '''
djcelery.contrib.test_runner.CeleryTestSuiteRunnerStoringResult
'''.strip()
"""
def setup_test_environment(self, **kwargs):
# Monkey-patch Task.on_success() method
def on_success_patched(self, retval, task_id, args, kwargs):
app = current_app._get_current_object()
DatabaseBackend(app=app).store_result(task_id, retval, 'SUCCESS')
Task.on_success = classmethod(on_success_patched)
super(CeleryTestSuiteRunnerStoringResult, self).setup_test_environment(
**kwargs
)
settings.CELERY_RESULT_BACKEND = 'database'
_set_eager()
| 786 |
348 | {"nom":"Saivres","circ":"2ème circonscription","dpt":"Deux-Sèvres","inscrits":1055,"abs":506,"votants":549,"blancs":28,"nuls":19,"exp":502,"res":[{"nuance":"SOC","nom":"<NAME>","voix":303},{"nuance":"REM","nom":"<NAME>","voix":199}]} | 95 |
392 | /*******************************************************************************
* Copyright (c) 2015
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*******************************************************************************/
package jsettlers.common.position;
import java.util.Iterator;
/**
* Iterates over an array of relative positions using a reference point.
*
* @author <NAME>
*
*/
public class RelativeToRealPointIterable implements Iterable<ShortPoint2D> {
private final RelativePoint[] relativePositions;
private final ShortPoint2D relationPosition;
/**
* Create a new {@link RelativeToRealPointIterable}.
*
* @param relativePositions
* The positions to iterate over
* @param relationPosition
* The reference point to use.
*/
public RelativeToRealPointIterable(RelativePoint[] relativePositions, ShortPoint2D relationPosition) {
this.relativePositions = relativePositions;
this.relationPosition = relationPosition;
}
@Override
public Iterator<ShortPoint2D> iterator() {
return new Iterator<ShortPoint2D>() {
private int index = 0;
@Override
public boolean hasNext() {
return index < relativePositions.length;
}
@Override
public ShortPoint2D next() {
return relativePositions[index++].calculatePoint(relationPosition);
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
}
| 666 |
1,531 | /**
* Copyright 2012-2015 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.cqengine.query.option;
/**
* Allows the app to disable argument validation in CQEngine via a QueryOption, to improve performance.
* This should only be done when application developers are sure that their code has no bugs!
*
* @author niall.gallagher
*/
public enum ArgumentValidationStrategy {
VALIDATE,
SKIP
}
| 251 |
797 | /*
The MIT License (MIT)
Copyright (c) 2015 <NAME> and Contributors
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
(the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package com.intellij.lang.jsgraphql.types.schema.idl;
import com.intellij.lang.jsgraphql.types.Internal;
import com.intellij.lang.jsgraphql.types.language.*;
import com.intellij.lang.jsgraphql.types.schema.GraphQLType;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Objects;
import java.util.Stack;
import static com.intellij.lang.jsgraphql.types.Assert.assertNotNull;
import static com.intellij.lang.jsgraphql.types.schema.GraphQLList.list;
import static com.intellij.lang.jsgraphql.types.schema.GraphQLNonNull.nonNull;
/**
* This helper gives you access to the type info given a type definition
*/
@SuppressWarnings("rawtypes")
@Internal
public class TypeInfo {
public static TypeInfo typeInfo(@NotNull Type type) {
return new TypeInfo(type);
}
private final @NotNull Type rawType;
private final @NotNull TypeName typeName;
private final @NotNull Stack<Class<?>> decoration = new Stack<>();
private TypeInfo(@NotNull Type type) {
this.rawType = assertNotNull(type, () -> "type must not be null");
while (!(type instanceof TypeName)) {
if (type instanceof NonNullType) {
decoration.push(NonNullType.class);
type = ((NonNullType) type).getType();
}
if (type instanceof ListType) {
decoration.push(ListType.class);
type = ((ListType) type).getType();
}
}
this.typeName = (TypeName) type;
}
public @NotNull Type getRawType() {
return rawType;
}
public @NotNull TypeName getTypeName() {
return typeName;
}
public @NotNull String getName() {
return typeName.getName();
}
public boolean isList() {
return rawType instanceof ListType;
}
public boolean isNonNull() {
return rawType instanceof NonNullType;
}
public boolean isPlain() {
return !isList() && !isNonNull();
}
/**
* This will rename the type with the specified new name but will preserve the wrapping that was present
*
* @param newName the new name of the type
* @return a new type info rebuilt with the new name
*/
public TypeInfo renameAs(String newName) {
Type out = TypeName.newTypeName(newName).build();
Stack<Class<?>> wrappingStack = new Stack<>();
wrappingStack.addAll(this.decoration);
while (!wrappingStack.isEmpty()) {
Class<?> clazz = wrappingStack.pop();
if (clazz.equals(NonNullType.class)) {
out = NonNullType.newNonNullType(out).build();
}
if (clazz.equals(ListType.class)) {
out = ListType.newListType(out).build();
}
}
return typeInfo(out);
}
/**
* This will decorate a graphql type with the original hierarchy of non null and list'ness
* it originally contained in its definition type
*
* @param objectType this should be a graphql type that was originally built from this raw type
* @param <T> the type
* @return the decorated type
*/
@SuppressWarnings("TypeParameterUnusedInFormals")
public <T extends GraphQLType> @Nullable T decorate(GraphQLType objectType) {
if (objectType == null) return null;
GraphQLType out = objectType;
Stack<Class<?>> wrappingStack = new Stack<>();
wrappingStack.addAll(this.decoration);
while (!wrappingStack.isEmpty()) {
Class<?> clazz = wrappingStack.pop();
if (clazz.equals(NonNullType.class)) {
out = nonNull(out);
}
if (clazz.equals(ListType.class)) {
out = list(out);
}
}
// we handle both input and output graphql types
//noinspection unchecked
return (T) out;
}
public static String getAstDesc(Type type) {
return AstPrinter.printAst(type);
}
public TypeInfo unwrapOne() {
if (rawType instanceof NonNullType) {
return typeInfo(((NonNullType) rawType).getType());
}
if (rawType instanceof ListType) {
return typeInfo(((ListType) rawType).getType());
}
return this;
}
public Type unwrapOneType() {
return unwrapOne().getRawType();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TypeInfo typeInfo = (TypeInfo) o;
return isNonNull() == typeInfo.isNonNull() &&
isList() == typeInfo.isList() &&
Objects.equals(typeName.getName(), typeInfo.typeName.getName());
}
@Override
public int hashCode() {
int result = 1;
result = 31 * result + Objects.hashCode(typeName.getName());
result = 31 * result + Boolean.hashCode(isNonNull());
result = 31 * result + Boolean.hashCode(isList());
return result;
}
@Override
public String toString() {
return "TypeInfo{" +
getAstDesc(rawType) +
'}';
}
}
| 2,471 |
7,272 | package com.kunal;
import java.util.*;
public class InBuiltExamples {
public static void main(String[] args) {
Stack<Integer> stack = new Stack<>();
stack.push(34);
stack.push(45);
stack.push(2);
stack.push(9);
stack.push(18);
System.out.println(stack.pop());
System.out.println(stack.pop());
System.out.println(stack.pop());
System.out.println(stack.pop());
System.out.println(stack.pop());
System.out.println(stack.pop());
Queue<Integer> queue = new LinkedList<>();
queue.add(3);
queue.add(6);
queue.add(5);
queue.add(19);
queue.add(1);
System.out.println(queue.remove());
System.out.println(queue.remove());
Deque<Integer> deque = new ArrayDeque<>();
deque.add(89);
deque.addLast(78);
deque.removeFirst();
// etc etc
}
}
| 448 |
3,861 | /* grub.c - The elastic binding between grub and standalone EFI */
/*
* Copyright © 2014 <NAME> <<EMAIL>>
* Based on GRUB, glibc and additional software:
* Copyright © 2001-2014 Free Software Foundation, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <grub/err.h>
#include <grub/misc.h>
#include "driver.h"
void
grub_exit(void)
{
ST->BootServices->Exit(EfiImageHandle, EFI_SUCCESS, 0, NULL);
for (;;) ;
}
/* Screen I/O */
int grub_term_inputs = 0;
void
grub_refresh(void) { }
int
grub_getkey(void)
{
EFI_INPUT_KEY Key;
while (ST->ConIn->ReadKeyStroke(ST->ConIn, &Key) == EFI_NOT_READY);
return (int) Key.UnicodeChar;
}
static void
grub_xputs_dumb(const char *str)
{
APrint((CHAR8 *)str);
}
void (*grub_xputs)(const char *str) = grub_xputs_dumb;
/* Read an EFI shell variable */
const char *
grub_env_get(const char *var)
{
EFI_STATUS Status;
CHAR16 Var[64], Val[128];
UINTN ValSize = sizeof(Val);
static char val[128] = { 0 };
Status = Utf8ToUtf16NoAlloc((CHAR8 *) var, Var, ARRAYSIZE(Var));
if (EFI_ERROR(Status)) {
PrintStatusError(Status, L"Could not convert variable name to UTF-16");
return NULL;
}
Status = RT->GetVariable(Var, &ShellVariable, NULL, &ValSize, Val);
if (EFI_ERROR(Status))
return NULL;
Status = Utf16ToUtf8NoAlloc(Val, val, sizeof(val));
if (EFI_ERROR(Status)) {
PrintStatusError(Status, L"Could not convert value '%s' to UTF-8", Val);
return NULL;
}
return val;
}
/* Memory management
* NB: We must keep track of the size allocated for grub_realloc
*/
void *
grub_malloc(grub_size_t size)
{
grub_size_t *ptr;
ptr = (grub_size_t *) AllocatePool(size + sizeof(grub_size_t));
if (ptr != NULL)
*ptr++ = size;
return (void *) ptr;
}
void *
grub_zalloc(grub_size_t size)
{
grub_size_t *ptr;
ptr = (grub_size_t *) AllocateZeroPool(size + sizeof(grub_size_t));
if (ptr != NULL)
*ptr++ = size;
return (void *) ptr;
}
void
grub_free(void *p)
{
grub_size_t *ptr = (grub_size_t *) p;
if (ptr != NULL) {
ptr = &ptr[-1];
if (ptr != NULL)
{
FreePool(ptr);
ptr = NULL;
}
}
}
void *
grub_realloc(void *p, grub_size_t new_size)
{
grub_size_t *ptr = (grub_size_t *) p;
if (ptr != NULL) {
ptr = &ptr[-1];
ptr = ReallocatePool(*ptr, new_size + sizeof(grub_size_t), ptr);
if (ptr != NULL)
*ptr++ = new_size;
}
return ptr;
}
/* Convert a grub_err_t to EFI_STATUS */
EFI_STATUS
GrubErrToEFIStatus(grub_err_t err)
{
// The following are defined in EFI but unused here:
// EFI_BAD_BUFFER_SIZE
// EFI_WRITE_PROTECTED
// EFI_VOLUME_FULL
// EFI_MEDIA_CHANGED
// EFI_NO_MEDIA
// EFI_NOT_STARTED
// EFI_ALREADY_STARTED
// EFI_ABORTED
// EFI_END_OF_MEDIA
// EFI_NO_RESPONSE
// EFI_PROTOCOL_ERROR
// EFI_INCOMPATIBLE_VERSION
if ((grub_errno != 0) && (LogLevel > FS_LOGLEVEL_INFO))
/* NB: Calling grub_print_error() will reset grub_errno */
grub_print_error();
switch (err) {
case GRUB_ERR_NONE:
return EFI_SUCCESS;
case GRUB_ERR_BAD_MODULE:
return EFI_LOAD_ERROR;
case GRUB_ERR_OUT_OF_RANGE:
return EFI_BUFFER_TOO_SMALL;
case GRUB_ERR_OUT_OF_MEMORY:
case GRUB_ERR_SYMLINK_LOOP:
return EFI_OUT_OF_RESOURCES;
case GRUB_ERR_BAD_FILE_TYPE:
return EFI_NO_MAPPING;
case GRUB_ERR_FILE_NOT_FOUND:
case GRUB_ERR_UNKNOWN_DEVICE:
case GRUB_ERR_UNKNOWN_FS:
return EFI_NOT_FOUND;
case GRUB_ERR_FILE_READ_ERROR:
case GRUB_ERR_BAD_DEVICE:
case GRUB_ERR_READ_ERROR:
case GRUB_ERR_WRITE_ERROR:
case GRUB_ERR_IO:
return EFI_DEVICE_ERROR;
case GRUB_ERR_BAD_PART_TABLE:
case GRUB_ERR_BAD_FS:
return EFI_VOLUME_CORRUPTED;
case GRUB_ERR_BAD_FILENAME:
case GRUB_ERR_BAD_ARGUMENT:
case GRUB_ERR_BAD_NUMBER:
case GRUB_ERR_UNKNOWN_COMMAND:
case GRUB_ERR_INVALID_COMMAND:
return EFI_INVALID_PARAMETER;
case GRUB_ERR_NOT_IMPLEMENTED_YET:
return EFI_UNSUPPORTED;
case GRUB_ERR_TIMEOUT:
return EFI_TIMEOUT;
case GRUB_ERR_ACCESS_DENIED:
return EFI_ACCESS_DENIED;
case GRUB_ERR_WAIT:
return EFI_NOT_READY;
case GRUB_ERR_EXTRACTOR:
case GRUB_ERR_BAD_COMPRESSED_DATA:
return EFI_CRC_ERROR;
case GRUB_ERR_EOF:
return EFI_END_OF_FILE;
case GRUB_ERR_BAD_SIGNATURE:
return EFI_SECURITY_VIOLATION;
default:
return EFI_NOT_FOUND;
}
}
/* The following is adapted from glibc's (offtime.c, etc.)
*/
/* How many days come before each month (0-12). */
static const unsigned short int __mon_yday[2][13] = {
/* Normal years. */
{ 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365 },
/* Leap years. */
{ 0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366 }
};
/* Nonzero if YEAR is a leap year (every 4 years,
except every 100th isn't, and every 400th is). */
#define __isleap(year) \
((year) % 4 == 0 && ((year) % 100 != 0 || (year) % 400 == 0))
#define SECS_PER_HOUR (60 * 60)
#define SECS_PER_DAY (SECS_PER_HOUR * 24)
#define DIV(a, b) ((a) / (b) - ((a) % (b) < 0))
#define LEAPS_THRU_END_OF(y) (DIV (y, 4) - DIV (y, 100) + DIV (y, 400))
/* Compute an EFI_TIME representation of a GRUB's mtime_t */
VOID
GrubTimeToEfiTime(const INT32 t, EFI_TIME *tp)
{
INT32 days, rem, y;
const unsigned short int *ip;
days = t / SECS_PER_DAY;
rem = t % SECS_PER_DAY;
while (rem < 0) {
rem += SECS_PER_DAY;
--days;
}
while (rem >= SECS_PER_DAY) {
rem -= SECS_PER_DAY;
++days;
}
tp->Hour = rem / SECS_PER_HOUR;
rem %= SECS_PER_HOUR;
tp->Minute = rem / 60;
tp->Second = rem % 60;
y = 1970;
while (days < 0 || days >= (__isleap (y) ? 366 : 365)) {
/* Guess a corrected year, assuming 365 days per year. */
INT32 yg = y + days / 365 - (days % 365 < 0);
/* Adjust DAYS and Y to match the guessed year. */
days -= ((yg - y) * 365
+ LEAPS_THRU_END_OF (yg - 1)
- LEAPS_THRU_END_OF (y - 1));
y = yg;
}
tp->Year = y;
ip = __mon_yday[__isleap(y)];
for (y = 11; days < (long int) ip[y]; --y)
continue;
days -= ip[y];
tp->Month = y + 1;
tp->Day = days + 1;
}
| 2,888 |
785 | /*
* Copyright © 2021 Apple Inc. and the ServiceTalk project authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.servicetalk.concurrent.api;
import javax.annotation.Nullable;
/**
* A functional interface that accepts 4 arguments and generates a return value.
* @param <T1> The type of the first argument.
* @param <T2> The type of the second argument.
* @param <T3> The type of the third argument.
* @param <T4> The type of the fourth argument.
* @param <R> The result of the function.
*/
@FunctionalInterface
public interface Function4<T1, T2, T3, T4, R> {
/**
* Applies the function to the given arguments.
* @param t1 the first value.
* @param t2 the second value.
* @param t3 the third value.
* @param t4 the fourth value.
* @return the result value.
*/
@Nullable
R apply(@Nullable T1 t1, @Nullable T2 t2, @Nullable T3 t3, @Nullable T4 t4);
}
| 446 |
2,298 | # -*- coding: utf-8 -*-
from pandas_ta.overlap import sma
from pandas_ta.utils import get_offset, verify_series
# - Standard definition of your custom indicator function (including docs)-
def ni(close, length=None, centered=False, offset=None, **kwargs):
"""
Example indicator ni
"""
# Validate Arguments
length = int(length) if length and length > 0 else 20
close = verify_series(close, length)
offset = get_offset(offset)
if close is None: return
# Calculate Result
t = int(0.5 * length) + 1
ma = sma(close, length)
ni = close - ma.shift(t)
if centered:
ni = (close.shift(t) - ma).shift(-t)
# Offset
if offset != 0:
ni = ni.shift(offset)
# Handle fills
if "fillna" in kwargs:
ni.fillna(kwargs["fillna"], inplace=True)
if "fill_method" in kwargs:
ni.fillna(method=kwargs["fill_method"], inplace=True)
# Name and Categorize it
ni.name = f"ni_{length}"
ni.category = "trend"
return ni
ni.__doc__ = \
"""Example indicator (NI)
Is an indicator provided solely as an example
Sources:
https://github.com/twopirllc/pandas-ta/issues/264
Calculation:
Default Inputs:
length=20, centered=False
SMA = Simple Moving Average
t = int(0.5 * length) + 1
ni = close.shift(t) - SMA(close, length)
if centered:
ni = ni.shift(-t)
Args:
close (pd.Series): Series of 'close's
length (int): It's period. Default: 20
centered (bool): Shift the ni back by int(0.5 * length) + 1. Default: False
offset (int): How many periods to offset the result. Default: 0
Kwargs:
fillna (value, optional): pd.DataFrame.fillna(value)
fill_method (value, optional): Type of fill method
Returns:
pd.Series: New feature generated.
"""
# - Define a matching class method --------------------------------------------
def ni_method(self, length=None, offset=None, **kwargs):
close = self._get_column(kwargs.pop("close", "close"))
result = ni(close=close, length=length, offset=offset, **kwargs)
return self._post_process(result, **kwargs) | 794 |
930 | package com.foxinmy.weixin4j.http;
/**
* 请求方法
*
* @className HttpMethod
* @author jinyu(<EMAIL>)
* @date 2015年5月29日
* @since JDK 1.6
* @see
*/
public enum HttpMethod {
GET, HEAD, POST, PUT, PATCH, DELETE, OPTIONS, TRACE, CONNECT
}
| 116 |
310 | <gh_stars>100-1000
{
"name": "Beta 52A",
"description": "A kick drum microphone.",
"url": "http://www.shure.com/americas/products/microphones/beta/beta-52a-kick-drum-microphone"
} | 71 |
608 | /*
* Copyright (c) 2016 The ZLToolKit project authors. All Rights Reserved.
*
* This file is part of ZLToolKit(https://github.com/ZLMediaKit/ZLToolKit).
*
* Use of this source code is governed by MIT license that can be found in the
* LICENSE file in the root of the source tree. All contributing project authors
* may be found in the AUTHORS file in the root of the source tree.
*/
#ifndef Timer_h
#define Timer_h
#include <functional>
#include "EventPoller.h"
namespace toolkit {
class Timer {
public:
using Ptr = std::shared_ptr<Timer>;
/**
* 构造定时器
* @param second 定时器重复秒数
* @param cb 定时器任务,返回true表示重复下次任务,否则不重复,如果任务中抛异常,则默认重复下次任务
* @param poller EventPoller对象,可以为nullptr
*/
Timer(float second, const std::function<bool()> &cb, const EventPoller::Ptr &poller);
~Timer();
private:
std::weak_ptr<EventPoller::DelayTask> _tag;
//定时器保持EventPoller的强引用
EventPoller::Ptr _poller;
};
} // namespace toolkit
#endif /* Timer_h */
| 488 |
358 | <reponame>deankeinan/mahi-gui
// MIT License
//
// Copyright (c) 2020 Mechatronics and Haptic Interfaces Lab - Rice University
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// Author(s): <NAME> (<EMAIL>)
#include <Mahi/Gui.hpp>
#include <Mahi/Util.hpp>
#include <deque>
#include <set>
#include <fstream>
#include <utility>
#include <algorithm>
#include <random>
using std::size_t;
using std::vector;
using std::pair;
using namespace mahi::gui;
using namespace mahi::util;
// Example which solves Puzzometry - "The Hardest Puzzle Youll Never Solve":
// https://www.puzzometry.com/
//
// This is a rather complex example that shows several features of the Shapes
// class, NanoVG, and coroutines. The majority of this problem is related to
// the solver, which you may skip over unless it interestes you. The mahi-gui
// related portions start around line 500.
//==============================================================================
// SOLVER IMPLEMENTATION
//==============================================================================
// The solver formulates the puzzle as an "exact cover problem". That is, given
// a collection S of subsets of a set X, the exact cover is a subcollection S* of
// Such that each element in X is contained in exactly one subset in S*. For
// example, here the exact cover solution is S* = {B,D,F}, since when combined
// there remains exactly one 1 in every column.
// 1 2 3 4 5 6 7
// -------------------
// [A] 1 0 0 1 0 0 1
// [B] 1 0 0 1 0 0 0 *
// [C] 0 0 0 1 1 0 1
// [D] 0 0 1 0 1 1 0 *
// [E] 0 1 1 0 0 1 1
// [F] 0 1 0 0 0 0 1 *
// Read More: https://en.wikipedia.org/wiki/Exact_cover
// The best method for solving large exact cover problems is Donald's Knuths
// "Dancing Links" (DLX) approach, which works by treating the exact cover matrix as
// a double linked list instead of a matrix of ones and zeros, and then using his
// Algorithm X to search the list for solutions. Each list node represents a 1 in
// the exact cover matrix, and is linked to adjacent 1s in all four directions.
// The gist is that we save considerable time using double link lists because we don't
// have to iterate the entire matrix and check if an element is 0 or 1.
// The original paper is defintely worth a read: https://arxiv.org/pdf/cs/0011047.pdf
//
// The most challenging part of this approach is choosing how to define and construct
// the exact cover matrix. I will try to break down my process as simply as possible.
//
// First, understand that the board and pieces are just tilings of squares and octagons.
// Rotating the board 45 degrees, we can see that all squares and octagons are equally spaced.
//
// . . . . . . . . S O . . . . . .
// . . . . . . . S O S O . . . . .
// . . . . . . S O S O S O . . . .
// . . . . . . O S O S O S . . . .
// . . . . . . S O S O S O S O . .
// . . . . O S O S O S O S O S O .
// . . S O S O S O S O S O S O S O
// . S O S O S O S O S O S O S O .
// . O S O S O S O S O S O S O . .
// O S O S O S O S O S O S O . . .
// . O S O S O S O S O S O . . . .
// . . . S O S O S O S O . . . . .
// . . . O S O S O S O . . . . . .
// . . . . O S O S O . . . . . . .
// . . . . . O S . . . . . . . . .
// . . . . . . O . . . . . . . . .
//
// Next, I assign every octagon or square a position number from 1 to N:
//
// . . . . . . . . 1 2 . . . . . .
// . . . . . . . 3 4 5 6 . . . . .
// . . . . . . 7 8 9 O S O . . . . ... and so on ...
// . . . . . . O S O S O S . . . .
// . . . . . . S O S O S O S O . .
// . . . . O S O S O S O S O S O .
// . . S O S O S O S O S O S O S O
// . S O S O S O S O S O S O S O .
// . O S O S O S O S O S O S O . .
// O S O S O S O S O S O S O . . .
// . O S O S O S O S O S O . . . .
// . . . S O S O S O S O . . . . .
// . . . O S O S O S O . . . . . .
// . . . . O S O S O . . . . . . .
// . . . . . O S . . . . . . . . .
// . . . . . . N . . . . . . . . . ... to N positions.
// Position numbers form the header of our exact cover matrix, and each row in the
// exact cover matrix represents a possible piece placement. For example, below, the
// the placement A shows that a piece filled positions 1,2,3,4,5 and the placement
// B shows that a piece has filled positions 4,5,6,7,8,9:
// 1 2 3 4 5 6 7 8 9 ... N
// ---------------------------------
// [A] 1 1 1 1 1 0 0 0 0 ... 0
// [B] 0 0 0 1 1 1 1 1 1 ... 0
// ...
//
// To constrain the problem so that every piece will be in the final solution only
// once, we can add a dummy column for each of the 14 pieces, and cover it.
//
// P1 P2 P3 P4 ... P14 | 1 2 3 4 5 6 7 8 9 ... N
// --------------------------------------------------------
// [A] 1 0 0 0 ... 0 | 1 1 1 1 1 0 0 0 0 ... 0
// [B] 1 0 0 0 ... 0 | 0 0 0 1 1 1 1 1 1 ... 0
// ... |
// [X] 0 1 0 0 ... 0 | 0 0 0 0 0 1 1 1 1 ... 0
// ...
// [Z] 0 0 0 0 ... 1 | 0 0 1 1 1 1 1 0 0 ... 0
//
// Since each piece can have hundres to thousands of valid placements, our exact
// cover matrix will be extremely large, and so we need to construct it
// progmatically. To do this, I first represent the board and pieces as
// matrices of 0s, 4s and 8s, where 0s are dead space, 4s are squares, and 8s
// are octagons (note 0s are displayed as dots here):
//
// 16x16 Board:
//
// . . . . . . . . 4 8 . . . . . .
// . . . . . . . 4 8 4 8 . . . . .
// . . . . . . 4 8 4 8 4 8 . . . .
// . . . . . . 8 4 8 4 8 4 . . . .
// . . . . . . 4 8 4 8 4 8 4 8 . .
// . . . . 8 4 8 4 8 4 8 4 8 4 8 .
// . . 4 8 4 8 4 8 4 8 4 8 4 8 4 8
// . 4 8 4 8 4 8 4 8 4 8 4 8 4 8 .
// . 8 4 8 4 8 4 8 4 8 4 8 4 8 . .
// 8 4 8 4 8 4 8 4 8 4 8 4 8 . . .
// . 8 4 8 4 8 4 8 4 8 4 8 . . . .
// . . . 4 8 4 8 4 8 4 8 . . . . .
// . . . 8 4 8 4 8 4 8 . . . . . .
// . . . . 8 4 8 4 8 . . . . . . .
// . . . . . 8 4 . . . . . . . . .
// . . . . . . 8 . . . . . . . . .
//
// 3x3 Piece: 2x4 Piece: ... and so on for all 14 pieces ...
//
// 8 4 8 8 4 8 4
// 4 8 . . 8 4 8
// 8 . .
//
// Next, I iteratively overlay each piece in every location on the board and take
// the difference between the board and piece values. If all values in the piece
// matrix go to zero, then the piece can be placed in that location. Note that
// we need to permutate each piece matrix (i.e. combinations of flipping and
// rotating) since we don't know which orientation will be the correct one.
// 3x3 Piece cannot be placed here: // But could be placed here:
//
// . . . . . . . . 4 8 . . . . . . // . . . . . . . . 4 8 . . . . . .
// . . . . . . . 4 8 4 8 . . . . . // . . . . . . . 4 8 4 8 . . . . .
// . 8 4 8 . . 4 8 4 8 4 8 . . . . // . . . . . . 4 8 4 8 4 8 . . . .
// . 4 8 . . . 8 4 8 4 8 4 . . . . // . . . . . . 8 4 8 4 8 4 . . . .
// . 8 . . . . 4 8 4 8 4 8 4 8 . . // . . . . . . 4 8 4 8 4 8 4 8 . .
// . . . . 8 4 8 4 8 4 8 4 8 4 8 . // . . . . 8 4 8 4 8 4 8 4 8 4 8 .
// . . 4 8 4 8 4 8 4 8 4 8 4 8 4 8 // . . 4 8 4 8 4 . . . 4 8 4 8 4 8
// . 4 8 4 8 4 8 4 8 4 8 4 8 4 8 . // . 4 8 4 8 4 8 . . 4 8 4 8 4 8 .
// . 8 4 8 4 8 4 8 4 8 4 8 4 8 . . // . 8 4 8 4 8 4 . 4 8 4 8 4 8 . .
// 8 4 8 4 8 4 8 4 8 4 8 4 8 . . . // 8 4 8 4 8 4 8 4 8 4 8 4 8 . . .
// . 8 4 8 4 8 4 8 4 8 4 8 . . . . // . 8 4 8 4 8 4 8 4 8 4 8 . . . .
// . . . 4 8 4 8 4 8 4 8 . . . . . // . . . 4 8 4 8 4 8 4 8 . . . . .
// . . . 8 4 8 4 8 4 8 . . . . . . // . . . 8 4 8 4 8 4 8 . . . . . .
// . . . . 8 4 8 4 8 . . . . . . . // . . . . 8 4 8 4 8 . . . . . . .
// . . . . . 8 4 . . . . . . . . . // . . . . . 8 4 . . . . . . . . .
// . . . . . . 8 . . . . . . . . . // . . . . . . 8 . . . . . . . . .
//
// If the placement is valid, I create an entry into the exact cover matrix that
// covers the piece's number and the board positions that it filled. Once the sparse
// exact cover matrix is formulated, it is converted to a dense matrix representation,
// and passed to my DLX solver implementation. That's pretty much it! :)
//==============================================================================
// MATRIX OPERATIONS
//==============================================================================
typedef int Num;
typedef vector<Num> Row; // Matrix row
typedef vector<Row> Matrix; // 2D Matrix
/// Returns the size of a matrix along a dimension
inline size_t size(const Matrix& mat, int dim) {
if (dim == 0)
return mat.size();
else if (dim == 1)
return mat[0].size();
return 0;
}
/// Makes a matrix of all zeros, size r x c
inline Matrix zeros(size_t r, size_t c) {
return Matrix(r, Row(c, 0));
}
/// Flips a matrix vertically
inline void flipud(Matrix &mat) {
std::reverse(mat.begin(), mat.end());
}
/// Flips a matrix horizontally
inline void fliplr(Matrix& mat) {
for (auto &row : mat)
std::reverse(row.begin(), row.end());
}
/// Rotates a matrix 90 deg clockwise
inline void rot90(Matrix& mat) {
auto temp = mat;
mat.resize(size(temp,1));
for (size_t j = 0; j < size(temp,1); ++j) {
mat[j].resize(size(temp,0));
for (size_t i = 0; i < size(temp,0); ++i)
mat[j][i] = temp[i][j];
}
fliplr(mat);
}
/// Permutates of matrix to one of 8 possible permutations
inline void permute(Matrix& mat, size_t permutation) {
switch(permutation) {
case 0 : { break; }
case 1 : { rot90(mat); break; }
case 2 : { rot90(mat); rot90(mat); break;}
case 3 : { rot90(mat); rot90(mat); rot90(mat); break;}
case 4 : { fliplr(mat); break;}
case 5 : { fliplr(mat); rot90(mat); break;}
case 6 : { fliplr(mat); rot90(mat); rot90(mat); break;}
case 7 : { fliplr(mat); rot90(mat); rot90(mat); rot90(mat); break;}
}
}
/// Finds all unique permutations for a matrix (i.e. some permutations are symmetrical...we don't check those twice)
inline size_t uniquePermutations(const Matrix& mat, vector<size_t>& uniquePerms, vector<Matrix>& uniqueMats) {
size_t numUnique = 0;
uniquePerms.clear(); uniquePerms.reserve(8);
uniqueMats.clear(); uniqueMats.reserve(8);
for (size_t perm = 0; perm < 8; ++perm) {
auto temp = mat;
permute(temp, perm);
if (std::find(uniqueMats.begin(), uniqueMats.end(), temp) == uniqueMats.end()) {
uniquePerms.push_back(perm);
uniqueMats.push_back(temp);
numUnique++;
}
}
return numUnique;
}
inline void sparseToDense(const Matrix& sparse, Matrix& dense) {
auto numRows = size(sparse,0);
auto numCols = size(sparse,1);
dense.assign(numRows,{});
for (size_t r = 0; r < numRows; ++r) {
for (size_t c = 0; c < numCols; ++c) {
if (sparse[r][c])
dense[r].push_back((Num)c);
}
}
}
std::string matToStr(const Matrix& mat) {
std::stringstream ss;
for (auto& row : mat) {
for (auto& val : row) {
if (val == 0)
ss << ". ";
else
ss << val << " ";
}
ss << std::endl;
}
return ss.str();
}
inline void printMat(const Matrix& mat) {
for (auto& row : mat) {
for (auto& val : row) {
if (val == 0)
std::cout << ". ";
else
std::cout << val << " ";
}
std::cout << std::endl;
}
std::cout << std::endl;
}
//==============================================================================
// DLX EXACT COVER SOLVER
//==============================================================================
/// Formulates an exact cover problem from a board and number of pieces
struct ExactCover {
struct Info {
size_t piece, perm, r, c;
};
ExactCover(const Matrix& board, const vector<Matrix>& pieces) :
board(board), pieces(pieces), numPieces(pieces.size()), numSlots(0),
boardRows(size(board,0)), boardCols(size(board,1))
{
buildSlots();
buildSparseMatrix();
sparseToDense(sparse,dense);
}
void buildSlots() {
slots = zeros(size(board, 0), size(board, 1));
for (size_t r = 0; r < size(slots,0); ++r) {
for (size_t c = 0; c < size(slots,1); ++c) {
if (board[r][c] != 0)
slots[r][c] = (Num)numSlots++;
}
}
}
void buildSparseMatrix() {
Row row;
sparse.reserve(2500);
dense.reserve(2500);
info.reserve(2500);
for (size_t piece = 0; piece < numPieces; ++piece) {
vector<size_t> uniquePerms;
vector<Matrix> uniqueMats;
auto numUnique = uniquePermutations(pieces[piece], uniquePerms, uniqueMats);
for (size_t perm = 0; perm < numUnique; ++perm) {
for (size_t r = 0; r < boardRows; ++r) {
for (size_t c = 0; c < boardCols; ++c) {
row.assign(numPieces + numSlots, 0);
row[piece] = 1;
if (place(uniqueMats[perm], r, c, row)) {
sparse.push_back(row);
info.push_back({piece, perm, r, c});
}
}
}
}
}
numRows = size(sparse,0);
numCols = size(sparse,1);
}
bool place(const Matrix& permMat, size_t r, size_t c, Row& rowOut) {
auto height = size(permMat,0);
auto width = size(permMat,1);
if ((r + height) > boardRows || (c + width) > boardCols)
return false;
else {
for (size_t rr = 0; rr < height; ++rr) {
for (size_t cc = 0; cc < width; ++cc) {
if (permMat[rr][cc] == 0)
continue;
auto diff = permMat[rr][cc] - board[r + rr][c + cc];
if (diff != 0)
return false;
rowOut[numPieces + slots[r + rr][c + cc]] = 1;
}
}
}
return true;
}
void writeToFiles() {
std::ofstream file1, file2, file3;
file1.open("exact_cover_sparse.txt");
file2.open("exact_cover_dense.txt");
file3.open("exact_cover_info.txt");
for (size_t r = 0; r < numRows; ++r) {
file1 << sparse[r] << std::endl;
file2 << dense[r] << std::endl;
file3 << fmt::format("{},{},{},{}",info[r].piece, info[r].perm, info[r].r, info[r].c) << std::endl;
}
file1.close();
file2.close();
file3.close();
}
Matrix board;
vector<Matrix> pieces;
size_t boardRows;
size_t boardCols;
size_t numPieces;
size_t numSlots;
Matrix slots;
Matrix sparse;
Matrix dense;
size_t numRows;
size_t numCols;
vector<Info> info;
};
/// Implementation of Donald Knuth's DLX algorithm
struct DLX {
/// Vector of row indices into exact cover matrix
typedef vector<size_t> Solution;
/// Node within toroidal linked list
struct Cell {
Cell(int row) : row(row) {}
int row; // row of cell
Cell* R = nullptr; // pointer to right cell
Cell* L = nullptr; // pointer to left cell
Cell* U = nullptr; // pointer to up cell
Cell* D = nullptr; // pointer to down cell
Cell* C = nullptr; // pointer to header cell
size_t* S = nullptr; // pointer to cell's column size
};
/// Constructor, taking exact cover matrix in dense representation
DLX(const Matrix& dense, size_t numCols) {
size_t numCells = 1 + numCols;
for (auto& row : dense)
numCells += row.size();
cells.reserve(numCells);
sizes.assign(numCols,0);
headers.resize(numCols);
makeHeaderRow(numCols);
for (size_t r = 0; r < dense.size(); ++r)
makeRow(r, dense[r]);
assert(numCells == cells.size());
}
/// Makes root and header cells
void makeHeaderRow(size_t numCols) {
// root
root = &cells.emplace_back(-1);
root->L = root;
root->R = root;
// headers
for (size_t c = 0; c < numCols; ++c) {
auto h = &cells.emplace_back(-1);
// up/down linkage
h->U = h;
h->D = h;
// left/right linkage
h->L = root->L;
h->L->R = h;
root->L = h;
h->R = root;
// linkage to header (self)
h->C = h;
/// linage to size block
h->S = &sizes[c];
// insert into header list
headers[c] = h;
}
}
/// Takes a dense row representation and adds it to toroidal linked list
void makeRow(size_t r, const Row& row) {
Cell* leftMost = nullptr;
for (auto& c : row) {
auto cell = &cells.emplace_back(r);
// up/down linkage
cell->U = headers[c]->U;
cell->U->D = cell;
headers[c]->U = cell;
cell->D = headers[c];
// left/right linkage
if (!leftMost) {
cell->L = cell;
cell->R = cell;
leftMost = cell;
}
else {
cell->L = leftMost->L;
cell->L->R = cell;
leftMost->L = cell;
cell->R = leftMost;
}
// linkage to header
cell->C = headers[c];
// linkage to size block
cell->S = &sizes[c];
++sizes[c];
}
}
/// Chooses column with fewest cells remaining in it
inline Cell* chooseColumn() {
auto C = root->R->C;
size_t s = -1;
for (Cell* j = root->R; j != root; j = j->R) {
if (*j->S < s) {
s = *j->S;
C = j->C;
}
}
return C;
}
/// Covers a column cell C
inline void cover(Cell* C) {
C->R->L = C->L;
C->L->R = C->R;
for (Cell* i = C->D; i != C; i = i->D) {
for (Cell* j = i->R; j != i; j = j->R) {
j->D->U = j->U;
j->U->D = j->D;
--*j->S;
}
}
}
/// Uncoverse a column cell C
inline void uncover(Cell* C) {
for (Cell* i = C->U; i != C; i = i->U) {
for (Cell* j = i->L; j != i; j = j->L) {
++*j->S;
j->D->U = j;
j->U->D = j;
}
}
C->R->L = C;
C->L->R = C;
}
/// Recursive search algorithm
bool search() {
if (root->R == root) {
solutions.push_back(workingSolution);
return true;
}
auto C = chooseColumn();
cover(C);
for (Cell* r = C->D; r != C; r = r->D) {
workingSolution.push_back(r->row);
operations.push_back({(size_t)r->row,true});
for (Cell* j = r->R; j != r; j = j->R)
cover(j->C);
if (search() && !findAll)
return true;
workingSolution.pop_back();
operations.push_back({(size_t)r->row,false});
C = r->C;
for (Cell* j = r->L; j != r; j = j->L)
uncover(j->C);
}
uncover(C);
return false;
}
bool findAll = false; ///< find all solutions?
vector<Cell> cells; ///< all root, header, and normal cells
Cell* root; ///< pointer to root cell
vector<Cell*> headers; ///< pointers to headers indexed by column
vector<size_t> sizes; ///< column sizes indexed by columns
Solution workingSolution; ///< the working solution of search()
vector<Solution> solutions; ///< all final saved solutions of search()
vector<pair<size_t,bool>> operations; ///< row insertions (true) and deletions (false)
};
//==============================================================================
// PUZZOMETRY CONSTANT PROPERTIES
//==============================================================================
constexpr float g_sideLength = 25.0f;
constexpr float g_gridSize = (2.0f + (float)SQRT2) * 0.5f * g_sideLength * 0.999f;
/// Board matrix
const Matrix g_board_mat {
{0, 0, 0, 0, 0, 0, 0, 0, 4, 8, 0, 0, 0, 0, 0, 0},
{0, 0, 0, 0, 0, 0, 0, 4, 8, 4, 8, 0, 0, 0, 0, 0},
{0, 0, 0, 0, 0, 0, 4, 8, 4, 8, 4, 8, 0, 0, 0, 0},
{0, 0, 0, 0, 0, 0, 8, 4, 8, 4, 8, 4, 0, 0, 0, 0},
{0, 0, 0, 0, 0, 0, 4, 8, 4, 8, 4, 8, 4, 8, 0, 0},
{0, 0, 0, 0, 8, 4, 8, 4, 8, 4, 8, 4, 8, 4, 8, 0},
{0, 0, 4, 8, 4, 8, 4, 8, 4, 8, 4, 8, 4, 8, 4, 8},
{0, 4, 8, 4, 8, 4, 8, 4, 8, 4, 8, 4, 8, 4, 8, 0},
{0, 8, 4, 8, 4, 8, 4, 8, 4, 8, 4, 8, 4, 8, 0, 0},
{8, 4, 8, 4, 8, 4, 8, 4, 8, 4, 8, 4, 8, 0, 0, 0},
{0, 8, 4, 8, 4, 8, 4, 8, 4, 8, 4, 8, 0, 0, 0, 0},
{0, 0, 0, 4, 8, 4, 8, 4, 8, 4, 8, 0, 0, 0, 0, 0},
{0, 0, 0, 8, 4, 8, 4, 8, 4, 8, 0, 0, 0, 0, 0, 0},
{0, 0, 0, 0, 8, 4, 8, 4, 8, 0, 0, 0, 0, 0, 0, 0},
{0, 0, 0, 0, 0, 8, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0},
{0, 0, 0, 0, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0}
};
/// Piece matrices
const vector<Matrix> g_piece_mats
{
{{8, 4, 8, 4}, {0, 8, 4, 8}},
{{0, 4, 8}, {4, 8, 4}, {8, 4, 8}},
{{8, 4, 0}, {4, 8, 4}, {8, 4, 0}},
{{0, 4, 0}, {4, 8, 4}, {8, 4, 8}, {4, 8, 4}, {8, 4, 0}},
{{4, 8, 0, 0}, {8, 4, 8, 0}, {0, 8, 4, 8}, {0, 4, 8, 4}, {0, 8, 4, 0}},
{{4, 8, 0}, {8, 4, 8}, {0, 8, 4}},
{{4, 0, 0, 0}, {8, 4, 0, 0}, {4, 8, 4, 0}, {0, 4, 8, 4}},
{{0, 0, 0, 8, 4}, {0, 0, 8, 4, 8}, {0, 8, 4, 8, 0}, {8, 4, 0, 0, 0}},
{{0, 0, 0, 4, 0}, {0, 8, 4, 8, 4}, {8, 4, 8, 4, 8}, {0, 8, 4, 8, 4}},
{{0, 8, 4, 8}, {8, 4, 8, 0}, {4, 8, 4, 0}, {8, 0, 0, 0}},
{{0, 4, 8}, {0, 8, 4}, {8, 4, 0}, {0, 8, 0}},
{{8, 4, 8}, {4, 8, 0}, {8, 0, 0}},
{{4, 8, 0}, {8, 4, 8}, {4, 8, 0}, {8, 0, 0}},
{{4, 8, 0}, {8, 4, 8}, {4, 8, 0}},
};
/// Represents a coordinate position in the Board
struct Coord {
int r;
int c;
};
/// Solution coordinates for g_piece_mats, all in permutation 0
const vector<Coord> g_solutions {
{9, 0},
{6, 1},
{5, 4},
{1, 6},
{0, 8},
{11, 3},
{8, 4},
{5, 5},
{7, 6},
{4, 10},
{12, 5},
{11, 8},
{7, 11},
{5, 13},
};
/// Piece colors
const vector<Color> g_colors {
Reds::FireBrick,
Oranges::OrangeRed,
Reds::LightCoral,
Yellows::Yellow,
Greens::YellowGreen,
Greens::Chartreuse,
Greens::SpringGreen,
Cyans::LightSeaGreen,
Blues::DeepSkyBlue,
Blues::DodgerBlue,
Blues::SteelBlue,
Purples::BlueViolet,
Purples::Magenta,
Pinks::DeepPink
};
//==============================================================================
// HELPERS
//==============================================================================
inline Vec2 coordPosition(const Coord& coord) {
return Vec2(coord.c * g_gridSize, coord.r * g_gridSize);
}
inline Vec2 coordPosition(int i, int j) {
return coordPosition({i,j});
}
Shape makeShape(const Matrix& mat) {
// create square and octagon primitives
auto sqr = make_rectangle_shape(g_sideLength,g_sideLength);
auto oct = make_polygon_shape(8, g_sideLength, PolyParam::SideLength);
oct.rotate(360.0f / 16.0f);
// create que of shapes to merge
std::deque<Shape> shapes;
for (size_t r = 0; r < size(mat,0); ++r) {
for (size_t c = 0; c < size(mat,1); ++c) {
if (mat[r][c] == 4)
shapes.push_back(sqr);
else if (mat[r][c] == 8)
shapes.push_back(oct);
oct.move(g_gridSize, 0);
sqr.move(g_gridSize, 0);
}
oct.move(-g_gridSize * size(mat,1), g_gridSize);
sqr.move(-g_gridSize * size(mat,1), g_gridSize);
}
// marge shapes
Shape shape;
while (!shapes.empty()) {
auto toMerge = shapes.front();
shapes.pop_front();
auto merged = clip_shapes(shape, toMerge, ClipType::Union);
if (merged.size() == 1)
shape = merged[0];
else
shapes.push_back(toMerge);
}
return shape;
};
//==============================================================================
// BOARD
//==============================================================================
class Board
{
public:
Board(Application* app) : app(app), matrix(g_board_mat), color(Grays::Gray50) {
shape.set_point_count(4);
shape.set_point(0, coordPosition(9,-2));
shape.set_point(1, coordPosition(-2,9));
shape.set_point(2, coordPosition(6,17));
shape.set_point(3, coordPosition(17,6));
shape.set_radii(g_gridSize);
auto hole = makeShape(matrix);
hole = offset_shape(hole, 2.0f);
shape.push_back_hole(hole);
}
void draw(NVGcontext* vg) {
float trans[6], inv[6];
nvgCurrentTransform(vg, trans);
nvgTransformInverse(inv, trans);
Vec2 mouse = app->get_mouse_pos();
nvgTransformPoint(&mouse.x, &mouse.y, inv, mouse.x, mouse.y);
bool hovered = shape.contains(mouse);
if (hovered) {
ImGui::BeginTooltip();
ImGui::Text("Board");
ImGui::Separator();
auto matStr = matToStr(matrix);
ImGui::Text(matStr.c_str());
ImGui::Separator();
ImGui::EndTooltip();
}
Vec2 tl = shape.bounds().tl();
Vec2 br = shape.bounds().br();
nvgBeginPath(vg);
nvgShapeWithHoles(vg, shape);
auto paint = nvgLinearGradient(vg, tl.x, tl.y, br.x, br.y, {0.5f,0.5f,0.5f,0.25f}, {0.5f,0.5f,0.5f,0.5f});
nvgFillPaint(vg, paint);
nvgFill(vg);
nvgStrokeColor(vg, Grays::Gray80);
nvgStrokeWidth(vg, hovered ? 3 : 1);
nvgStroke(vg);
}
Application* app;
Shape shape;
Matrix matrix;
Color color;
};
//==============================================================================
// PIECE
//==============================================================================
class Piece : public Transformable
{
public:
Piece(const Matrix &mat, const Color &col, Application* app) : app(app), matrix(mat), color(col)
{
shape = makeShape(matrix);
shape = offset_shape(shape, -2.0f);
perm = 0;
set_pos({0,0});
set_origin({0,0});
set_scale({1,1});
set_rotation(0);
}
void updateMatrix(const Matrix &mat) {
matrix = mat;
shape = makeShape(matrix);
shape = offset_shape(shape, -2.0f);
perm = 0;
}
Vec2 computeScale(int perm) {
return perm < 4 ? Vec2(1,1) : Vec2(-1,1);
}
Vec2 computeOrigin(int perm) {
return g_gridSize * Vec2((perm == 2 || perm == 3 || perm == 4 || perm == 5) ? (float)size(matrix,1) - 1 : 0.0f,
(perm == 1 || perm == 2 || perm == 5 || perm == 6) ? (float)size(matrix,0) - 1 : 0.0f);
}
float computeRotation(int perm) {
return 90.0f * (float)(perm % 4);
}
void place(const Coord& new_coord, int new_perm) {
set_pos(coordPosition(new_coord));
set_origin(computeOrigin(new_perm));
set_scale(computeScale(new_perm));
set_rotation(computeRotation(new_perm));
coord = new_coord;
perm = new_perm;
}
Enumerator transition(Coord to_coord, int perm, float duration) {
transitioning = true;
// start state
auto startPosition = pos();
auto startRotation = rotation();
auto startScale = scale();
auto startOrigin = origin();
// end state
auto endPosition = coordPosition(to_coord);
auto endRotation = computeRotation(perm);
auto endScale = computeScale(perm);
auto endOrigin = computeOrigin(perm);
// animation loop
float elapsedTime = 0.0f;
while (elapsedTime < duration) {
float t = elapsedTime / duration;
set_pos( Tween::Smootherstep(startPosition, endPosition, t) );
set_rotation( Tween::Smootherstep(startRotation, endRotation, t) );
set_scale( Tween::Smootherstep(startScale, endScale, t) );
set_origin( Tween::Smootherstep(startOrigin, endOrigin, t) );
elapsedTime += (float)app->delta_time().as_seconds();
co_yield nullptr;
}
place(to_coord, perm);
transitioning = false;
}
void draw(NVGcontext* vg) {
nvgTransform(vg, transform());
float trans[6], inv[6];
nvgCurrentTransform(vg, trans);
nvgTransformInverse(inv, trans);
Vec2 mouse = app->get_mouse_pos();
nvgTransformPoint(&mouse.x, &mouse.y, inv, mouse.x, mouse.y);
bool hovered = shape.contains(mouse);
if (hovered) {
ImGui::BeginTooltip();
ImGui::TextColored(color, "Piece");
ImGui::Text("R:%i C:%i P:%i", coord.r, coord.c, perm);
ImGui::Separator();
auto permMat = matrix;
permute(permMat, perm);
auto matStr = matToStr(permMat);
ImGui::Text(matStr.c_str());
ImGui::Separator();
ImGui::EndTooltip();
}
Vec2 tl = shape.bounds().tl();
Vec2 br = shape.bounds().br();
nvgBeginPath(vg);
nvgShape(vg, shape);
auto paint = nvgLinearGradient(vg, tl.x, tl.y, br.x, br.y, with_alpha(color,0.25f), with_alpha(color,0.5f));
nvgFillPaint(vg, paint);
nvgFill(vg);
nvgStrokeColor(vg, Tween::Linear(Whites::White, color, 0.5f));
nvgStrokeWidth(vg, hovered ? 3 : 1);
nvgStroke(vg);
}
Application* app;
Shape shape;
bool transitioning = false;
int perm;
Coord coord;
Matrix matrix;
Color color;
};
//==============================================================================
// PUZZOMETRY
//==============================================================================
class Puzzometry : public Application {
public:
Puzzometry(Config conf) : Application(conf), board(this), problem(g_board_mat, g_piece_mats), solver(problem.dense, problem.numCols) {
pieces.reserve(g_piece_mats.size());
for (size_t i = 0; i < g_piece_mats.size(); ++i) {
auto p = Piece(g_piece_mats[i], g_colors[i], this);
p.place(g_solutions[i], 0);
pieces.emplace_back(std::move(p));
}
// solve
set_vsync(false);
nvgCreateFontMem(m_vg, "roboto-bold", Roboto_Bold_ttf, Roboto_Bold_ttf_len, 0);
createCheckerBoard();
}
void createCheckerBoard() {
checker = nvgluCreateFramebuffer(m_vg, 16, 16, NVG_IMAGE_REPEATX | NVG_IMAGE_REPEATY);
nvgluBindFramebuffer(checker);
glViewport(0, 0, 16, 16);
glClearColor(0, 0, 0, 0);
glClear(GL_COLOR_BUFFER_BIT|GL_STENCIL_BUFFER_BIT);
nvgBeginFrame(m_vg, 16, 16, 1);
nvgBeginPath(m_vg);
nvgRect(m_vg, 0, 0, 8, 8);
nvgFillColor(m_vg, Grays::Gray10);
nvgFill(m_vg);
nvgBeginPath(m_vg);
nvgRect(m_vg, 8, 8, 16, 16);
nvgFillColor(m_vg, Grays::Gray10);
nvgFill(m_vg);
nvgEndFrame(m_vg);
nvgluBindFramebuffer(nullptr);
}
~Puzzometry() {
nvgluDeleteFramebuffer(checker);
}
Enumerator shuffle() {
stop_coroutines();
animating = false;
percent = 0;
solver.solutions.clear();
std::random_device rng;
std::mt19937 gen(rng());
std::shuffle(pieces.begin(), pieces.end(), gen);
std::vector<Matrix> matrices;
for (auto& p : pieces) {
int perm = random_range(0,7);
auto mat = p.matrix;
permute(mat, perm);
matrices.push_back(mat);
co_yield start_coroutine(p.transition({0,0},perm,0.1f));
p.updateMatrix(mat);
p.place({0,0},0);
}
problem = ExactCover(g_board_mat, matrices);
}
void solve() {
Clock clk;
solver = DLX(problem.dense, problem.numCols);
solver.search();
ms_solve = clk.get_elapsed_time().as_milliseconds();
}
void update() {
ImGui::Begin("Puzzometry");
if (ImGui::Button("Shuffle"))
start_coroutine(shuffle());
ImGui::SameLine();
ImGui::BeginDisabled(animating || solver.solutions.size() > 0);
if (ImGui::Button("Solve"))
solve();
ImGui::EndDisabled();
if (solver.solutions.size()) {
ImGui::SameLine();
ImGui::Text("Solve Time: %d ms | Operations: %d", ms_solve, solver.operations.size(), solver.solutions.size());
}
ImGui::BeginDisabled(solver.solutions.size() == 0);
if (ImGui::Button("Animate"))
start_coroutine(animateSolution());
ImGui::SameLine();
ImGui::ProgressBar(percent);
static float scale = 1;
if (ImGui::DragFloat("Animation Speed", &scale, 0.1f, 0, 100, "%.1fx"))
set_time_scale(scale);
ImGui::EndDisabled();
ImGui::Separator();
for (int i = 0; i < pieces.size(); ++i) {
ImGui::PushID(i);
ImGui::PushItemWidth(100);
ImGui::ColorButton("##Color",pieces[i].color); ImGui::SameLine();
int r = pieces[i].coord.r;
int c = pieces[i].coord.c;
int p = pieces[i].perm;
if (ImGui::SliderInt("Row",&r,0,15))
{
pieces[i].coord.r = r;
pieces[i].place(pieces[i].coord, pieces[i].perm);
}
ImGui::SameLine();
if (ImGui::SliderInt("Col",&c,0,15))
{
pieces[i].coord.c = c;
pieces[i].place(pieces[i].coord, pieces[i].perm);
}
ImGui::SameLine();
if (ImGui::SliderInt("Permutation",&p,0,7))
{
pieces[i].perm = p;
pieces[i].place(pieces[i].coord, pieces[i].perm);
}
ImGui::PopItemWidth();
ImGui::PopID();
}
ImGui::End();
}
void draw(NVGcontext* vg) override {
NVGpaint img = nvgImagePattern(vg, 0, 0, 16, 16, 0, checker->image, 1.0f);
nvgBeginPath(vg);
nvgRect(vg,0,0,800,800);
nvgFillPaint(vg, img);
nvgFill(vg);
nvgTranslate(vg, 400-g_gridSize*7.5f, 400-g_gridSize*7.5f);
// grid
for (int i = 0; i < 16; ++i) {
static std::vector<const char*> rtxt = {"R0","R1","R2","R3","R4","R5","R6","R7","R8","R9","R10","R11","R12","R13","R14","R15"};
static std::vector<const char*> ctxt = {"C0","C1","C2","C3","C4","C5","C6","C7","C8","C9","C10","C11","C12","C13","C14","C15"};
auto p1 = coordPosition(-1, i);
auto p2 = coordPosition(16, i);
auto p3 = coordPosition(i, -1);
auto p4 = coordPosition(i, 16);
nvgBeginPath(vg);
nvgMoveTo(vg, p1.x, p1.y);
nvgLineTo(vg, p2.x, p2.y);
nvgStrokeColor(vg, Grays::Gray30);
nvgStrokeWidth(vg, 1);
nvgStroke(vg);
nvgBeginPath(vg);
nvgMoveTo(vg, p3.x, p3.y);
nvgLineTo(vg, p4.x, p4.y);
nvgStrokeColor(vg, Grays::Gray30);
nvgStrokeWidth(vg, 1);
nvgStroke(vg);
nvgFontSize(vg,15);
nvgFontFace(vg, "roboto-bold");
nvgFillColor(vg, Whites::White);
nvgTextAlign(vg, NVG_ALIGN_RIGHT | NVG_ALIGN_MIDDLE);
nvgText(vg, p3.x - 10, p3.y, rtxt[i], nullptr);
nvgTextAlign(vg, NVG_ALIGN_CENTER | NVG_ALIGN_TOP);
nvgText(vg, p2.x, p2.y + 10, ctxt[i], nullptr);
}
board.draw(vg);
for (auto& p : pieces) {
nvgSave(vg);
p.draw(vg);
nvgRestore(vg);
}
}
Enumerator animateSolution() {
stop_coroutines();
animating = true;
for (auto& p : pieces)
start_coroutine(p.transition({0, 0}, 0, 0.1f));
co_yield yield_time_scaled(200_ms);
for (size_t i = 0; i < solver.operations.size(); ++i) {
percent = (float)i / (float)solver.operations.size();
auto op = solver.operations[i];
auto info = problem.info[op.first];
auto& piece = pieces[info.piece];
if (op.second) {
// insetion
co_yield start_coroutine(
piece.transition({(int)info.r, (int)info.c}, info.perm, 0.25f));
co_yield yield_time_scaled(150_ms);
} else {
// removal
auto nextOp = solver.operations[(i + 1) % solver.operations.size()];
auto nextInfo = problem.info[nextOp.first];
if (info.piece != nextInfo.piece)
co_yield start_coroutine(piece.transition({0,0}, piece.perm, 0.1f));
}
}
animating = false;
}
ExactCover problem;
DLX solver;
int ms_solve = -1;
Board board;
vector<Piece> pieces;
bool spinning = false;
bool animating = false;
bool toggled = true;
float percent = 0;
NVGLUframebuffer* checker = NULL;
};
int main(int argc, char const *argv[]) {
Application::Config conf;
conf.width = 800;
conf.height = 800;
conf.resizable = false;
conf.title = "Puzzometry Solver";
conf.msaa = false;
Puzzometry puzz(conf);
puzz.run();
return 0;
}
| 20,522 |
2,226 | /*
gui_event.cpp - implementation of the GuiEvent class.
Revision 0
Notes:
-
2021/10/15, <NAME>
*/
#include "gui_event.h"
#include "gui/core/InputManager.h"
#include "gui/app/views/ViewController.h"
// --- SET ITEM ---
void GuiEvent::setItem(FileData* item, Window* window) {
this->item = item;
this->window = window;
}
// --- PROCESS ---
void GuiEvent::process() {
item->launchItem(window);
}
// --- FINISH ---
void GuiEvent::finish() {
// Call own destructor.
delete this;
}
| 200 |
473 | <reponame>pingjuiliao/cb-multios
/*
* Copyright (C) <NAME> <<EMAIL>>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#include "cgc_libc.h"
int cgc_get_match_run_length(uint32_t i1, uint32_t i2, uint8_t *num_matching) {
DBG("\n");
int ret = SUCCESS;
DBG("[D] finding initial matching bit run: 0x%08x vs. 0x%08x\n", i1, i2);
for (cgc_size_t i=0; i<NUM_BITS_IN_A_UINT32; i++) {
if ((1 & (i1 >> (NUM_BITS_IN_A_UINT32-1-i))) == (1 & (i2 >> (NUM_BITS_IN_A_UINT32-1-i)))) {
*num_matching = *num_matching+1;
} else {
break;
}
}
DBG("[D] 0x%08x & 0x%08x share %d initial matching bits\n", i1, i2, *num_matching);
bail:
return ret;
}
| 584 |
678 | <gh_stars>100-1000
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>.
//
#import "MMService.h"
#import "IMsgExt.h"
#import "MMService.h"
@class NSString, NewArrivalCountData;
@interface NewArrivalCountMgr : MMService <MMService, IMsgExt>
{
NewArrivalCountData *m_data;
}
- (void).cxx_destruct;
- (void)OnModMsg:(id)arg1 MsgWrap:(id)arg2;
- (void)OnDelMsg:(id)arg1;
- (void)OnAddMsg:(id)arg1 MsgWrap:(id)arg2;
- (void)lastMessageChange:(id)arg1 MsgWrap:(id)arg2;
- (void)lastMessageChange:(id)arg1 msgid:(unsigned int)arg2 msg:(id)arg3;
- (_Bool)clearQQNewArrivalCountByName:(id)arg1;
- (_Bool)zeroQQNewArrivalCount;
- (unsigned int)getQQNewArrivalCount;
- (id)getQQNewArrivalList;
- (_Bool)zeroSXNewArrivalCount;
- (unsigned int)getSXNewArrivalCount;
- (id)getSXNewArrivalList;
- (_Bool)zeroBottleNewArrivalCount;
- (_Bool)clearBottleNewArrivalCount:(id)arg1;
- (unsigned int)getBottleNewArrivalCount;
- (id)getBottleNewArrivalList;
- (void)dealloc;
- (id)init;
- (void)zeroQQNewArrivalMsgItem;
- (void)zeroSXNewArrivalMsgItem;
- (void)callExt:(id)arg1;
- (void)callBottleExt;
- (void)callQQExt;
- (void)callSXExt;
- (_Bool)loadData;
- (id)getPath;
// Remaining properties
@property(readonly, copy) NSString *debugDescription;
@property(readonly, copy) NSString *description;
@property(readonly) unsigned long long hash;
@property(readonly) Class superclass;
@end
| 603 |
646 | # ----------------------------------------
# Written by <NAME>
# ----------------------------------------
import torch
import torch.nn as nn
import torchvision
import torchvision.transforms as transforms
import os
import sys
import numpy as np
from config import cfg
from datasets.generateData import generate_dataset
from net.generateNet import generate_net
import torch.optim as optim
from PIL import Image
from tensorboardX import SummaryWriter
from torch.utils.data import DataLoader
from net.loss import MaskCrossEntropyLoss, MaskBCELoss, MaskBCEWithLogitsLoss
from net.sync_batchnorm.replicate import patch_replication_callback
def train_net():
dataset = generate_dataset(cfg.DATA_NAME, cfg, 'train', cfg.DATA_AUG)
dataloader = DataLoader(dataset,
batch_size=cfg.TRAIN_BATCHES,
shuffle=cfg.TRAIN_SHUFFLE,
num_workers=cfg.DATA_WORKERS,
drop_last=True)
net = generate_net(cfg)
if cfg.TRAIN_TBLOG:
from tensorboardX import SummaryWriter
# Set the Tensorboard logger
tblogger = SummaryWriter(cfg.LOG_DIR)
print('Use %d GPU'%cfg.TRAIN_GPUS)
device = torch.device(0)
if cfg.TRAIN_GPUS > 1:
net = nn.DataParallel(net)
patch_replication_callback(net)
net.to(device)
if cfg.TRAIN_CKPT:
pretrained_dict = torch.load(cfg.TRAIN_CKPT)
net_dict = net.state_dict()
pretrained_dict = {k: v for k, v in pretrained_dict.items() if (k in net_dict) and (v.shape==net_dict[k].shape)}
net_dict.update(pretrained_dict)
net.load_state_dict(net_dict)
# net.load_state_dict(torch.load(cfg.TRAIN_CKPT),False)
criterion = nn.CrossEntropyLoss(ignore_index=255)
optimizer = optim.SGD(
params = [
{'params': get_params(net.module,key='1x'), 'lr': cfg.TRAIN_LR},
{'params': get_params(net.module,key='10x'), 'lr': 10*cfg.TRAIN_LR}
],
momentum=cfg.TRAIN_MOMENTUM
)
#scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=cfg.TRAIN_LR_MST, gamma=cfg.TRAIN_LR_GAMMA, last_epoch=-1)
itr = cfg.TRAIN_MINEPOCH * len(dataloader)
max_itr = cfg.TRAIN_EPOCHS*len(dataloader)
running_loss = 0.0
tblogger = SummaryWriter(cfg.LOG_DIR)
net.eval()
for epoch in range(cfg.TRAIN_MINEPOCH, cfg.TRAIN_EPOCHS):
#scheduler.step()
#now_lr = scheduler.get_lr()
for i_batch, sample_batched in enumerate(dataloader):
now_lr = adjust_lr(optimizer, itr, max_itr)
inputs_batched, labels_batched = sample_batched['image'], sample_batched['segmentation']
optimizer.zero_grad()
labels_batched = labels_batched.long().to(1)
#0foreground_pix = (torch.sum(labels_batched!=0).float()+1)/(cfg.DATA_RESCALE**2*cfg.TRAIN_BATCHES)
predicts_batched = net(inputs_batched)
predicts_batched = predicts_batched.to(1)
loss = criterion(predicts_batched, labels_batched)
loss.backward()
optimizer.step()
running_loss += loss.item()
print('epoch:%d/%d\tbatch:%d/%d\titr:%d\tlr:%g\tloss:%g ' %
(epoch, cfg.TRAIN_EPOCHS, i_batch, dataset.__len__()//cfg.TRAIN_BATCHES,
itr+1, now_lr, running_loss))
if cfg.TRAIN_TBLOG and itr%100 == 0:
#inputs = np.array((inputs_batched[0]*128+128).numpy().transpose((1,2,0)),dtype=np.uint8)
#inputs = inputs_batched.numpy()[0]
inputs = inputs_batched.numpy()[0]/2.0 + 0.5
labels = labels_batched[0].cpu().numpy()
labels_color = dataset.label2colormap(labels).transpose((2,0,1))
predicts = torch.argmax(predicts_batched[0],dim=0).cpu().numpy()
predicts_color = dataset.label2colormap(predicts).transpose((2,0,1))
pix_acc = np.sum(labels==predicts)/(cfg.DATA_RESCALE**2)
tblogger.add_scalar('loss', running_loss, itr)
tblogger.add_scalar('lr', now_lr, itr)
tblogger.add_scalar('pixel acc', pix_acc, itr)
tblogger.add_image('Input', inputs, itr)
tblogger.add_image('Label', labels_color, itr)
tblogger.add_image('Output', predicts_color, itr)
running_loss = 0.0
if itr % 5000 == 0:
save_path = os.path.join(cfg.MODEL_SAVE_DIR,'%s_%s_%s_itr%d.pth'%(cfg.MODEL_NAME,cfg.MODEL_BACKBONE,cfg.DATA_NAME,itr))
torch.save(net.state_dict(), save_path)
print('%s has been saved'%save_path)
itr += 1
save_path = os.path.join(cfg.MODEL_SAVE_DIR,'%s_%s_%s_epoch%d_all.pth'%(cfg.MODEL_NAME,cfg.MODEL_BACKBONE,cfg.DATA_NAME,cfg.TRAIN_EPOCHS))
torch.save(net.state_dict(),save_path)
if cfg.TRAIN_TBLOG:
tblogger.close()
print('%s has been saved'%save_path)
def adjust_lr(optimizer, itr, max_itr):
now_lr = cfg.TRAIN_LR * (1 - itr/(max_itr+1)) ** cfg.TRAIN_POWER
optimizer.param_groups[0]['lr'] = now_lr
optimizer.param_groups[1]['lr'] = 10*now_lr
return now_lr
def get_params(model, key):
for m in model.named_modules():
if key == '1x':
if 'backbone' in m[0] and isinstance(m[1], nn.Conv2d):
for p in m[1].parameters():
yield p
elif key == '10x':
if 'backbone' not in m[0] and isinstance(m[1], nn.Conv2d):
for p in m[1].parameters():
yield p
if __name__ == '__main__':
train_net()
| 2,197 |
526 | <filename>open-metadata-implementation/view-services/tex-view/tex-view-api/src/main/java/org/odpi/openmetadata/viewservices/tex/api/ffdc/TexViewServiceException.java<gh_stars>100-1000
/* SPDX-License-Identifier: Apache-2.0 */
/* Copyright Contributors to the ODPi Egeria project. */
package org.odpi.openmetadata.viewservices.tex.api.ffdc;
import org.odpi.openmetadata.frameworks.auditlog.messagesets.ExceptionMessageDefinition;
import org.odpi.openmetadata.frameworks.connectors.ffdc.OCFCheckedExceptionBase;
import java.util.Arrays;
/*
* This is a simple exception class that can convey the few types of exception
* that it is sensible to propagate back to the user interface. The Tex handler will
* have detected or caught each error condition, audit logged it and will then
* create a TexViewServiceException to capture the essential details that can be
* used in the UI.
*/
public class TexViewServiceException extends OCFCheckedExceptionBase
{
/*
* Constructor
*/
public TexViewServiceException(ExceptionMessageDefinition messageDefinition,
String className,
String actionDescription)
{
super(messageDefinition, className, actionDescription);
}
/**
* JSON-style toString
*
* @return string of property names and values for this enum
*/
@Override
public String toString()
{
return "TexViewServiceException{" +
", reportedHTTPCode=" + getReportedHTTPCode() +
", reportingClassName='" + getReportingClassName() + '\'' +
", reportingActionDescription='" + getReportingActionDescription() + '\'' +
", reportedErrorMessageId='" + getReportedErrorMessageId() + '\'' +
", reportedErrorMessageParameters=" + Arrays.toString(getReportedErrorMessageParameters()) +
", reportedSystemAction='" + getReportedSystemAction() + '\'' +
", reportedUserAction='" + getReportedUserAction() + '\'' +
", reportedCaughtException=" + getReportedCaughtException() +
", relatedProperties=" + getRelatedProperties() +
'}';
}
/**
* Return comparison result based on the content of the properties.
*
* @param objectToCompare test object
* @return result of comparison
*/
@Override
public boolean equals(Object objectToCompare)
{
if (this == objectToCompare)
{
return true;
}
if (objectToCompare == null || getClass() != objectToCompare.getClass())
{
return false;
}
if (!super.equals(objectToCompare))
{
return false;
}
return true;
}
/**
* Return hash code for this object
*
* @return int hash code
*/
@Override
public int hashCode()
{
return super.hashCode();
}
}
| 1,201 |
530 | package org.carlspring.strongbox.service;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.pool.PoolStats;
import javax.ws.rs.client.Client;
/**
* @author korest
*/
public interface ProxyRepositoryConnectionPoolConfigurationService
{
Client getRestClient();
CloseableHttpClient getHttpClient();
void setMaxTotal(int max);
void setDefaultMaxPerRepository(int defaultMax);
int getDefaultMaxPerRepository();
void setMaxPerRepository(String repository, int max);
PoolStats getTotalStats();
PoolStats getPoolStats(String repository);
void shutdown();
}
| 192 |
328 | <filename>springboot-invoke/api-common/src/main/java/com/ctg/test/api/service/TestService.java<gh_stars>100-1000
package com.ctg.test.api.service;
import com.ctg.test.api.model.ResponseDo;
import java.rmi.RemoteException;
/**
* @Description: TODO
* @Author: yanhonghai
* @Date: 2018/11/3 11:53
*/
public interface TestService {
public ResponseDo getResponse(String paramer) throws RemoteException;;
}
| 144 |
375 | /*
* Copyright 2016 Nokia Solutions and Networks
* Licensed under the Apache License, Version 2.0,
* see license.txt file for details.
*/
package org.rf.ide.core.testdata.text.write.tables.variables;
import java.util.ArrayList;
import java.util.List;
import org.rf.ide.core.testdata.model.AModelElement;
import org.rf.ide.core.testdata.model.ModelType;
import org.rf.ide.core.testdata.model.table.RobotElementsComparatorWithPositionChangedPresave;
import org.rf.ide.core.testdata.model.table.VariableTable;
import org.rf.ide.core.testdata.model.table.variables.DictionaryVariable;
import org.rf.ide.core.testdata.model.table.variables.DictionaryVariable.DictionaryKeyValuePair;
import org.rf.ide.core.testdata.text.read.recognizer.RobotToken;
import org.rf.ide.core.testdata.text.read.recognizer.RobotTokenType;
import org.rf.ide.core.testdata.text.write.DumperHelper;
import org.rf.ide.core.testdata.text.write.tables.ANotExecutableTableElementDumper;
public class DictionaryVariableDumper extends ANotExecutableTableElementDumper<VariableTable> {
public DictionaryVariableDumper(final DumperHelper helper) {
super(helper, ModelType.DICTIONARY_VARIABLE_DECLARATION_IN_TABLE);
}
@Override
public RobotElementsComparatorWithPositionChangedPresave getSorter(
final AModelElement<VariableTable> currentElement) {
final DictionaryVariable var = (DictionaryVariable) currentElement;
final List<RobotToken> itemsAsValue = new ArrayList<>(0);
for (final DictionaryKeyValuePair dv : var.getItems()) {
final RobotToken key = dv.getKey();
if (!key.isDirty() && !dv.getValue().isDirty() && !dv.getRaw().getText().isEmpty()) {
itemsAsValue.add(dv.getRaw());
} else {
final RobotToken joinedKeyValue = new RobotToken();
joinedKeyValue.setStartOffset(key.getStartOffset());
joinedKeyValue.setLineNumber(key.getLineNumber());
joinedKeyValue.setStartColumn(key.getStartColumn());
final String text = key.getText() + "=" + dv.getValue().getText();
joinedKeyValue.setText(text.equals("=") ? "" : text);
joinedKeyValue.setType(RobotTokenType.VARIABLES_VARIABLE_VALUE);
itemsAsValue.add(joinedKeyValue);
}
}
final RobotElementsComparatorWithPositionChangedPresave sorter = new RobotElementsComparatorWithPositionChangedPresave();
sorter.addPresaveSequenceForType(RobotTokenType.VARIABLES_VARIABLE_VALUE, 1, itemsAsValue);
sorter.addPresaveSequenceForType(RobotTokenType.COMMENT, 2,
elemUtility.filter(var.getComment(), RobotTokenType.COMMENT));
return sorter;
}
}
| 1,071 |
3,442 | <filename>src/net/java/sip/communicator/impl/gui/main/call/UIVideoHandler2.java
/*
* Jitsi, the OpenSource Java VoIP and Instant Messaging client.
*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.java.sip.communicator.impl.gui.main.call;
import java.awt.*;
import java.beans.*;
import java.util.*;
import net.java.sip.communicator.service.protocol.*;
import net.java.sip.communicator.service.protocol.event.*;
import org.jitsi.util.event.*;
/**
* Facilitates the handling of the various video-related events occurring in a
* <tt>CallConference</tt> in the UI as employed by <tt>OneToOneCallPanel</tt>
* and <tt>VideoConferenceCallPanel</tt>. The intention at the time of this
* writing is to have <tt>UIVideoHandler2</tt> initialized by <tt>CallPanel</tt>
* and shared by it with <tt>OneToOneCallPanel</tt> and
* <tt>VideoConferenceCallPanel</tt> as <tt>CallPanel</tt> switches between them
* as needed.
*
* @author <NAME>
*/
public class UIVideoHandler2
extends Observable
{
/**
* The name of the <tt>UIVideoHandler2</tt> property which indicates whether
* the visual <tt>Component</tt> displaying the video of the local peer/user
* streaming to the remote peer(s) is to be made visible in the user
* interface.
*/
public static final String LOCAL_VIDEO_VISIBLE_PROPERTY_NAME
= "localVideoVisible";
/**
* The <tt>CallConference</tt> in which the handling of the various
* video-related events is to be facilitated by this
* <tt>UIVideoHandler2</tt>.
*/
private final CallConference callConference;
/**
* The listener implementations which get notified by
* {@link #callConference}, the <tt>Call</tt>s participating in it, the
* <tt>CallPeer</tt>s associated with them, and the
* <tt>ConferenceMember</tt>s participating in their telephony conferences
* about events related to the handling of video which this instance
* facilitates.
*/
private final CallConferenceListener callConferenceListener;
/**
* The indicator which determines whether the visual <tt>Component</tt>
* depicting the video of the local peer/user streaming to the remote
* peer(s) is to be made visible in the user interface.
*/
private boolean localVideoVisible = true;
/**
* Initializes a new <tt>UIVideoHandler2</tt> instance which is to
* facilitate the handling of the various video-related events occurring in
* a specific <tt>CallConference</tt>.
*
* @param callConference the <tt>CallConference</tt> in which the handling
* of the various video-related events is to be facilitated by the new
* instance
*/
public UIVideoHandler2(CallConference callConference)
{
this.callConference = callConference;
callConferenceListener = new CallConferenceListener();
}
/**
* Notifies this instance about a change in the value of the <tt>calls</tt>
* property of {@link #callConference} i.e. a <tt>Call</tt> was added to or
* removed from the list of <tt>Call</tt>s participating in
* <tt>callConference</tt>. Adding or removing <tt>Call</tt>s modifies the
* list of <tt>CallPeer</tt>s associated with <tt>callConference</tt> which
* in turn may result in the adding or removing of visual
* <tt>Component</tt>s depicting video.
*
* @param ev a <tt>PropertyChangeEvent</tt> which specifies the
* <tt>Call</tt> which was added to or removed from the list of
* <tt>Call</tt>s participating in <tt>callConference</tt>
*/
protected void callConferenceCallsPropertyChange(PropertyChangeEvent ev)
{
notifyObservers(ev);
}
/**
* Notifies this instance about a change in the value of a video-related
* property of a <tt>ConferenceMember</tt>. Changing such a value means that
* a visual <tt>Component</tt> displaying video may be associated or
* dissociated with the <tt>ConferenceMember</tt>.
*
* @param ev a <tt>PropertyChangeEvent</tt> which specifies the
* <tt>ConferenceMember</tt> whose video-related property value changed, the
* name of the property whose value changed, and the old and new values of
* the property in question
*/
protected void conferenceMemberVideoPropertyChange(PropertyChangeEvent ev)
{
notifyObservers(ev);
}
/**
* Releases the resources (which require explicit disposal such as listeners
* added to notifiers) acquired by this instance throughout its lifetime and
* prepares it for garbage collection.
*/
void dispose()
{
callConferenceListener.dispose();
}
/**
* Determines whether a specific <tt>Container</tt> is an ancestor of a
* specific <tt>Component</tt> (in the UI hierarchy).
*
* @param container the <tt>Container</tt> which is to be tested as an
* ancestor of <tt>component</tt>
* @param component the <tt>Component</tt> which is to be tested as having
* <tt>container</tt> as its ancestor
* @return <tt>true</tt> if <tt>container</tt> is an ancestor of
* <tt>component</tt> (in the UI hierarchy); otherwise, <tt>false</tt>
*/
public static boolean isAncestor(Container container, Component component)
{
do
{
Container parent = component.getParent();
if (parent == null)
return false;
else if (parent.equals(container))
return true;
else
component = parent;
}
while (true);
}
/**
* Gets the indicator which determines whether the visual <tt>Component</tt>
* depicting the video of the local peer/user streaming to the remote
* peer(s) is to be made visible in the user interface. The indicator does
* not determine whether the local peer/user is actually streaming video to
* the remote peer(s).
*
* @return <tt>true</tt> to have the visual <tt>Component</tt> depicting the
* video of the local peer/user streaming to the remote peer(s) visible in
* the user interface; otherwise, <tt>false</tt>
*/
public boolean isLocalVideoVisible()
{
return localVideoVisible;
}
/**
* Notifies this instance that the value of the property which indicates
* whether the local peer is streaming video to the remote peer(s) changed.
* It is not very clear who is the source of the
* <tt>PropertyChangeEvent</tt> because a <tt>PropertyChangeListener</tt> is
* added through <tt>OperationSetVideoTelephony</tt> by specifying a
* <tt>Call</tt>. But it is likely that a change in the value of the
* property in question is related to the video and, consequently, this
* instance.
*
* @param ev a <tt>PropertyChangeEvent</tt> which specifies the source
* notifying about the change and the old and new values of the property.
*/
protected void localVideoStreamingPropertyChange(PropertyChangeEvent ev)
{
notifyObservers(ev);
}
/**
* {@inheritDoc}
*
* Overrides {@link Observable#notifyObservers(Object)} to force the super
* to notify the added <tt>Observer</tt>s regardless of the <tt>changed</tt>
* state of this <tt>Observable</tt> which <tt>UIVideoHandler2</tt> does not
* use at the time of this writing.
*/
@Override
public void notifyObservers(Object arg)
{
setChanged();
super.notifyObservers(arg);
}
/**
* Notifies this instance about a specific <tt>CallPeerConferenceEvent</tt>
* fired by a <tt>CallPeer</tt> associated with a <tt>Call</tt>
* participating in {@link #callConference}. Adding or removing a
* <tt>ConferenceMember</tt> may cause a visual <tt>Component</tt>
* displaying video to be associated or dissociated with the
* <tt>ConferenceMember</tt>.
*
* @param ev the <tt>CallPeerConferenceEvent</tt> this instance is to be
* notified about
*/
protected void onCallPeerConferenceEvent(CallPeerConferenceEvent ev)
{
notifyObservers(ev);
}
/**
* Notifies this instance about a specific <tt>CallPeerEvent</tt> fired by a
* <tt>Call</tt> participating in {@link #callConference}. Adding or
* removing a <tt>CallPeer</tt> may modify the list of visual
* <tt>Component</tt>s displaying video.
*
* @param ev the <tt>CallPeerEvent</tt> this instance is to be notified
* about
*/
protected void onCallPeerEvent(CallPeerEvent ev)
{
notifyObservers(ev);
}
/**
* Notifies this instance about a specific <tt>VideoEvent</tt> fired by a
* <tt>CallPeer</tt> associated with a <tt>Call</tt> participating in
* {@link #callConference}.
*
* @param ev the <tt>VideoEvent</tt> this instance is to be notified about
*/
protected void onVideoEvent(VideoEvent ev)
{
notifyObservers(ev);
}
/**
* Sets the indicator which determines whether the visual <tt>Component</tt>
* depicting the video of the local peer/user streaming to the remote
* peer(s) is to be made visible in the user interface. The indicator does
* not determine whether the local peer/user is actually streaming video to
* the remote peer(s).
*
* @param localVideoVisible <tt>true</tt> to have the visual
* <tt>Component</tt> depicting the video of the local peer/user streaming
* to the remote peer(s) visible in the user interface; otherwise,
* <tt>false</tt>
*/
public void setLocalVideoVisible(boolean localVideoVisible)
{
if (this.localVideoVisible != localVideoVisible)
{
boolean oldValue = this.localVideoVisible;
this.localVideoVisible = localVideoVisible;
notifyObservers(
new PropertyChangeEvent(
this,
LOCAL_VIDEO_VISIBLE_PROPERTY_NAME,
oldValue,
this.localVideoVisible));
}
}
/**
* Implements the listeners which get notified by
* {@link UIVideoHandler2#callConference}, the <tt>Call</tt>s participating
* in it, the <tt>CallPeer</tt>s associated with them, and the
* <tt>ConferenceMember</tt>s participating in their telephony conferences
* about events related to the handling of video which this
* <tt>UIVideoHandler2</tt> facilitates.
*/
private class CallConferenceListener
extends CallPeerConferenceAdapter
implements CallChangeListener,
PropertyChangeListener,
VideoListener
{
/**
* Initializes a new <tt>CallConferenceListener</tt> instance which is
* to get notified by {@link UIVideoHandler2#callConference} about
* events related to the handling of video.
*/
CallConferenceListener()
{
callConference.addCallChangeListener(this);
callConference.addCallPeerConferenceListener(this);
callConference.addPropertyChangeListener(this);
for (Call call : callConference.getCalls())
addListeners(call);
}
/**
* Adds this as a listener to the <tt>CallPeer</tt>s associated with a
* specific <tt>Call</tt> and to the <tt>ConferenceMember</tt>s
* participating in their telephony conferences.
*
* @param call the <tt>Call</tt> to whose associated <tt>CallPeer</tt>s
* and <tt>ConferenceMember</tt>s this is to add itself as a listener
*/
private void addListeners(Call call)
{
OperationSetVideoTelephony videoTelephony
= call.getProtocolProvider().getOperationSet(
OperationSetVideoTelephony.class);
if (videoTelephony != null)
videoTelephony.addPropertyChangeListener(call, this);
Iterator<? extends CallPeer> callPeerIter = call.getCallPeers();
while (callPeerIter.hasNext())
addListeners(callPeerIter.next());
}
/**
* Adds this as a listener to a specific <tt>CallPeer</tt> and to the
* <tt>ConferenceMember</tt>s participating in its telephony conference.
*
* @param callPeer the <tt>CallPeer</tt> to which and to whose
* participating <tt>ConferenceMember</tt>s this is to add itself as a
* listener
*/
private void addListeners(CallPeer callPeer)
{
OperationSetVideoTelephony videoTelephony
= callPeer.getProtocolProvider().getOperationSet(
OperationSetVideoTelephony.class);
if (videoTelephony != null)
videoTelephony.addVideoListener(callPeer, this);
for (ConferenceMember conferenceMember
: callPeer.getConferenceMembers())
{
addListeners(conferenceMember);
}
}
/**
* Adds this as a listener to a specific <tt>ConferenceMember</tt>.
*
* @param conferenceMember the <tt>ConferenceMember</tt> to which this
* is to add itself as a listener
*/
private void addListeners(ConferenceMember conferenceMember)
{
conferenceMember.addPropertyChangeListener(this);
}
/**
* {@inheritDoc}
*
* Delegates to {@link #onCallPeerEvent(CallPeerEvent)} because the
* specifics can be determined from the <tt>CallPeerEvent</tt>.
*/
public void callPeerAdded(CallPeerEvent ev)
{
onCallPeerEvent(ev);
}
/**
* {@inheritDoc}
*
* Delegates to {@link #onCallPeerEvent(CallPeerEvent)} because the
* specifics can be determined from the <tt>CallPeerEvent</tt>.
*/
public void callPeerRemoved(CallPeerEvent ev)
{
onCallPeerEvent(ev);
}
/**
* {@inheritDoc}
*
* <tt>CallConferenceListener</tt> does nothing because changes in the
* state of a <tt>Call</tt> are not directly related to video or are
* expressed with other events which are directly related to video.
*/
public void callStateChanged(CallChangeEvent ev)
{
}
/**
* Releases the resources (which require explicit disposal such as
* listeners added to notifiers) acquired by this instance throughout
* its lifetime and prepares it for garbage collection.
*/
void dispose()
{
callConference.removeCallChangeListener(this);
callConference.removeCallPeerConferenceListener(this);
callConference.removePropertyChangeListener(this);
for (Call call : callConference.getCalls())
removeListeners(call);
}
/**
* {@inheritDoc}
*/
@Override
protected void onCallPeerConferenceEvent(CallPeerConferenceEvent ev)
{
switch (ev.getEventID())
{
case CallPeerConferenceEvent.CONFERENCE_MEMBER_ADDED:
addListeners(ev.getConferenceMember());
break;
case CallPeerConferenceEvent.CONFERENCE_MEMBER_REMOVED:
removeListeners(ev.getConferenceMember());
break;
}
UIVideoHandler2.this.onCallPeerConferenceEvent(ev);
}
/**
* Notifies this instance about a specific <tt>CallPeerEvent</tt> which
* was fired by a <tt>Call</tt> participating in
* {@link UIVideoHandler2#callConference}.
*
* @param ev the <tt>CallPeerEvent</tt> which this instance is to be
* notified about and which was fired by a <tt>Call</tt> participating
* in <tt>UIVideoHandler2.callConference</tt>
*/
private void onCallPeerEvent(CallPeerEvent ev)
{
switch (ev.getEventID())
{
case CallPeerEvent.CALL_PEER_ADDED:
addListeners(ev.getSourceCallPeer());
break;
case CallPeerEvent.CALL_PEER_REMOVED:
removeListeners(ev.getSourceCallPeer());
break;
}
UIVideoHandler2.this.onCallPeerEvent(ev);
}
/**
* Notifies this instance about a specific <tt>VideoEvent</tt> which was
* fired by a <tt>CallPeer</tt> associated with a <tt>Call</tt>
* participating in {@link UIVideoHandler2#callConference}.
*
* @param ev the <tt>VideoEvent</tt> which this instance is to be
* notified about and which was fired by a <tt>CallPeer</tt> associated
* with a <tt>Call</tt> participating in
* <tt>UIVideoHandler2.callConference</tt>
*/
private void onVideoEvent(VideoEvent ev)
{
UIVideoHandler2.this.onVideoEvent(ev);
}
/**
* {@inheritDoc}
*
* For example, notifies this <tt>UIVideoHandler2</tt> that a
* <tt>Call</tt> was added/removed to/from the <tt>callConference</tt>.
*/
public void propertyChange(PropertyChangeEvent ev)
{
String propertyName = ev.getPropertyName();
if (CallConference.CALLS.equals(propertyName))
{
if (ev.getSource() instanceof CallConference)
{
Object oldValue = ev.getOldValue();
if (oldValue instanceof Call)
removeListeners((Call) oldValue);
Object newValue = ev.getNewValue();
if (newValue instanceof Call)
addListeners((Call) newValue);
callConferenceCallsPropertyChange(ev);
}
}
else if (ConferenceMember.VIDEO_SSRC_PROPERTY_NAME.equals(
propertyName)
|| ConferenceMember.VIDEO_STATUS_PROPERTY_NAME.equals(
propertyName))
{
if (ev.getSource() instanceof ConferenceMember)
conferenceMemberVideoPropertyChange(ev);
}
else if (OperationSetVideoTelephony.LOCAL_VIDEO_STREAMING.equals(
propertyName))
{
localVideoStreamingPropertyChange(ev);
}
}
/**
* Removes this as a listener from the <tt>CallPeer</tt>s associated
* with a specific <tt>Call</tt> and from the <tt>ConferenceMember</tt>s
* participating in their telephony conferences.
*
* @param call the <tt>Call</tt> from whose associated
* <tt>CallPeer</tt>s and <tt>ConferenceMember</tt>s this is to remove
* itself as a listener
*/
private void removeListeners(Call call)
{
OperationSetVideoTelephony videoTelephony
= call.getProtocolProvider().getOperationSet(
OperationSetVideoTelephony.class);
if (videoTelephony != null)
videoTelephony.addPropertyChangeListener(call, this);
Iterator<? extends CallPeer> callPeerIter = call.getCallPeers();
while (callPeerIter.hasNext())
removeListeners(callPeerIter.next());
}
/**
* Removes this as a listener from a specific <tt>CallPeer</tt> and from
* the <tt>ConferenceMember</tt>s participating in its telephony
* conference.
*
* @param callPeer the <tt>CallPeer<tt> from which and from whose
* participating <tt>ConferenceMember</tt>s this is to remove itself as
* a listener
*/
private void removeListeners(CallPeer callPeer)
{
OperationSetVideoTelephony videoTelephony
= callPeer.getProtocolProvider().getOperationSet(
OperationSetVideoTelephony.class);
if (videoTelephony != null)
videoTelephony.removeVideoListener(callPeer, this);
for (ConferenceMember conferenceMember
: callPeer.getConferenceMembers())
{
removeListeners(conferenceMember);
}
}
/**
* Removes this as a listener from a specific <tt>ConferenceMember</tt>.
*
* @param conferenceMember the <tt>ConferenceMember</tt> from which this
* is to remove itself as a listener
*/
private void removeListeners(ConferenceMember conferenceMember)
{
conferenceMember.removePropertyChangeListener(this);
}
/**
* {@inheritDoc}
*
* Implements {@link VideoListener#videoAdded(VideoEvent)}. Delegates to
* {@link #onVideoEvent(VideoEvent) because the specifics can be
* determined from the <tt>VideoEvent</tt>.
*/
public void videoAdded(VideoEvent ev)
{
onVideoEvent(ev);
}
/**
* {@inheritDoc}
*
* Implements {@link VideoListener#videoRemoved(VideoEvent)}. Delegates
* to {@link #onVideoEvent(VideoEvent) because the specifics can be
* determined from the <tt>VideoEvent</tt>.
*/
public void videoRemoved(VideoEvent ev)
{
onVideoEvent(ev);
}
/**
* {@inheritDoc}
*
* Implements {@link VideoListener#videoUpdate(VideoEvent)}. Delegates
* to {@link #onVideoEvent(VideoEvent) because the specifics can be
* determined from the <tt>VideoEvent</tt>.
*/
public void videoUpdate(VideoEvent ev)
{
onVideoEvent(ev);
}
}
}
| 9,410 |
777 | <reponame>google-ar/chromium<gh_stars>100-1000
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef ASH_MUS_NON_CLIENT_FRAME_CONTROLLER_H_
#define ASH_MUS_NON_CLIENT_FRAME_CONTROLLER_H_
#include <stdint.h>
#include <map>
#include <string>
#include <vector>
#include "ash/mus/frame/detached_title_area_renderer_host.h"
#include "base/macros.h"
#include "base/strings/string16.h"
#include "ui/aura/window_observer.h"
#include "ui/gfx/geometry/insets.h"
#include "ui/gfx/geometry/rect.h"
#include "ui/views/widget/widget_delegate.h"
namespace aura {
class Window;
class WindowManagerClient;
}
namespace gfx {
class Insets;
}
namespace ui {
namespace mojom {
enum class WindowType;
}
}
namespace ash {
namespace mus {
class WindowManager;
// Provides the non-client frame for mus Windows.
class NonClientFrameController : public views::WidgetDelegateView,
public aura::WindowObserver,
public DetachedTitleAreaRendererHost {
public:
// Creates a new NonClientFrameController and window to render the non-client
// frame decorations. This deletes itself when |window| is destroyed. |parent|
// is the parent to place the newly created window in, and may be null. If
// |parent| is null |context| is used to determine the parent Window. One of
// |parent| or |context| must be non-null.
NonClientFrameController(
aura::Window* parent,
aura::Window* context,
const gfx::Rect& bounds,
ui::mojom::WindowType window_type,
std::map<std::string, std::vector<uint8_t>>* properties,
WindowManager* window_manager);
// Returns the NonClientFrameController for the specified window, null if
// one was not created.
static NonClientFrameController* Get(aura::Window* window);
// Returns the preferred client area insets.
static gfx::Insets GetPreferredClientAreaInsets();
// Returns the width needed to display the standard set of buttons on the
// title bar.
static int GetMaxTitleBarButtonWidth();
aura::Window* window() { return window_; }
aura::WindowManagerClient* window_manager_client() {
return window_manager_client_;
}
void SetClientArea(const gfx::Insets& insets,
const std::vector<gfx::Rect>& additional_client_areas);
private:
~NonClientFrameController() override;
// DetachedTitleAreaRendererHost:
void OnDetachedTitleAreaRendererDestroyed(
DetachedTitleAreaRenderer* renderer) override;
// views::WidgetDelegateView:
base::string16 GetWindowTitle() const override;
bool CanResize() const override;
bool CanMaximize() const override;
bool CanMinimize() const override;
bool ShouldShowWindowTitle() const override;
views::ClientView* CreateClientView(views::Widget* widget) override;
// aura::WindowObserver:
void OnWindowHierarchyChanged(const HierarchyChangeParams& params) override;
void OnWindowPropertyChanged(aura::Window* window,
const void* key,
intptr_t old) override;
void OnWindowDestroyed(aura::Window* window) override;
aura::WindowManagerClient* window_manager_client_;
views::Widget* widget_;
// WARNING: as widget delays destruction there is a portion of time when this
// is null.
aura::Window* window_;
// Used if a child window is added that has the
// kRenderParentTitleArea_Property set.
DetachedTitleAreaRenderer* detached_title_area_renderer_ = nullptr;
bool did_init_native_widget_ = false;
gfx::Insets client_area_insets_;
std::vector<gfx::Rect> additional_client_areas_;
DISALLOW_COPY_AND_ASSIGN(NonClientFrameController);
};
} // namespace mus
} // namespace ash
#endif // ASH_MUS_NON_CLIENT_FRAME_CONTROLLER_H_
| 1,337 |
343 | from torch import optim
from contextlib import contextmanager
class Trainer:
r"""Abstract base class for training models.
The Trainer class makes it incredibly simple and convinient to train,
monitor, debug and checkpoint entire Deep Learning projects.
Simply define your training loop by
implementing the :py:meth:`optimize` method.
Args:
models (list of :py:class:`nn.Module`): All the models that need
to be trained
optimizers (list of :py:class:`optim.Optimizer`): Any optimizers that
are used
.. note::
If any model is in eval() model, the trainer is *set off*.
This means that as per protocol, *all* models will not train.
Attributes:
callbacks (list): A list of callbacks attached to the trainer.
Take a look at :py:class:`SupervisedTrainer` for an idea on how to extend this class.
"""
def __init__(self, models, optimizers):
self.models = models
self.optimizers = optimizers
self.parameters = set()
self.register_parameter('iterations', 0)
def optimize(self):
r""" Defines the core optimization loop.
This method is called on each iteration.
Two quick protocols that one needs to follow are:
1. **Do NOT** actually backpropagate or step() the optimizers if the
trainer is not training. Use the :py:meth:`is_training` method
to find out.
This is essential since this will ensure that the trainer behaves
as expected when :py:meth:`is_training` is ``False``.
Useful, for example, in cases like :py:class:`callbacks.ColdStart`
2. Send a callback the signal ``'gradient'`` with a keyword argument
``'models'`` that is the list of models that accumulate a gradient.
Usually, it's all the modules (``self.modules``).
Any callbacks that listen to this signal are interested in the gradient
information (eg. ``callbacks.Babysitter``).
"""
raise NotImplementedError
def train(self, dataloader, epochs=1, callbacks=None, **kwargs):
r"""Starts the training process.
Args:
dataloader (``DataLoader``): The MagNet dataloader that iterates
over the training set
epochs (float or int): The number of epochs to train for.
Default: ``1``
callbacks (list): Any callbacks to be attached. Default: ``None``
Keyword Args:
iterations (int): The number of iterations to train for.
Overrides :attr:`epochs`.
.. note::
PyTorch ``DataLoader`` s are not supported.
Ideally, encapsulate your dataset in the ``Data`` class.
"""
from magnet.training.callbacks import CallbackQueue
self.dataloader = dataloader
if callbacks is None: callbacks = []
self.callbacks = CallbackQueue(callbacks)
total_iterations = kwargs.get('iterations', int(epochs * len(dataloader)))
self.callbacks('on_training_start', trainer=self, total_iterations=total_iterations)
for self.iterations in range(self.iterations, self.iterations + total_iterations): next(self)
self.callbacks('on_training_end', trainer=self)
def __iter__(self):
return self
def __next__(self):
self.callbacks('on_batch_start', trainer=self)
self.optimize()
self.callbacks('on_batch_end', trainer=self)
@contextmanager
def mock(self, path=None):
r"""A context manager that creates a temporary *'safe'* scope for training.
All impact to stateful objects (models, optimizers and the
trainer itself) are forgotten once out of this scope.
This is very useful if you need to try out *what-if experiments*.
Args:
path (pathlib.Path): The path to save temporary states into
Default: ``{System temp directory}/.mock_trainer``
"""
from shutil import rmtree
if path is None:
from pathlib import Path
from tempfile import gettempdir
path = Path(gettempdir()) / '.mock_trainer'
rmtree(path, ignore_errors=True) # Remove any existing directory
self.save_state(path)
try:
yield
finally:
self.load_state(path)
rmtree(path)
def epochs(self, mode=None):
r"""The number of epochs completed.
Args:
mode (str or None): If the mode is ``'start'`` or ``'end'``, a
boolean is returned signalling if it's the start or end of an epoch
"""
if mode is None:
return self.iterations / len(self.dataloader)
if mode == 'start':
return (self.iterations / len(self.dataloader)).is_integer()
if mode == 'end':
return ((self.iterations + 1) / len(self.dataloader)).is_integer()
def is_training(self):
return all(model.training for model in self.models)
def load_state(self, path):
from magnet.training.utils import load_state, load_object
for i, model in enumerate(self.models): load_state(model, path / 'models', alternative_name=str(i))
for i, optimizer in enumerate(self.optimizers): load_state(optimizer, path / 'optimizers', alternative_name=str(i))
state_dict = load_object(path / 'state.p', default={})
for attr, val in state_dict.items(): self.register_parameter(attr, val)
try: self.callbacks('load_state', trainer=self, path=path / 'callbacks')
except AttributeError: pass
try: self.dataloader.load_state_dict(path / 'dataloader.p')
except AttributeError: pass
def save_state(self, path):
from magnet.training.utils import save_state, save_object
for i, model in enumerate(self.models): save_state(model, path / 'models', alternative_name=str(i))
for i, optimizer in enumerate(self.optimizers): save_state(optimizer, path / 'optimizers', alternative_name=str(i))
state_dict = {attr: getattr(self, attr) for attr in self.parameters}
save_object(state_dict, path / 'state.p')
try: self.callbacks('save_state', trainer=self, path=path / 'callbacks')
except AttributeError: pass
try: self.dataloader.save_state_dict(path / 'dataloader.p')
except AttributeError: pass
def register_parameter(self, name, value):
r"""Use this to register *'stateful'* parameters that are serialized
"""
setattr(self, name, value)
self.parameters.add(name)
class SupervisedTrainer(Trainer):
r"""A simple trainer that implements a supervised approach where a simple
model :math:`\hat{y} = f(x)` is trained to map :math:`\hat{y}` to
ground-truth :math:`y` according to some specified loss.
This is the training routine that most high-level deep learning frameworks
implement.
Args:
model (``nn.Module``): The model that needs to be trained
optimizer (str or optim.Optimzer): The optimizer used to train
the model. Default: ``'adam'``
loss (str or ``callable``): A loss function that gives the objective
to be minimized. Default: ``'cross_entropy'``
metrics (list): Any other metrics that need to be monitored.
Default: ``None``
* :attr:`optimizer` can be an actual ``optim.Optimizer`` instance or the
name of a popular optimzizer (eg. ``'adam'``).
* :attr:`loss` can be a function or the name of a popular
loss function (eg. ``'cross_entropy'``).
It should accept 2 arguments (:math:`\hat{y}`, :math:`y`).
* :attr:`metrics` should contain a list of functions which accept
2 arguments (:math:`\hat{y}`, :math:`y`), like the loss function.
.. note::
A static :py:meth:`validate` function is provided for the
validation callback
.. note::
The :attr:`metrics` is of no use unless there is some
callback (eg.``callbacks.Monitor``) to receive the metrics
Examples::
>>> import magnet as mag
>>> import magnet.nodes as mn
>>> from magnet.data import Data
>>> from magnet.training import callbacks, SupervisedTrainer
>>> data = Data.get('mnist')
>>> model = mn.Linear(10, act=None)
>>> model.build(x=next(data())[0])
>>> trainer = SupervisedTrainer(model)
>>> callbacks=[callbacks.Monitor(),
callbacks.Validate(data(64, mode='val'), SupervisedTrainer.validate)]
>>> trainer.train(data(64, shuffle=True), 1, callbacks)
"""
def __init__(self, model, optimizer='adam', loss='cross_entropy', metrics=None):
from magnet.nodes.functional import wiki
if isinstance(optimizer, str): optimizer = optimizer_wiki[optimizer.lower()](model.parameters())
if isinstance(loss, str): loss = wiki['losses'][loss.lower()]
if metrics is None: metrics = []
if not isinstance(metrics, (tuple, list)): metrics = [metrics]
for i, metric in enumerate(metrics):
if isinstance(metric, str): metrics[i] = (metric, wiki['metrics'][metric.lower()])
super().__init__([model], [optimizer])
self.loss = loss
self.metrics = metrics
def optimize(self):
optimizer = self.optimizers[0]
loss = self.get_loss(self.dataloader)
# Protocol 1: Backprop and step() only if trainer is training
if self.is_training():
loss.backward()
# Protocol 2: Broadcast the models that accumulate the gradient
# using signal 'gradient' before clearing them.
self.callbacks('gradient', trainer=self, models=self.models)
optimizer.step()
optimizer.zero_grad()
@staticmethod
def validate(trainer, dataloader):
r"""Static helper method to validate models in :attr:`trainer` against
data in :attr:`dataloader`.
Can be passed to ``callbacks.Validate()``.
"""
trainer.get_loss(dataloader, validation=True)
def get_loss(self, dataloader, validation=False):
r"""Utility function that returns the loss and broadcasts metrics.
"""
def write_stats(key, value):
self.callbacks('write_stats', trainer=self, key=key, value=value, validation=validation, buffer_size=len(dataloader))
model = self.models[0]
x, y = next(dataloader)
y_pred = model(x)
loss = self.loss(y_pred, y)
# Broadcast the loss and any other metrics using the 'write_stats' signal.
write_stats('loss', loss.item())
for metric in self.metrics: write_stats(metric[0], metric[1](y_pred, y).item())
return loss
def finish_training(path, names=None):
r""" A helper function for cleaning up the training logs and other
checkpoints and retaining only the state_dicts of the trained models.
Args:
path (pathlib.Path): The path where the trainer was checkpointed
names (list): The names of the models in the order given to the trainer.
Default: ``None``
* :attr:`names` can be used if the models themselves did not have names
prior to training.
The checkpoints default to an ordered naming scheme.
If passed, the files are additionally renamed to these names.
.. note::
Does nothing / fails silently if the path does not exist.
Example::
>>> # Assume that we've defined two models - encoder and decoder,
>>> # and a suitable trainer. The models do not have a 'name' attribute.
>>> trainer.save_state(checkpoint_path / 'my-trainer')
>>> # Suppose the checkpoint directory contains the following files:
>>> # my-trainer/
>>> # models/
>>> # 0.pt
>>> # 1.pt
>>> # callbacks/
>>> # monitor/
>>> # babysitter/
>>> # state.p
>>> finish_training(path, names=['encoder', 'decoder'])
>>> # Now the directory contains these files:
>>> # encoder.pt
>>> # decoder.pt
"""
if not path.exists(): return
import shutil
if isinstance(names, str): names = [names]
filenames = list((path / 'models').glob('*.pt'))
if names is None: names = [filename.stem for filename in filenames]
for name, filename in zip(names, filenames):
shutil.move(filename, path.parent / (name + '.pt'))
shutil.rmtree(path)
optimizer_wiki = {'adam': optim.Adam} | 4,960 |
4,054 | <gh_stars>1000+
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.clustercontroller.core.status;
import com.yahoo.vdslib.state.ClusterState;
import com.yahoo.vespa.clustercontroller.core.StateVersionTracker;
import com.yahoo.vespa.clustercontroller.core.status.statuspage.StatusPageResponse;
import com.yahoo.vespa.clustercontroller.core.status.statuspage.StatusPageServer;
public class ClusterStateRequestHandler implements StatusPageServer.RequestHandler {
private final StateVersionTracker stateVersionTracker;
public ClusterStateRequestHandler(StateVersionTracker stateVersionTracker) {
this.stateVersionTracker = stateVersionTracker;
}
@Override
public StatusPageResponse handle(StatusPageServer.HttpRequest request) {
ClusterState cs = stateVersionTracker.getVersionedClusterState();
StatusPageResponse response = new StatusPageResponse();
response.setContentType("text/plain");
response.writeContent(cs.toString());
return response;
}
}
| 334 |
3,094 | import torch
import numpy as np
import cv2
from PIL import Image
import torch.nn as nn
import albumentations as A
from albumentations.pytorch import ToTensorV2
from torch.utils.data import Dataset
import os
class ImageFolder(Dataset):
def __init__(self, root_dir, transform=None):
super(ImageFolder, self).__init__()
self.data = []
self.root_dir = root_dir
self.transform = transform
self.class_names = os.listdir(root_dir)
for index, name in enumerate(self.class_names):
files = os.listdir(os.path.join(root_dir, name))
self.data += list(zip(files, [index]*len(files)))
def __len__(self):
return len(self.data)
def __getitem__(self, index):
img_file, label = self.data[index]
root_and_dir = os.path.join(self.root_dir, self.class_names[label])
image = np.array(Image.open(os.path.join(root_and_dir, img_file)))
if self.transform is not None:
augmentations = self.transform(image=image)
image = augmentations["image"]
return image, label
transform = A.Compose(
[
A.Resize(width=1920, height=1080),
A.RandomCrop(width=1280, height=720),
A.Rotate(limit=40, p=0.9, border_mode=cv2.BORDER_CONSTANT),
A.HorizontalFlip(p=0.5),
A.VerticalFlip(p=0.1),
A.RGBShift(r_shift_limit=25, g_shift_limit=25, b_shift_limit=25, p=0.9),
A.OneOf([
A.Blur(blur_limit=3, p=0.5),
A.ColorJitter(p=0.5),
], p=1.0),
A.Normalize(
mean=[0, 0, 0],
std=[1, 1, 1],
max_pixel_value=255,
),
ToTensorV2(),
]
)
dataset = ImageFolder(root_dir="cat_dogs", transform=transform)
for x,y in dataset:
print(x.shape)
| 867 |
378 | <gh_stars>100-1000
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openejb.jee;
import org.metatype.sxc.jaxb.JAXBObject;
import org.metatype.sxc.jaxb.LifecycleCallback;
import org.metatype.sxc.jaxb.RuntimeContext;
import org.metatype.sxc.util.Attribute;
import org.metatype.sxc.util.XoXMLStreamReader;
import org.metatype.sxc.util.XoXMLStreamWriter;
import javax.xml.XMLConstants;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.namespace.QName;
import java.util.ArrayList;
import java.util.List;
import static org.apache.openejb.jee.ConfigProperty$JAXB.readConfigProperty;
import static org.apache.openejb.jee.ConfigProperty$JAXB.writeConfigProperty;
@SuppressWarnings({
"StringEquality"
})
public class AdminObject$JAXB
extends JAXBObject<AdminObject> {
public AdminObject$JAXB() {
super(AdminObject.class, null, new QName("http://java.sun.com/xml/ns/javaee".intern(), "adminobjectType".intern()), ConfigProperty$JAXB.class);
}
public static AdminObject readAdminObject(final XoXMLStreamReader reader, final RuntimeContext context)
throws Exception {
return _read(reader, context);
}
public static void writeAdminObject(final XoXMLStreamWriter writer, final AdminObject adminObject, final RuntimeContext context)
throws Exception {
_write(writer, adminObject, context);
}
public void write(final XoXMLStreamWriter writer, final AdminObject adminObject, final RuntimeContext context)
throws Exception {
_write(writer, adminObject, context);
}
public final static AdminObject _read(final XoXMLStreamReader reader, RuntimeContext context)
throws Exception {
// Check for xsi:nil
if (reader.isXsiNil()) {
return null;
}
if (context == null) {
context = new RuntimeContext();
}
final AdminObject adminObject = new AdminObject();
context.beforeUnmarshal(adminObject, LifecycleCallback.NONE);
List<ConfigProperty> configProperty = null;
// Check xsi:type
final QName xsiType = reader.getXsiType();
if (xsiType != null) {
if (("adminobjectType" != xsiType.getLocalPart()) || ("http://java.sun.com/xml/ns/javaee" != xsiType.getNamespaceURI())) {
return context.unexpectedXsiType(reader, AdminObject.class);
}
}
// Read attributes
for (final Attribute attribute : reader.getAttributes()) {
if (("id" == attribute.getLocalName()) && (("" == attribute.getNamespace()) || (attribute.getNamespace() == null))) {
// ATTRIBUTE: id
final String id = Adapters.collapsedStringAdapterAdapter.unmarshal(attribute.getValue());
context.addXmlId(reader, id, adminObject);
adminObject.id = id;
} else if (XMLConstants.W3C_XML_SCHEMA_INSTANCE_NS_URI != attribute.getNamespace()) {
context.unexpectedAttribute(attribute, new QName("", "id"));
}
}
// Read elements
for (final XoXMLStreamReader elementReader : reader.getChildElements()) {
if (("adminobject-interface" == elementReader.getLocalName()) && ("http://java.sun.com/xml/ns/javaee" == elementReader.getNamespaceURI())) {
// ELEMENT: adminObjectInterface
final String adminObjectInterfaceRaw = elementReader.getElementAsString();
final String adminObjectInterface;
try {
adminObjectInterface = Adapters.collapsedStringAdapterAdapter.unmarshal(adminObjectInterfaceRaw);
} catch (final Exception e) {
context.xmlAdapterError(elementReader, CollapsedStringAdapter.class, String.class, String.class, e);
continue;
}
adminObject.adminObjectInterface = adminObjectInterface;
} else if (("adminobject-class" == elementReader.getLocalName()) && ("http://java.sun.com/xml/ns/javaee" == elementReader.getNamespaceURI())) {
// ELEMENT: adminObjectClass
final String adminObjectClassRaw = elementReader.getElementAsString();
final String adminObjectClass;
try {
adminObjectClass = Adapters.collapsedStringAdapterAdapter.unmarshal(adminObjectClassRaw);
} catch (final Exception e) {
context.xmlAdapterError(elementReader, CollapsedStringAdapter.class, String.class, String.class, e);
continue;
}
adminObject.adminObjectClass = adminObjectClass;
} else if (("config-property" == elementReader.getLocalName()) && ("http://java.sun.com/xml/ns/javaee" == elementReader.getNamespaceURI())) {
// ELEMENT: configProperty
final ConfigProperty configPropertyItem = readConfigProperty(elementReader, context);
if (configProperty == null) {
configProperty = adminObject.configProperty;
if (configProperty != null) {
configProperty.clear();
} else {
configProperty = new ArrayList<ConfigProperty>();
}
}
configProperty.add(configPropertyItem);
} else {
context.unexpectedElement(elementReader, new QName("http://java.sun.com/xml/ns/javaee", "adminobject-interface"), new QName("http://java.sun.com/xml/ns/javaee", "adminobject-class"), new QName("http://java.sun.com/xml/ns/javaee", "config-property"));
}
}
if (configProperty != null) {
adminObject.configProperty = configProperty;
}
context.afterUnmarshal(adminObject, LifecycleCallback.NONE);
return adminObject;
}
public final AdminObject read(final XoXMLStreamReader reader, final RuntimeContext context)
throws Exception {
return _read(reader, context);
}
public final static void _write(final XoXMLStreamWriter writer, final AdminObject adminObject, RuntimeContext context)
throws Exception {
if (adminObject == null) {
writer.writeXsiNil();
return;
}
if (context == null) {
context = new RuntimeContext();
}
final String prefix = writer.getUniquePrefix("http://java.sun.com/xml/ns/javaee");
if (AdminObject.class != adminObject.getClass()) {
context.unexpectedSubclass(writer, adminObject, AdminObject.class);
return;
}
context.beforeMarshal(adminObject, LifecycleCallback.NONE);
// ATTRIBUTE: id
final String idRaw = adminObject.id;
if (idRaw != null) {
String id = null;
try {
id = Adapters.collapsedStringAdapterAdapter.marshal(idRaw);
} catch (final Exception e) {
context.xmlAdapterError(adminObject, "id", CollapsedStringAdapter.class, String.class, String.class, e);
}
writer.writeAttribute("", "", "id", id);
}
// ELEMENT: adminObjectInterface
final String adminObjectInterfaceRaw = adminObject.adminObjectInterface;
String adminObjectInterface = null;
try {
adminObjectInterface = Adapters.collapsedStringAdapterAdapter.marshal(adminObjectInterfaceRaw);
} catch (final Exception e) {
context.xmlAdapterError(adminObject, "adminObjectInterface", CollapsedStringAdapter.class, String.class, String.class, e);
}
if (adminObjectInterface != null) {
writer.writeStartElement(prefix, "adminobject-interface", "http://java.sun.com/xml/ns/javaee");
writer.writeCharacters(adminObjectInterface);
writer.writeEndElement();
} else {
context.unexpectedNullValue(adminObject, "adminObjectInterface");
}
// ELEMENT: adminObjectClass
final String adminObjectClassRaw = adminObject.adminObjectClass;
String adminObjectClass = null;
try {
adminObjectClass = Adapters.collapsedStringAdapterAdapter.marshal(adminObjectClassRaw);
} catch (final Exception e) {
context.xmlAdapterError(adminObject, "adminObjectClass", CollapsedStringAdapter.class, String.class, String.class, e);
}
if (adminObjectClass != null) {
writer.writeStartElement(prefix, "adminobject-class", "http://java.sun.com/xml/ns/javaee");
writer.writeCharacters(adminObjectClass);
writer.writeEndElement();
} else {
context.unexpectedNullValue(adminObject, "adminObjectClass");
}
// ELEMENT: configProperty
final List<ConfigProperty> configProperty = adminObject.configProperty;
if (configProperty != null) {
for (final ConfigProperty configPropertyItem : configProperty) {
if (configPropertyItem != null) {
writer.writeStartElement(prefix, "config-property", "http://java.sun.com/xml/ns/javaee");
writeConfigProperty(writer, configPropertyItem, context);
writer.writeEndElement();
}
}
}
context.afterMarshal(adminObject, LifecycleCallback.NONE);
}
}
| 4,130 |
6,989 | #pragma once
#include "concepts/size_fitter.h"
#include <util/system/yassert.h>
#include <util/generic/bitops.h>
namespace NFlatHash {
class TAndSizeFitter {
public:
size_t EvalIndex(size_t hs, size_t sz) const noexcept {
Y_ASSERT(Mask_ == sz - 1);
return (hs & Mask_);
}
size_t EvalSize(size_t sz) const noexcept {
return FastClp2(sz);
}
void Update(size_t sz) noexcept {
Y_ASSERT((sz & (sz - 1)) == 0);
Mask_ = sz - 1;
}
private:
size_t Mask_ = 0;
};
static_assert(NConcepts::SizeFitterV<TAndSizeFitter>);
class TModSizeFitter {
public:
constexpr size_t EvalIndex(size_t hs, size_t sz) const noexcept {
return hs % sz;
}
constexpr size_t EvalSize(size_t sz) const noexcept {
return sz;
}
constexpr void Update(size_t) noexcept {}
};
static_assert(NConcepts::SizeFitterV<TModSizeFitter>);
} // NFlatHash
| 430 |
348 | {"nom":"Gomont","circ":"1ère circonscription","dpt":"Ardennes","inscrits":257,"abs":153,"votants":104,"blancs":1,"nuls":0,"exp":103,"res":[{"nuance":"LR","nom":"<NAME>","voix":83},{"nuance":"REM","nom":"<NAME>","voix":20}]} | 88 |
460 | <gh_stars>100-1000
/*
* Copyright 2017 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.swarm.orientdb.test;
import com.orientechnologies.orient.core.db.OPartitionedDatabasePool;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.tinkerpop.blueprints.impls.orient.OrientGraph;
import org.jboss.arquillian.test.api.ArquillianResource;
import org.junit.After;
import org.junit.Before;
import javax.inject.Inject;
import javax.inject.Named;
import javax.naming.InitialContext;
import javax.naming.NamingException;
/**
* @author <a href="mailto:<EMAIL>"><NAME></a>
*/
public abstract class AbstractTestCase {
private static final String DATABASE_JNDI = "java:jboss/orientdb/test";
@ArquillianResource
private static InitialContext initialContext;
@Before
public void before() throws NamingException {
try {
clearDatabase();
} catch (Throwable t) {
// Database might not exist at this stage
}
initDatabase();
}
@After
public void after() throws NamingException {
clearDatabase();
}
@Inject
@Named("orienttesttprofile")
private OPartitionedDatabasePool databasePool;
private OPartitionedDatabasePool getDatabasePool() throws NamingException {
return databasePool;
}
private void initDatabase() throws NamingException {
try (ODatabaseDocumentTx database = getDatabasePool().acquire()) {
database.getMetadata().getSchema().createClass("Person").createProperty("name", OType.STRING);
}
}
private void clearDatabase() throws NamingException {
try (ODatabaseDocumentTx database = getDatabasePool().acquire()) {
database.getMetadata().getSchema().dropClass("Person");
} catch (Throwable ignore) { }
OrientGraph database = new OrientGraph(getDatabasePool());
try {
database.getEdges().forEach(database::removeEdge);
database.getVertices().forEach(database::removeVertex);
} finally {
database.shutdown();
}
}
}
| 957 |
2,086 | #include "colormanager.h"
#include <string>
#include <unordered_set>
#include <vector>
#include "3rd-party/catch.hpp"
#include "confighandlerexception.h"
#include "configparser.h"
using namespace newsboat;
class StylesCollector {
std::map<std::string, std::string> styles;
public:
StylesCollector() = default;
std::function<void(const std::string&, const std::string&)> setter()
{
return [this](const std::string& element, const std::string& style) {
if (this->styles.find(element) != this->styles.cend()) {
throw std::invalid_argument(std::string("Multiple styles for element ") + element);
}
this->styles[element] = style;
};
}
size_t styles_count() const
{
return styles.size();
}
std::string style(const std::string& element) const
{
const auto style = styles.find(element);
if (style != styles.cend()) {
return style->second;
} else {
return {};
}
}
};
TEST_CASE(
"apply_colors() invokes the callback for each element, supplying the element name and its style",
"[ColorManager]")
{
ColorManager c;
StylesCollector collector;
SECTION("By default, the list is empty") {
c.apply_colors(collector.setter());
REQUIRE(collector.styles_count() == 0);
}
SECTION("Each processed action adds corresponding entry to return value") {
c.handle_action("color", {"listnormal", "default", "default"});
c.handle_action("color", {"listfocus_unread", "cyan", "default", "bold", "underline"});
c.handle_action("color", {"background", "red", "yellow"});
c.handle_action("color", {"info", "green", "white", "reverse"});
c.handle_action("color", {"end-of-text-marker", "color123", "default", "dim", "protect"});
c.handle_action("color", {"hint-key", "default", "color2"});
c.handle_action("color", {"hint-description", "color3", "default"});
c.apply_colors(collector.setter());
REQUIRE(collector.styles_count() == 10);
REQUIRE(collector.style("listnormal") == "");
REQUIRE(collector.style("listfocus_unread") == "fg=cyan,attr=bold,attr=underline");
REQUIRE(collector.style("background") == "fg=red,bg=yellow");
REQUIRE(collector.style("info") == "fg=green,bg=white,attr=reverse");
REQUIRE(collector.style("title") == "fg=green,bg=white,attr=reverse");
REQUIRE(collector.style("end-of-text-marker") == "fg=color123,attr=dim,attr=protect");
REQUIRE(collector.style("hint-key") == "bg=color2");
REQUIRE(collector.style("hint-description") == "fg=color3");
// These two weren't set explicitly and fell back to `info`
REQUIRE(collector.style("hint-keys-delimiter") == "fg=green,bg=white,attr=reverse");
REQUIRE(collector.style("hint-separator") == "fg=green,bg=white,attr=reverse");
}
SECTION("For `article` element, two additional elements are emitted") {
c.handle_action("color", {"article", "white", "blue", "reverse"});
c.apply_colors(collector.setter());
REQUIRE(collector.styles_count() == 3);
REQUIRE(collector.style("article") == "fg=white,bg=blue,attr=reverse");
REQUIRE(collector.style("color_bold") == "fg=white,bg=blue,attr=reverse,attr=bold");
REQUIRE(collector.style("color_underline") ==
"fg=white,bg=blue,attr=reverse,attr=underline");
}
}
TEST_CASE("register_commands() registers ColorManager with ConfigParser",
"[ColorManager]")
{
ConfigParser cfg;
ColorManager clr;
StylesCollector collector;
REQUIRE_NOTHROW(clr.register_commands(cfg));
clr.apply_colors(collector.setter());
REQUIRE(collector.styles_count() == 0);
cfg.parse_file("data/config-with-colors");
clr.apply_colors(collector.setter());
REQUIRE(collector.styles_count() == 2);
}
TEST_CASE(
"handle_action() throws ConfigHandlerException if there aren't "
"enough parameters",
"[ColorManager]")
{
ColorManager c;
CHECK_THROWS_AS(c.handle_action("color", {}), ConfigHandlerException);
CHECK_THROWS_AS(
c.handle_action("color", {"one"}), ConfigHandlerException);
CHECK_THROWS_AS(c.handle_action("color", {"one", "two"}),
ConfigHandlerException);
}
TEST_CASE(
"handle_action() throws ConfigHandlerException if foreground color "
"is invalid",
"[ColorManager]")
{
ColorManager c;
const std::vector<std::string> non_colors{
{"awesome", "but", "nonexistent", "colors"}};
for (const auto& color : non_colors) {
CHECK_THROWS_AS(c.handle_action("color",
{"listfocus", color, "default"}),
ConfigHandlerException);
}
}
TEST_CASE(
"handle_action() throws ConfigHandlerException if background color "
"is invalid",
"[ColorManager]")
{
ColorManager c;
const std::vector<std::string> non_colors{
{"awesome", "but", "nonexistent", "colors"}};
for (const auto& color : non_colors) {
CHECK_THROWS_AS(c.handle_action("color",
{"listfocus", "default", color}),
ConfigHandlerException);
}
}
TEST_CASE(
"handle_action() throws ConfigHandlerException if color attribute "
"is invalid",
"[ColorManager]")
{
ColorManager c;
const std::vector<std::string> non_attributes{
{"awesome", "but", "nonexistent", "attributes"}};
for (const auto& attr : non_attributes) {
CHECK_THROWS_AS(c.handle_action("color",
{"listfocus", "red", "red", attr}),
ConfigHandlerException);
}
}
TEST_CASE(
"handle_action() throws ConfigHandlerException if color is applied "
"to non-existent element",
"[ColorManager]")
{
ColorManager c;
const std::vector<std::string> non_elements{
{"awesome", "but", "nonexistent", "elements"}};
for (const auto& element : non_elements) {
CHECK_THROWS_AS(
c.handle_action("color", {element, "red", "green"}),
ConfigHandlerException);
}
}
TEST_CASE(
"handle_action() throws ConfigHandlerException if it's passed a "
"command other than \"color\"",
"[ColorManager]")
{
ColorManager c;
const std::vector<std::string> other_commands{
{"browser", "include", "auto-reload", "ocnews-flag-star"}};
for (const auto& command : other_commands) {
CHECK_THROWS_AS(
c.handle_action(command, {}), ConfigHandlerException);
}
}
TEST_CASE("dump_config() returns everything we put into ColorManager",
"[ColorManager]")
{
ColorManager c;
std::unordered_set<std::string> expected;
std::vector<std::string> config;
// Checks that `expected` contains the same lines as `config` contains,
// and nothing more.
auto equivalent = [&]() -> bool {
std::size_t found = 0;
for (const auto& line : config)
{
if (expected.find(line) == expected.end()) {
return false;
}
found++;
}
return found == expected.size();
};
{
INFO("Empty ColorManager outputs nothing");
c.dump_config(config);
REQUIRE(config.empty());
REQUIRE(equivalent());
}
expected.emplace("color listfocus default red");
c.handle_action("color", {"listfocus", "default", "red"});
config.clear();
c.dump_config(config);
REQUIRE(config.size() == 1);
REQUIRE(equivalent());
expected.emplace("color article green cyan bold");
c.handle_action("color", {"article", "green", "cyan", "bold"});
config.clear();
c.dump_config(config);
REQUIRE(config.size() == 2);
REQUIRE(equivalent());
expected.emplace("color listnormal_unread black yellow underline standout");
c.handle_action("color",
{"listnormal_unread", "black", "yellow", "underline", "standout"});
config.clear();
c.dump_config(config);
REQUIRE(config.size() == 3);
REQUIRE(equivalent());
expected.emplace("color hint-keys-delimiter color5 default dim");
c.handle_action("color", {"hint-keys-delimiter", "color5", "default", "dim"});
config.clear();
c.dump_config(config);
REQUIRE(config.size() == 4);
REQUIRE(equivalent());
expected.emplace("color hint-separator color7 color8");
c.handle_action("color", {"hint-separator", "color7", "color8"});
config.clear();
c.dump_config(config);
REQUIRE(config.size() == 5);
REQUIRE(equivalent());
}
TEST_CASE("If no colors were specified for the "
"`title`/`hint-key`/`hint-keys-delimiter`/`hint-separator`/`hint-description` elements, "
"then use colors from the `info` element (if any)",
"[ColorManager]")
{
const std::vector<std::string> elements {
"title", "hint-key", "hint-keys-delimiter", "hint-separator", "hint-description"};
for (const auto& element : elements) {
DYNAMIC_SECTION("element: " << element) {
ColorManager c;
StylesCollector collector;
SECTION("Element's style can be changed as usual") {
c.handle_action("color", {element, "green", "default", "underline"});
c.apply_colors(collector.setter());
REQUIRE(collector.styles_count() >= 1);
REQUIRE(collector.style(element) == "fg=green,attr=underline");
}
SECTION("Element and `info` don't interfere with each other") {
SECTION("Element's style is set before `info`") {
c.handle_action("color", {element, "blue", "black"});
c.handle_action("color", {"info", "green", "yellow", "bold"});
}
SECTION("Element's style is set after `info`") {
c.handle_action("color", {"info", "green", "yellow", "bold"});
c.handle_action("color", {element, "blue", "black"});
}
c.apply_colors(collector.setter());
REQUIRE(collector.styles_count() >= 2);
REQUIRE(collector.style(element) == "fg=blue,bg=black");
REQUIRE(collector.style("info") == "fg=green,bg=yellow,attr=bold");
}
SECTION("Element inherits the `info` style when available") {
c.handle_action("color", {"info", "red", "magenta", "reverse"});
c.apply_colors(collector.setter());
REQUIRE(collector.styles_count() >= 2);
REQUIRE(collector.style(element) == "fg=red,bg=magenta,attr=reverse");
REQUIRE(collector.style("info") == "fg=red,bg=magenta,attr=reverse");
}
SECTION("Element has no style if there is no style for `info` either") {
c.handle_action("color", {"listnormal", "black", "white"});
c.apply_colors(collector.setter());
REQUIRE(collector.styles_count() >= 1);
REQUIRE(collector.style("listnormal") == "fg=black,bg=white");
REQUIRE(collector.style(element) == "");
REQUIRE(collector.style("info") == "");
}
}
}
}
| 3,628 |
1,346 | package com.ctrip.platform.dal.dao.task;
import com.ctrip.platform.dal.common.enums.ShardingCategory;
import com.ctrip.platform.dal.dao.*;
import java.sql.SQLException;
import static com.ctrip.platform.dal.dao.helper.DalShardingHelper.*;
/**
* Created by lilj on 2018/9/12.
*/
public class BaseTaskAdapter {
protected DalClient client;
protected String logicDbName;
public ShardingCategory shardingCategory;
public boolean shardingEnabled;
public void initialize(String logicDbName) {
this.logicDbName=logicDbName;
this.client = DalClientFactory.getClient(logicDbName);
shardingEnabled = isShardingEnabled(logicDbName);
initShardingCategory();
}
public DefaultTaskContext createTaskContext() throws SQLException {
DefaultTaskContext taskContext = new DefaultTaskContext();
taskContext.setShardingCategory(shardingCategory);
return taskContext;
}
public void initShardingCategory() {
if (shardingEnabled)
shardingCategory = ShardingCategory.DBShard;
else
shardingCategory = ShardingCategory.NoShard;
}
}
| 417 |
2,863 | <gh_stars>1000+
package org.spockframework.util;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Indicates that the annotated element, a member of Spock's public API, is in a
* trial phase. Until the element is promoted by removing this annotation, it may still undergo
* incompatible changes. This allows us to incorporate valuable feedback from our users
* before freezing the API for a new feature. In the unlikely event that the element isn't deemed fit for
* purpose, it may be removed completely. Typically, elements are promoted within one or two releases.
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
public @interface Beta {} | 180 |
599 | <filename>preprocess/crop_video_sequences_batch.py
import os
from tqdm import tqdm
from multiprocessing import Pool
from functools import partial
from itertools import groupby
import numpy as np
from fsgan.preprocess.crop_video_sequences import main as crop_video_sequences
def parse_videos(root):
vid_rel_paths = []
for r, d, f in os.walk(root):
for file in f:
if file.endswith('.mp4'):
vid_rel_paths.append(os.path.join(os.path.relpath(r, root), file).replace('\\', '/'))
return vid_rel_paths
def process_video(input, cache_postfix='_dsfd_seq.pkl', resolution=256, crop_scale=1.2, select='all'):
file_path, out_dir = input[0], input[1]
filename = os.path.basename(file_path)
curr_out_cache_path = os.path.join(out_dir, os.path.splitext(filename)[0] + '_seq00' + cache_postfix)
if os.path.exists(curr_out_cache_path):
return True
# Process video
crop_video_sequences(file_path, out_dir, None, cache_postfix, resolution, crop_scale, select, disable_tqdm=True)
return True
def main(root, output_dir, file_lists=None, cache_postfix='_dsfd_seq.pkl', resolution=256, crop_scale=2.0, workers=4,
select='all'):
# Validation
if not os.path.isdir(root):
raise RuntimeError('root directory does not exist: ' + root)
if not os.path.isdir(output_dir):
raise RuntimeError('Output directory does not exist: ' + output_dir)
# Parse files from directory or file lists (if specified)
if file_lists is None:
vid_rel_paths = parse_videos(root)
else:
vid_rel_paths = []
for file_list in file_lists:
vid_rel_paths.append(np.loadtxt(os.path.join(root, file_list), dtype=str))
vid_rel_paths = np.concatenate(vid_rel_paths)
vid_out_dirs = [os.path.join(output_dir, os.path.split(p)[0]) for p in vid_rel_paths]
vid_paths = [os.path.join(root, p) for p in vid_rel_paths]
# Make directory structure
for out_dir in vid_out_dirs:
if not os.path.exists(out_dir):
os.makedirs(out_dir)
# Process all videos
f = partial(process_video, cache_postfix=cache_postfix, resolution=resolution, crop_scale=crop_scale, select=select)
with Pool(workers) as p:
list(tqdm(p.imap(f, zip(vid_paths, vid_out_dirs)), total=len(vid_paths)))
# Parse generated sequence videos
vid_seq_rel_paths = parse_videos(output_dir)
vid_seq_keys, vid_seq_groups = zip(*[(key, list(group)) for key, group in
groupby(vid_seq_rel_paths, lambda p: (p[:-10] + '.mp4'))])
vid_seq_groups = np.array(vid_seq_groups)
for file_list in file_lists:
# Adjust file list to generated sequence videos
list_rel_paths = np.loadtxt(os.path.join(root, file_list), dtype=str)
_, indices, _ = np.intersect1d(vid_seq_keys, list_rel_paths, return_indices=True)
list_seq_rel_paths = np.concatenate(vid_seq_groups[indices])
# Write output list to file
np.savetxt(os.path.join(output_dir, file_list), list_seq_rel_paths, fmt='%s')
if __name__ == "__main__":
# Parse program arguments
import argparse
parser = argparse.ArgumentParser(os.path.splitext(os.path.basename(__file__))[0])
parser.add_argument('root', metavar='DIR',
help='root directory')
parser.add_argument('-o', '--output', metavar='DIR', required=True,
help='output directory')
parser.add_argument('-fl', '--file_lists', metavar='PATH', nargs='+',
help='file lists')
parser.add_argument('-cp', '--cache_postfix', default='_dsfd_seq.pkl', metavar='POSTFIX',
help='cache file postfix')
parser.add_argument('-r', '--resolution', default=256, type=int, metavar='N',
help='output video resolution (default: 256)')
parser.add_argument('-cs', '--crop_scale', type=float, metavar='F', default=2.0,
help='crop scale relative to detection bounding box')
parser.add_argument('-w', '--workers', default=4, type=int, metavar='N',
help='number of data loading workers (default: 4)')
parser.add_argument('-s', '--select', default='all', metavar='STR',
help='selection method [all|longest]')
args = parser.parse_args()
main(args.root, args.output, args.file_lists, args.cache_postfix, args.resolution, args.crop_scale, args.workers,
args.select)
| 1,964 |
3,200 | /**
* Copyright 2020 Huawei Technologies Co., Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MINDSPORE_CCSRC_MINDDATA_DATASET_ENGINE_GNN_GRAPH_FEATURE_PARSER_H_
#define MINDSPORE_CCSRC_MINDDATA_DATASET_ENGINE_GNN_GRAPH_FEATURE_PARSER_H_
#include <memory>
#include <queue>
#include <string>
#include <unordered_map>
#include <unordered_set>
#include <vector>
#include "minddata/dataset/core/data_type.h"
#include "minddata/dataset/core/tensor.h"
#if !defined(_WIN32) && !defined(_WIN64)
#include "minddata/dataset/engine/gnn/graph_shared_memory.h"
#endif
#include "minddata/dataset/engine/gnn/feature.h"
#include "minddata/dataset/util/status.h"
#include "minddata/mindrecord/include/shard_column.h"
namespace mindspore {
namespace dataset {
namespace gnn {
using mindrecord::ShardColumn;
class GraphFeatureParser {
public:
explicit GraphFeatureParser(const ShardColumn &shard_column);
~GraphFeatureParser() = default;
// @param std::string key - column name
// @param std::vector<uint8_t> &blob - contains data in blob field in mindrecord
// @param std::vector<int32_t> *ind - return value, list of feature index in int32_t
// @return Status - the status code
Status LoadFeatureIndex(const std::string &key, const std::vector<uint8_t> &blob, std::vector<int32_t> *ind);
// @param std::string &key - column name
// @param std::vector<uint8_t> &blob - contains data in blob field in mindrecord
// @param std::shared_ptr<Tensor> *tensor - return value feature tensor
// @return Status - the status code
Status LoadFeatureTensor(const std::string &key, const std::vector<uint8_t> &blob, std::shared_ptr<Tensor> *tensor);
#if !defined(_WIN32) && !defined(_WIN64)
Status LoadFeatureToSharedMemory(const std::string &key, const std::vector<uint8_t> &col_blob,
GraphSharedMemory *shared_memory, std::shared_ptr<Tensor> *out_tensor);
#endif
private:
std::unique_ptr<ShardColumn> shard_column_;
};
} // namespace gnn
} // namespace dataset
} // namespace mindspore
#endif // MINDSPORE_CCSRC_MINDDATA_DATASET_ENGINE_GNN_GRAPH_FEATURE_PARSER_H_
| 899 |
522 | /**
* Copyright (C) 2004-2011 Jive Software. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.spark.preference;
import org.jivesoftware.MainWindowListener;
import org.jivesoftware.spark.SparkManager;
import org.jivesoftware.spark.util.log.Log;
import org.jivesoftware.sparkimpl.plugin.privacy.ui.PrivacyPreferences;
import org.jivesoftware.sparkimpl.preference.PreferenceDialog;
import org.jivesoftware.sparkimpl.preference.PreferencesPanel;
import org.jivesoftware.sparkimpl.preference.chat.ChatPreference;
import org.jivesoftware.sparkimpl.preference.groupchat.GroupChatPreference;
//import org.jivesoftware.sparkimpl.preference.media.MediaPreference;
import org.jivesoftware.sparkimpl.settings.local.LocalPreference;
import javax.swing.*;
import java.util.*;
/**
* Usage of the PreferenceManager to handle loading of preferences within Spark.
*
* @author <NAME>
*/
public class PreferenceManager {
private final Map<String, Preference> map = new LinkedHashMap<>();
private PreferenceDialog preferenceDialog;
public PreferenceManager() {
// Initialize base preferences
ChatPreference chatPreferences = new ChatPreference();
addPreference(chatPreferences);
chatPreferences.load();
GroupChatPreference groupChatPreferences = new GroupChatPreference();
addPreference(groupChatPreferences);
groupChatPreferences.load();
// MediaPreference preferences = new MediaPreference();
// addPreference(preferences);
// preferences.load();
PrivacyPreferences privacy = new PrivacyPreferences();
addPreference(privacy);
privacy.load();
LocalPreference localPreferences = new LocalPreference();
addPreference(localPreferences);
localPreferences.load();
getPreferences();
SparkManager.getMainWindow().addMainWindowListener(new MainWindowListener() {
@Override
public void shutdown() {
fireShutdown();
}
@Override
public void mainWindowActivated() {
}
@Override
public void mainWindowDeactivated() {
}
});
}
/**
* <h1>showPreferences</h1>
* This will open the Preference-Dialog and select the given preference.
*
* @param selectedPref the preference you want to select
*/
public void showPreferences(Preference selectedPref) {
preferenceDialog = new PreferenceDialog();
preferenceDialog.invoke(SparkManager.getMainWindow(), new PreferencesPanel(getPreferences(), selectedPref));
}
public void showPreferences() {
preferenceDialog = new PreferenceDialog();
preferenceDialog.invoke(SparkManager.getMainWindow(), new PreferencesPanel(getPreferences()));
}
public void addPreference(Preference preference) {
map.put(preference.getNamespace(), preference);
}
public void removePreference(Preference preference) {
map.remove(preference.getNamespace());
}
public Preference getPreference(String namespace) {
return map.get(namespace);
}
public Object getPreferenceData(String namespace) {
return getPreference(namespace).getData();
}
public Iterator<Preference> getPreferences() {
final List<Preference> returnList = new ArrayList<>();
for (String namespace : map.keySet()) {
returnList.add(map.get(namespace));
}
return returnList.iterator();
}
private void fireShutdown()
{
for ( final Preference preference : map.values() )
{
try
{
preference.shutdown();
}
catch ( Exception e )
{
Log.error( "An exception occurred while trying to shutdown preference: " + preference, e );
}
}
}
public JDialog getPreferenceDialog() {
return preferenceDialog.getDialog();
}
}
| 1,658 |
1,085 | <reponame>luxms/dremio-oss
/*
* Copyright (C) 2017-2019 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dremio.exec.planner.fragment;
import com.dremio.exec.proto.CoordExecRPC.PlanFragmentMajor;
import com.dremio.exec.proto.CoordExecRPC.PlanFragmentMinor;
import com.dremio.exec.proto.CoordinationProtos.NodeEndpoint;
import com.dremio.exec.proto.ExecProtos.FragmentHandle;
/**
* Holder for the major and minor specific portions of the plan fragment.
*/
public class PlanFragmentFull {
private final PlanFragmentMajor major;
private final PlanFragmentMinor minor;
private FragmentHandle handle;
public PlanFragmentFull(PlanFragmentMajor major, PlanFragmentMinor minor) {
this.major = major;
this.minor = minor;
handle = FragmentHandle.newBuilder(major.getHandle())
.setMinorFragmentId(minor.getMinorFragmentId())
.build();
}
public PlanFragmentMajor getMajor() {
return major;
}
public PlanFragmentMinor getMinor() {
return minor;
}
public FragmentHandle getHandle() { return handle; }
public int getMajorFragmentId() { return handle.getMajorFragmentId(); }
public int getMinorFragmentId() { return handle.getMinorFragmentId(); }
public NodeEndpoint getAssignment() { return minor.getAssignment(); }
public long getMemInitial() { return major.getMemInitial(); }
public long getMemMax() { return minor.getMemMax(); }
}
| 586 |
568 | <reponame>sadpe/docassemble<gh_stars>100-1000
# Copyright 2014 SolidBuilds.com. All rights reserved
#
# Authors: <NAME> <<EMAIL>>
from docassemble.webapp.app_and_db import app
from flask import render_template, current_app
from docassemble_flask_user import login_required, roles_required
# The Member page is accessible to authenticated users (users that have logged in)
@app.route('/member')
@login_required # Limits access to authenticated users
def member_page():
return render_template('pages/member_page.html')
# The Admin page is accessible to users with the 'admin' role
@app.route('/admin')
@roles_required('admin') # Limits access to users with the 'admin' role
def admin_page():
return render_template('pages/admin_page.html')
| 237 |
554 | <reponame>fxxf1111/X-APM
package github.tornaco.xposedmoduletest.xposed.submodules;
import android.util.Log;
import java.util.Set;
import de.robv.android.xposed.IXposedHookZygoteInit;
import de.robv.android.xposed.XC_MethodHook;
import de.robv.android.xposed.XposedBridge;
import de.robv.android.xposed.XposedHelpers;
import github.tornaco.xposedmoduletest.xposed.util.XposedLog;
/**
* Created by guohao4 on 2017/10/31.
* Email: <EMAIL>
*/
class ActivitySubModule extends AndroidSubModule {
@Override
public String needBuildVar() {
return super.needBuildVar();
}
@Override
public int needMinSdk() {
return super.needMinSdk();
}
@Override
public void initZygote(IXposedHookZygoteInit.StartupParam startupParam) {
super.initZygote(startupParam);
hookOnBackPressed();
}
private void hookOnBackPressed() {
XposedLog.verbose("hookOnBackPressed...");
try {
Class clz = XposedHelpers.findClass("android.app.Activity", null);
Set unHooks = XposedBridge.hookAllMethods(clz,
"onKeyUp", new XC_MethodHook() {
@Override
protected void afterHookedMethod(MethodHookParam param) throws Throwable {
super.afterHookedMethod(param);
XposedLog.verbose("onKeyUp: " + param.thisObject);
}
});
XposedLog.verbose("hookOnBackPressed OK:" + unHooks);
setStatus(unhooksToStatus(unHooks));
} catch (Exception e) {
XposedLog.verbose("Fail hookOnBackPressed:" + e);
setStatus(SubModuleStatus.ERROR);
setErrorMessage(Log.getStackTraceString(e));
}
}
}
| 892 |
555 | package com.github.steveice10.mc.protocol.packet.login.client;
import com.github.steveice10.mc.protocol.packet.PacketTest;
import org.junit.Before;
public class LoginStartPacketTest extends PacketTest {
@Before
public void setup() {
this.setPackets(new LoginStartPacket("Username"));
}
}
| 116 |
1,125 | // Copyright 2021 Google LLC
//
// This source code is licensed under the BSD-style license found in the
// LICENSE file in the root directory of this source tree.
#pragma once
#include <gtest/gtest.h>
#include <algorithm>
#include <cassert>
#include <cstddef>
#include <cstdlib>
#include <functional>
#include <random>
#include <vector>
#include <fp16.h>
#include <xnnpack.h>
#include <xnnpack/params.h>
class VCvtMicrokernelTester {
public:
inline VCvtMicrokernelTester& batch_size(size_t batch_size) {
assert(batch_size != 0);
this->batch_size_ = batch_size;
return *this;
}
inline size_t batch_size() const {
return this->batch_size_;
}
inline VCvtMicrokernelTester& iterations(size_t iterations) {
this->iterations_ = iterations;
return *this;
}
inline size_t iterations() const {
return this->iterations_;
}
void Test(xnn_f16_f32_vcvt_ukernel_function vcvt) const {
std::random_device random_device;
auto rng = std::mt19937(random_device());
auto distribution = std::uniform_real_distribution<float>(-100.0f, 100.0f);
auto f32rng = std::bind(distribution, std::ref(rng));
auto f16rng = std::bind(fp16_ieee_from_fp32_value, f32rng);
std::vector<uint16_t> input(batch_size() + XNN_EXTRA_BYTES / sizeof(uint16_t));
std::vector<float> output(batch_size());
for (size_t iteration = 0; iteration < iterations(); iteration++) {
std::generate(input.begin(), input.end(), std::ref(f16rng));
std::fill(output.begin(), output.end(), nanf(""));
// Call optimized micro-kernel.
vcvt(batch_size() * sizeof(float), input.data(), output.data(), nullptr /* params */);
// Verify results.
for (size_t i = 0; i < batch_size(); i++) {
ASSERT_EQ(fp32_to_bits(output[i]), fp32_to_bits(fp16_ieee_to_fp32_value(input[i])))
<< "at " << i << " / " << batch_size()
<< ", x[" << i << "] = 0x" << std::hex << std::setw(4) << std::setfill('0') << input[i];
}
}
}
private:
size_t batch_size_ = 1;
size_t iterations_ = 15;
};
| 821 |
1,694 | //
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Sep 17 2017 16:24:48).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by <NAME>.
//
#import "NSObject-Protocol.h"
@class CBaseContact, CContact, CMessageWrap, LinkWeAppJumpWrap, MMUIViewController, NSArray, NSMutableArray, NSString, PushMailWrap, TranslateInfo;
@protocol MessageNodeViewDelegate <NSObject>
@optional
- (_Bool)isUseDefaultChatBkgImage;
- (void)onClickPlayMusicBtn:(CMessageWrap *)arg1 isPlay:(_Bool)arg2;
- (void)onClickFavMenu:(CMessageWrap *)arg1;
- (void)onClickVoiceTranslate:(CMessageWrap *)arg1;
- (void)onClickTextFloatPreView:(CMessageWrap *)arg1;
- (void)onClickTranslateMsg:(CMessageWrap *)arg1 translateInfo:(TranslateInfo *)arg2;
- (void)hideToolViewAnimated:(_Bool)arg1;
- (void)scheduleOnMsg:(CMessageWrap *)arg1;
- (void)clickNewAASysMsg:(NSString *)arg1 withMsgLocalID:(unsigned int)arg2;
- (void)shareMsgToOpenSDKByNodeView:(CMessageWrap *)arg1;
- (void)clickWeAppReceiveSessionMsg;
- (void)clickReceiveBrandMsg;
- (void)clickShieldBrandMsg;
- (void)clickLinkToDealWithSysXml:(CMessageWrap *)arg1 fromScene:(NSString *)arg2;
- (void)clickLinkToExpose;
- (void)onFullScreenWindowWillHide;
- (void)willShowMenuController:(id)arg1;
- (void)filterShowMenuItem:(NSMutableArray *)arg1 sender:(id)arg2;
- (_Bool)isMsgLastSend:(CMessageWrap *)arg1;
- (_Bool)isMsgCanRevoke:(CMessageWrap *)arg1;
- (void)revokeMsgByNodeView:(CMessageWrap *)arg1;
- (void)SetMsgPlayed:(CMessageWrap *)arg1;
- (void)switchEarMode;
- (void)jumpToUserProfile:(NSString *)arg1 Displayname:(NSString *)arg2 Scence:(unsigned int)arg3;
- (void)onHideKeyboard;
- (void)onMoreOperateWithMsgId:(NSString *)arg1;
- (void)onForwardMessageOK;
- (void)onMassSendSendAgain:(NSArray *)arg1;
- (void)onScrollToBottom;
- (void)tagWeAppLink:(LinkWeAppJumpWrap *)arg1 messageWrap:(CMessageWrap *)arg2;
- (void)tagLink:(NSString *)arg1 messageWrap:(CMessageWrap *)arg2;
- (MMUIViewController *)getViewController;
- (void)hasTapReaderNodeView;
- (void)longPressOnHeadImageForDebug:(CMessageWrap *)arg1;
- (_Bool)isMsgSelected:(CMessageWrap *)arg1;
- (void)longPressOnHeadImage:(CBaseContact *)arg1;
- (_Bool)canLongPressOnHeadImage:(CBaseContact *)arg1;
- (void)deleteNode:(CMessageWrap *)arg1;
- (void)StartDownloadShortVideo:(CMessageWrap *)arg1;
- (void)StartDownloadVideo:(CMessageWrap *)arg1 DownloadMode:(unsigned long long)arg2;
- (void)StartDownloadVideo:(CMessageWrap *)arg1;
- (void)StartUploadVideo:(CMessageWrap *)arg1;
- (void)StopDownloadVideo:(CMessageWrap *)arg1;
- (void)StopUploadVideo:(CMessageWrap *)arg1;
- (void)onExposeTemplateMsg:(CMessageWrap *)arg1;
- (void)readerViewClickedWithMsg:(CMessageWrap *)arg1;
- (void)headerImageClickedWithMsg:(CMessageWrap *)arg1;
- (void)headerImageClicked:(CBaseContact *)arg1;
- (void)OnEndPlaying:(CMessageWrap *)arg1;
- (void)EndPlaying:(CMessageWrap *)arg1;
- (void)BeginPlaying:(CMessageWrap *)arg1 FromTouch:(_Bool)arg2;
- (void)PlayVideo:(CMessageWrap *)arg1 soundable:(_Bool)arg2;
- (void)tapAppNodeView:(id)arg1;
- (void)tapFriendCard_NodeView:(id)arg1 WithContact:(CContact *)arg2 WithMsg:(CMessageWrap *)arg3;
- (void)tapVideoStatus_NodeView:(id)arg1 DownloadMode:(unsigned long long)arg2;
- (void)tapPushMail_NodeView:(id)arg1 withPushMailWrap:(PushMailWrap *)arg2;
- (void)tapVideoStatus_NodeView:(id)arg1;
- (void)tapStatus_NodeView:(id)arg1;
- (void)tapLocation_NodeView:(id)arg1;
- (void)tapImage_NodeView:(id)arg1 needEditImage:(_Bool)arg2;
- (void)tapImage_NodeView:(id)arg1;
@end
| 1,319 |
4,262 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.salesforce.internal.client;
import java.io.InputStream;
import java.util.List;
import java.util.Map;
import org.apache.camel.component.salesforce.api.SalesforceException;
import org.apache.camel.component.salesforce.api.dto.bulkv2.Job;
import org.apache.camel.component.salesforce.api.dto.bulkv2.JobStateEnum;
import org.apache.camel.component.salesforce.api.dto.bulkv2.Jobs;
import org.apache.camel.component.salesforce.api.dto.bulkv2.QueryJob;
import org.apache.camel.component.salesforce.api.dto.bulkv2.QueryJobs;
public interface BulkApiV2Client {
interface JobResponseCallback {
void onResponse(Job job, Map<String, String> headers, SalesforceException ex);
}
interface JobsResponseCallback {
void onResponse(Jobs jobs, Map<String, String> headers, SalesforceException ex);
}
interface ResponseCallback {
void onResponse(Map<String, String> headers, SalesforceException ex);
}
interface StreamResponseCallback {
void onResponse(InputStream inputStream, Map<String, String> headers, SalesforceException ex);
}
interface QueryJobResponseCallback {
void onResponse(QueryJob queryJob, Map<String, String> headers, SalesforceException ex);
}
interface QueryJobsResponseCallback {
void onResponse(QueryJobs queryJobs, Map<String, String> headers, SalesforceException ex);
}
void createJob(Job job, Map<String, List<String>> header, JobResponseCallback callback);
void getAllJobs(String queryLocator, Map<String, List<String>> headers, JobsResponseCallback callback);
void getJob(String jobId, Map<String, List<String>> header, JobResponseCallback callback);
void createBatch(
InputStream batchStream, String jobId, Map<String, List<String>> headers, ResponseCallback callback);
void changeJobState(
String jobId, JobStateEnum state, Map<String, List<String>> headers, JobResponseCallback callback);
void deleteJob(String jobId, Map<String, List<String>> headers, ResponseCallback callback);
void getSuccessfulResults(String jobId, Map<String, List<String>> headers, StreamResponseCallback callback);
void getFailedResults(String jobId, Map<String, List<String>> headers, StreamResponseCallback callback);
void getUnprocessedRecords(String jobId, Map<String, List<String>> headers, StreamResponseCallback callback);
void createQueryJob(QueryJob queryJob, Map<String, List<String>> headers, QueryJobResponseCallback callback);
void getQueryJob(String jobId, Map<String, List<String>> headers, QueryJobResponseCallback callback);
void getQueryJobResults(String jobId, Map<String, List<String>> headers, StreamResponseCallback callback);
void changeQueryJobState(
String jobId, JobStateEnum state, Map<String, List<String>> headers, QueryJobResponseCallback callback);
void deleteQueryJob(String jobId, Map<String, List<String>> headers, ResponseCallback callback);
void getAllQueryJobs(String queryLocator, Map<String, List<String>> headers, QueryJobsResponseCallback callback);
}
| 1,159 |
2,541 | <filename>ios/WeApp/Core/WAAppTaskMgr/WAWebViewPageMgr/View/WeAppCustomTabbar/WeAppCustomTabbar.h
//
// WeAppCustomTabbar.h
// WeAppExample
//
// Created by lionvoom on 2020/12/3.
// Copyright © 2020 wept. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "WATabbarStyle.h"
#import "WeAppCustomTabbarDelegate-Protocol.h"
NS_ASSUME_NONNULL_BEGIN
@interface WeAppCustomTabbar : UIView
@property(readonly) NSInteger selectedIndex;
@property(nonatomic, weak) id<WeAppCustomTabbarDelegate> delegate;
- (void)setTabbar:(WATabbarStyle *)style;
- (void)selectTabbarIndex:(NSUInteger)tabbarIndex;
@end
NS_ASSUME_NONNULL_END
| 243 |
5,279 | <filename>runners/spark/src/main/java/org/apache/beam/runners/spark/translation/EvaluationContext.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.spark.translation;
import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.beam.runners.core.construction.SerializablePipelineOptions;
import org.apache.beam.runners.core.construction.TransformInputs;
import org.apache.beam.runners.spark.SparkPipelineOptions;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.runners.AppliedPTransform;
import org.apache.beam.sdk.transforms.GroupByKey;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.transforms.windowing.BoundedWindow;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PCollectionView;
import org.apache.beam.sdk.values.PValue;
import org.apache.beam.sdk.values.TupleTag;
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Iterables;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* The EvaluationContext allows us to define pipeline instructions and translate between {@code
* PObject<T>}s or {@code PCollection<T>}s and Ts or DStreams/RDDs of Ts.
*/
@SuppressWarnings({
"rawtypes", // TODO(https://issues.apache.org/jira/browse/BEAM-10556)
"nullness" // TODO(https://issues.apache.org/jira/browse/BEAM-10402)
})
public class EvaluationContext {
private final JavaSparkContext jsc;
private JavaStreamingContext jssc;
private final Pipeline pipeline;
private final Map<PValue, Dataset> datasets = new LinkedHashMap<>();
private final Map<PValue, Dataset> pcollections = new LinkedHashMap<>();
private final Set<Dataset> leaves = new LinkedHashSet<>();
private final Map<PValue, Object> pobjects = new LinkedHashMap<>();
private AppliedPTransform<?, ?, ?> currentTransform;
private final SparkPCollectionView pviews = new SparkPCollectionView();
private final Map<PCollection, Long> cacheCandidates = new HashMap<>();
private final PipelineOptions options;
private final SerializablePipelineOptions serializableOptions;
public EvaluationContext(JavaSparkContext jsc, Pipeline pipeline, PipelineOptions options) {
this.jsc = jsc;
this.pipeline = pipeline;
this.options = options;
this.serializableOptions = new SerializablePipelineOptions(options);
}
public EvaluationContext(
JavaSparkContext jsc, Pipeline pipeline, PipelineOptions options, JavaStreamingContext jssc) {
this(jsc, pipeline, options);
this.jssc = jssc;
}
public JavaSparkContext getSparkContext() {
return jsc;
}
public JavaStreamingContext getStreamingContext() {
return jssc;
}
public Pipeline getPipeline() {
return pipeline;
}
public PipelineOptions getOptions() {
return options;
}
public SerializablePipelineOptions getSerializableOptions() {
return serializableOptions;
}
public void setCurrentTransform(AppliedPTransform<?, ?, ?> transform) {
this.currentTransform = transform;
}
public AppliedPTransform<?, ?, ?> getCurrentTransform() {
return currentTransform;
}
public <T extends PValue> T getInput(PTransform<T, ?> transform) {
@SuppressWarnings("unchecked")
T input =
(T) Iterables.getOnlyElement(TransformInputs.nonAdditionalInputs(getCurrentTransform()));
return input;
}
public <T> Map<TupleTag<?>, PCollection<?>> getInputs(PTransform<?, ?> transform) {
checkArgument(currentTransform != null, "can only be called with non-null currentTransform");
checkArgument(
currentTransform.getTransform() == transform, "can only be called with current transform");
return currentTransform.getInputs();
}
public <T extends PValue> T getOutput(PTransform<?, T> transform) {
@SuppressWarnings("unchecked")
T output = (T) Iterables.getOnlyElement(getOutputs(transform).values());
return output;
}
public Map<TupleTag<?>, PCollection<?>> getOutputs(PTransform<?, ?> transform) {
checkArgument(currentTransform != null, "can only be called with non-null currentTransform");
checkArgument(
currentTransform.getTransform() == transform, "can only be called with current transform");
return currentTransform.getOutputs();
}
public Map<TupleTag<?>, Coder<?>> getOutputCoders() {
return currentTransform.getOutputs().entrySet().stream()
.filter(e -> e.getValue() instanceof PCollection)
.collect(Collectors.toMap(Map.Entry::getKey, e -> ((PCollection) e.getValue()).getCoder()));
}
/**
* Cache PCollection if SparkPipelineOptions.isCacheDisabled is false or transform isn't
* GroupByKey transformation and PCollection is used more then once in Pipeline.
*
* <p>PCollection is not cached in GroupByKey transformation, because Spark automatically persists
* some intermediate data in shuffle operations, even without users calling persist.
*
* @param pvalue output of transform
* @param transform the transform to check
* @return if PCollection will be cached
*/
public boolean shouldCache(PTransform<?, ? extends PValue> transform, PValue pvalue) {
if (serializableOptions.get().as(SparkPipelineOptions.class).isCacheDisabled()
|| transform instanceof GroupByKey) {
return false;
}
return pvalue instanceof PCollection && cacheCandidates.getOrDefault(pvalue, 0L) > 1;
}
/**
* Add single output of transform to context map and possibly cache if it conforms {@link
* #shouldCache(PTransform, PValue)}.
*
* @param transform from which Dataset was created
* @param dataset created Dataset from transform
*/
public void putDataset(PTransform<?, ? extends PValue> transform, Dataset dataset) {
putDataset(transform, getOutput(transform), dataset);
}
/**
* Add output of transform to context map and possibly cache if it conforms {@link
* #shouldCache(PTransform, PValue)}. Used when PTransform has multiple outputs.
*
* @param pvalue one of multiple outputs of transform
* @param dataset created Dataset from transform
*/
public void putDataset(PValue pvalue, Dataset dataset) {
putDataset(null, pvalue, dataset);
}
/**
* Add output of transform to context map and possibly cache if it conforms {@link
* #shouldCache(PTransform, PValue)}.
*
* @param transform from which Dataset was created
* @param pvalue output of transform
* @param dataset created Dataset from transform
*/
private void putDataset(
@Nullable PTransform<?, ? extends PValue> transform, PValue pvalue, Dataset dataset) {
try {
dataset.setName(pvalue.getName());
} catch (IllegalStateException e) {
// name not set, ignore
}
if (shouldCache(transform, pvalue)) {
// we cache only PCollection
Coder<?> coder = ((PCollection<?>) pvalue).getCoder();
Coder<? extends BoundedWindow> wCoder =
((PCollection<?>) pvalue).getWindowingStrategy().getWindowFn().windowCoder();
dataset.cache(storageLevel(), WindowedValue.getFullCoder(coder, wCoder));
}
datasets.put(pvalue, dataset);
leaves.add(dataset);
}
public Dataset borrowDataset(PTransform<? extends PValue, ?> transform) {
return borrowDataset(getInput(transform));
}
public Dataset borrowDataset(PValue pvalue) {
Dataset dataset = datasets.get(pvalue);
leaves.remove(dataset);
return dataset;
}
/**
* Computes the outputs for all RDDs that are leaves in the DAG and do not have any actions (like
* saving to a file) registered on them (i.e. they are performed for side effects).
*/
public void computeOutputs() {
for (Dataset dataset : leaves) {
dataset.action(); // force computation.
}
}
/**
* Retrieve an object of Type T associated with the PValue passed in.
*
* @param value PValue to retrieve associated data for.
* @param <T> Type of object to return.
* @return Native object.
*/
@SuppressWarnings("TypeParameterUnusedInFormals")
public <T> T get(PValue value) {
if (pobjects.containsKey(value)) {
return (T) pobjects.get(value);
}
if (pcollections.containsKey(value)) {
JavaRDD<?> rdd = ((BoundedDataset) pcollections.get(value)).getRDD();
T res = (T) Iterables.getOnlyElement(rdd.collect());
pobjects.put(value, res);
return res;
}
throw new IllegalStateException("Cannot resolve un-known PObject: " + value);
}
/**
* Return the current views creates in the pipeline.
*
* @return SparkPCollectionView
*/
public SparkPCollectionView getPViews() {
return pviews;
}
/**
* Adds/Replaces a view to the current views creates in the pipeline.
*
* @param view - Identifier of the view
* @param value - Actual value of the view
* @param coder - Coder of the value
*/
public void putPView(
PCollectionView<?> view,
Iterable<WindowedValue<?>> value,
Coder<Iterable<WindowedValue<?>>> coder) {
pviews.putPView(view, value, coder);
}
/**
* Get the map of cache candidates hold by the evaluation context.
*
* @return The current {@link Map} of cache candidates.
*/
public Map<PCollection, Long> getCacheCandidates() {
return this.cacheCandidates;
}
<T> Iterable<WindowedValue<T>> getWindowedValues(PCollection<T> pcollection) {
@SuppressWarnings("unchecked")
BoundedDataset<T> boundedDataset = (BoundedDataset<T>) datasets.get(pcollection);
leaves.remove(boundedDataset);
return boundedDataset.getValues(pcollection);
}
public String storageLevel() {
return serializableOptions.get().as(SparkPipelineOptions.class).getStorageLevel();
}
}
| 3,569 |
1,478 | /**
* @file unit-Subarray.cc
*
* @section LICENSE
*
* The MIT License
*
* @copyright Copyright (c) 2017-2021 TileDB, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*
* @section DESCRIPTION
*
* Tests the `Subarray` class.
*/
#include "test/src/helpers.h"
#include "test/src/vfs_helpers.h"
#include "tiledb/sm/c_api/tiledb_struct_def.h"
#include "tiledb/sm/subarray/subarray_partitioner.h"
#ifdef _WIN32
#include "tiledb/sm/filesystem/win.h"
#else
#include "tiledb/sm/filesystem/posix.h"
#endif
#include <catch.hpp>
#include <iostream>
using namespace tiledb::sm;
using namespace tiledb::test;
/* ********************************* */
/* STRUCT DEFINITION */
/* ********************************* */
struct SubarrayFx {
tiledb_ctx_t* ctx_;
tiledb_vfs_t* vfs_;
const std::vector<std::unique_ptr<SupportedFs>> fs_vec_;
std::string temp_dir_;
std::string array_name_;
const char* ARRAY_NAME = "subarray";
tiledb_array_t* array_ = nullptr;
SubarrayFx();
~SubarrayFx();
};
SubarrayFx::SubarrayFx()
: fs_vec_(vfs_test_get_fs_vec()) {
// Initialize vfs test
REQUIRE(vfs_test_init(fs_vec_, &ctx_, &vfs_).ok());
// Create temporary directory based on the supported filesystem
#ifdef _WIN32
SupportedFsLocal windows_fs;
temp_dir_ = windows_fs.file_prefix() + windows_fs.temp_dir();
#else
SupportedFsLocal posix_fs;
temp_dir_ = posix_fs.file_prefix() + posix_fs.temp_dir();
#endif
create_dir(temp_dir_, ctx_, vfs_);
array_name_ = temp_dir_ + ARRAY_NAME;
int rc = tiledb_array_alloc(ctx_, array_name_.c_str(), &array_);
CHECK(rc == TILEDB_OK);
}
SubarrayFx::~SubarrayFx() {
tiledb_array_free(&array_);
remove_dir(temp_dir_, ctx_, vfs_);
tiledb_ctx_free(&ctx_);
tiledb_vfs_free(&vfs_);
}
/* ********************************* */
/* TESTS */
/* ********************************* */
TEST_CASE_METHOD(
SubarrayFx,
"Subarray: Test tile coords, 1D",
"[Subarray][1d][tile_coords]") {
uint64_t domain[] = {1, 100};
uint64_t tile_extent = 10;
create_array(
ctx_,
array_name_,
TILEDB_DENSE,
{"d"},
{TILEDB_UINT64},
{domain},
{&tile_extent},
{"a", "b"},
{TILEDB_INT32, TILEDB_INT32},
{1, TILEDB_VAR_NUM},
{tiledb::test::Compressor(TILEDB_FILTER_LZ4, -1),
tiledb::test::Compressor(TILEDB_FILTER_LZ4, -1)},
TILEDB_ROW_MAJOR,
TILEDB_ROW_MAJOR,
2);
open_array(ctx_, array_, TILEDB_READ);
Subarray subarray;
SubarrayRanges<uint64_t> ranges = {{5, 7, 6, 15, 33, 43}};
Layout subarray_layout = Layout::ROW_MAJOR;
create_subarray(array_->array_, ranges, subarray_layout, &subarray);
subarray.compute_tile_coords<uint64_t>();
// Prepare correct tile coordinates
std::vector<std::vector<uint8_t>> c_tile_coords;
std::vector<uint8_t> tile_coords_el;
auto coords_size = sizeof(uint64_t);
tile_coords_el.resize(coords_size);
uint64_t tile_coords_0 = 0;
uint64_t tile_coords_1 = 1;
uint64_t tile_coords_3 = 3;
uint64_t tile_coords_4 = 4;
std::memcpy(&tile_coords_el[0], &tile_coords_0, sizeof(uint64_t));
c_tile_coords.push_back(tile_coords_el);
std::memcpy(&tile_coords_el[0], &tile_coords_1, sizeof(uint64_t));
c_tile_coords.push_back(tile_coords_el);
std::memcpy(&tile_coords_el[0], &tile_coords_3, sizeof(uint64_t));
c_tile_coords.push_back(tile_coords_el);
std::memcpy(&tile_coords_el[0], &tile_coords_4, sizeof(uint64_t));
c_tile_coords.push_back(tile_coords_el);
// Check tile coordinates
auto tile_coords = subarray.tile_coords();
CHECK(tile_coords == c_tile_coords);
// Check tile coordinates ptr
std::vector<uint8_t> aux_tile_coords;
aux_tile_coords.resize(coords_size);
auto tile_coords_ptr =
subarray.tile_coords_ptr<uint64_t>({1}, &aux_tile_coords);
CHECK(tile_coords_ptr[0] == tile_coords_1);
tile_coords_ptr = subarray.tile_coords_ptr<uint64_t>({4}, &aux_tile_coords);
CHECK(tile_coords_ptr[0] == tile_coords_4);
tile_coords_ptr = subarray.tile_coords_ptr<uint64_t>({10}, &aux_tile_coords);
CHECK(tile_coords_ptr == nullptr);
close_array(ctx_, array_);
}
TEST_CASE_METHOD(
SubarrayFx,
"Subarray: Test tile coords, 2D",
"[Subarray][2d][tile_coords]") {
tiledb_layout_t tile_order = TILEDB_ROW_MAJOR;
uint64_t domain[] = {1, 10};
uint64_t tile_extent_1 = 2;
uint64_t tile_extent_2 = 5;
std::vector<std::vector<uint8_t>> c_tile_coords;
std::vector<uint8_t> tile_coords_el;
auto coords_size = 2 * sizeof(uint64_t);
uint64_t tile_coords_0_0[] = {0, 0};
uint64_t tile_coords_0_1[] = {0, 1};
uint64_t tile_coords_2_0[] = {2, 0};
uint64_t tile_coords_2_1[] = {2, 1};
uint64_t tile_coords_3_0[] = {3, 0};
uint64_t tile_coords_3_1[] = {3, 1};
uint64_t tile_coords_4_0[] = {4, 0};
uint64_t tile_coords_4_1[] = {4, 1};
SECTION("tile: row") {
tile_order = TILEDB_ROW_MAJOR;
tile_coords_el.resize(coords_size);
std::memcpy(&tile_coords_el[0], tile_coords_0_0, 2 * sizeof(uint64_t));
c_tile_coords.push_back(tile_coords_el);
std::memcpy(&tile_coords_el[0], tile_coords_0_1, 2 * sizeof(uint64_t));
c_tile_coords.push_back(tile_coords_el);
std::memcpy(&tile_coords_el[0], tile_coords_2_0, 2 * sizeof(uint64_t));
c_tile_coords.push_back(tile_coords_el);
std::memcpy(&tile_coords_el[0], tile_coords_2_1, 2 * sizeof(uint64_t));
c_tile_coords.push_back(tile_coords_el);
std::memcpy(&tile_coords_el[0], tile_coords_3_0, 2 * sizeof(uint64_t));
c_tile_coords.push_back(tile_coords_el);
std::memcpy(&tile_coords_el[0], tile_coords_3_1, 2 * sizeof(uint64_t));
c_tile_coords.push_back(tile_coords_el);
std::memcpy(&tile_coords_el[0], tile_coords_4_0, 2 * sizeof(uint64_t));
c_tile_coords.push_back(tile_coords_el);
std::memcpy(&tile_coords_el[0], tile_coords_4_1, 2 * sizeof(uint64_t));
c_tile_coords.push_back(tile_coords_el);
}
SECTION("tile: col") {
tile_order = TILEDB_COL_MAJOR;
tile_coords_el.resize(coords_size);
std::memcpy(&tile_coords_el[0], tile_coords_0_0, 2 * sizeof(uint64_t));
c_tile_coords.push_back(tile_coords_el);
std::memcpy(&tile_coords_el[0], tile_coords_2_0, 2 * sizeof(uint64_t));
c_tile_coords.push_back(tile_coords_el);
std::memcpy(&tile_coords_el[0], tile_coords_3_0, 2 * sizeof(uint64_t));
c_tile_coords.push_back(tile_coords_el);
std::memcpy(&tile_coords_el[0], tile_coords_4_0, 2 * sizeof(uint64_t));
c_tile_coords.push_back(tile_coords_el);
std::memcpy(&tile_coords_el[0], tile_coords_0_1, 2 * sizeof(uint64_t));
c_tile_coords.push_back(tile_coords_el);
std::memcpy(&tile_coords_el[0], tile_coords_2_1, 2 * sizeof(uint64_t));
c_tile_coords.push_back(tile_coords_el);
std::memcpy(&tile_coords_el[0], tile_coords_3_1, 2 * sizeof(uint64_t));
c_tile_coords.push_back(tile_coords_el);
std::memcpy(&tile_coords_el[0], tile_coords_4_1, 2 * sizeof(uint64_t));
c_tile_coords.push_back(tile_coords_el);
}
create_array(
ctx_,
array_name_,
TILEDB_DENSE,
{"d1", "d2"},
{TILEDB_UINT64, TILEDB_UINT64},
{domain, domain},
{&tile_extent_1, &tile_extent_2},
{"a", "b"},
{TILEDB_INT32, TILEDB_INT32},
{1, TILEDB_VAR_NUM},
{tiledb::test::Compressor(TILEDB_FILTER_LZ4, -1),
tiledb::test::Compressor(TILEDB_FILTER_LZ4, -1)},
tile_order,
TILEDB_ROW_MAJOR,
2);
open_array(ctx_, array_, TILEDB_READ);
Subarray subarray;
SubarrayRanges<uint64_t> ranges = {{2, 2, 6, 10}, {2, 6, 5, 10}};
Layout subarray_layout = Layout::ROW_MAJOR;
create_subarray(array_->array_, ranges, subarray_layout, &subarray);
subarray.compute_tile_coords<uint64_t>();
auto tile_coords = subarray.tile_coords();
CHECK(tile_coords == c_tile_coords);
// Check tile coordinates ptr
std::vector<uint8_t> aux_tile_coords;
aux_tile_coords.resize(coords_size);
auto tile_coords_ptr =
subarray.tile_coords_ptr<uint64_t>({2, 0}, &aux_tile_coords);
CHECK(tile_coords_ptr[0] == tile_coords_2_0[0]);
CHECK(tile_coords_ptr[1] == tile_coords_2_0[1]);
tile_coords_ptr =
subarray.tile_coords_ptr<uint64_t>({3, 1}, &aux_tile_coords);
CHECK(tile_coords_ptr[0] == tile_coords_3_1[0]);
CHECK(tile_coords_ptr[1] == tile_coords_3_1[1]);
tile_coords_ptr =
subarray.tile_coords_ptr<uint64_t>({10, 10}, &aux_tile_coords);
CHECK(tile_coords_ptr == nullptr);
close_array(ctx_, array_);
}
TEST_CASE_METHOD(
SubarrayFx,
"Subarray: Test crop to tile, 2D",
"[Subarray][2d][crop_to_tile]") {
uint64_t domain[] = {1, 10};
uint64_t tile_extent_1 = 2;
uint64_t tile_extent_2 = 5;
create_array(
ctx_,
array_name_,
TILEDB_DENSE,
{"d1", "d2"},
{TILEDB_UINT64, TILEDB_UINT64},
{domain, domain},
{&tile_extent_1, &tile_extent_2},
{"a", "b"},
{TILEDB_INT32, TILEDB_INT32},
{1, TILEDB_VAR_NUM},
{tiledb::test::Compressor(TILEDB_FILTER_LZ4, -1),
tiledb::test::Compressor(TILEDB_FILTER_LZ4, -1)},
TILEDB_ROW_MAJOR,
TILEDB_ROW_MAJOR,
2);
open_array(ctx_, array_, TILEDB_READ);
Subarray subarray;
SubarrayRanges<uint64_t> ranges = {{2, 10, 6, 10}, {2, 6, 5, 10}};
Layout subarray_layout = Layout::ROW_MAJOR;
create_subarray(array_->array_, ranges, subarray_layout, &subarray);
std::vector<uint64_t> tile_coords = {1, 0};
std::vector<uint64_t> c_range_0_0 = {3, 4};
std::vector<uint64_t> c_range_1_0 = {2, 5};
std::vector<uint64_t> c_range_1_1 = {5, 5};
auto cropped_subarray =
subarray.crop_to_tile(&tile_coords[0], Layout::ROW_MAJOR);
const Range* range = nullptr;
CHECK(cropped_subarray.range_num() == 2);
CHECK(cropped_subarray.get_range(0, 0, &range).ok());
CHECK(!memcmp(range->data(), &c_range_0_0[0], 2 * sizeof(uint64_t)));
CHECK(cropped_subarray.get_range(1, 0, &range).ok());
CHECK(!memcmp(range->data(), &c_range_1_0[0], 2 * sizeof(uint64_t)));
CHECK(cropped_subarray.get_range(1, 1, &range).ok());
CHECK(!memcmp(range->data(), &c_range_1_1[0], 2 * sizeof(uint64_t)));
close_array(ctx_, array_);
}
void verify_expanded_coordinates_2D(
Subarray* const subarray,
const uint64_t range_idx_start,
const uint64_t range_idx_end,
const uint64_t expected_range_idx_start,
const uint64_t expected_range_idx_end,
const std::vector<uint64_t>& expected_start_coords,
const std::vector<uint64_t>& expected_end_coords) {
std::vector<uint64_t> start_coords;
std::vector<uint64_t> end_coords;
subarray->get_expanded_coordinates(
range_idx_start, range_idx_end, &start_coords, &end_coords);
REQUIRE(start_coords == expected_start_coords);
REQUIRE(end_coords == expected_end_coords);
REQUIRE(subarray->range_idx(start_coords) == expected_range_idx_start);
REQUIRE(subarray->range_idx(end_coords) == expected_range_idx_end);
// Build a map from each inclusive range index between
// `range_idx_start` and `range_idx_end` that maps to a bool.
std::unordered_map<uint64_t, bool> range_idx_found;
for (uint64_t i = range_idx_start; i <= range_idx_end; ++i) {
range_idx_found[i] = false;
}
// Iterate through every coordinate between the start and end
// coordinate. If the flattened index is in `range_idx_found`,
// set the value to `true`.
for (uint64_t x = start_coords[0]; x <= end_coords[0]; ++x) {
for (uint64_t y = start_coords[1]; y <= end_coords[1]; ++y) {
const uint64_t range_idx = subarray->range_idx({x, y});
if (range_idx_found.count(range_idx) == 1) {
range_idx_found[range_idx] = true;
}
}
}
// Verify all flattened ranges are contained within the 2D
// space between `start_coords` and `end_coords`.
for (uint64_t i = range_idx_start; i <= range_idx_end; ++i) {
REQUIRE(range_idx_found[i] == true);
}
}
TEST_CASE_METHOD(
SubarrayFx,
"Subarray: Test get_expanded_coordinates, row-major, 2D",
"[Subarray][2d][row_major][get_expanded_coordinates]") {
uint64_t domain[] = {1, 4};
uint64_t tile_extent_1 = 1;
uint64_t tile_extent_2 = 1;
create_array(
ctx_,
array_name_,
TILEDB_DENSE,
{"x", "y"},
{TILEDB_UINT64, TILEDB_UINT64},
{domain, domain},
{&tile_extent_1, &tile_extent_2},
{"a"},
{TILEDB_INT32},
{1},
{tiledb::test::Compressor(TILEDB_FILTER_LZ4, -1)},
TILEDB_ROW_MAJOR,
TILEDB_ROW_MAJOR,
1);
open_array(ctx_, array_, TILEDB_READ);
/**
* Populate the subarray with non-coalesced point ranges
* on each cell. This will populate the 2D subarray ranges as:
* 0 1 2 3
* 4 5 6 7
* 8 9 10 11
* 12 13 14 15
*/
Subarray subarray;
std::vector<uint64_t> d1_ranges;
std::vector<uint64_t> d2_ranges;
for (uint64_t i = domain[0]; i <= domain[1]; ++i) {
d1_ranges.emplace_back(i);
d1_ranges.emplace_back(i);
d2_ranges.emplace_back(i);
d2_ranges.emplace_back(i);
}
SubarrayRanges<uint64_t> ranges = {d1_ranges, d2_ranges};
Layout subarray_layout = Layout::ROW_MAJOR;
create_subarray(array_->array_, ranges, subarray_layout, &subarray, false);
// We must compute range offsets before invoking
// `get_expanded_coordinates`.
subarray.compute_range_offsets();
// The flattened, inclusive range [1, 2] does not expand
// the coordinates when calibrating.
verify_expanded_coordinates_2D(&subarray, 1, 2, 1, 2, {0, 1}, {0, 2});
// The flattened, inclusive range [4, 6] does not expand
// the coordinates when calibrating.
verify_expanded_coordinates_2D(&subarray, 4, 6, 4, 6, {1, 0}, {1, 2});
// The flattened, inclusive range [8, 8] does not expand
// the coordinates when calibrating.
verify_expanded_coordinates_2D(&subarray, 8, 8, 8, 8, {2, 0}, {2, 0});
// The flattened, inclusive range [1, 7] must have
// a starting coordinate of (0, 0) and an ending coordinate
// of (1, 3) to contain ranges [0, 7].
verify_expanded_coordinates_2D(&subarray, 1, 7, 0, 7, {0, 0}, {1, 3});
// The flattened, inclusive range [5, 10] must have
// a starting coordinate of (1, 0) and an ending coordinate
// of (2, 3) to contain ranges [4, 11].
verify_expanded_coordinates_2D(&subarray, 5, 10, 4, 11, {1, 0}, {2, 3});
close_array(ctx_, array_);
}
TEST_CASE_METHOD(
SubarrayFx,
"Subarray: Test get_expanded_coordinates, col-major, 2D",
"[Subarray][2d][col_major][get_expanded_coordinates]") {
uint64_t domain[] = {1, 4};
uint64_t tile_extent_1 = 1;
uint64_t tile_extent_2 = 1;
create_array(
ctx_,
array_name_,
TILEDB_DENSE,
{"x", "y"},
{TILEDB_UINT64, TILEDB_UINT64},
{domain, domain},
{&tile_extent_1, &tile_extent_2},
{"a"},
{TILEDB_INT32},
{1},
{tiledb::test::Compressor(TILEDB_FILTER_LZ4, -1)},
TILEDB_ROW_MAJOR,
TILEDB_ROW_MAJOR,
1);
open_array(ctx_, array_, TILEDB_READ);
/**
* Populate the subarray with non-coalesced point ranges
* on each cell. This will populate the 2D subarray ranges as:
* 0 4 8 12
* 1 5 9 13
* 2 6 10 14
* 3 7 11 15
*/
Subarray subarray;
std::vector<uint64_t> d1_ranges;
std::vector<uint64_t> d2_ranges;
for (uint64_t i = domain[0]; i <= domain[1]; ++i) {
d1_ranges.emplace_back(i);
d1_ranges.emplace_back(i);
d2_ranges.emplace_back(i);
d2_ranges.emplace_back(i);
}
SubarrayRanges<uint64_t> ranges = {d1_ranges, d2_ranges};
Layout subarray_layout = Layout::COL_MAJOR;
create_subarray(array_->array_, ranges, subarray_layout, &subarray, false);
// We must compute range offsets before invoking
// `get_expanded_coordinates`.
subarray.compute_range_offsets();
// The flattened, inclusive range [1, 2] does not expand
// the coordinates when calibrating.
verify_expanded_coordinates_2D(&subarray, 1, 2, 1, 2, {1, 0}, {2, 0});
// The flattened, inclusive range [4, 6] does not expand
// the coordinates when calibrating.
verify_expanded_coordinates_2D(&subarray, 4, 6, 4, 6, {0, 1}, {2, 1});
// The flattened, inclusive range [8, 8] does not expand
// the coordinates when calibrating.
verify_expanded_coordinates_2D(&subarray, 8, 8, 8, 8, {0, 2}, {0, 2});
// The flattened, inclusive range [1, 7] must have
// a starting coordinate of (0, 0) and an ending coordinate
// of (3, 1) to contain ranges [0, 7].
verify_expanded_coordinates_2D(&subarray, 1, 7, 0, 7, {0, 0}, {3, 1});
// The flattened, inclusive range [5, 10] must have
// a starting coordinate of (0, 1) and an ending coordinate
// of (3, 2) to contain ranges [4, 11].
verify_expanded_coordinates_2D(&subarray, 5, 10, 4, 11, {0, 1}, {3, 2});
close_array(ctx_, array_);
}
TEST_CASE_METHOD(
SubarrayFx,
"Subarray: Test get_expanded_coordinates, unordered, 2D",
"[Subarray][2d][unordered][get_expanded_coordinates]") {
uint64_t domain[] = {1, 4};
uint64_t tile_extent_1 = 1;
uint64_t tile_extent_2 = 1;
create_array(
ctx_,
array_name_,
TILEDB_DENSE,
{"x", "y"},
{TILEDB_UINT64, TILEDB_UINT64},
{domain, domain},
{&tile_extent_1, &tile_extent_2},
{"a"},
{TILEDB_INT32},
{1},
{tiledb::test::Compressor(TILEDB_FILTER_LZ4, -1)},
TILEDB_ROW_MAJOR,
TILEDB_ROW_MAJOR,
1);
open_array(ctx_, array_, TILEDB_READ);
/**
* Populate the subarray with non-coalesced point ranges
* on each cell. This will populate the 2D subarray ranges as:
* 0 1 2 3
* 4 5 6 7
* 8 9 10 11
* 12 13 14 15
*/
Subarray subarray;
std::vector<uint64_t> d1_ranges;
std::vector<uint64_t> d2_ranges;
for (uint64_t i = domain[0]; i <= domain[1]; ++i) {
d1_ranges.emplace_back(i);
d1_ranges.emplace_back(i);
d2_ranges.emplace_back(i);
d2_ranges.emplace_back(i);
}
SubarrayRanges<uint64_t> ranges = {d1_ranges, d2_ranges};
Layout subarray_layout = Layout::UNORDERED;
create_subarray(array_->array_, ranges, subarray_layout, &subarray, false);
// We must compute range offsets before invoking
// `get_expanded_coordinates`.
subarray.compute_range_offsets();
// The flattened, inclusive range [1, 2] does not expand
// the coordinates when calibrating.
verify_expanded_coordinates_2D(&subarray, 1, 2, 1, 2, {0, 1}, {0, 2});
// The flattened, inclusive range [4, 6] does not expand
// the coordinates when calibrating.
verify_expanded_coordinates_2D(&subarray, 4, 6, 4, 6, {1, 0}, {1, 2});
// The flattened, inclusive range [8, 8] does not expand
// the coordinates when calibrating.
verify_expanded_coordinates_2D(&subarray, 8, 8, 8, 8, {2, 0}, {2, 0});
// The flattened, inclusive range [1, 7] must have
// a starting coordinate of (0, 0) and an ending coordinate
// of (1, 3) to contain ranges [0, 7].
verify_expanded_coordinates_2D(&subarray, 1, 7, 0, 7, {0, 0}, {1, 3});
// The flattened, inclusive range [5, 10] must have
// a starting coordinate of (1, 0) and an ending coordinate
// of (2, 3) to contain ranges [4, 11].
verify_expanded_coordinates_2D(&subarray, 5, 10, 4, 11, {1, 0}, {2, 3});
close_array(ctx_, array_);
}
void verify_expanded_coordinates_3D(
Subarray* const subarray,
const uint64_t range_idx_start,
const uint64_t range_idx_end,
const uint64_t expected_range_idx_start,
const uint64_t expected_range_idx_end,
const std::vector<uint64_t>& expected_start_coords,
const std::vector<uint64_t>& expected_end_coords) {
std::vector<uint64_t> start_coords;
std::vector<uint64_t> end_coords;
subarray->get_expanded_coordinates(
range_idx_start, range_idx_end, &start_coords, &end_coords);
REQUIRE(start_coords == expected_start_coords);
REQUIRE(end_coords == expected_end_coords);
REQUIRE(subarray->range_idx(start_coords) == expected_range_idx_start);
REQUIRE(subarray->range_idx(end_coords) == expected_range_idx_end);
// Build a map from each inclusive range index between
// `range_idx_start` and `range_idx_end` that maps to a bool.
std::unordered_map<uint64_t, bool> range_idx_found;
for (uint64_t i = range_idx_start; i <= range_idx_end; ++i) {
range_idx_found[i] = false;
}
// Iterate through every coordinate between the start and end
// coordinate. If the flattened index is in `range_idx_found`,
// set the value to `true`.
for (uint64_t x = start_coords[0]; x <= end_coords[0]; ++x) {
for (uint64_t y = start_coords[1]; y <= end_coords[1]; ++y) {
for (uint64_t z = start_coords[2]; z <= end_coords[2]; ++z) {
const uint64_t range_idx = subarray->range_idx({x, y, z});
if (range_idx_found.count(range_idx) == 1) {
range_idx_found[range_idx] = true;
}
}
}
}
// Verify all flattened ranges are contained within the 2D
// space between `start_coords` and `end_coords`.
for (uint64_t i = range_idx_start; i <= range_idx_end; ++i) {
REQUIRE(range_idx_found[i] == true);
}
}
TEST_CASE_METHOD(
SubarrayFx,
"Subarray: Test get_expanded_coordinates, row-major, 3D",
"[Subarray][3d][row_major][get_expanded_coordinates]") {
uint64_t domain[] = {1, 4};
uint64_t tile_extent_1 = 1;
uint64_t tile_extent_2 = 1;
uint64_t tile_extent_3 = 1;
create_array(
ctx_,
array_name_,
TILEDB_DENSE,
{"x", "y", "z"},
{TILEDB_UINT64, TILEDB_UINT64, TILEDB_UINT64},
{domain, domain, domain},
{&tile_extent_1, &tile_extent_2, &tile_extent_3},
{"a"},
{TILEDB_INT32},
{1},
{tiledb::test::Compressor(TILEDB_FILTER_LZ4, -1)},
TILEDB_ROW_MAJOR,
TILEDB_ROW_MAJOR,
1);
open_array(ctx_, array_, TILEDB_READ);
/**
* Populate the subarray with non-coalesced point ranges
* on each cell. This will populate the 3D subarray ranges as:
*
* z == 0
* 0 4 8 12
* 16 20 24 28
* 32 36 40 44
* 48 52 56 60
*
* z == 1
* 1 5 9 13
* 17 21 25 29
* 33 37 41 45
* 49 53 57 61
*
* z == 2
* 2 6 10 14
* 18 22 26 30
* 34 38 42 46
* 50 54 58 62
*
* z == 3
* 3 7 11 15
* 19 23 27 31
* 35 39 43 47
* 51 55 59 63
*/
Subarray subarray;
std::vector<uint64_t> d1_ranges;
std::vector<uint64_t> d2_ranges;
std::vector<uint64_t> d3_ranges;
for (uint64_t i = domain[0]; i <= domain[1]; ++i) {
d1_ranges.emplace_back(i);
d1_ranges.emplace_back(i);
d2_ranges.emplace_back(i);
d2_ranges.emplace_back(i);
d3_ranges.emplace_back(i);
d3_ranges.emplace_back(i);
}
SubarrayRanges<uint64_t> ranges = {d1_ranges, d2_ranges, d3_ranges};
Layout subarray_layout = Layout::ROW_MAJOR;
create_subarray(array_->array_, ranges, subarray_layout, &subarray, false);
// We must compute range offsets before invoking
// `get_expanded_coordinates`.
subarray.compute_range_offsets();
// The flattened, inclusive range [0, 4] only expands
// on the last dimension.
verify_expanded_coordinates_3D(&subarray, 0, 4, 0, 7, {0, 0, 0}, {0, 1, 3});
// The flattened, inclusive range [56, 59] does not expand
// the coordinates when calibrating.
verify_expanded_coordinates_3D(
&subarray, 56, 59, 56, 59, {3, 2, 0}, {3, 2, 3});
// The flattened, inclusive range [16, 18] does not expand
// the coordinates when calibrating.
verify_expanded_coordinates_3D(
&subarray, 16, 18, 16, 18, {1, 0, 0}, {1, 0, 2});
// The flattened, inclusive range [37, 57] must have
// a starting coordinate of (2, 0, 0) and an ending coordinate
// of (3, 3, 3) to contain ranges [32, 63]. This ensures
// expansion along both the "y" and "z" dimension, leaving the
// "x" dimension untouched.
verify_expanded_coordinates_3D(
&subarray, 37, 57, 32, 63, {2, 0, 0}, {3, 3, 3});
close_array(ctx_, array_);
}
TEST_CASE_METHOD(
SubarrayFx,
"Subarray: Test get_expanded_coordinates, col-major, 3D",
"[Subarray][3d][col_major][get_expanded_coordinates]") {
uint64_t domain[] = {1, 4};
uint64_t tile_extent_1 = 1;
uint64_t tile_extent_2 = 1;
uint64_t tile_extent_3 = 1;
create_array(
ctx_,
array_name_,
TILEDB_DENSE,
{"x", "y", "z"},
{TILEDB_UINT64, TILEDB_UINT64, TILEDB_UINT64},
{domain, domain, domain},
{&tile_extent_1, &tile_extent_2, &tile_extent_3},
{"a"},
{TILEDB_INT32},
{1},
{tiledb::test::Compressor(TILEDB_FILTER_LZ4, -1)},
TILEDB_ROW_MAJOR,
TILEDB_ROW_MAJOR,
1);
open_array(ctx_, array_, TILEDB_READ);
/**
* Populate the subarray with non-coalesced point ranges
* on each cell. This will populate the 3D subarray ranges as:
*
* z == 0
* 0 16 32 48
* 4 20 36 52
* 8 24 40 56
* 12 28 44 60
*
* z == 1
* 1 17 33 49
* 5 21 37 53
* 9 25 41 57
* 13 29 45 61
*
* z == 2
* 2 18 34 50
* 6 22 38 54
* 10 26 42 58
* 14 30 46 62
*
* z == 3
* 3 19 35 51
* 7 23 39 55
* 11 27 43 59
* 15 31 47 63
*/
Subarray subarray;
std::vector<uint64_t> d1_ranges;
std::vector<uint64_t> d2_ranges;
std::vector<uint64_t> d3_ranges;
for (uint64_t i = domain[0]; i <= domain[1]; ++i) {
d1_ranges.emplace_back(i);
d1_ranges.emplace_back(i);
d2_ranges.emplace_back(i);
d2_ranges.emplace_back(i);
d3_ranges.emplace_back(i);
d3_ranges.emplace_back(i);
}
SubarrayRanges<uint64_t> ranges = {d1_ranges, d2_ranges, d3_ranges};
Layout subarray_layout = Layout::COL_MAJOR;
create_subarray(array_->array_, ranges, subarray_layout, &subarray, false);
// We must compute range offsets before invoking
// `get_expanded_coordinates`.
subarray.compute_range_offsets();
// The flattened, inclusive range [0, 4] only expands
// on the last dimension.
verify_expanded_coordinates_3D(&subarray, 0, 4, 0, 7, {0, 0, 0}, {3, 1, 0});
// The flattened, inclusive range [56, 59] does not expand
// the coordinates when calibrating.
verify_expanded_coordinates_3D(
&subarray, 56, 59, 56, 59, {0, 2, 3}, {3, 2, 3});
// The flattened, inclusive range [16, 18] does not expand
// the coordinates when calibrating.
verify_expanded_coordinates_3D(
&subarray, 16, 18, 16, 18, {0, 0, 1}, {2, 0, 1});
// The flattened, inclusive range [37, 57] must have
// a starting coordinate of (0, 0, 2) and an ending coordinate
// of (3, 3, 3) to contain ranges [32, 63]. This ensures
// expansion along both the "x" and "y" dimension, leaving the
// "z" dimension untouched.
verify_expanded_coordinates_3D(
&subarray, 37, 57, 32, 63, {0, 0, 2}, {3, 3, 3});
close_array(ctx_, array_);
}
TEST_CASE_METHOD(
SubarrayFx,
"Subarray: Test get_expanded_coordinates, unordered, 3D",
"[Subarray][3d][unordered][get_expanded_coordinates]") {
uint64_t domain[] = {1, 4};
uint64_t tile_extent_1 = 1;
uint64_t tile_extent_2 = 1;
uint64_t tile_extent_3 = 1;
create_array(
ctx_,
array_name_,
TILEDB_DENSE,
{"x", "y", "z"},
{TILEDB_UINT64, TILEDB_UINT64, TILEDB_UINT64},
{domain, domain, domain},
{&tile_extent_1, &tile_extent_2, &tile_extent_3},
{"a"},
{TILEDB_INT32},
{1},
{tiledb::test::Compressor(TILEDB_FILTER_LZ4, -1)},
TILEDB_ROW_MAJOR,
TILEDB_ROW_MAJOR,
1);
open_array(ctx_, array_, TILEDB_READ);
/**
* Populate the subarray with non-coalesced point ranges
* on each cell. This will populate the 3D subarray ranges as:
*
* z == 0
* 0 4 8 12
* 16 20 24 28
* 32 36 40 44
* 48 52 56 60
*
* z == 1
* 1 5 9 13
* 17 21 25 29
* 33 37 41 45
* 49 53 57 61
*
* z == 2
* 2 6 10 14
* 18 22 26 30
* 34 38 42 46
* 50 54 58 62
*
* z == 3
* 3 7 11 15
* 19 23 27 31
* 35 39 43 47
* 51 55 59 63
*/
Subarray subarray;
std::vector<uint64_t> d1_ranges;
std::vector<uint64_t> d2_ranges;
std::vector<uint64_t> d3_ranges;
for (uint64_t i = domain[0]; i <= domain[1]; ++i) {
d1_ranges.emplace_back(i);
d1_ranges.emplace_back(i);
d2_ranges.emplace_back(i);
d2_ranges.emplace_back(i);
d3_ranges.emplace_back(i);
d3_ranges.emplace_back(i);
}
SubarrayRanges<uint64_t> ranges = {d1_ranges, d2_ranges, d3_ranges};
Layout subarray_layout = Layout::UNORDERED;
create_subarray(array_->array_, ranges, subarray_layout, &subarray, false);
// We must compute range offsets before invoking
// `get_expanded_coordinates`.
subarray.compute_range_offsets();
// The flattened, inclusive range [56, 59] does not expand
// the coordinates when calibrating.
verify_expanded_coordinates_3D(
&subarray, 56, 59, 56, 59, {3, 2, 0}, {3, 2, 3});
// The flattened, inclusive range [16, 18] does not expand
// the coordinates when calibrating.
verify_expanded_coordinates_3D(
&subarray, 16, 18, 16, 18, {1, 0, 0}, {1, 0, 2});
// The flattened, inclusive range [37, 57] must have
// a starting coordinate of (2, 0, 0) and an ending coordinate
// of (3, 3, 3) to contain ranges [32, 63]. This ensures
// expansion along both the "y" and "z" dimension, leaving the
// "x" dimension untouched.
verify_expanded_coordinates_3D(
&subarray, 37, 57, 32, 63, {2, 0, 0}, {3, 3, 3});
close_array(ctx_, array_);
} | 13,236 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.