max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
348 | <reponame>chamberone/Leaflet.PixiOverlay<gh_stars>100-1000
{"nom":"Nouméa","circ":"1ère circonscription","dpt":"Nouvelle-Calédonie","inscrits":62404,"abs":33630,"votants":28774,"blancs":1219,"nuls":457,"exp":27098,"res":[{"nuance":"DVD","nom":"<NAME>","voix":15376},{"nuance":"DVD","nom":"<NAME>","voix":11722}]} | 132 |
1,475 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*
*/
package org.apache.geode.gradle.testing.isolation;
import java.io.InputStream;
import java.io.OutputStream;
import org.gradle.api.logging.Logger;
import org.gradle.api.logging.Logging;
/**
* Wraps a Java {@link Process} to execute a completion function when it terminates.
*/
public class CompletableProcess extends Process {
private static final Logger LOGGER = Logging.getLogger(CompletableProcess.class);
private final String description;
private final Process delegate;
private Runnable onCompletion;
public CompletableProcess(String description, Process delegate, Runnable onCompletion) {
this.description = description;
this.delegate = delegate;
this.onCompletion = onCompletion;
LOGGER.debug("{} started", this);
}
@Override
public OutputStream getOutputStream() {
return delegate.getOutputStream();
}
@Override
public InputStream getInputStream() {
return delegate.getInputStream();
}
@Override
public InputStream getErrorStream() {
return delegate.getErrorStream();
}
@Override
public int waitFor() throws InterruptedException {
try {
LOGGER.debug("{} waiting for process to finish", this);
return delegate.waitFor();
} finally {
LOGGER.debug("{} finished", this);
cleanUp();
}
}
@Override
public int exitValue() {
int exitValue = delegate.exitValue();
LOGGER.debug("{} reporting exit value {}", this, exitValue);
return exitValue;
}
@Override
public void destroy() {
LOGGER.debug("Destroying {}", this);
delegate.destroy();
LOGGER.debug("{} destroyed", this);
cleanUp();
}
@Override
public String toString() {
return "CompletableProcess{" + description + '}';
}
private synchronized void cleanUp() {
if (onCompletion == null) {
return;
}
LOGGER.debug("{} cleaning up", this);
onCompletion.run();
onCompletion = null;
LOGGER.debug("{} cleaned up", this);
}
}
| 830 |
357 | <gh_stars>100-1000
/*
PsychPlatform.h
PLATFORMS:
Only Windows
AUTHORS:
<NAME> mk <EMAIL>
HISTORY:
DESCRIPTION:
PsychPlatform.h contains constant definitions asserting conditions
specific to the the OS X version of Screen. If you
use a conditional macro wich branches according to one of the constants
in this file, then that conditional macro does not belong here.
Note that this should be the ONLY Psychtoolbox file which is conditionally
included by platform. Exceptions to this rule might be:
-project.def files included in VC++ files
-StdAfx pre-compiled header files included in VC++ files.
TO DO:
For now the project path specifies which platform version of this file to
include, with each version of this file defining constants which identify
the platform. A smarter way to do this would be to use a single version
of this file which detects contants defined within the compilation
environment, for example as a flag passed to the compiler.
*/
#ifndef PSYCH_PLATFORM_WIN32_H
#define PSYCH_PLATFORM_WIN32_H
#include "PsychPlatformConstants.h"
//these control build switches
#define PSYCH_SYSTEM PSYCH_WINDOWS
#ifndef PSYCH_LANGUAGE
#define PSYCH_LANGUAGE PSYCH_MATLAB
#endif
#define PSYCH_DEBUG PSYCH_ON
// Only needed on GNU/Octave + MinGW64, as of Octave-6.1. MSVC 2019 for Python
// and Matlab builds already defines a sufficiently high WINVER:
#ifdef PTBOCTAVE3MEX
// Need to define _WIN32_WINNT and WINVER as 0x0602, so we can use features
// added in Windows-8 and in the Win-8 SDK. This obviously needs at least
// Windows-8 as build- and runtime system, but as we only officially support
// Windows-10, this is not a problem.
// #warning Manually setting WINVER to 0x0602
#undef _WIN32_WINNT
#undef WINVER
#define _WIN32_WINNT 0x0602
#define WINVER 0x0602
#endif
// PSYCH_PLATFORM_WIN32_H
#endif
| 729 |
1,752 | <filename>MyPerf4J-Core/src/main/java/cn/myperf4j/core/AbstractMethodTagMaintainer.java
package cn.myperf4j.core;
import cn.myperf4j.base.MethodTag;
import java.lang.reflect.Method;
/**
* Created by LinShunkang on 2018/5/20
*/
public abstract class AbstractMethodTagMaintainer {
public abstract int addMethodTag(MethodTag methodTag);
public abstract int addMethodTag(Method method);
public abstract MethodTag getMethodTag(int methodId);
public abstract int getMethodTagCount();
}
| 168 |
1,022 | {
"FindPackagesByIdCacheTimeInSeconds": 60,
"FindPackagesByIdCountCacheTimeInSeconds": 0,
"GetSpecificPackageCacheTimeInSeconds": 60,
"SearchCacheTimeInSeconds": 45
} | 59 |
468 | <reponame>benparsons/hexbin<filename>meta/data-warehouse.json
{
"name": "data-warehouse",
"author": "rockmedia-es",
"license": "CC BY-NC-SA 4.0",
"raster": "http://hexb.in/hexagons/data-warehouse.png",
"vector": "http://hexb.in/vector/data-warehouse.svg",
"description": "One warehouse to store all the data",
"order_online_url": "https://www.rockmedia.es"
} | 145 |
1,160 | # ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 <NAME>
# Copyright (c) 2008-2021 pyglet contributors
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
"""Wrapper for Xext
Generated with:
tools/genwrappers.py xsync
Do not modify this file.
"""
import ctypes
from ctypes import *
import pyglet.lib
_lib = pyglet.lib.load_library('Xext')
_int_types = (c_int16, c_int32)
if hasattr(ctypes, 'c_int64'):
# Some builds of ctypes apparently do not have c_int64
# defined; it's a pretty good bet that these builds do not
# have 64-bit pointers.
_int_types += (ctypes.c_int64,)
for t in _int_types:
if sizeof(t) == sizeof(c_size_t):
c_ptrdiff_t = t
class c_void(Structure):
# c_void_p is a buggy return type, converting to int, so
# POINTER(None) == c_void_p is actually written as
# POINTER(c_void), so it can be treated as a real pointer.
_fields_ = [('dummy', c_int)]
# XXX DODGY relative import of xlib.py, which contains XID etc definitions.
# can't use wrapped import which gave
# import pyglet.window.xlib.xlib
# because Python has the lamest import semantics and can't handle that kind of
# recursive import, even though it's the same as
from . import xlib
SYNC_MAJOR_VERSION = 3 # /usr/include/X11/extensions/sync.h:4901
SYNC_MINOR_VERSION = 0 # /usr/include/X11/extensions/sync.h:4902
X_SyncInitialize = 0 # /usr/include/X11/extensions/sync.h:4904
X_SyncListSystemCounters = 1 # /usr/include/X11/extensions/sync.h:4905
X_SyncCreateCounter = 2 # /usr/include/X11/extensions/sync.h:4906
X_SyncSetCounter = 3 # /usr/include/X11/extensions/sync.h:4907
X_SyncChangeCounter = 4 # /usr/include/X11/extensions/sync.h:4908
X_SyncQueryCounter = 5 # /usr/include/X11/extensions/sync.h:4909
X_SyncDestroyCounter = 6 # /usr/include/X11/extensions/sync.h:4910
X_SyncAwait = 7 # /usr/include/X11/extensions/sync.h:4911
X_SyncCreateAlarm = 8 # /usr/include/X11/extensions/sync.h:4912
X_SyncChangeAlarm = 9 # /usr/include/X11/extensions/sync.h:4913
X_SyncQueryAlarm = 10 # /usr/include/X11/extensions/sync.h:4914
X_SyncDestroyAlarm = 11 # /usr/include/X11/extensions/sync.h:4915
X_SyncSetPriority = 12 # /usr/include/X11/extensions/sync.h:4916
X_SyncGetPriority = 13 # /usr/include/X11/extensions/sync.h:4917
XSyncCounterNotify = 0 # /usr/include/X11/extensions/sync.h:4919
XSyncAlarmNotify = 1 # /usr/include/X11/extensions/sync.h:4920
XSyncAlarmNotifyMask = 2 # /usr/include/X11/extensions/sync.h:4921
XSyncNumberEvents = 2 # /usr/include/X11/extensions/sync.h:4923
XSyncBadCounter = 0 # /usr/include/X11/extensions/sync.h:4925
XSyncBadAlarm = 1 # /usr/include/X11/extensions/sync.h:4926
XSyncNumberErrors = 2 # /usr/include/X11/extensions/sync.h:4927
XSyncCACounter = 1 # /usr/include/X11/extensions/sync.h:4932
XSyncCAValueType = 2 # /usr/include/X11/extensions/sync.h:4933
XSyncCAValue = 4 # /usr/include/X11/extensions/sync.h:4934
XSyncCATestType = 8 # /usr/include/X11/extensions/sync.h:4935
XSyncCADelta = 16 # /usr/include/X11/extensions/sync.h:4936
XSyncCAEvents = 32 # /usr/include/X11/extensions/sync.h:4937
enum_anon_93 = c_int
XSyncAbsolute = 0
XSyncRelative = 1
XSyncValueType = enum_anon_93 # /usr/include/X11/extensions/sync.h:4945
enum_anon_94 = c_int
XSyncPositiveTransition = 0
XSyncNegativeTransition = 1
XSyncPositiveComparison = 2
XSyncNegativeComparison = 3
XSyncTestType = enum_anon_94 # /usr/include/X11/extensions/sync.h:4955
enum_anon_95 = c_int
XSyncAlarmActive = 0
XSyncAlarmInactive = 1
XSyncAlarmDestroyed = 2
XSyncAlarmState = enum_anon_95 # /usr/include/X11/extensions/sync.h:4964
XID = xlib.XID
XSyncCounter = XID # /usr/include/X11/extensions/sync.h:4967
XSyncAlarm = XID # /usr/include/X11/extensions/sync.h:4968
class struct__XSyncValue(Structure):
__slots__ = [
'hi',
'lo',
]
struct__XSyncValue._fields_ = [
('hi', c_int),
('lo', c_uint),
]
XSyncValue = struct__XSyncValue # /usr/include/X11/extensions/sync.h:4972
# /usr/include/X11/extensions/sync.h:4980
XSyncIntToValue = _lib.XSyncIntToValue
XSyncIntToValue.restype = None
XSyncIntToValue.argtypes = [POINTER(XSyncValue), c_int]
# /usr/include/X11/extensions/sync.h:4985
XSyncIntsToValue = _lib.XSyncIntsToValue
XSyncIntsToValue.restype = None
XSyncIntsToValue.argtypes = [POINTER(XSyncValue), c_uint, c_int]
Bool = xlib.Bool
# /usr/include/X11/extensions/sync.h:4991
XSyncValueGreaterThan = _lib.XSyncValueGreaterThan
XSyncValueGreaterThan.restype = Bool
XSyncValueGreaterThan.argtypes = [XSyncValue, XSyncValue]
# /usr/include/X11/extensions/sync.h:4996
XSyncValueLessThan = _lib.XSyncValueLessThan
XSyncValueLessThan.restype = Bool
XSyncValueLessThan.argtypes = [XSyncValue, XSyncValue]
# /usr/include/X11/extensions/sync.h:5001
XSyncValueGreaterOrEqual = _lib.XSyncValueGreaterOrEqual
XSyncValueGreaterOrEqual.restype = Bool
XSyncValueGreaterOrEqual.argtypes = [XSyncValue, XSyncValue]
# /usr/include/X11/extensions/sync.h:5006
XSyncValueLessOrEqual = _lib.XSyncValueLessOrEqual
XSyncValueLessOrEqual.restype = Bool
XSyncValueLessOrEqual.argtypes = [XSyncValue, XSyncValue]
# /usr/include/X11/extensions/sync.h:5011
XSyncValueEqual = _lib.XSyncValueEqual
XSyncValueEqual.restype = Bool
XSyncValueEqual.argtypes = [XSyncValue, XSyncValue]
# /usr/include/X11/extensions/sync.h:5016
XSyncValueIsNegative = _lib.XSyncValueIsNegative
XSyncValueIsNegative.restype = Bool
XSyncValueIsNegative.argtypes = [XSyncValue]
# /usr/include/X11/extensions/sync.h:5020
XSyncValueIsZero = _lib.XSyncValueIsZero
XSyncValueIsZero.restype = Bool
XSyncValueIsZero.argtypes = [XSyncValue]
# /usr/include/X11/extensions/sync.h:5024
XSyncValueIsPositive = _lib.XSyncValueIsPositive
XSyncValueIsPositive.restype = Bool
XSyncValueIsPositive.argtypes = [XSyncValue]
# /usr/include/X11/extensions/sync.h:5028
XSyncValueLow32 = _lib.XSyncValueLow32
XSyncValueLow32.restype = c_uint
XSyncValueLow32.argtypes = [XSyncValue]
# /usr/include/X11/extensions/sync.h:5032
XSyncValueHigh32 = _lib.XSyncValueHigh32
XSyncValueHigh32.restype = c_int
XSyncValueHigh32.argtypes = [XSyncValue]
# /usr/include/X11/extensions/sync.h:5036
XSyncValueAdd = _lib.XSyncValueAdd
XSyncValueAdd.restype = None
XSyncValueAdd.argtypes = [POINTER(XSyncValue), XSyncValue, XSyncValue, POINTER(c_int)]
# /usr/include/X11/extensions/sync.h:5043
XSyncValueSubtract = _lib.XSyncValueSubtract
XSyncValueSubtract.restype = None
XSyncValueSubtract.argtypes = [POINTER(XSyncValue), XSyncValue, XSyncValue, POINTER(c_int)]
# /usr/include/X11/extensions/sync.h:5050
XSyncMaxValue = _lib.XSyncMaxValue
XSyncMaxValue.restype = None
XSyncMaxValue.argtypes = [POINTER(XSyncValue)]
# /usr/include/X11/extensions/sync.h:5054
XSyncMinValue = _lib.XSyncMinValue
XSyncMinValue.restype = None
XSyncMinValue.argtypes = [POINTER(XSyncValue)]
class struct__XSyncSystemCounter(Structure):
__slots__ = [
'name',
'counter',
'resolution',
]
struct__XSyncSystemCounter._fields_ = [
('name', c_char_p),
('counter', XSyncCounter),
('resolution', XSyncValue),
]
XSyncSystemCounter = struct__XSyncSystemCounter # /usr/include/X11/extensions/sync.h:5131
class struct_anon_96(Structure):
__slots__ = [
'counter',
'value_type',
'wait_value',
'test_type',
]
struct_anon_96._fields_ = [
('counter', XSyncCounter),
('value_type', XSyncValueType),
('wait_value', XSyncValue),
('test_type', XSyncTestType),
]
XSyncTrigger = struct_anon_96 # /usr/include/X11/extensions/sync.h:5139
class struct_anon_97(Structure):
__slots__ = [
'trigger',
'event_threshold',
]
struct_anon_97._fields_ = [
('trigger', XSyncTrigger),
('event_threshold', XSyncValue),
]
XSyncWaitCondition = struct_anon_97 # /usr/include/X11/extensions/sync.h:5144
class struct_anon_98(Structure):
__slots__ = [
'trigger',
'delta',
'events',
'state',
]
struct_anon_98._fields_ = [
('trigger', XSyncTrigger),
('delta', XSyncValue),
('events', Bool),
('state', XSyncAlarmState),
]
XSyncAlarmAttributes = struct_anon_98 # /usr/include/X11/extensions/sync.h:5152
class struct_anon_99(Structure):
__slots__ = [
'type',
'serial',
'send_event',
'display',
'counter',
'wait_value',
'counter_value',
'time',
'count',
'destroyed',
]
Display = xlib.Display
Time = xlib.Time
struct_anon_99._fields_ = [
('type', c_int),
('serial', c_ulong),
('send_event', Bool),
('display', POINTER(Display)),
('counter', XSyncCounter),
('wait_value', XSyncValue),
('counter_value', XSyncValue),
('time', Time),
('count', c_int),
('destroyed', Bool),
]
XSyncCounterNotifyEvent = struct_anon_99 # /usr/include/X11/extensions/sync.h:5169
class struct_anon_100(Structure):
__slots__ = [
'type',
'serial',
'send_event',
'display',
'alarm',
'counter_value',
'alarm_value',
'time',
'state',
]
struct_anon_100._fields_ = [
('type', c_int),
('serial', c_ulong),
('send_event', Bool),
('display', POINTER(Display)),
('alarm', XSyncAlarm),
('counter_value', XSyncValue),
('alarm_value', XSyncValue),
('time', Time),
('state', XSyncAlarmState),
]
XSyncAlarmNotifyEvent = struct_anon_100 # /usr/include/X11/extensions/sync.h:5181
class struct_anon_101(Structure):
__slots__ = [
'type',
'display',
'alarm',
'serial',
'error_code',
'request_code',
'minor_code',
]
struct_anon_101._fields_ = [
('type', c_int),
('display', POINTER(Display)),
('alarm', XSyncAlarm),
('serial', c_ulong),
('error_code', c_ubyte),
('request_code', c_ubyte),
('minor_code', c_ubyte),
]
XSyncAlarmError = struct_anon_101 # /usr/include/X11/extensions/sync.h:5195
class struct_anon_102(Structure):
__slots__ = [
'type',
'display',
'counter',
'serial',
'error_code',
'request_code',
'minor_code',
]
struct_anon_102._fields_ = [
('type', c_int),
('display', POINTER(Display)),
('counter', XSyncCounter),
('serial', c_ulong),
('error_code', c_ubyte),
('request_code', c_ubyte),
('minor_code', c_ubyte),
]
XSyncCounterError = struct_anon_102 # /usr/include/X11/extensions/sync.h:5205
# /usr/include/X11/extensions/sync.h:5213
XSyncQueryExtension = _lib.XSyncQueryExtension
XSyncQueryExtension.restype = c_int
XSyncQueryExtension.argtypes = [POINTER(Display), POINTER(c_int), POINTER(c_int)]
# /usr/include/X11/extensions/sync.h:5219
XSyncInitialize = _lib.XSyncInitialize
XSyncInitialize.restype = c_int
XSyncInitialize.argtypes = [POINTER(Display), POINTER(c_int), POINTER(c_int)]
# /usr/include/X11/extensions/sync.h:5225
XSyncListSystemCounters = _lib.XSyncListSystemCounters
XSyncListSystemCounters.restype = POINTER(XSyncSystemCounter)
XSyncListSystemCounters.argtypes = [POINTER(Display), POINTER(c_int)]
# /usr/include/X11/extensions/sync.h:5230
XSyncFreeSystemCounterList = _lib.XSyncFreeSystemCounterList
XSyncFreeSystemCounterList.restype = None
XSyncFreeSystemCounterList.argtypes = [POINTER(XSyncSystemCounter)]
# /usr/include/X11/extensions/sync.h:5234
XSyncCreateCounter = _lib.XSyncCreateCounter
XSyncCreateCounter.restype = XSyncCounter
XSyncCreateCounter.argtypes = [POINTER(Display), XSyncValue]
# /usr/include/X11/extensions/sync.h:5239
XSyncSetCounter = _lib.XSyncSetCounter
XSyncSetCounter.restype = c_int
XSyncSetCounter.argtypes = [POINTER(Display), XSyncCounter, XSyncValue]
# /usr/include/X11/extensions/sync.h:5245
XSyncChangeCounter = _lib.XSyncChangeCounter
XSyncChangeCounter.restype = c_int
XSyncChangeCounter.argtypes = [POINTER(Display), XSyncCounter, XSyncValue]
# /usr/include/X11/extensions/sync.h:5251
XSyncDestroyCounter = _lib.XSyncDestroyCounter
XSyncDestroyCounter.restype = c_int
XSyncDestroyCounter.argtypes = [POINTER(Display), XSyncCounter]
# /usr/include/X11/extensions/sync.h:5256
XSyncQueryCounter = _lib.XSyncQueryCounter
XSyncQueryCounter.restype = c_int
XSyncQueryCounter.argtypes = [POINTER(Display), XSyncCounter, POINTER(XSyncValue)]
# /usr/include/X11/extensions/sync.h:5262
XSyncAwait = _lib.XSyncAwait
XSyncAwait.restype = c_int
XSyncAwait.argtypes = [POINTER(Display), POINTER(XSyncWaitCondition), c_int]
# /usr/include/X11/extensions/sync.h:5268
XSyncCreateAlarm = _lib.XSyncCreateAlarm
XSyncCreateAlarm.restype = XSyncAlarm
XSyncCreateAlarm.argtypes = [POINTER(Display), c_ulong, POINTER(XSyncAlarmAttributes)]
# /usr/include/X11/extensions/sync.h:5274
XSyncDestroyAlarm = _lib.XSyncDestroyAlarm
XSyncDestroyAlarm.restype = c_int
XSyncDestroyAlarm.argtypes = [POINTER(Display), XSyncAlarm]
# /usr/include/X11/extensions/sync.h:5279
XSyncQueryAlarm = _lib.XSyncQueryAlarm
XSyncQueryAlarm.restype = c_int
XSyncQueryAlarm.argtypes = [POINTER(Display), XSyncAlarm, POINTER(XSyncAlarmAttributes)]
# /usr/include/X11/extensions/sync.h:5285
XSyncChangeAlarm = _lib.XSyncChangeAlarm
XSyncChangeAlarm.restype = c_int
XSyncChangeAlarm.argtypes = [POINTER(Display), XSyncAlarm, c_ulong, POINTER(XSyncAlarmAttributes)]
# /usr/include/X11/extensions/sync.h:5292
XSyncSetPriority = _lib.XSyncSetPriority
XSyncSetPriority.restype = c_int
XSyncSetPriority.argtypes = [POINTER(Display), XID, c_int]
# /usr/include/X11/extensions/sync.h:5298
XSyncGetPriority = _lib.XSyncGetPriority
XSyncGetPriority.restype = c_int
XSyncGetPriority.argtypes = [POINTER(Display), XID, POINTER(c_int)]
__all__ = ['SYNC_MAJOR_VERSION', 'SYNC_MINOR_VERSION', 'X_SyncInitialize',
'X_SyncListSystemCounters', 'X_SyncCreateCounter', 'X_SyncSetCounter',
'X_SyncChangeCounter', 'X_SyncQueryCounter', 'X_SyncDestroyCounter',
'X_SyncAwait', 'X_SyncCreateAlarm', 'X_SyncChangeAlarm', 'X_SyncQueryAlarm',
'X_SyncDestroyAlarm', 'X_SyncSetPriority', 'X_SyncGetPriority',
'XSyncCounterNotify', 'XSyncAlarmNotify', 'XSyncAlarmNotifyMask',
'XSyncNumberEvents', 'XSyncBadCounter', 'XSyncBadAlarm', 'XSyncNumberErrors',
'XSyncCACounter', 'XSyncCAValueType', 'XSyncCAValue', 'XSyncCATestType',
'XSyncCADelta', 'XSyncCAEvents', 'XSyncValueType', 'XSyncAbsolute',
'XSyncRelative', 'XSyncTestType', 'XSyncPositiveTransition',
'XSyncNegativeTransition', 'XSyncPositiveComparison',
'XSyncNegativeComparison', 'XSyncAlarmState', 'XSyncAlarmActive',
'XSyncAlarmInactive', 'XSyncAlarmDestroyed', 'XSyncCounter', 'XSyncAlarm',
'XSyncValue', 'XSyncIntToValue', 'XSyncIntsToValue', 'XSyncValueGreaterThan',
'XSyncValueLessThan', 'XSyncValueGreaterOrEqual', 'XSyncValueLessOrEqual',
'XSyncValueEqual', 'XSyncValueIsNegative', 'XSyncValueIsZero',
'XSyncValueIsPositive', 'XSyncValueLow32', 'XSyncValueHigh32',
'XSyncValueAdd', 'XSyncValueSubtract', 'XSyncMaxValue', 'XSyncMinValue',
'XSyncSystemCounter', 'XSyncTrigger', 'XSyncWaitCondition',
'XSyncAlarmAttributes', 'XSyncCounterNotifyEvent', 'XSyncAlarmNotifyEvent',
'XSyncAlarmError', 'XSyncCounterError', 'XSyncQueryExtension',
'XSyncInitialize', 'XSyncListSystemCounters', 'XSyncFreeSystemCounterList',
'XSyncCreateCounter', 'XSyncSetCounter', 'XSyncChangeCounter',
'XSyncDestroyCounter', 'XSyncQueryCounter', 'XSyncAwait', 'XSyncCreateAlarm',
'XSyncDestroyAlarm', 'XSyncQueryAlarm', 'XSyncChangeAlarm',
'XSyncSetPriority', 'XSyncGetPriority']
| 6,669 |
2,189 | import os
import sys
import logging
import datetime
log = logging.getLogger("Cli_wallet")
def init_logger(_file_name):
global log
log.handlers = []
formater = '%(asctime)s [%(levelname)s] %(message)s'
stdh = logging.StreamHandler(sys.stdout)
stdh.setFormatter(logging.Formatter(formater))
log.addHandler(stdh)
log.setLevel(logging.INFO)
data = os.path.split(_file_name)
if data[0]:
path = data[0] + "/logs/"
else:
path = "./logs/"
file = data[1]
if path and not os.path.exists(path):
os.makedirs(path)
now = str(datetime.datetime.now())[:-7]
now = now.replace(' ', '-')
full_path = path+"/"+now+"_"+file
if not full_path.endswith(".log"):
full_path += (".log")
fileh = logging.FileHandler(full_path)
fileh.setFormatter(logging.Formatter(formater))
log.addHandler(fileh)
| 392 |
13,709 | #import "ElementBaseTransition.h"
#import "FloatTransition.h"
@interface ColorTransition : ElementBaseTransition
- (instancetype)initWithView:(UIView *)view
from:(UIColor *)from
to:(UIColor *)to
startDelay:(NSTimeInterval)startDelay
duration:(NSTimeInterval)duration
interpolator:(id<Interpolator>)interpolator;
@property(nonatomic, readonly, strong) UIColor *from;
@property(nonatomic, readonly, strong) UIColor *to;
@end
| 245 |
1,847 | // Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
#pragma once
#include "schema_locator.hpp"
namespace krabs {
/**
* <summary>
* Additional ETW trace context passed to event callbacks
* to enable processing.
* </summary>
*/
struct trace_context
{
const schema_locator schema_locator;
/* Add additional trace context here. */
};
}
| 172 |
1,147 | /*
Copyright (c) 2005-2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#include "common/parallel_for_each_common.h"
#include "common/concepts_common.h"
#include <vector>
#include <iterator>
//! \file test_parallel_for_each.cpp
//! \brief Test for [algorithms.parallel_for_each]
//! Test forward access iterator support
//! \brief \ref error_guessing \ref interface
TEST_CASE("Forward iterator support") {
for ( auto concurrency_level : utils::concurrency_range() ) {
tbb::global_control control(tbb::global_control::max_allowed_parallelism, concurrency_level);
for(size_t depth = 0; depth <= depths_nubmer; ++depth) {
g_tasks_expected = 0;
for (size_t i=0; i < depth; ++i)
g_tasks_expected += FindNumOfTasks(g_depths[i].value());
TestIterator_Modifiable<utils::ForwardIterator<value_t>>(depth);
}
}
}
//! Test random access iterator support
//! \brief \ref error_guessing \ref interface
TEST_CASE("Random access iterator support") {
for ( auto concurrency_level : utils::concurrency_range() ) {
tbb::global_control control(tbb::global_control::max_allowed_parallelism, concurrency_level);
for(size_t depth = 0; depth <= depths_nubmer; ++depth) {
g_tasks_expected = 0;
for (size_t i=0; i < depth; ++i)
g_tasks_expected += FindNumOfTasks(g_depths[i].value());
TestIterator_Modifiable<value_t*>(depth);
}
}
}
//! Test const random access iterator support
//! \brief \ref error_guessing \ref interface
TEST_CASE("Const random access iterator support") {
for ( auto concurrency_level : utils::concurrency_range() ) {
tbb::global_control control(tbb::global_control::max_allowed_parallelism, concurrency_level);
for(size_t depth = 0; depth <= depths_nubmer; ++depth) {
g_tasks_expected = 0;
for (size_t i=0; i < depth; ++i)
g_tasks_expected += FindNumOfTasks(g_depths[i].value());
TestIterator_Const<utils::ConstRandomIterator<value_t>>(depth);
}
}
}
//! Test container based overload
//! \brief \ref error_guessing \ref interface
TEST_CASE("Container based overload - forward iterator based container") {
container_based_overload_test_case<utils::ForwardIterator>(/*expected_value*/1);
}
//! Test container based overload
//! \brief \ref error_guessing \ref interface
TEST_CASE("Container based overload - random access iterator based container") {
container_based_overload_test_case<utils::RandomIterator>(/*expected_value*/1);
}
// Test for iterators over values convertible to work item type
//! \brief \ref error_guessing \ref interface
TEST_CASE("Using with values convertible to work item type") {
for ( auto concurrency_level : utils::concurrency_range() ) {
tbb::global_control control(tbb::global_control::max_allowed_parallelism, concurrency_level);
using Iterator = size_t*;
for(size_t depth = 0; depth <= depths_nubmer; ++depth) {
g_tasks_expected = 0;
for (size_t i=0; i < depth; ++i)
g_tasks_expected += FindNumOfTasks(g_depths[i].value());
// Test for iterators over values convertible to work item type
TestIterator_Common<Iterator>(depth);
TestBody<FakeTaskGeneratorBody_RvalueRefVersion, Iterator>(depth);
TestBody<TaskGeneratorBody_RvalueRefVersion, Iterator>(depth);
}
}
}
//! Testing workers going to sleep
//! \brief \ref resource_usage \ref stress
TEST_CASE("That all workers sleep when no work") {
const std::size_t N = 100000;
std::vector<std::size_t> vec(N, 0);
tbb::parallel_for_each(vec.begin(), vec.end(), [&](std::size_t& in) {
for (int i = 0; i < 1000; ++i) {
++in;
}
});
TestCPUUserTime(utils::get_platform_max_threads());
}
#if __TBB_CPP20_CONCEPTS_PRESENT
template <typename Iterator, typename Body>
concept can_call_parallel_for_each_with_iterator = requires( Iterator it, const Body& body, tbb::task_group_context ctx ) {
tbb::parallel_for_each(it, it, body);
tbb::parallel_for_each(it, it, body, ctx);
};
template <typename ContainerBasedSequence, typename Body>
concept can_call_parallel_for_each_with_cbs = requires( ContainerBasedSequence cbs,
const ContainerBasedSequence const_cbs,
const Body& body, tbb::task_group_context ctx ) {
tbb::parallel_for_each(cbs, body);
tbb::parallel_for_each(cbs, body, ctx);
tbb::parallel_for_each(const_cbs, body);
tbb::parallel_for_each(const_cbs, body, ctx);
};
using CorrectCBS = test_concepts::container_based_sequence::Correct;
template <typename Body>
concept can_call_parallel_for_each =
can_call_parallel_for_each_with_iterator<CorrectCBS::iterator, Body> &&
can_call_parallel_for_each_with_cbs<CorrectCBS, Body>;
template <typename Iterator>
using CorrectBody = test_concepts::parallel_for_each_body::Correct<decltype(*std::declval<Iterator>())>;
void test_pfor_each_iterator_constraints() {
using CorrectIterator = typename std::vector<int>::iterator; // random_access_iterator
using IncorrectIterator = std::ostream_iterator<int>; // output_iterator
static_assert(can_call_parallel_for_each_with_iterator<CorrectIterator, CorrectBody<CorrectIterator>>);
static_assert(!can_call_parallel_for_each_with_iterator<IncorrectIterator, CorrectBody<IncorrectIterator>>);
}
void test_pfor_each_container_based_sequence_constraints() {
using namespace test_concepts::container_based_sequence;
static_assert(can_call_parallel_for_each_with_cbs<Correct, CorrectBody<Correct::iterator>>);
static_assert(!can_call_parallel_for_each_with_cbs<NoBegin, CorrectBody<NoBegin::iterator>>);
static_assert(!can_call_parallel_for_each_with_cbs<NoEnd, CorrectBody<NoEnd::iterator>>);
}
void test_pfor_each_body_constraints() {
using namespace test_concepts::parallel_for_each_body;
static_assert(can_call_parallel_for_each<Correct<int>>);
static_assert(can_call_parallel_for_each<WithFeeder<int>>);
static_assert(!can_call_parallel_for_each<NoOperatorRoundBrackets<int>>);
static_assert(!can_call_parallel_for_each<WithFeederNoOperatorRoundBrackets<int>>);
static_assert(!can_call_parallel_for_each<OperatorRoundBracketsNonConst<int>>);
static_assert(!can_call_parallel_for_each<WithFeederOperatorRoundBracketsNonConst<int>>);
static_assert(!can_call_parallel_for_each<WrongInputOperatorRoundBrackets<int>>);
static_assert(!can_call_parallel_for_each<WithFeederWrongFirstInputOperatorRoundBrackets<int>>);
static_assert(!can_call_parallel_for_each<WithFeederWrongSecondInputOperatorRoundBrackets<int>>);
}
//! \brief \ref error_guessing
TEST_CASE("parallel_for_each constraints") {
test_pfor_each_iterator_constraints();
test_pfor_each_container_based_sequence_constraints();
test_pfor_each_body_constraints();
}
#endif // __TBB_CPP20_CONCEPTS_PRESENT
| 2,935 |
2,023 | <gh_stars>1000+
import os, zipfile
from cStringIO import StringIO
def extract( filename, dir ):
zf = zipfile.ZipFile( filename )
namelist = zf.namelist()
dirlist = filter( lambda x: x.endswith( '/' ), namelist )
filelist = filter( lambda x: not x.endswith( '/' ), namelist )
# make base
pushd = os.getcwd()
if not os.path.isdir( dir ):
os.mkdir( dir )
os.chdir( dir )
# create directory structure
dirlist.sort()
for dirs in dirlist:
dirs = dirs.split( '/' )
prefix = ''
for dir in dirs:
dirname = os.path.join( prefix, dir )
if dir and not os.path.isdir( dirname ):
os.mkdir( dirname )
prefix = dirname
# extract files
for fn in filelist:
try:
out = open( fn, 'wb' )
buffer = StringIO( zf.read( fn ))
buflen = 2 ** 20
datum = buffer.read( buflen )
while datum:
out.write( datum )
datum = buffer.read( buflen )
out.close()
finally:
print fn
os.chdir( pushd )
| 571 |
1,383 | <filename>src/chrono_irrlicht/ChIrrNodeProxyToAsset.h
// =============================================================================
// PROJECT CHRONO - http://projectchrono.org
//
// Copyright (c) 2014 projectchrono.org
// All rights reserved.
//
// Use of this source code is governed by a BSD-style license that can be found
// in the LICENSE file at the top level of the distribution and at
// http://projectchrono.org/license-chrono.txt.
//
// =============================================================================
#ifndef CHIRRNODEPROXYTOASSET_H
#define CHIRRNODEPROXYTOASSET_H
#include <irrlicht.h>
#include "chrono/assets/ChAsset.h"
#include "chrono/assets/ChGlyphs.h"
#include "chrono/assets/ChTriangleMeshShape.h"
#include "chrono/assets/ChLineShape.h"
#include "chrono/assets/ChPathShape.h"
#include "chrono/assets/ChSurfaceShape.h"
#include "chrono_irrlicht/ChApiIrr.h"
#define ESNT_CHIRRNODEPROXYTOASSET 1202
namespace chrono {
namespace irrlicht {
/// @addtogroup irrlicht_module
/// @{
/// Class for proxy to ChAsset, it is a node with mesh in Irrlicht system
/// and a shared pointer to the ChAsset to whom it corresponds.
/// Example: (with ascii art, with --> shared pointer, ...> raw pointer)
///
/// CHRONO side IRRLICHT side
/// ChBody <......................._
/// ChIrrNodeAsset --------> ChIrrNode
/// ChBoxShape <-------------- ChIrrNodeProxyToAsset
/// IMeshSceneNode
/// ChSphereShape <------------ ChIrrNodeProxyToAsset
/// IMeshSceneNode
class ChApiIrr ChIrrNodeProxyToAsset : public irr::scene::ISceneNode {
public:
ChIrrNodeProxyToAsset(std::shared_ptr<ChAsset> asset, ///< Chrono visualization asset
irr::scene::ISceneNode* parent ///< parent node in Irrlicht hierarchy
);
~ChIrrNodeProxyToAsset() {}
virtual void render() {}
virtual const irr::core::aabbox3d<irr::f32>& getBoundingBox() const { return Box; }
ISceneNode* clone(ISceneNode* newParent, irr::scene::ISceneManager* newManager);
/// Get the associated visualization asset.
std::shared_ptr<ChAsset>& GetVisualizationAsset() { return visualization_asset; }
/// Update to reflect possible changes in the associated asset.
virtual void Update();
virtual irr::scene::ESCENE_NODE_TYPE getType() const {
return (irr::scene::ESCENE_NODE_TYPE)ESNT_CHIRRNODEPROXYTOASSET;
}
private:
irr::core::aabbox3d<irr::f32> Box; ///< bounding box
std::shared_ptr<ChAsset> visualization_asset; ///< associated visualization asset
bool initial_update; ///< flag forcing a first update
void UpdateTriangleMesh(std::shared_ptr<ChTriangleMeshShape> trianglemesh);
void UpdateTriangleMeshFixedConnectivity(std::shared_ptr<ChTriangleMeshShape> trianglemesh);
void UpdateGlyphs(std::shared_ptr<ChGlyphs> glyphs);
void UpdateSurface(std::shared_ptr<ChSurfaceShape> surface);
void UpdateLine(std::shared_ptr<geometry::ChLine> line, unsigned int nvertexes);
};
/// @} irrlicht_module
} // namespace irrlicht
} // end namespace chrono
#endif
| 1,200 |
2,663 | <reponame>lobap/Superalgos
{
"type": "Initial Targets",
"definition": {
"text": "The initial targets node holds the most basic definitions about the position to be taken: the target rate and the target size."
},
"paragraphs": [
{
"style": "Text",
"text": "Both definitions are required.",
"updated": 1609595544643
}
]
} | 169 |
401 | <gh_stars>100-1000
/*
* Copyright 2016, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.jf.smalidea.debugging.value;
import com.intellij.openapi.project.Project;
import com.sun.jdi.PrimitiveValue;
import org.jf.smalidea.psi.impl.SmaliMethod;
public class LazyPrimitiveValue<T extends PrimitiveValue> extends LazyValue<T> implements PrimitiveValue {
public LazyPrimitiveValue(SmaliMethod method, Project project, int registerNumber, String type) {
super(method, project, registerNumber, type);
}
@Override public boolean booleanValue() {
return getValue().booleanValue();
}
@Override public byte byteValue() {
return getValue().byteValue();
}
@Override public char charValue() {
return getValue().charValue();
}
@Override public double doubleValue() {
return getValue().doubleValue();
}
@Override public float floatValue() {
return getValue().floatValue();
}
@Override public int intValue() {
return getValue().intValue();
}
@Override public long longValue() {
return getValue().longValue();
}
@Override public short shortValue() {
return getValue().shortValue();
}
@Override public String toString() {
return getValue().toString();
}
}
| 861 |
679 | <filename>main/sw/source/ui/web/wview.cxx
/**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
// MARKER(update_precomp.py): autogen include statement, do not remove
#include "precompiled_sw.hxx"
#include <sfx2/msg.hxx>
#include <svl/srchitem.hxx>
#include <sfx2/dispatch.hxx>
#include <sfx2/templdlg.hxx>
#include <svx/srchdlg.hxx>
#include <basic/sbxobj.hxx>
#include <uivwimp.hxx>
#include <svx/fmshell.hxx>
#include <svx/extrusionbar.hxx>
#include <svx/fontworkbar.hxx>
#include <sfx2/objface.hxx>
#include <swmodule.hxx>
#include <unotxvw.hxx>
#include <swtypes.hxx>
#include <cmdid.h>
#include <globals.hrc>
#include <wrtsh.hxx>
#include <edtwin.hxx>
#include <wgrfsh.hxx>
#include <wfrmsh.hxx>
#include <wolesh.hxx>
#include <wtabsh.hxx>
#include <wlistsh.hxx>
#include <wformsh.hxx>
#include <wtextsh.hxx>
#include <barcfg.hxx>
#include <doc.hxx>
// EIGENTLICH nicht moeglich !!
#include <beziersh.hxx>
#include <drawsh.hxx>
#include <drwtxtsh.hxx>
#include <annotsh.hxx>
#include <wview.hxx>
#include <wdocsh.hxx>
#include <web.hrc>
#include <shells.hrc>
#define SwWebView
#define Frames
#define Graphics
#define OLEObjects
#define Controls
#define Text
#define Frame
#define Graphic
#define Object
#define Draw
#define DrawText
#define TextInTable
#define ListInText
#define ListInTable
#define Page
#include <swslots.hxx>
SFX_IMPL_NAMED_VIEWFACTORY(SwWebView, "Default")
{
SFX_VIEW_REGISTRATION(SwWebDocShell);
}
SFX_IMPL_INTERFACE( SwWebView, SwView, SW_RES(RID_WEBTOOLS_TOOLBOX) )
{
SFX_CHILDWINDOW_REGISTRATION(SfxTemplateDialogWrapper::GetChildWindowId());
SFX_CHILDWINDOW_REGISTRATION(SvxSearchDialogWrapper::GetChildWindowId());
SFX_OBJECTBAR_REGISTRATION( SFX_OBJECTBAR_TOOLS|
SFX_VISIBILITY_STANDARD|SFX_VISIBILITY_SERVER,
SW_RES(RID_WEBTOOLS_TOOLBOX) );
}
TYPEINIT1(SwWebView,SwView)
/*-----------------22.01.97 14.27-------------------
--------------------------------------------------*/
SwWebView::SwWebView(SfxViewFrame* _pFrame, SfxViewShell* _pShell) :
SwView(_pFrame, _pShell)
{
}
/*-----------------22.01.97 14.27-------------------
--------------------------------------------------*/
SwWebView::~SwWebView()
{
}
/*-----------------23.01.97 09.01-------------------
--------------------------------------------------*/
void SwWebView::SelectShell()
{
// Entscheidung, ob UpdateTable gerufen werden muss
sal_Bool bUpdateTable = sal_False;
const SwFrmFmt* pCurTableFmt = GetWrtShell().GetTableFmt();
if(pCurTableFmt && pCurTableFmt != GetLastTblFrmFmt())
{
bUpdateTable = sal_True; // kann erst spaeter ausgefuehrt werden
}
SetLastTblFrmFmt(pCurTableFmt);
//SEL_TBL und SEL_TBL_CELLS koennen verodert sein!
int nNewSelectionType = (GetWrtShell().GetSelectionType()
& ~nsSelectionType::SEL_TBL_CELLS);
int _nSelectionType = GetSelectionType();
if ( nNewSelectionType == _nSelectionType )
{
GetViewFrame()->GetBindings().InvalidateAll( sal_False );
if ( _nSelectionType & nsSelectionType::SEL_OLE ||
_nSelectionType & nsSelectionType::SEL_GRF )
//Fuer Grafiken und OLE kann sich natuerlich das Verb aendern!
ImpSetVerb( nNewSelectionType );
}
else
{
// DELETEZ(pxSelectionObj); //Selektionsobjekt loeschen
SfxDispatcher &rDispatcher = *GetViewFrame()->GetDispatcher();
SwToolbarConfigItem *pBarCfg = SW_MOD()->GetWebToolbarConfig();
if( GetCurShell() )
{
rDispatcher.Flush(); // alle gecachten Shells wirklich loeschen
//Zur alten Selektion merken welche Toolbar sichtbar war
sal_Int32 nId = rDispatcher.GetObjectBarId( SFX_OBJECTBAR_OBJECT );
if ( nId )
pBarCfg->SetTopToolbar( _nSelectionType, nId );
SfxShell *pSfxShell;
sal_uInt16 i;
for ( i = 0; sal_True; ++i )
{
pSfxShell = rDispatcher.GetShell( i );
if ( !( pSfxShell->ISA( SwBaseShell ) ||
pSfxShell->ISA( SwDrawTextShell ) || pSfxShell->ISA( SwAnnotationShell ) ) )
break;
}
pSfxShell = rDispatcher.GetShell( --i );
ASSERT( pSfxShell, "My Shell ist lost in space" );
rDispatcher.Pop( *pSfxShell, SFX_SHELL_POP_UNTIL | SFX_SHELL_POP_DELETE);
}
sal_Bool bInitFormShell = sal_False;
if( !GetFormShell() )
{
bInitFormShell = sal_True;
SetFormShell( new FmFormShell( this ) );
rDispatcher.Push( *GetFormShell() );
}
sal_Bool bSetExtInpCntxt = sal_False;
_nSelectionType = nNewSelectionType;
SetSelectionType( _nSelectionType );
ShellModes eShellMode;
if ( _nSelectionType & nsSelectionType::SEL_OLE )
{
eShellMode = SHELL_MODE_OBJECT;
SetShell( new SwWebOleShell( *this ));
rDispatcher.Push( *GetCurShell() );
}
else if ( _nSelectionType & nsSelectionType::SEL_FRM
|| _nSelectionType & nsSelectionType::SEL_GRF)
{
eShellMode = SHELL_MODE_FRAME;
SetShell( new SwWebFrameShell( *this ));
rDispatcher.Push( *GetCurShell() );
if(_nSelectionType & nsSelectionType::SEL_GRF )
{
eShellMode = SHELL_MODE_GRAPHIC;
SetShell( new SwWebGrfShell( *this ));
rDispatcher.Push( *GetCurShell() );
}
}
else if ( _nSelectionType & nsSelectionType::SEL_FRM )
{
eShellMode = SHELL_MODE_FRAME;
SetShell( new SwWebFrameShell( *this ));
rDispatcher.Push( *GetCurShell() );
}
else if ( _nSelectionType & nsSelectionType::SEL_DRW )
{
eShellMode = SHELL_MODE_DRAW;
SetShell( new svx::ExtrusionBar( this ) );
rDispatcher.Push( *GetCurShell() );
eShellMode = SHELL_MODE_DRAW;
SetShell( new svx::FontworkBar( this ) );
rDispatcher.Push( *GetCurShell() );
SetShell( new SwDrawShell( *this ));
rDispatcher.Push( *GetCurShell() );
if ( _nSelectionType & nsSelectionType::SEL_BEZ )
{
eShellMode = SHELL_MODE_BEZIER;
SetShell( new SwBezierShell( *this ));
rDispatcher.Push( *GetCurShell() );
}
}
else if ( _nSelectionType & nsSelectionType::SEL_DRW_FORM )
{
eShellMode = SHELL_MODE_DRAW_FORM;
SetShell( new SwWebDrawFormShell( *this ));
rDispatcher.Push( *GetCurShell() );
}
else if ( _nSelectionType & nsSelectionType::SEL_DRW_TXT )
{
eShellMode = SHELL_MODE_DRAWTEXT;
rDispatcher.Push( *(new SwBaseShell( *this )) );
SetShell( new SwDrawTextShell( *this ));
rDispatcher.Push( *GetCurShell() );
}
else if ( _nSelectionType & nsSelectionType::SEL_POSTIT )
{
eShellMode = SHELL_MODE_POSTIT;
SetShell( new SwAnnotationShell( *this ) );
rDispatcher.Push( *GetCurShell() );
}
else
{
bSetExtInpCntxt = sal_True;
eShellMode = SHELL_MODE_TEXT;
if ( _nSelectionType & nsSelectionType::SEL_NUM )
{
eShellMode = SHELL_MODE_LIST_TEXT;
SetShell( new SwWebListShell( *this ));
rDispatcher.Push( *GetCurShell() );
}
SetShell( new SwWebTextShell(*this));
rDispatcher.Push( *GetCurShell() );
if ( _nSelectionType & nsSelectionType::SEL_TBL )
{
eShellMode = eShellMode == SHELL_MODE_LIST_TEXT ? SHELL_MODE_TABLE_LIST_TEXT
: SHELL_MODE_TABLE_TEXT;
SetShell( new SwWebTableShell( *this ));
rDispatcher.Push( *GetCurShell() );
}
}
ImpSetVerb( _nSelectionType );
GetViewImpl()->SetShellMode(eShellMode);
if( !GetDocShell()->IsReadOnly() )
{
if( bSetExtInpCntxt && GetWrtShell().HasReadonlySel() )
bSetExtInpCntxt = sal_False;
InputContext aCntxt( GetEditWin().GetInputContext() );
aCntxt.SetOptions( bSetExtInpCntxt
? (aCntxt.GetOptions() |
( INPUTCONTEXT_TEXT |
INPUTCONTEXT_EXTTEXTINPUT ))
: (aCntxt.GetOptions() & ~
( INPUTCONTEXT_TEXT |
INPUTCONTEXT_EXTTEXTINPUT )) );
GetEditWin().SetInputContext( aCntxt );
}
//Zur neuen Selektion die Toolbar aktivieren, die auch beim letzten Mal
//aktiviert war
//Vorher muss ein Flush() sein, betrifft aber lt. MBA nicht das UI und ist
//kein Performance-Problem
// TODO/LATER: maybe now the Flush() command is superfluous?!
rDispatcher.Flush();
Point aPnt = GetEditWin().GetPointerPosPixel();
aPnt = GetEditWin().PixelToLogic(aPnt);
GetEditWin().UpdatePointer(aPnt);
if ( bInitFormShell && GetWrtShell().GetDrawView() )
GetFormShell()->SetView( PTR_CAST( FmFormView,
GetWrtShell().GetDrawView()));
}
GetViewImpl()->GetUNOObject_Impl()->NotifySelChanged();
//Guenstiger Zeitpunkt fuer die Kommunikation mit OLE-Objekten?
if ( GetDocShell()->GetDoc()->IsOLEPrtNotifyPending() )
GetDocShell()->GetDoc()->PrtOLENotify( sal_False );
//jetzt das Tabellen-Update
if(bUpdateTable)
GetWrtShell().UpdateTable();
}
| 4,152 |
389 | /*
* Copyright 2014 <NAME>, Inc.
*/
package gw.config;
public abstract class BaseService implements IService
{
private boolean _inited = false;
public final boolean isInited()
{
return _inited;
}
public final void init()
{
doInit();
_inited = true;
}
public final void uninit()
{
doInit();
_inited = false;
}
protected void doInit() {
// for subclasses
}
protected void doUninit() {
// for subclasses
}
} | 172 |
1,025 | <reponame>tiropas/java-game-server
package org.menacheri.jetserver.server.netty;
import java.net.InetSocketAddress;
import java.util.Arrays;
import java.util.concurrent.Executors;
import org.jboss.netty.bootstrap.Bootstrap;
import org.jboss.netty.bootstrap.ConnectionlessBootstrap;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelException;
import org.jboss.netty.channel.FixedReceiveBufferSizePredictorFactory;
import org.jboss.netty.channel.socket.nio.NioDatagramChannelFactory;
import org.menacheri.jetserver.concurrent.NamedThreadFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This server does UDP connection less broadcast. Since it does not store the
* connection, each call to a channel write must also contain the remote socket
* address <code>e.getChannel().write("Message", e.getRemoteAddress())</code>.
* Since it uses the same channel for all incoming connections, the handlers
* cannot be modified refer to <a
* href="http://www.jboss.org/netty/community.html#nabble-f685700">nabble
* post</a>
*
* @author <NAME>
*
*/
public class NettyUDPServer extends AbstractNettyServer
{
private static final Logger LOG = LoggerFactory.getLogger(NettyUDPServer.class);
private FixedReceiveBufferSizePredictorFactory bufferSizePredictor;
private String[] args;
/**
* The connected channel for this server. This reference can be used to
* shutdown this server.
*/
private Channel channel;
public NettyUDPServer()
{
}
@Override
public void startServer(int port) throws Exception
{
portNumber = port;
startServer(args);
}
@Override
public void startServer() throws Exception
{
startServer(args);
}
public void startServer(String[] args) throws Exception
{
int portNumber = getPortNumber(args);
InetSocketAddress socketAddress = new InetSocketAddress(portNumber);
startServer(socketAddress);
}
@Override
public Bootstrap createServerBootstrap()
{
serverBootstrap = new ConnectionlessBootstrap(
new NioDatagramChannelFactory(Executors
.newCachedThreadPool(new NamedThreadFactory(
"UDP-Server-Worker"))));
return serverBootstrap;
}
@Override
public void stopServer() throws Exception
{
if(null != channel)
{
channel.close();
}
super.stopServer();
}
public FixedReceiveBufferSizePredictorFactory getBufferSizePredictor()
{
return bufferSizePredictor;
}
public void setBufferSizePredictor(
FixedReceiveBufferSizePredictorFactory bufferSizePredictor)
{
this.bufferSizePredictor = bufferSizePredictor;
}
@Override
public TransmissionProtocol getTransmissionProtocol()
{
return TRANSMISSION_PROTOCOL.UDP;
}
@Override
public void startServer(InetSocketAddress socketAddress)
{
this.socketAddress = socketAddress;
//TODO these should be set from spring
serverBootstrap.setOption("broadcast", "false");
serverBootstrap.setOption("receiveBufferSizePredictorFactory",
bufferSizePredictor);
serverBootstrap.setOption("sendBufferSize", 65536);
serverBootstrap.setOption("receiveBufferSize", 65536);
configureServerBootStrap(args);
try
{
channel = ((ConnectionlessBootstrap) serverBootstrap)
.bind(socketAddress);
}
catch (ChannelException e)
{
LOG.error("Unable to start UDP server due to error {}",e);
throw e;
}
}
public String[] getArgs()
{
return args;
}
public void setArgs(String[] args)
{
this.args = args;
}
@Override
public String toString()
{
return "NettyUDPServer [args=" + Arrays.toString(args)
+ ", socketAddress=" + socketAddress + ", portNumber=" + portNumber
+ "]";
}
}
| 1,228 |
14,668 | <filename>third_party/blink/renderer/platform/text/mathml_operator_dictionary_test.cc
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/platform/text/mathml_operator_dictionary.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/blink/renderer/platform/wtf/text/character_names.h"
#include "third_party/blink/renderer/platform/wtf/text/string_builder.h"
namespace blink {
static const UChar32 category_a[]{
0x2190, 0x2191, 0x2192, 0x2193, 0x2194, 0x2195, 0x2196, 0x2197, 0x2198,
0x2199, 0x219C, 0x219D, 0x219E, 0x219F, 0x21A0, 0x21A1, 0x21A2, 0x21A3,
0x21A4, 0x21A5, 0x21A6, 0x21A7, 0x21A8, 0x21A9, 0x21AA, 0x21AB, 0x21AC,
0x21BC, 0x21BD, 0x21BE, 0x21BF, 0x21C0, 0x21C1, 0x21C2, 0x21C3, 0x21C4,
0x21C5, 0x21C6, 0x21C7, 0x21C8, 0x21C9, 0x21CA, 0x21CB, 0x21CC, 0x21D0,
0x21D1, 0x21D2, 0x21D3, 0x21D4, 0x21D5, 0x21D6, 0x21D7, 0x21D8, 0x21D9,
0x21DA, 0x21DB, 0x21DC, 0x21DD, 0x21E0, 0x21E1, 0x21E2, 0x21E3, 0x21E4,
0x21E5, 0x21E6, 0x21E7, 0x21E8, 0x21E9, 0x21EA, 0x21EB, 0x21EC, 0x21ED,
0x21EE, 0x21EF, 0x21F0, 0x21F3, 0x21F5, 0x21F6, 0x21FD, 0x21FE, 0x21FF,
0x27F0, 0x27F1, 0x27F5, 0x27F6, 0x27F7, 0x27F8, 0x27F9, 0x27FA, 0x27FB,
0x27FC, 0x27FD, 0x27FE, 0x27FF, 0x2952, 0x290A, 0x290B, 0x290C, 0x290D,
0x290E, 0x290F, 0x2910, 0x2912, 0x2913, 0x2921, 0x2922, 0x294E, 0x294F,
0x2950, 0x2951, 0x2952, 0x2953, 0x2954, 0x2955, 0x2956, 0x2957, 0x2958,
0x2959, 0x295A, 0x295B, 0x295C, 0x295D, 0x295E, 0x295F, 0x2960, 0x2961,
0x296E, 0x296F, 0x2B45, 0x2B46};
static const UChar32 category_b[]{
0x002B, 0x002D, 0x002F, 0x00B1, 0x00F7, 0x0322, 0x2044, 0x2212, 0x2213,
0x2214, 0x2215, 0x2216, 0x2218, 0x2224, 0x2227, 0x2228, 0x2229, 0x222A,
0x2236, 0x2238, 0x228C, 0x228D, 0x228E, 0x228F, 0x2293, 0x2294, 0x2295,
0x2296, 0x2298, 0x229D, 0x229E, 0x229F, 0x22BB, 0x22BC, 0x22BD, 0x22C4,
0x22C6, 0x22CE, 0x22CF, 0x22D2, 0x22D3, 0x2795, 0x2796, 0x2797, 0x27F4,
0x29BC, 0x29F6, 0x2A22, 0x2A23, 0x2A24, 0x2A25, 0x2A26, 0x2A27, 0x2A28,
0x2A29, 0x2A2A, 0x2A2B, 0x2A2C, 0x2A2D, 0x2A2E, 0x2A38, 0x2A39, 0x2A3A,
0x2A40, 0x2A41, 0x2A42, 0x2A43, 0x2A44, 0x2A45, 0x2A46, 0x2A47, 0x2A48,
0x2A49, 0x2A4A, 0x2A4B, 0x2A4C, 0x2A4D, 0x2A4E, 0x2A4F, 0x2A51, 0x2A52,
0x2A53, 0x2A54, 0x2A55, 0x2A56, 0x2A57, 0x2A58, 0x2A59, 0x2A5A, 0x2A5B,
0x2A5C, 0x2A5D, 0x2A5E, 0x2A5F, 0x2A60, 0x2A61, 0x2A62, 0x2A63, 0x2ADA,
0x2ADB, 0x2AFB, 0x2AFD, 0x2B32,
};
static const UChar32 category_c[]{
0x0025, 0x002A, 0x002E, 0x003F, 0x0040, 0x005E, 0x005F, 0x007C, 0x00B7,
0x00D7, 0x0323, 0x032B, 0x032F, 0x0332, 0x2022, 0x2043, 0x2206, 0x220E,
0x2217, 0x223F, 0x2240, 0x2297, 0x2299, 0x22A0, 0x22A1, 0x22C5, 0x22C7,
0x22C9, 0x22CA, 0x22CB, 0x22CC, 0x2305, 0x2306, 0x25A0, 0x25A1, 0x25AA,
0x25AB, 0x25AD, 0x25AE, 0x25AF, 0x25B0, 0x25B1, 0x27CB, 0x27CD, 0x2981,
0x2982, 0x2999, 0x299A, 0x29B5, 0x29C2, 0x29C3, 0x29C9, 0x29CA, 0x29CB,
0x29CC, 0x29CD, 0x29D8, 0x29D9, 0x29DB, 0x29DF, 0x29E0, 0x29E2, 0x29E7,
0x29E8, 0x29E9, 0x29EA, 0x29EB, 0x29EC, 0x29ED, 0x29F8, 0x29F9, 0x29FA,
0x29FB, 0x2A1D, 0x2A1E, 0x2A1F, 0x2A20, 0x2A21, 0x2A2F, 0x2A30, 0x2A31,
0x2A32, 0x2A33, 0x2A34, 0x2A35, 0x2A36, 0x2A37, 0x2A3B, 0x2A3C, 0x2A3D,
0x2A3F, 0x2A50, 0x2ADC, 0x2ADD, 0x2AFE,
};
static const UChar32 category_d[]{
0x0021, 0x002B, 0x002D, 0x00AC, 0x00B1, 0x0332, 0x2018, 0x201C, 0x2200,
0x2201, 0x2203, 0x2204, 0x2207, 0x2212, 0x2213, 0x221F, 0x2220, 0x2221,
0x2222, 0x223C, 0x22BE, 0x22BF, 0x2310, 0x2319, 0x2795, 0x2796, 0x27C0,
0x299B, 0x299C, 0x299D, 0x299E, 0x299F, 0x29A0, 0x29A1, 0x29A2, 0x29A3,
0x29A4, 0x29A5, 0x29A6, 0x29A7, 0x29A8, 0x29A9, 0x29AA, 0x29AB, 0x29AC,
0x29AD, 0x29AE, 0x29AF, 0x2AEC, 0x2AED,
};
static const UChar32 category_e[]{
0x0021, 0x0022, 0x0026, 0x0027, 0x0060, 0x00A8, 0x00B0, 0x00B2,
0x00B3, 0x00B4, 0x00B8, 0x00B9, 0x02CA, 0x02CB, 0x02D8, 0x02D9,
0x02DA, 0x02DD, 0x0311, 0x0320, 0x0325, 0x0327, 0x032A, 0x0332,
0x2019, 0x201A, 0x201B, 0x201D, 0x201E, 0x201F, 0x2032, 0x2033,
0x2034, 0x2035, 0x2036, 0x2037, 0x2057, 0x20DB, 0x20DC, 0x23CD,
};
static const UChar32 category_f[]{
0x0028, 0x005B, 0x007B, 0x007C, 0x2016, 0x2308, 0x230A, 0x2329, 0x2772,
0x27E6, 0x27E8, 0x27EA, 0x27EC, 0x27EE, 0x2980, 0x2983, 0x2985, 0x2987,
0x2989, 0x298B, 0x298D, 0x298F, 0x2991, 0x2993, 0x2995, 0x2997, 0x29FC,
};
static const UChar32 category_g[]{
0x0029, 0x005D, 0x007C, 0x007D, 0x2016, 0x2309, 0x230B, 0x232A, 0x2773,
0x27E7, 0x27E9, 0x27EB, 0x27ED, 0x27EF, 0x2980, 0x2984, 0x2986, 0x2988,
0x298A, 0x298C, 0x298E, 0x2990, 0x2992, 0x2994, 0x2996, 0x2998, 0x29FD,
};
static const UChar32 category_h[]{
0x222B, 0x222C, 0x222D, 0x222E, 0x222F, 0x2230, 0x2231, 0x2232, 0x2233,
0x2A0B, 0x2A0C, 0x2A0D, 0x2A0E, 0x2A0F, 0x2A10, 0x2A11, 0x2A12, 0x2A13,
0x2A14, 0x2A15, 0x2A16, 0x2A17, 0x2A18, 0x2A19, 0x2A1A, 0x2A1B, 0x2A1C};
static const UChar32 category_i[]{
0x005E, 0x005F, 0x007E, 0x00AF, 0x02C6, 0x02C7, 0x02C9, 0x02CD,
0x02DC, 0x02F7, 0x0302, 0x203E, 0x2322, 0x2323, 0x23B4, 0x23B5,
0x23DC, 0x23DD, 0x23DE, 0x23DF, 0x23E0, 0x23E1,
};
static const UChar32 category_j[]{
0x220F, 0x2210, 0x2211, 0x22C0, 0x22C1, 0x22C2, 0x22C3,
0x2A00, 0x2A01, 0x2A02, 0x2A03, 0x2A04, 0x2A05, 0x2A06,
0x2A07, 0x2A08, 0x2A09, 0x2A0A, 0x2AFC, 0x2AFF,
};
static const UChar32 category_k[]{
0x2145, 0x2146, 0x2202, 0x221A, 0x221B, 0x221C,
};
static const UChar32 category_l[]{
0x005C, 0x2061, 0x2062, 0x2063, 0x2064,
};
static const UChar32 category_m[]{
0x002C,
0x003A,
0x003B,
};
template <typename T, size_t N>
bool IsInCategory(const T (&table)[N], UChar32 character) {
return std::binary_search(table, table + base::size(table), character);
}
String FromUChar32(UChar32 c) {
StringBuilder input;
input.Append(c);
return input.ToString();
}
TEST(MathOperatorDictionaryTest, Infix) {
for (UChar32 ch = 0; ch < kMaxCodepoint; ch++) {
String s = FromUChar32(ch);
s.Ensure16Bit();
if (ch >= kCombiningMinusSignBelow &&
ch <= kGreekCapitalReversedDottedLunateSigmaSymbol) {
EXPECT_TRUE(FindCategory(s, MathMLOperatorDictionaryForm::kInfix) ==
MathMLOperatorDictionaryCategory::kNone);
} else if (IsInCategory(category_a, ch)) {
EXPECT_TRUE(FindCategory(s, MathMLOperatorDictionaryForm::kInfix) ==
MathMLOperatorDictionaryCategory::kA);
} else if (IsInCategory(category_b, ch)) {
EXPECT_TRUE(FindCategory(s, MathMLOperatorDictionaryForm::kInfix) ==
MathMLOperatorDictionaryCategory::kB);
} else if (IsInCategory(category_c, ch)) {
EXPECT_TRUE(FindCategory(s, MathMLOperatorDictionaryForm::kInfix) ==
MathMLOperatorDictionaryCategory::kC);
} else if (IsInCategory(category_l, ch)) {
EXPECT_TRUE(FindCategory(s, MathMLOperatorDictionaryForm::kInfix) ==
MathMLOperatorDictionaryCategory::kDorEorL);
} else if (IsInCategory(category_m, ch)) {
EXPECT_TRUE(FindCategory(s, MathMLOperatorDictionaryForm::kInfix) ==
MathMLOperatorDictionaryCategory::kM);
}
}
}
TEST(MathOperatorDictionaryTest, Prefix) {
for (UChar32 ch = 0; ch < kMaxCodepoint; ch++) {
String s = FromUChar32(ch);
s.Ensure16Bit();
if (ch >= kCombiningMinusSignBelow &&
ch <= kGreekCapitalReversedDottedLunateSigmaSymbol) {
EXPECT_TRUE(FindCategory(s, MathMLOperatorDictionaryForm::kPrefix) ==
MathMLOperatorDictionaryCategory::kNone);
} else if (IsInCategory(category_d, ch)) {
EXPECT_TRUE(FindCategory(s, MathMLOperatorDictionaryForm::kPrefix) ==
MathMLOperatorDictionaryCategory::kDorEorL);
} else if (IsInCategory(category_f, ch)) {
EXPECT_TRUE(FindCategory(s, MathMLOperatorDictionaryForm::kPrefix) ==
MathMLOperatorDictionaryCategory::kForG);
} else if (IsInCategory(category_h, ch)) {
EXPECT_TRUE(FindCategory(s, MathMLOperatorDictionaryForm::kPrefix) ==
MathMLOperatorDictionaryCategory::kH);
} else if (IsInCategory(category_j, ch)) {
EXPECT_TRUE(FindCategory(s, MathMLOperatorDictionaryForm::kPrefix) ==
MathMLOperatorDictionaryCategory::kJ);
} else if (IsInCategory(category_k, ch)) {
EXPECT_TRUE(FindCategory(s, MathMLOperatorDictionaryForm::kPrefix) ==
MathMLOperatorDictionaryCategory::kK);
} else {
EXPECT_TRUE(FindCategory(s, MathMLOperatorDictionaryForm::kPrefix) ==
MathMLOperatorDictionaryCategory::kNone);
}
}
}
TEST(MathOperatorDictionaryTest, Postfix) {
for (UChar32 ch = 0; ch < kMaxCodepoint; ch++) {
String s = FromUChar32(ch);
s.Ensure16Bit();
if (ch >= kCombiningMinusSignBelow &&
ch <= kGreekCapitalReversedDottedLunateSigmaSymbol) {
EXPECT_TRUE(FindCategory(s, MathMLOperatorDictionaryForm::kPostfix) ==
MathMLOperatorDictionaryCategory::kNone);
} else if (ch == kArabicMathematicalOperatorMeemWithHahWithTatweel ||
ch == kArabicMathematicalOperatorHahWithDal) {
EXPECT_TRUE(FindCategory(s, MathMLOperatorDictionaryForm::kPostfix) ==
MathMLOperatorDictionaryCategory::kI);
} else if (IsInCategory(category_e, ch)) {
EXPECT_TRUE(FindCategory(s, MathMLOperatorDictionaryForm::kPostfix) ==
MathMLOperatorDictionaryCategory::kDorEorL);
} else if (IsInCategory(category_g, ch)) {
EXPECT_TRUE(FindCategory(s, MathMLOperatorDictionaryForm::kPostfix) ==
MathMLOperatorDictionaryCategory::kForG);
} else if (IsInCategory(category_i, ch)) {
EXPECT_TRUE(FindCategory(s, MathMLOperatorDictionaryForm::kPostfix) ==
MathMLOperatorDictionaryCategory::kI);
} else {
EXPECT_TRUE(FindCategory(s, MathMLOperatorDictionaryForm::kPostfix) ==
MathMLOperatorDictionaryCategory::kNone);
}
}
}
} // namespace blink
| 5,575 |
880 | <reponame>LinZong/logback-android
/**
* Copyright 2019 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.qos.logback.core.property;
import ch.qos.logback.core.PropertyDefinerBase;
import ch.qos.logback.core.util.OptionHelper;
import java.io.File;
/**
* In conjunction with {@link ch.qos.logback.core.joran.action.PropertyAction} sets
* the named variable to "true" if the file specified by {@link #setPath(String) path}
* property exists, to "false" otherwise.
*
* @see #getPropertyValue()
*
* @author <NAME>uml;
*/
public class FileExistsPropertyDefiner extends PropertyDefinerBase {
String path;
public String getPath() {
return path;
}
/**
* Sets the path for the file to search for.
*
* @param path the file path
*/
public void setPath(String path) {
this.path = path;
}
/**
* Returns "true" if the file specified by {@link #setPath(String) path} property exists.
* Returns "false" otherwise.
*
* @return "true"|"false" depending on the existence of file
*/
public String getPropertyValue() {
if (OptionHelper.isEmpty(path)) {
addError("The \"path\" property must be set.");
return null;
}
File file = new File(path);
return booleanAsStr(file.exists());
}
}
| 567 |
1,083 | <filename>core/src/main/java/org/apache/carbondata/core/util/CarbonTimeStatisticsFactory.java
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.core.util;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.stats.DriverQueryStatisticsRecorderDummy;
import org.apache.carbondata.core.stats.DriverQueryStatisticsRecorderImpl;
import org.apache.carbondata.core.stats.QueryStatisticsRecorder;
import org.apache.carbondata.core.stats.QueryStatisticsRecorderDummy;
import org.apache.carbondata.core.stats.QueryStatisticsRecorderImpl;
public class CarbonTimeStatisticsFactory {
private static String loadStatisticsInstanceType;
private static LoadStatistics loadStatisticsInstance;
private static String driverRecorderType;
private static QueryStatisticsRecorder driverRecorder;
static {
CarbonTimeStatisticsFactory.updateTimeStatisticsUtilStatus();
loadStatisticsInstance = genLoadStatisticsInstance();
driverRecorder = genDriverRecorder();
}
private static void updateTimeStatisticsUtilStatus() {
loadStatisticsInstanceType = CarbonProperties.getInstance()
.getProperty(CarbonCommonConstants.ENABLE_DATA_LOADING_STATISTICS,
CarbonCommonConstants.ENABLE_DATA_LOADING_STATISTICS_DEFAULT);
driverRecorderType = CarbonProperties.getInstance()
.getProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS,
CarbonCommonConstants.ENABLE_QUERY_STATISTICS_DEFAULT);
}
private static LoadStatistics genLoadStatisticsInstance() {
if (loadStatisticsInstanceType.equalsIgnoreCase("true")) {
return CarbonLoadStatisticsImpl.getInstance();
} else {
return CarbonLoadStatisticsDummy.getInstance();
}
}
public static LoadStatistics getLoadStatisticsInstance() {
return loadStatisticsInstance;
}
private static QueryStatisticsRecorder genDriverRecorder() {
if (driverRecorderType.equalsIgnoreCase("true")) {
return DriverQueryStatisticsRecorderImpl.getInstance();
} else {
return DriverQueryStatisticsRecorderDummy.getInstance();
}
}
public static QueryStatisticsRecorder createDriverRecorder() {
return driverRecorder;
}
public static QueryStatisticsRecorder createExecutorRecorder(String queryId) {
String queryStatisticsRecorderType = CarbonProperties.getInstance()
.getProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS,
CarbonCommonConstants.ENABLE_QUERY_STATISTICS_DEFAULT);
if (queryStatisticsRecorderType.equalsIgnoreCase("true")) {
return new QueryStatisticsRecorderImpl(queryId);
} else {
return new QueryStatisticsRecorderDummy();
}
}
} | 1,031 |
984 | //
// UINavigationBar+Theming.h
// DynamicThemesExample
//
// Created by <NAME> on 1/2/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
@import UIKit;
NS_ASSUME_NONNULL_BEGIN
@interface UINavigationBar (Theming)
- (UIBarStyle)mtf_barStyleForColor:(UIColor *)color;
- (void)mtf_setShadowColor:(UIColor *)color;
@end
NS_ASSUME_NONNULL_END
| 147 |
358 | from ...survey import BaseSurvey
from ...utils.code_utils import deprecate_class
class Survey(BaseSurvey):
"""Base Gravity Survey"""
receiver_locations = None #: receiver locations
rxType = None #: receiver type
components = ["gz"]
def __init__(self, source_field, **kwargs):
self.source_field = source_field
BaseSurvey.__init__(self, **kwargs)
def eval(self, fields):
return fields
@property
def nRx(self):
return self.source_field.receiver_list[0].locations.shape[0]
@property
def receiver_locations(self):
return self.source_field.receiver_list[0].locations
@property
def nD(self):
return len(self.receiver_locations) * len(self.components)
@property
def components(self):
return self.source_field.receiver_list[0].components
@property
def Qfx(self):
if getattr(self, "_Qfx", None) is None:
self._Qfx = self.prob.mesh.getInterpolationMat(
self.receiver_locations, "Fx"
)
return self._Qfx
@property
def Qfy(self):
if getattr(self, "_Qfy", None) is None:
self._Qfy = self.prob.mesh.getInterpolationMat(
self.receiver_locations, "Fy"
)
return self._Qfy
@property
def Qfz(self):
if getattr(self, "_Qfz", None) is None:
self._Qfz = self.prob.mesh.getInterpolationMat(
self.receiver_locations, "Fz"
)
return self._Qfz
def projectFields(self, u):
"""
This function projects the fields onto the data space.
First we project our B on to data location
.. math::
\mathbf{B}_{rec} = \mathbf{P} \mathbf{B}
then we take the dot product between B and b_0
.. math ::
\\text{TMI} = \\vec{B}_s \cdot \hat{B}_0
"""
# TODO: There can be some different tyes of data like |B| or B
gfx = self.Qfx * u["G"]
gfy = self.Qfy * u["G"]
gfz = self.Qfz * u["G"]
fields = {"gx": gfx, "gy": gfy, "gz": gfz}
return fields
@deprecate_class(removal_version="0.16.0", error=True)
class LinearSurvey(Survey):
pass
| 1,049 |
1,761 | # Face recognition with LBP descriptors.
# See <NAME>'s "Face Recognition with Local Binary Patterns".
#
# Before running the example:
# 1) Download the AT&T faces database http://www.cl.cam.ac.uk/Research/DTG/attarchive/pub/data/att_faces.zip
# 2) Exract and copy the orl_faces directory to the SD card root.
#
# NOTE: This is just a PoC implementation of the paper mentioned above, it does Not work well in real life conditions.
import sensor, time, image
SUB = "s2"
NUM_SUBJECTS = 5
NUM_SUBJECTS_IMGS = 10
img = image.Image("orl_faces/%s/1.pgm"%(SUB)).mask_ellipse()
d0 = img.find_lbp((0, 0, img.width(), img.height()))
img = None
print("")
for s in range(1, NUM_SUBJECTS+1):
dist = 0
for i in range(2, NUM_SUBJECTS_IMGS+1):
img = image.Image("orl_faces/s%d/%d.pgm"%(s, i)).mask_ellipse()
d1 = img.find_lbp((0, 0, img.width(), img.height()))
dist += image.match_descriptor(d0, d1)
print("Average dist for subject %d: %d"%(s, dist/NUM_SUBJECTS_IMGS))
| 408 |
357 | <filename>vmdir/common/opensslutil.c
/*
* Copyright © 2018 VMware, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an “AS IS” BASIS, without
* warranties or conditions of any kind, EITHER EXPRESS OR IMPLIED. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
#include "includes.h"
DWORD
VmDirComputeMessageDigest(
const EVP_MD* digestMethod,
const unsigned char* pData,
size_t dataSize,
unsigned char** ppMD,
size_t* pMDSize
)
{
DWORD dwError = 0;
EVP_MD_CTX mdCtx = {0};
unsigned char md[EVP_MAX_MD_SIZE] = {0};
unsigned int mdSize = 0;
unsigned char* pMD = NULL;
if (!digestMethod || !pData || !ppMD || !pMDSize)
{
BAIL_WITH_VMDIR_ERROR(dwError, VMDIR_ERROR_INVALID_PARAMETER);
}
EVP_MD_CTX_init(&mdCtx);
if (EVP_DigestInit_ex(&mdCtx, digestMethod, NULL) == 0)
{
VMDIR_LOG_ERROR(VMDIR_LOG_MASK_ALL, "%s: EVP_DigestInit_ex returned 0", __FUNCTION__);
BAIL_WITH_VMDIR_ERROR(dwError, VMDIR_ERROR_SSL);
}
if (EVP_DigestUpdate(&mdCtx, pData, dataSize) == 0)
{
VMDIR_LOG_ERROR(VMDIR_LOG_MASK_ALL, "%s: EVP_DigestUpdate returned 0", __FUNCTION__);
BAIL_WITH_VMDIR_ERROR(dwError, VMDIR_ERROR_SSL);
}
if (EVP_DigestFinal_ex(&mdCtx, md, &mdSize) == 0)
{
VMDIR_LOG_ERROR(VMDIR_LOG_MASK_ALL, "%s: EVP_DigestFinal_ex returned 0", __FUNCTION__);
BAIL_WITH_VMDIR_ERROR(dwError, VMDIR_ERROR_SSL);
}
dwError = VmDirAllocateAndCopyMemory(md, mdSize, (PVOID*)&pMD);
BAIL_ON_VMDIR_ERROR(dwError);
*ppMD = pMD;
*pMDSize = mdSize;
cleanup:
EVP_MD_CTX_cleanup(&mdCtx);
return dwError;
error:
VMDIR_LOG_ERROR(
VMDIR_LOG_MASK_ALL,
"%s failed with error (%d)",
__FUNCTION__,
dwError);
VMDIR_SAFE_FREE_MEMORY(pMD);
goto cleanup;
}
DWORD
VmDirConvertPEMToPublicKey(
PCSTR pszPEM,
EVP_PKEY** ppPubKey
)
{
DWORD dwError = 0;
int retVal = 0;
BIO* bio = NULL;
RSA* rsa = NULL;
EVP_PKEY* pPubKey = NULL;
if (IsNullOrEmptyString(pszPEM) || !ppPubKey)
{
BAIL_WITH_VMDIR_ERROR(dwError, VMDIR_ERROR_INVALID_PARAMETER);
}
bio = BIO_new(BIO_s_mem());
if (!bio)
{
VMDIR_LOG_ERROR(VMDIR_LOG_MASK_ALL, "%s: BIO_new returned NULL", __FUNCTION__);
BAIL_WITH_VMDIR_ERROR(dwError, VMDIR_ERROR_SSL);
}
retVal = BIO_puts(bio, pszPEM);
if (retVal <= 0)
{
VMDIR_LOG_ERROR(VMDIR_LOG_MASK_ALL, "%s: BIO_puts returned %d", __FUNCTION__, retVal);
BAIL_WITH_VMDIR_ERROR(dwError, VMDIR_ERROR_SSL);
}
PEM_read_bio_RSA_PUBKEY(bio, &rsa, NULL, NULL);
if (!rsa)
{
VMDIR_LOG_ERROR(VMDIR_LOG_MASK_ALL, "%s: PEM_read_RSA_PUBKEY returned NULL", __FUNCTION__);
BAIL_WITH_VMDIR_ERROR(dwError, VMDIR_ERROR_SSL);
}
pPubKey = EVP_PKEY_new();
if (!pPubKey)
{
VMDIR_LOG_ERROR(VMDIR_LOG_MASK_ALL, "%s: EVP_PKEY_new returned NULL", __FUNCTION__);
BAIL_WITH_VMDIR_ERROR(dwError, VMDIR_ERROR_SSL);
}
retVal = EVP_PKEY_assign_RSA(pPubKey, rsa);
if (retVal != 1)
{
VMDIR_LOG_ERROR(VMDIR_LOG_MASK_ALL, "%s: EVP_PKEY_assign_RSA returned %d", __FUNCTION__, retVal);
BAIL_WITH_VMDIR_ERROR(dwError, VMDIR_ERROR_SSL);
}
*ppPubKey = pPubKey;
cleanup:
BIO_free(bio);
return dwError;
error:
VMDIR_LOG_ERROR(
VMDIR_LOG_MASK_ALL,
"%s failed with error (%d)",
__FUNCTION__,
dwError);
RSA_free(rsa);
EVP_PKEY_free(pPubKey);
goto cleanup;
}
DWORD
VmDirVerifyRSASignature(
EVP_PKEY* pPubKey,
const EVP_MD* digestMethod,
const unsigned char* pData,
size_t dataSize,
const unsigned char* pSignature,
size_t signatureSize,
PBOOLEAN pVerified
)
{
DWORD dwError = 0;
int retVal = 0;
unsigned char* pMd = NULL;
size_t mdSize = 0;
EVP_PKEY_CTX* pPubKeyCtx = NULL;
if (!pPubKey || !digestMethod || !pData || !pData || !pVerified)
{
BAIL_WITH_VMDIR_ERROR(dwError, VMDIR_ERROR_INVALID_PARAMETER);
}
dwError = VmDirComputeMessageDigest(digestMethod, pData, dataSize, &pMd, &mdSize);
BAIL_ON_VMDIR_ERROR(dwError);
pPubKeyCtx = EVP_PKEY_CTX_new(pPubKey, NULL);
if (!pPubKeyCtx)
{
VMDIR_LOG_ERROR(
VMDIR_LOG_MASK_ALL,
"%s: EVP_PKEY_CTX_new returned NULL",
__FUNCTION__);
BAIL_WITH_VMDIR_ERROR(dwError, VMDIR_ERROR_SSL);
}
retVal = EVP_PKEY_verify_init(pPubKeyCtx);
if (retVal <= 0)
{
VMDIR_LOG_ERROR(
VMDIR_LOG_MASK_ALL,
"%s: EVP_PKEY_verify_init returned %d",
__FUNCTION__,
retVal);
BAIL_WITH_VMDIR_ERROR(dwError, VMDIR_ERROR_SSL);
}
retVal = EVP_PKEY_CTX_set_rsa_padding(pPubKeyCtx, RSA_PKCS1_PADDING);
if (retVal <= 0)
{
VMDIR_LOG_ERROR(
VMDIR_LOG_MASK_ALL,
"%s: EVP_PKEY_CTX_set_rsa_padding returned %d",
__FUNCTION__,
retVal);
BAIL_WITH_VMDIR_ERROR(dwError, VMDIR_ERROR_SSL);
}
retVal = EVP_PKEY_CTX_set_signature_md(pPubKeyCtx, digestMethod);
if (retVal <= 0)
{
VMDIR_LOG_ERROR(
VMDIR_LOG_MASK_ALL,
"%s: EVP_PKEY_CTX_set_signature_md returned %d",
__FUNCTION__,
retVal);
BAIL_WITH_VMDIR_ERROR(dwError, VMDIR_ERROR_SSL);
}
retVal = EVP_PKEY_verify(pPubKeyCtx, pSignature, signatureSize, pMd, mdSize);
if (retVal == 1)
{
*pVerified = TRUE;
}
else if (retVal == 0)
{
*pVerified = FALSE;
}
else
{
VMDIR_LOG_ERROR(
VMDIR_LOG_MASK_ALL,
"%s: EVP_PKEY_verify returned %d",
__FUNCTION__,
retVal);
BAIL_WITH_VMDIR_ERROR(dwError, VMDIR_ERROR_SSL);
}
cleanup:
VMDIR_SAFE_FREE_MEMORY(pMd);
EVP_PKEY_CTX_free(pPubKeyCtx);
return dwError;
error:
VMDIR_LOG_ERROR(
VMDIR_LOG_MASK_ALL,
"%s failed with error (%d)",
__FUNCTION__,
dwError);
goto cleanup;
}
| 3,654 |
314 | <reponame>liutong-cnu/rocketmq-client-cpp
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <string.h>
#include <stdint.h>
#include "hmac.h"
#include "sha1.h"
#include "sha256.h"
#include "sha512.h"
#ifdef __cplusplus
namespace rocketmqSignature{
#endif
#define IPAD 0x36
#define OPAD 0x5c
int hmac_sha1(const void *key, size_t key_len, const void *data, size_t data_len, void *ret_buf)
{
uint32_t i;
struct sha1_ctx inner;
struct sha1_ctx outer;
struct sha1_ctx key_hash;
char ipad[64] = {0};
char opad[64] = {0};
char key_buf[SHA1_DIGEST_SIZE] = {0};
char inner_buf[SHA1_DIGEST_SIZE] = {0};
if (key == NULL || data == NULL || ret_buf == NULL) return -1;
if (key_len > 64) {
sha1_init_ctx(&key_hash);
sha1_process_bytes(key, key_len, &key_hash);
sha1_finish_ctx(&key_hash, key_buf);
key = key_buf;
key_len = SHA1_DIGEST_SIZE;
}
sha1_init_ctx (&inner);
for (i = 0; i < 64; i++) {
if (i < key_len) {
ipad[i] = ((const char *)key)[i] ^ IPAD;
opad[i] = ((const char *)key)[i] ^ OPAD;
} else {
ipad[i] = IPAD;
opad[i] = OPAD;
}
}
sha1_process_block (ipad, 64, &inner);
sha1_process_bytes (data, data_len, &inner);
sha1_finish_ctx (&inner, inner_buf);
sha1_init_ctx (&outer);
sha1_process_block (opad, 64, &outer);
sha1_process_bytes (inner_buf, SHA1_DIGEST_SIZE, &outer);
sha1_finish_ctx (&outer, ret_buf);
return 0;
}
int hmac_sha256(const void *key, size_t key_len, const void *data, size_t data_len, void *ret_buf)
{
uint32_t i;
struct sha256_ctx inner;
struct sha256_ctx outer;
struct sha256_ctx key_hash;
char ipad[64] = {0};
char opad[64] = {0};
char key_buf[SHA256_DIGEST_SIZE] = {0};
char inner_buf[SHA256_DIGEST_SIZE] = {0};
if (key == NULL || data == NULL || ret_buf == NULL) return -1;
if (key_len > 64) {
sha256_init_ctx(&key_hash);
sha256_process_bytes(key, key_len, &key_hash);
sha256_finish_ctx(&key_hash, key_buf);
key = key_buf;
key_len = SHA256_DIGEST_SIZE;
}
sha256_init_ctx (&inner);
for (i = 0; i < 64; i++) {
if (i < key_len) {
ipad[i] = ((const char *)key)[i] ^ IPAD;
opad[i] = ((const char *)key)[i] ^ OPAD;
} else {
ipad[i] = IPAD;
opad[i] = OPAD;
}
}
sha256_process_block (ipad, 64, &inner);
sha256_process_bytes (data, data_len, &inner);
sha256_finish_ctx (&inner, inner_buf);
sha256_init_ctx (&outer);
sha256_process_block (opad, 64, &outer);
sha256_process_bytes (inner_buf, SHA256_DIGEST_SIZE, &outer);
sha256_finish_ctx (&outer, ret_buf);
return 0;
}
int hmac_sha512(const void *key, size_t key_len, const void *data, size_t data_len, void *ret_buf)
{
uint32_t i;
struct sha512_ctx inner;
struct sha512_ctx outer;
struct sha512_ctx key_hash;
char ipad[128] = {0};
char opad[128] = {0};
char key_buf[SHA512_DIGEST_SIZE] = {0};
char inner_buf[SHA512_DIGEST_SIZE] = {0};
if (key == NULL || data == NULL || ret_buf == NULL) return -1;
if (key_len > 128) {
sha512_init_ctx(&key_hash);
sha512_process_bytes(key, key_len, &key_hash);
sha512_finish_ctx(&key_hash, key_buf);
key = key_buf;
key_len = SHA512_DIGEST_SIZE;
}
sha512_init_ctx (&inner);
for (i = 0; i < 128; i++) {
if (i < key_len) {
ipad[i] = ((const char *)key)[i] ^ IPAD;
opad[i] = ((const char *)key)[i] ^ OPAD;
} else {
ipad[i] = IPAD;
opad[i] = OPAD;
}
}
sha512_process_block (ipad, 128, &inner);
sha512_process_bytes (data, data_len, &inner);
sha512_finish_ctx (&inner, inner_buf);
sha512_init_ctx (&outer);
sha512_process_block (opad, 128, &outer);
sha512_process_bytes (inner_buf, SHA512_DIGEST_SIZE, &outer);
sha512_finish_ctx (&outer, ret_buf);
return 0;
}
#ifdef __cplusplus
}
#endif | 1,866 |
1,664 | <gh_stars>1000+
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.utils;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
/**
* This class should be used to compare module(service) versions.
* Base method which should be used is parse(..), This method will validate and parse
* version which you will pass as parameter, and return object of current class with
* parsed version. Same thing you should do with another version, with which you are
* planning to compare previous one. After that, use method compare to get final result.
*/
public class ModuleVersion implements Comparable<ModuleVersion> {
// RE for different version formats like N.N.N.N-bN, N.N.N-hN-bN
private static final String VERSION_WITH_HOTFIX_AND_BUILD_PATTERN = "^([0-9]+).([0-9]+).([0-9]+).([0-9]+)-h([0-9]+)-b([0-9]+)";
private static final String VERSION_WITH_BUILD_PATTERN = "^([0-9]+).([0-9]+).([0-9]+).([0-9]+)-b([0-9]+)";
// Patterns for previous RE
private static final Pattern PATTERN_WITH_HOTFIX = Pattern.compile(VERSION_WITH_HOTFIX_AND_BUILD_PATTERN);
private static final Pattern PATTERN_WITHOUT_HOTFIX = Pattern.compile(VERSION_WITH_BUILD_PATTERN);
// Parts of version
private int apacheMajor;
private int apacheMinor;
private int internalMinor;
private int internalMaint;
private int hotfix;
private int build;
public ModuleVersion(int apacheMajor, int apacheMinor, int internalMinor, int internalMaint, int hotfix, int build) {
this.apacheMajor = apacheMajor;
this.apacheMinor = apacheMinor;
this.internalMinor = internalMinor;
this.internalMaint = internalMaint;
this.hotfix = hotfix;
this.build = build;
}
/**
* Method which will parse module version
* which user passed as parameter. Also
* in this method version will be validated.
* @param moduleVersion string
* @return MpackVersion instance which contains parsed version
* */
public static ModuleVersion parse(String moduleVersion) {
Matcher versionMatcher = validateModuleVersion(moduleVersion);
ModuleVersion result = null;
if (versionMatcher.pattern().pattern().equals(VERSION_WITH_HOTFIX_AND_BUILD_PATTERN)) {
result = new ModuleVersion(Integer.parseInt(versionMatcher.group(1)), Integer.parseInt(versionMatcher.group(2)),
Integer.parseInt(versionMatcher.group(3)), Integer.parseInt(versionMatcher.group(4)),
Integer.parseInt(versionMatcher.group(5)), Integer.parseInt(versionMatcher.group(6)));
} else {
result = new ModuleVersion(Integer.parseInt(versionMatcher.group(1)), Integer.parseInt(versionMatcher.group(2)),
Integer.parseInt(versionMatcher.group(3)), Integer.parseInt(versionMatcher.group(4)), 0,
Integer.parseInt(versionMatcher.group(5)));
}
return result;
}
/**
* Method validate module version not to be
* empty or null. Also check if passed version
* has valid format.
* @param version string
* @return Matcher for passed version
* @throws IllegalArgumentException() if version empty/null/not valid
*/
private static Matcher validateModuleVersion(String version) {
if (StringUtils.isEmpty(version)) {
throw new IllegalArgumentException("Module version can't be empty or null");
}
String moduleVersion = StringUtils.trim(version);
Matcher versionMatcher = PATTERN_WITH_HOTFIX.matcher(moduleVersion);
if (!versionMatcher.find()) {
versionMatcher = PATTERN_WITHOUT_HOTFIX.matcher(moduleVersion);
if (!versionMatcher.find()) {
throw new IllegalArgumentException("Wrong format for module version, should be N.N.N.N-bN or N.N.N-hN-bN");
}
}
return versionMatcher;
}
@Override
public int compareTo(ModuleVersion other) {
int result = this.apacheMajor - other.apacheMajor;
if(result == 0) {
result = this.apacheMinor - other.apacheMinor;
if(result == 0) {
result = this.internalMinor - other.internalMinor;
if(result == 0) {
result = this.internalMaint - other.internalMaint;
if(result == 0) {
result = this.hotfix - other.hotfix;
if(result == 0) {
result = this.build - other.build;
}
}
}
}
}
return result > 0 ? 1 : result < 0 ? -1 : 0;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ModuleVersion that = (ModuleVersion) o;
if (apacheMajor != that.apacheMajor) return false;
if (apacheMinor != that.apacheMinor) return false;
if (build != that.build) return false;
if (hotfix != that.hotfix) return false;
if (internalMaint != that.internalMaint) return false;
if (internalMinor != that.internalMinor) return false;
return true;
}
@Override
public int hashCode() {
int result = apacheMajor;
result = 31 * result + apacheMinor;
result = 31 * result + internalMinor;
result = 31 * result + internalMaint;
result = 31 * result + hotfix;
result = 31 * result + build;
return result;
}
}
| 2,009 |
335 | <reponame>Safal08/Hacktoberfest-1
{
"word": "Exceptional",
"definitions": [
"An item in a company's accounts arising from its normal activity but much larger or smaller than usual."
],
"parts-of-speech": "Noun"
} | 89 |
836 | <reponame>kclamar/vedo
"""Mesh smoothing with two different methods"""
from vedo import Plotter, dataurl
plt = Plotter(N=2)
# Load a mesh and show it
vol = plt.load(dataurl+"embryo.tif")
m0 = vol.isosurface().normalize().lw(0.1).c("violet")
plt.show(m0, __doc__+"\nOriginal Mesh:", at=0)
plt.background([0.8, 1, 1], at=0) # set first renderer color
# Smooth the mesh
m1 = m0.clone().smooth(niter=20).color("lg")
plt.show(m1, "Polygons are smoothed:", at=1,
viewup='z', zoom=1.5, interactive=True).close() | 223 |
1,353 | /*
* Copyright (c) 2014, <NAME>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.twelvemonkeys.imageio.plugins.sgi;
import javax.imageio.IIOException;
import javax.imageio.stream.ImageInputStream;
import java.io.IOException;
import java.nio.charset.Charset;
final class SGIHeader {
private int compression;
private int bytesPerPixel;
private int dimensions;
private int width;
private int height;
private int channels;
private int minValue;
private int maxValue;
private String name;
private int colorMode;
public int getCompression() {
return compression;
}
public int getBytesPerPixel() {
return bytesPerPixel;
}
public int getWidth() {
return width;
}
public int getHeight() {
return height;
}
public int getChannels() {
return channels;
}
public int getMinValue() {
return minValue;
}
public int getMaxValue() {
return maxValue;
}
public String getName() {
return name;
}
public int getColorMode() {
return colorMode;
}
@Override
public String toString() {
return "SGIHeader{" +
"compression=" + compression +
", bytesPerPixel=" + bytesPerPixel +
", dimensions=" + dimensions +
", width=" + width +
", height=" + height +
", channels=" + channels +
", minValue=" + minValue +
", maxValue=" + maxValue +
", name='" + name + '\'' +
", colorMode=" + colorMode +
'}';
}
public static SGIHeader read(final ImageInputStream imageInput) throws IOException {
// typedef struct _SGIHeader
// {
// SHORT Magic; /* Identification number (474) */
// CHAR Storage; /* Compression flag */
// CHAR Bpc; /* Bytes per pixel */
// WORD Dimension; /* Number of image dimensions */
// WORD XSize; /* Width of image in pixels */
// WORD YSize; /* Height of image in pixels */
// WORD ZSize; /* Number of bit channels */
// LONG PixMin; /* Smallest pixel value */
// LONG PixMax; /* Largest pixel value */
// CHAR Dummy1[4]; /* Not used */
// CHAR ImageName[80]; /* Name of image */
// LONG ColorMap; /* Format of pixel data */
// CHAR Dummy2[404]; /* Not used */
// } SGIHEAD;
short magic = imageInput.readShort();
if (magic != SGI.MAGIC) {
throw new IIOException(String.format("Not an SGI image. Expected SGI magic %04x, read %04x", SGI.MAGIC, magic));
}
SGIHeader header = new SGIHeader();
header.compression = imageInput.readUnsignedByte();
header.bytesPerPixel = imageInput.readUnsignedByte();
header.dimensions = imageInput.readUnsignedShort();
header.width = imageInput.readUnsignedShort();
header.height = imageInput.readUnsignedShort();
header.channels = imageInput.readUnsignedShort();
header.minValue = imageInput.readInt();
header.maxValue = imageInput.readInt();
imageInput.readInt(); // Ignore
byte[] nameBytes = new byte[80];
imageInput.readFully(nameBytes);
header.name = toAsciiString(nameBytes);
header.colorMode = imageInput.readInt();
imageInput.skipBytes(404);
return header;
}
private static String toAsciiString(final byte[] bytes) {
// Find null-terminator
int len = bytes.length;
for (int i = 0; i < bytes.length; i++) {
if (bytes[i] == 0) {
len = i;
break;
}
}
return new String(bytes, 0, len, Charset.forName("ASCII"));
}
}
| 2,190 |
303 | #ifndef wren_core_h
#define wren_core_h
#include "wren_vm.h"
// This module defines the built-in classes and their primitives methods that
// are implemented directly in C code. Some languages try to implement as much
// of the core library itself in the primary language instead of in the host
// language.
//
// With Wren, we try to do as much of it in C as possible. Primitive methods
// are always faster than code written in Wren, and it minimizes startup time
// since we don't have to parse, compile, and execute Wren code.
//
// There is one limitation, though. Methods written in C cannot call Wren ones.
// They can only be the top of the callstack, and immediately return. This
// makes it difficult to have primitive methods that rely on polymorphic
// behavior. For example, `IO.write` should call `toString` on its argument,
// including user-defined `toString` methods on user-defined classes.
void wrenInitializeCore(WrenVM* vm);
#endif
| 256 |
849 | package milkman.ui.plugin.rest.postman.importers;
import java.io.IOException;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.stream.Collectors;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.milkman.rest.postman.schema.v1.PostmanCollection100;
import com.milkman.rest.postman.schema.v1.Request;
import milkman.domain.Collection;
import milkman.domain.Folder;
import milkman.domain.RequestContainer;
import milkman.ui.plugin.rest.domain.HeaderEntry;
import milkman.ui.plugin.rest.domain.RestBodyAspect;
import milkman.ui.plugin.rest.domain.RestHeaderAspect;
import milkman.ui.plugin.rest.domain.RestRequestContainer;
public class PostmanImporterV10 {
public Collection importCollection(String json) throws Exception {
PostmanCollection100 pmCollection = readJson(json, PostmanCollection100.class);
if (pmCollection.getRequests().isEmpty()) {
throw new IllegalArgumentException("Empty collection");
}
return convertToDomain(pmCollection);
}
private <T> T readJson(String json, Class<T> type) throws IOException, JsonParseException, JsonMappingException {
ObjectMapper mapper = new ObjectMapper();
mapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES);
T pmCollection = mapper.readValue(json, type);
return pmCollection;
}
private Collection convertToDomain(PostmanCollection100 pmCollection) {
//ignoring folders
List<RequestContainer> requests = new LinkedList<>();
for(Request container : pmCollection.getRequests()) {
requests.addAll(convertToDomain(container));
}
// postman v1.0 doesnt seem to support subfolders, so we just return a list of all folders
List<Folder> folders = new LinkedList<>();
if (pmCollection.getFolders() != null) {
for (com.milkman.rest.postman.schema.v1.Folder postmanFolder : pmCollection.getFolders()) {
var folder = convertToDomain(postmanFolder);
folders.add(folder);
}
}
return new Collection(UUID.randomUUID().toString(), pmCollection.getName(), false, requests, folders);
}
private Folder convertToDomain(com.milkman.rest.postman.schema.v1.Folder postmanFolder) {
List<String> requests = new LinkedList<>();
if (postmanFolder.getOrder() != null)
requests.addAll(postmanFolder.getOrder());
return new Folder(postmanFolder.getId(), postmanFolder.getName(), new LinkedList(), requests);
}
private List<RequestContainer> convertToDomain(Request pmItem) {
List<RequestContainer> result = new LinkedList<RequestContainer>();
RestRequestContainer request = new RestRequestContainer(pmItem.getName(), pmItem.getUrl(), pmItem.getMethod().toString());
request.setId(pmItem.getId());
request.setInStorage(true);
//adding headers
RestHeaderAspect headers = new RestHeaderAspect();
pmItem.getHeaderData().forEach(h -> headers.getEntries().add(new HeaderEntry(UUID.randomUUID().toString(),h.getKey(), h.getValue(), true)));
request.addAspect(headers);
//adding bodies
RestBodyAspect body = new RestBodyAspect();
if (pmItem.getRawModeData() != null) {
body.setBody(pmItem.getRawModeData());
}
request.addAspect(body);
result.add(request);
return result;
}
}
| 1,171 |
476 | <gh_stars>100-1000
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.operator.scalar;
import io.prestosql.spi.block.Block;
import io.prestosql.spi.block.BlockBuilder;
import io.prestosql.spi.function.Description;
import io.prestosql.spi.function.ScalarFunction;
import io.prestosql.spi.function.SqlType;
import io.prestosql.spi.function.TypeParameter;
import io.prestosql.spi.type.MapType;
import io.prestosql.spi.type.Type;
@Description("Creates an empty map")
@ScalarFunction("map")
public final class EmptyMapConstructor
{
private final Block emptyMap;
public EmptyMapConstructor(@TypeParameter("map(unknown,unknown)") Type mapType)
{
BlockBuilder mapBlockBuilder = mapType.createBlockBuilder(null, 1);
mapBlockBuilder.beginBlockEntry();
mapBlockBuilder.closeEntry();
emptyMap = ((MapType) mapType).getObject(mapBlockBuilder.build(), 0);
}
@SqlType("map(unknown,unknown)")
public Block map()
{
return emptyMap;
}
}
| 513 |
5,349 | # -*- coding: utf-8 -*-
import pytest
from sktime.benchmarking.strategies import TSCStrategy
from sktime.benchmarking.tasks import TSCTask
from sktime.datasets import load_gunpoint
from sktime.datasets import load_italy_power_demand
from sktime.classification.compose import ComposableTimeSeriesForestClassifier
classifier = ComposableTimeSeriesForestClassifier(n_estimators=2)
DATASET_LOADERS = (load_gunpoint, load_italy_power_demand)
# Test output of time-series classification strategies
@pytest.mark.parametrize("dataset", DATASET_LOADERS)
def test_TSCStrategy(dataset):
train = dataset(split="train")
test = dataset(split="test")
s = TSCStrategy(classifier)
task = TSCTask(target="class_val")
s.fit(task, train)
y_pred = s.predict(test)
assert y_pred.shape == test[task.target].shape
| 291 |
445 | <filename>src/distributions/categorical.cpp<gh_stars>100-1000
#include <c10/util/ArrayRef.h>
#include <torch/torch.h>
#include "cpprl/distributions/categorical.h"
#include "third_party/doctest.h"
namespace cpprl
{
Categorical::Categorical(const torch::Tensor *probs,
const torch::Tensor *logits)
{
if ((probs == nullptr) == (logits == nullptr))
{
throw std::runtime_error("Either probs or logits is required, but not both");
}
if (probs != nullptr)
{
if (probs->dim() < 1)
{
throw std::runtime_error("Probabilities tensor must have at least one dimension");
}
this->probs = *probs / probs->sum(-1, true);
// 1.21e-7 is used as the epsilon to match PyTorch's Python results as closely
// as possible
this->probs = this->probs.clamp(1.21e-7, 1. - 1.21e-7);
this->logits = torch::log(this->probs);
}
else
{
if (logits->dim() < 1)
{
throw std::runtime_error("Logits tensor must have at least one dimension");
}
this->logits = *logits - logits->logsumexp(-1, true);
this->probs = torch::softmax(this->logits, -1);
}
param = probs != nullptr ? *probs : *logits;
num_events = param.size(-1);
if (param.dim() > 1)
{
batch_shape = param.sizes().vec();
batch_shape.resize(batch_shape.size() - 1);
}
}
torch::Tensor Categorical::entropy()
{
auto p_log_p = logits * probs;
return -p_log_p.sum(-1);
}
torch::Tensor Categorical::log_prob(torch::Tensor value)
{
value = value.to(torch::kLong).unsqueeze(-1);
auto broadcasted_tensors = torch::broadcast_tensors({value, logits});
value = broadcasted_tensors[0];
value = value.narrow(-1, 0, 1);
return broadcasted_tensors[1].gather(-1, value).squeeze(-1);
}
torch::Tensor Categorical::sample(c10::ArrayRef<int64_t> sample_shape)
{
auto ext_sample_shape = extended_shape(sample_shape);
auto param_shape = ext_sample_shape;
param_shape.insert(param_shape.end(), {num_events});
auto exp_probs = probs.expand(param_shape);
torch::Tensor probs_2d;
if (probs.dim() == 1 || probs.size(0) == 1)
{
probs_2d = exp_probs.view({-1, num_events});
}
else
{
probs_2d = exp_probs.contiguous().view({-1, num_events});
}
auto sample_2d = torch::multinomial(probs_2d, 1, true);
return sample_2d.contiguous().view(ext_sample_shape);
}
TEST_CASE("Categorical")
{
SUBCASE("Throws when provided both probs and logits")
{
auto tensor = torch::Tensor();
CHECK_THROWS(Categorical(&tensor, &tensor));
}
SUBCASE("Sampled numbers are in the right range")
{
float probabilities[] = {0.2, 0.2, 0.2, 0.2, 0.2};
auto probabilities_tensor = torch::from_blob(probabilities, {5});
auto dist = Categorical(&probabilities_tensor, nullptr);
auto output = dist.sample({100});
auto more_than_4 = output > 4;
auto less_than_0 = output < 0;
CHECK(!more_than_4.any().item().toInt());
CHECK(!less_than_0.any().item().toInt());
}
SUBCASE("Sampled tensors are of the right shape")
{
float probabilities[] = {0.2, 0.2, 0.2, 0.2, 0.2};
auto probabilities_tensor = torch::from_blob(probabilities, {5});
auto dist = Categorical(&probabilities_tensor, nullptr);
CHECK(dist.sample({20}).sizes().vec() == std::vector<int64_t>{20});
CHECK(dist.sample({2, 20}).sizes().vec() == std::vector<int64_t>{2, 20});
CHECK(dist.sample({1, 2, 3, 4, 5}).sizes().vec() == std::vector<int64_t>{1, 2, 3, 4, 5});
}
SUBCASE("Multi-dimensional input probabilities are handled correctly")
{
SUBCASE("Sampled tensors are of the right shape")
{
float probabilities[2][4] = {{0.5, 0.5, 0.0, 0.0},
{0.25, 0.25, 0.25, 0.25}};
auto probabilities_tensor = torch::from_blob(probabilities, {2, 4});
auto dist = Categorical(&probabilities_tensor, nullptr);
CHECK(dist.sample({20}).sizes().vec() == std::vector<int64_t>{20, 2});
CHECK(dist.sample({10, 5}).sizes().vec() == std::vector<int64_t>{10, 5, 2});
}
SUBCASE("Generated tensors have correct probabilities")
{
float probabilities[2][4] = {{0, 1, 0, 0},
{0, 0, 0, 1}};
auto probabilities_tensor = torch::from_blob(probabilities, {2, 4});
auto dist = Categorical(&probabilities_tensor, nullptr);
auto output = dist.sample({5});
auto sum = output.sum({0});
CHECK(sum[0].item().toInt() == 5);
CHECK(sum[1].item().toInt() == 15);
}
}
SUBCASE("entropy()")
{
float probabilities[2][4] = {{0.5, 0.5, 0.0, 0.0},
{0.25, 0.25, 0.25, 0.25}};
auto probabilities_tensor = torch::from_blob(probabilities, {2, 4});
auto dist = Categorical(&probabilities_tensor, nullptr);
auto entropies = dist.entropy();
SUBCASE("Returns correct values")
{
CHECK(entropies[0].item().toDouble() ==
doctest::Approx(0.6931).epsilon(1e-3));
CHECK(entropies[1].item().toDouble() ==
doctest::Approx(1.3863).epsilon(1e-3));
}
SUBCASE("Output tensor is the correct size")
{
CHECK(entropies.sizes().vec() == std::vector<int64_t>{2});
}
}
SUBCASE("log_prob()")
{
float probabilities[2][4] = {{0.5, 0.5, 0.0, 0.0},
{0.25, 0.25, 0.25, 0.25}};
auto probabilities_tensor = torch::from_blob(probabilities, {2, 4});
auto dist = Categorical(&probabilities_tensor, nullptr);
float actions[2][2] = {{0, 1},
{2, 3}};
auto actions_tensor = torch::from_blob(actions, {2, 2});
auto log_probs = dist.log_prob(actions_tensor);
INFO(log_probs << "\n");
SUBCASE("Returns correct values")
{
CHECK(log_probs[0][0].item().toDouble() ==
doctest::Approx(-0.6931).epsilon(1e-3));
CHECK(log_probs[0][1].item().toDouble() ==
doctest::Approx(-1.3863).epsilon(1e-3));
CHECK(log_probs[1][0].item().toDouble() ==
doctest::Approx(-15.9424).epsilon(1e-3));
CHECK(log_probs[1][1].item().toDouble() ==
doctest::Approx(-1.3863).epsilon(1e-3));
}
SUBCASE("Output tensor is correct size")
{
CHECK(log_probs.sizes().vec() == std::vector<int64_t>{2, 2});
}
}
}
} | 3,361 |
575 | <reponame>sarang-apps/darshan_browser
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "weblayer/test/stub_autofill_provider.h"
namespace weblayer {
StubAutofillProvider::StubAutofillProvider(
const base::RepeatingCallback<void(const autofill::FormData&)>&
on_received_form_data)
: on_received_form_data_(on_received_form_data) {}
StubAutofillProvider::~StubAutofillProvider() = default;
void StubAutofillProvider::OnQueryFormFieldAutofill(
autofill::AutofillHandlerProxy* handler,
int32_t id,
const autofill::FormData& form,
const autofill::FormFieldData& field,
const gfx::RectF& bounding_box,
bool /*unused_autoselect_first_suggestion*/) {
on_received_form_data_.Run(form);
}
} // namespace weblayer
| 320 |
1,013 | <gh_stars>1000+
/*!
@authors <NAME> (<EMAIL>)
@date 2014-2020
@copyright BSD-3-Clause
*/
#include <gtest/gtest.h>
#include <pyclustering/interface/sync_interface.h>
#include <pyclustering/interface/hsyncnet_interface.h>
#include <pyclustering/interface/pyclustering_interface.h>
#include <pyclustering/interface/pyclustering_package.hpp>
#include <pyclustering/cluster/hsyncnet.hpp>
#include "utenv_utils.hpp"
using namespace pyclustering;
static void CHECK_FREE_PACKAGE(pyclustering_package * package) {
ASSERT_NE(nullptr, package);
ASSERT_TRUE(package->size > 0);
free_pyclustering_package(package);
}
TEST(utest_interface_hsyncnet, hsyncnet_api) {
std::shared_ptr<pyclustering_package> sample = pack(dataset({ { 1 }, { 2 }, { 3 }, { 10 }, { 11 }, { 12 } }));
void * network_pointer = hsyncnet_create_network(sample.get(), 3, (unsigned int) initial_type::EQUIPARTITION, 3, 0.1);
ASSERT_NE(nullptr, network_pointer);
void * analyser_pointer = hsyncnet_process(network_pointer, 0.995, (unsigned int) solve_type::FORWARD_EULER, true);
ASSERT_NE(nullptr, analyser_pointer);
pyclustering_package * package = sync_dynamic_allocate_sync_ensembles(analyser_pointer, 0.1, sync_dynamic_get_size(analyser_pointer) - 1);
CHECK_FREE_PACKAGE(package);
package = sync_dynamic_allocate_correlation_matrix(analyser_pointer, sync_dynamic_get_size(analyser_pointer) - 1);
CHECK_FREE_PACKAGE(package);
package = sync_dynamic_get_time(analyser_pointer);
CHECK_FREE_PACKAGE(package);
package = sync_dynamic_get_output(analyser_pointer);
CHECK_FREE_PACKAGE(package);
hsyncnet_destroy_network(network_pointer);
hsyncnet_analyser_destroy(analyser_pointer);
} | 729 |
945 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.hive;
import org.apache.iotdb.hadoop.tsfile.IReaderSet;
import org.apache.iotdb.hadoop.tsfile.TSFInputSplit;
import org.apache.iotdb.hadoop.tsfile.TSFRecordReader;
import org.apache.iotdb.tsfile.read.TsFileSequenceReader;
import org.apache.iotdb.tsfile.read.common.Field;
import org.apache.iotdb.tsfile.read.common.RowRecord;
import org.apache.iotdb.tsfile.read.query.dataset.QueryDataSet;
import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static org.apache.iotdb.hadoop.tsfile.TSFRecordReader.getCurrentValue;
public class TSFHiveRecordReader implements RecordReader<NullWritable, MapWritable>, IReaderSet {
private static final Logger logger = LoggerFactory.getLogger(TSFHiveRecordReader.class);
/** all datasets corresponding to one specific split */
private List<QueryDataSet> dataSetList = new ArrayList<>();
/**
* List for name of devices. The order corresponds to the order of dataSetList. Means that
* deviceIdList[i] is the name of device for dataSetList[i].
*/
private List<String> deviceIdList = new ArrayList<>();
/** The index of QueryDataSet that is currently processed */
private int currentIndex = 0;
private boolean isReadDeviceId;
private boolean isReadTime;
private TsFileSequenceReader reader;
private List<String> measurementIds;
@Override
public boolean next(NullWritable key, MapWritable value) throws IOException {
while (currentIndex < dataSetList.size()) {
if (!dataSetList.get(currentIndex).hasNext()) {
currentIndex++;
} else {
RowRecord rowRecord = dataSetList.get(currentIndex).next();
List<Field> fields = rowRecord.getFields();
long timestamp = rowRecord.getTimestamp();
try {
MapWritable res = new MapWritable();
getCurrentValue(
deviceIdList,
currentIndex,
timestamp,
isReadTime,
isReadDeviceId,
fields,
measurementIds)
.forEach((k, v) -> res.put(new Text(k.toString().toLowerCase()), v));
value.putAll(res);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IOException(e.getMessage());
}
return true;
}
}
return false;
}
@Override
public NullWritable createKey() {
return NullWritable.get();
}
@Override
public MapWritable createValue() {
return new MapWritable();
}
@Override
public long getPos() {
// can't know
return 0;
}
public TSFHiveRecordReader(InputSplit split, JobConf job) throws IOException {
if (split instanceof TSFInputSplit) {
TSFRecordReader.initialize((TSFInputSplit) split, job, this, dataSetList, deviceIdList);
} else {
logger.error(
"The InputSplit class is not {}, the class is {}",
TSFInputSplit.class.getName(),
split.getClass().getName());
throw new InternalError(
String.format(
"The InputSplit class is not %s, the class is %s",
TSFInputSplit.class.getName(), split.getClass().getName()));
}
}
@Override
public float getProgress() {
return 0;
}
@Override
public void close() throws IOException {
dataSetList = null;
deviceIdList = null;
reader.close();
}
@Override
public void setReader(TsFileSequenceReader reader) {
this.reader = reader;
}
@Override
public void setMeasurementIds(List<String> measurementIds) {
this.measurementIds = measurementIds;
}
@Override
public void setReadDeviceId(boolean isReadDeviceId) {
this.isReadDeviceId = isReadDeviceId;
}
@Override
public void setReadTime(boolean isReadTime) {
this.isReadTime = isReadTime;
}
}
| 1,783 |
4,140 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.serde2.objectinspector;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Map;
import java.util.TreeMap;
/*
* The equality is implemented fully, the implementation sorts the maps
* by their keys to provide a transitive compare.
*/
public class FullMapEqualComparer implements MapEqualComparer {
private static class MapKeyComparator implements Comparator<Object> {
private ObjectInspector oi;
MapKeyComparator(ObjectInspector oi) {
this.oi = oi;
}
@Override
public int compare(Object o1, Object o2) {
return ObjectInspectorUtils.compare(o1, oi, o2, oi);
}
}
@Override
public int compare(Object o1, MapObjectInspector moi1, Object o2, MapObjectInspector moi2) {
int mapsize1 = moi1.getMapSize(o1);
int mapsize2 = moi2.getMapSize(o2);
if (mapsize1 != mapsize2) {
return mapsize1 - mapsize2;
}
ObjectInspector mkoi1 = moi1.getMapKeyObjectInspector();
ObjectInspector mkoi2 = moi2.getMapKeyObjectInspector();
ObjectInspector mvoi1 = moi1.getMapValueObjectInspector();
ObjectInspector mvoi2 = moi2.getMapValueObjectInspector();
Map<?, ?> map1 = moi1.getMap(o1);
Map<?, ?> map2 = moi2.getMap(o2);
Object[] sortedMapKeys1 = map1.keySet().toArray();
Arrays.sort(sortedMapKeys1, new MapKeyComparator(mkoi1));
Object[] sortedMapKeys2 = map2.keySet().toArray();
Arrays.sort(sortedMapKeys2, new MapKeyComparator(mkoi2));
for (int i = 0; i < mapsize1; ++i) {
Object mk1 = sortedMapKeys1[i];
Object mk2 = sortedMapKeys2[i];
int rc = ObjectInspectorUtils.compare(mk1, mkoi1, mk2, mkoi2, this);
if (rc != 0) {
return rc;
}
Object mv1 = map1.get(mk1);
Object mv2 = map2.get(mk2);
rc = ObjectInspectorUtils.compare(mv1, mvoi1, mv2, mvoi2, this);
if (rc != 0) {
return rc;
}
}
return 0;
}
}
| 1,020 |
1,056 | <filename>php/php.project/src/org/netbeans/modules/php/project/ui/actions/support/TestSingleMethodSupport.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.php.project.ui.actions.support;
import javax.swing.JEditorPane;
import javax.swing.text.Document;
import org.netbeans.modules.editor.NbEditorUtilities;
import org.netbeans.modules.php.api.editor.EditorSupport;
import org.netbeans.modules.php.api.editor.PhpBaseElement;
import org.netbeans.modules.php.api.editor.PhpType;
import org.netbeans.modules.php.api.phpmodule.PhpModule;
import org.netbeans.modules.php.project.PhpProject;
import org.netbeans.modules.php.project.util.PhpProjectUtils;
import org.netbeans.modules.php.spi.testing.PhpTestingProvider;
import org.netbeans.spi.project.SingleMethod;
import org.openide.cookies.EditorCookie;
import org.openide.filesystems.FileObject;
import org.openide.nodes.Node;
import org.openide.text.NbDocument;
import org.openide.util.Lookup;
import org.openide.util.Mutex;
public final class TestSingleMethodSupport {
private TestSingleMethodSupport() {
}
public static boolean isTestClass(Node activatedNode) {
FileObject fileObject = CommandUtils.getFileObject(activatedNode);
if (fileObject == null) {
return false;
}
PhpProject project = PhpProjectUtils.getPhpProject(fileObject);
if (project == null) {
return false;
}
if(CommandUtils.isUnderTests(project, fileObject, false)) {
return true;
}
return false;
}
public static boolean canHandle(Node activatedNode) {
FileObject fileObject = CommandUtils.getFileObject(activatedNode);
if (fileObject == null) {
return false;
}
PhpProject project = PhpProjectUtils.getPhpProject(fileObject);
if (project == null) {
return false;
}
final EditorCookie editorCookie = activatedNode.getLookup().lookup(EditorCookie.class);
if (editorCookie == null) {
return false;
}
JEditorPane pane = Mutex.EVENT.readAccess(new Mutex.Action<JEditorPane>() {
@Override
public JEditorPane run() {
return NbDocument.findRecentEditorPane(editorCookie);
}
});
if (pane == null) {
return false;
}
return getTestMethod(pane.getDocument(), pane.getCaret().getDot()) != null;
}
public static SingleMethod getTestMethod(Document doc, int caret) {
FileObject fileObject = NbEditorUtilities.getFileObject(doc);
assert fileObject != null;
EditorSupport editorSupport = Lookup.getDefault().lookup(EditorSupport.class);
assert editorSupport != null;
PhpBaseElement element = editorSupport.getElement(fileObject, caret);
if (!(element instanceof PhpType.Method)) {
return null;
}
PhpType.Method method = (PhpType.Method) element;
PhpProject project = PhpProjectUtils.getPhpProject(fileObject);
assert project != null;
PhpModule phpModule = project.getPhpModule();
for (PhpTestingProvider testingProvider : project.getTestingProviders()) {
if (testingProvider.isTestFile(phpModule, fileObject)
&& testingProvider.isTestCase(phpModule, method)) {
return new SingleMethod(fileObject, CommandUtils.encodeMethod(method.getPhpType().getFullyQualifiedName(), method.getName()));
}
}
return null;
}
}
| 1,615 |
636 | <filename>tests/test_archive.py
import hashlib
import io
from os import path
import pytest
import slackviewer
from slackviewer import archive
from slackviewer.utils.six import to_bytes
def test_SHA1_file():
filepath = path.join("tests", "testarchive.zip")
version = to_bytes(slackviewer.__version__)
def SHA1_file(filepath, extra=b''):
"""The original unoptimized method (reads whole file instead of chunks)"""
with io.open(filepath, 'rb') as f:
return hashlib.sha1(f.read() + extra).hexdigest()
expected = SHA1_file(filepath, version)
actual = archive.SHA1_file(filepath, version)
assert actual == expected
| 238 |
2,122 | <filename>web/src/main/java/com/sishuok/es/maintain/push/web/controller/PushController.java
/**
* Copyright (c) 2005-2012 https://github.com/zhangkaitao
*
* Licensed under the Apache License, Version 2.0 (the "License");
*/
package com.sishuok.es.maintain.push.web.controller;
import com.google.common.collect.Maps;
import com.sishuok.es.maintain.notification.service.NotificationApi;
import com.sishuok.es.maintain.push.service.PushService;
import com.sishuok.es.personal.message.service.MessageApi;
import com.sishuok.es.sys.user.entity.User;
import com.sishuok.es.sys.user.web.bind.annotation.CurrentUser;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.servlet.http.HttpServletResponse;
import java.util.List;
import java.util.Map;
/**
* 1、实时推送用户:消息和通知
* <p>User: <NAME>
* <p>Date: 13-7-16 下午2:08
* <p>Version: 1.0
*/
@Controller
public class PushController {
@Autowired
private MessageApi messageApi;
@Autowired
private NotificationApi notificationApi;
@Autowired
private PushService pushService;
/**
* 获取页面的提示信息
* @return
*/
@RequestMapping(value = "/admin/polling")
@ResponseBody
public Object polling(HttpServletResponse resp, @CurrentUser User user) {
resp.setHeader("Connection", "Keep-Alive");
resp.addHeader("Cache-Control", "private");
resp.addHeader("Pragma", "no-cache");
Long userId = user.getId();
if(userId == null) {
return null;
}
//如果用户第一次来 立即返回
if(!pushService.isOnline(userId)) {
Long unreadMessageCount = messageApi.countUnread(userId);
List<Map<String, Object>> notifications = notificationApi.topFiveNotification(user.getId());
Map<String, Object> data = Maps.newHashMap();
data.put("unreadMessageCount", unreadMessageCount);
data.put("notifications", notifications);
pushService.online(userId);
return data;
} else {
//长轮询
return pushService.newDeferredResult(userId);
}
}
}
| 982 |
348 | <gh_stars>100-1000
{"nom":"Censy","circ":"2ème circonscription","dpt":"Yonne","inscrits":43,"abs":16,"votants":27,"blancs":6,"nuls":0,"exp":21,"res":[{"nuance":"UDI","nom":"M. <NAME>","voix":11},{"nuance":"REM","nom":"<NAME>","voix":10}]} | 100 |
1,555 | <reponame>LightSun/mir
/* Verify unaligned address aliasing on Alpha EV[45]. */
extern void exit (int);
static unsigned short x, y;
void foo()
{
x = 0x345;
y = 0x567;
}
int main()
{
foo ();
if (x != 0x345 || y != 0x567)
abort ();
exit (0);
}
| 111 |
1,043 | <filename>SwiftExample/Pods/Headers/Private/KZPlayground/KZPComponent.h
//
// Created by <NAME>(http://twitter.com/merowing_) on 20/10/14.
//
//
//
@import Foundation;
@protocol KZPComponent <NSObject>
@required
+ (void)reset;
@end | 93 |
335 | {
"word": "Attachment",
"definitions": [
"An extra part or extension that is or may be attached to something to perform a particular function.",
"A computer file appended to an email.",
"Affection, fondness, or sympathy for someone or something.",
"An affectionate relationship.",
"Temporary secondment to an organization.",
"The action of attaching something.",
"Legal seizure of property."
],
"parts-of-speech": "Noun"
} | 171 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.java.hints.threading;
import com.sun.source.tree.BlockTree;
import com.sun.source.tree.StatementTree;
import com.sun.source.tree.Tree;
import com.sun.source.tree.TryTree;
import com.sun.source.util.TreePath;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.netbeans.api.java.source.TreeMaker;
import org.netbeans.api.java.source.TreePathHandle;
import org.netbeans.api.java.source.WorkingCopy;
import org.netbeans.api.java.source.matching.Matcher;
import org.netbeans.api.java.source.matching.Occurrence;
import org.netbeans.api.java.source.matching.Pattern;
import org.netbeans.modules.java.hints.errors.Utilities;
import org.netbeans.spi.editor.hints.ErrorDescription;
import org.netbeans.spi.editor.hints.Fix;
import org.netbeans.spi.java.hints.ConstraintVariableType;
import org.netbeans.spi.java.hints.ErrorDescriptionFactory;
import org.netbeans.spi.java.hints.Hint;
import org.netbeans.spi.java.hints.HintContext;
import org.netbeans.spi.java.hints.JavaFix;
import org.netbeans.spi.java.hints.JavaFixUtilities;
import org.netbeans.spi.java.hints.TriggerPattern;
import org.netbeans.spi.java.hints.TriggerPatterns;
import org.openide.util.NbBundle;
/**
*
* @author sdedic
*/
@Hint(displayName = "#DN_org.netbeans.modules.java.hints.threading.Tiny.unlockOutsideTryFinally", description = "#DESC_org.netbeans.modules.java.hints.threading.Tiny.unlockOutsideTryFinally", category="thread", suppressWarnings="LockAcquiredButNotSafelyReleased")
public class UnlockOutsideFinally {
@NbBundle.Messages({
"FIX_MoveUnlockToFinally=Move unlock() to finally",
"FIX_RemoveRedundantUnlock=Remove extra unlock() call"
})
private static class MoveUnlockFix extends JavaFix {
private TreePathHandle finHandle;
public MoveUnlockFix(TreePathHandle handle, TreePathHandle finHandle) {
super(handle);
this.finHandle = finHandle;
}
@Override
protected String getText() {
return finHandle == null ? Bundle.FIX_RemoveRedundantUnlock() : Bundle.FIX_MoveUnlockToFinally();
}
@Override
protected void performRewrite(TransformationContext ctx) throws Exception {
TreePath finPath = null;
if (finHandle != null) {
finPath = finHandle.resolve(ctx.getWorkingCopy());
if (finPath == null) {
// report ?
return;
}
}
WorkingCopy wc = ctx.getWorkingCopy();
TreeMaker mk = wc.getTreeMaker();
TreePath p = ctx.getPath();
if (!StatementTree.class.isAssignableFrom(p.getLeaf().getKind().asInterface())) {
if (p.getParentPath() != null &&
p.getParentPath().getLeaf().getKind() == Tree.Kind.EXPRESSION_STATEMENT) {
p = p.getParentPath();
} else {
return;
}
}
Utilities.removeStatement(wc, p);
if (finPath != null) {
Utilities.insertStatement(wc, finPath, null,
Collections.singletonList((StatementTree)p.getLeaf()),
null,
Utilities.INSERT_POS_CHILD
);
}
}
}
@TriggerPatterns({
@TriggerPattern(value="$lock.lock(); $otherStats$; try { $statements$; $lock.unlock(); $rest$; } catch $catches$ finally { $finstats$; } ",
constraints=@ConstraintVariableType(variable="$lock", type="java.util.concurrent.locks.Lock")),
@TriggerPattern(value="$lock.lock(); $otherStats$; try { $statements$; $lock.unlock(); $rest$; } catch $catches$",
constraints=@ConstraintVariableType(variable="$lock", type="java.util.concurrent.locks.Lock")),
@TriggerPattern(value="$lock.lock(); $otherStats$; try { $statements$; } catch $catches$ catch($excType $var) { $catchStats1$; $lock.unlock(); $catchStats2$; } catch $catches2$ finally { $finstmts$; }",
constraints=@ConstraintVariableType(variable="$lock", type="java.util.concurrent.locks.Lock")),
})
@NbBundle.Messages({
"ERR_UnlockOutsideTryFinally=Lock.lock() not unlocked in finally",
"FIX_UnlockOutsideTryFinally=Wrap by try-finally",
"MSG_ExtraUnlock=Extra unlock() call; lock is already released in finally"
})
public static ErrorDescription unlockInsideTry(HintContext ctx) {
TreePath fin = ctx.getVariables().get("$lock$1");
if (fin == null) {
return null;
}
TreePath parent = fin.getParentPath();
if (parent.getLeaf().getKind() != Tree.Kind.MEMBER_SELECT) {
return null;
}
parent = parent.getParentPath();
if (parent == null || parent.getLeaf().getKind() != Tree.Kind.METHOD_INVOCATION) {
return null;
}
TreePath tPath = parent.getParentPath();
while (tPath != null && tPath.getLeaf().getKind() != Tree.Kind.TRY) {
if (tPath.getLeaf().getKind() == Tree.Kind.METHOD ||
tPath.getLeaf().getKind() == Tree.Kind.CLASS) {
return null;
}
tPath = tPath.getParentPath();
}
if (tPath == null) {
return null;
}
TryTree tt = (TryTree)tPath.getLeaf();
Fix f = null;
String displayName = null;
if (tt.getFinallyBlock() != null) {
TreePath finBlockPath = new TreePath(tPath, tt.getFinallyBlock());
Collection<? extends Occurrence> occ = Matcher.create(ctx.getInfo()).
setSearchRoot(finBlockPath).
match(
Pattern.createSimplePattern(parent)
);
if (!occ.isEmpty()) {
f = new MoveUnlockFix(
TreePathHandle.create(parent, ctx.getInfo()),
null).toEditorFix();
displayName = Bundle.MSG_ExtraUnlock();
}
}
if (f == null) {
displayName = Bundle.ERR_UnlockOutsideTryFinally();
f = new MoveUnlockFix(
TreePathHandle.create(parent, ctx.getInfo()),
TreePathHandle.create(tPath, ctx.getInfo())).toEditorFix();
}
return ErrorDescriptionFactory.forName(ctx, parent, displayName, f);
}
@TriggerPatterns({
@TriggerPattern(value="$lock.lock(); $statements$; $lock.unlock();",
constraints=@ConstraintVariableType(variable="$lock", type="java.util.concurrent.locks.Lock")),
})
public static ErrorDescription unlockOutsideTryFinally(HintContext ctx) {
if (ctx.getMultiVariables().get("$statements$").isEmpty()) return null; //#186434
String fixDisplayName = NbBundle.getMessage(Tiny.class, "FIX_UnlockOutsideTryFinally");
String lockString = ctx.getVariables().containsKey("$lock") ? "$lock." : ""; // NOI18N
Fix f= JavaFixUtilities.rewriteFix(ctx, fixDisplayName, ctx.getPath(),
lockString + "lock(); try {$statements$;} finally {" + lockString + "unlock();}");
String displayName = NbBundle.getMessage(Tiny.class, "ERR_UnlockOutsideTryFinally");
//XXX:
Tree mark;
Tree matched = ctx.getPath().getLeaf();
if (matched.getKind() == Tree.Kind.BLOCK) {
List<? extends StatementTree> s = ((BlockTree) matched).getStatements();
int count = ctx.getMultiVariables().get("$$1$").size();
mark = s.get(count);
} else {
mark = matched;
}
return ErrorDescriptionFactory.forName(ctx, mark, displayName, f);
}
}
| 3,782 |
320 | import os
import tempfile
from scrapy.http import TextResponse
from pystock_crawler.spiders.yahoo import make_url, YahooSpider
from pystock_crawler.tests.base import TestCaseBase
class MakeURLTest(TestCaseBase):
def test_no_dates(self):
self.assertEqual(make_url('YHOO'), (
'http://ichart.finance.yahoo.com/table.csv?'
's=YHOO&d=&e=&f=&g=d&a=&b=&c=&ignore=.csv'
))
def test_only_start_date(self):
self.assertEqual(make_url('GOOG', start_date='20131122'), (
'http://ichart.finance.yahoo.com/table.csv?'
's=GOOG&d=&e=&f=&g=d&a=10&b=22&c=2013&ignore=.csv'
))
def test_only_end_date(self):
self.assertEqual(make_url('AAPL', end_date='20131122'), (
'http://ichart.finance.yahoo.com/table.csv?'
's=AAPL&d=10&e=22&f=2013&g=d&a=&b=&c=&ignore=.csv'
))
def test_start_and_end_dates(self):
self.assertEqual(make_url('TSLA', start_date='20120305', end_date='20131122'), (
'http://ichart.finance.yahoo.com/table.csv?'
's=TSLA&d=10&e=22&f=2013&g=d&a=2&b=5&c=2012&ignore=.csv'
))
class YahooSpiderTest(TestCaseBase):
def test_empty_creation(self):
spider = YahooSpider()
self.assertEqual(list(spider.start_urls), [])
def test_inline_symbols(self):
spider = YahooSpider(symbols='C')
self.assertEqual(list(spider.start_urls), [make_url('C')])
spider = YahooSpider(symbols='KO,DIS,ATVI')
self.assertEqual(list(spider.start_urls), [
make_url(symbol) for symbol in ('KO', 'DIS', 'ATVI')
])
def test_symbol_file(self):
try:
# Create a mock file of a list of symbols
with tempfile.NamedTemporaryFile('w', delete=False) as f:
f.write('# Comment\nGOOG\tGoogle Inc.\nAAPL\nFB Facebook.com\n#comment\nAMZN\n')
spider = YahooSpider(symbols=f.name)
self.assertEqual(list(spider.start_urls), [
make_url(symbol) for symbol in ('GOOG', 'AAPL', 'FB', 'AMZN')
])
finally:
os.remove(f.name)
def test_illegal_dates(self):
with self.assertRaises(ValueError):
YahooSpider(startdate='12345678')
with self.assertRaises(ValueError):
YahooSpider(enddate='12345678')
def test_parse(self):
spider = YahooSpider()
body = ('Date,Open,High,Low,Close,Volume,Adj Close\n'
'2013-11-22,121.58,122.75,117.93,121.38,11096700,121.38\n'
'2013-09-06,168.57,169.70,165.15,166.97,8619700,166.97\n'
'2013-06-26,103.80,105.87,102.66,105.72,6602600,105.72\n')
response = TextResponse(make_url('YHOO'), body=body)
items = list(spider.parse(response))
self.assertEqual(len(items), 3)
self.assert_item(items[0], {
'symbol': 'YHOO',
'date': '2013-11-22',
'open': 121.58,
'high': 122.75,
'low': 117.93,
'close': 121.38,
'volume': 11096700,
'adj_close': 121.38
})
self.assert_item(items[1], {
'symbol': 'YHOO',
'date': '2013-09-06',
'open': 168.57,
'high': 169.70,
'low': 165.15,
'close': 166.97,
'volume': 8619700,
'adj_close': 166.97
})
self.assert_item(items[2], {
'symbol': 'YHOO',
'date': '2013-06-26',
'open': 103.80,
'high': 105.87,
'low': 102.66,
'close': 105.72,
'volume': 6602600,
'adj_close': 105.72
})
| 2,003 |
348 | {"nom":"Landerneau","circ":"5ème circonscription","dpt":"Finistère","inscrits":11254,"abs":4985,"votants":6269,"blancs":50,"nuls":24,"exp":6195,"res":[{"nuance":"DVD","nom":"M. <NAME>","voix":2165},{"nuance":"REM","nom":"Mme <NAME>","voix":1826},{"nuance":"SOC","nom":"Mme <NAME>","voix":796},{"nuance":"FI","nom":"Mme <NAME>","voix":613},{"nuance":"FN","nom":"Mme <NAME>","voix":290},{"nuance":"ECO","nom":"Mme <NAME>","voix":213},{"nuance":"REG","nom":"M. <NAME>","voix":86},{"nuance":"DLF","nom":"M. <NAME>","voix":60},{"nuance":"COM","nom":"M. <NAME>","voix":49},{"nuance":"EXG","nom":"Mme <NAME>","voix":31},{"nuance":"DVG","nom":"Mme <NAME>","voix":28},{"nuance":"REG","nom":"M. <NAME>","voix":27},{"nuance":"DIV","nom":"<NAME>","voix":11}]} | 300 |
2,868 | <gh_stars>1000+
/*
* Copyright (c) 2015-2017, Intel Corporation
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of Intel Corporation nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef ACCEL_COMPILE_H
#define ACCEL_COMPILE_H
#include "ue2common.h"
#include "util/charreach.h"
#include "util/flat_containers.h"
union AccelAux;
namespace ue2 {
struct AccelInfo {
AccelInfo() : single_offset(0U), double_offset(0U),
single_stops(CharReach::dot()) {}
u32 single_offset; /**< offset correction to apply to single schemes */
u32 double_offset; /**< offset correction to apply to double schemes */
CharReach double_stop1; /**< single-byte accel stop literals for double
* schemes */
flat_set<std::pair<u8, u8>> double_stop2; /**< double-byte accel stop
* literals */
CharReach single_stops; /**< escapes for single byte acceleration */
};
bool buildAccelAux(const AccelInfo &info, AccelAux *aux);
/* returns true is the escape set can be handled with a masked double_verm */
bool buildDvermMask(const flat_set<std::pair<u8, u8>> &escape_set,
u8 *m1_out = nullptr, u8 *m2_out = nullptr);
} // namespace ue2
#endif
| 899 |
12,536 | // Copyright 2018 The gVisor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <sys/errno.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <atomic>
#include "gtest/gtest.h"
#include "test/util/capability_util.h"
#include "test/util/file_descriptor.h"
#include "test/util/test_util.h"
#include "test/util/thread_util.h"
namespace gvisor {
namespace testing {
namespace {
// For this set of tests to run, they must be run with coverage enabled. On
// native Linux, this involves compiling the kernel with kcov enabled. For
// gVisor, we need to enable the Go coverage tool, e.g. bazel test --
// collect_coverage_data --instrumentation_filter=//pkg/... <test>.
constexpr char kcovPath[] = "/sys/kernel/debug/kcov";
constexpr int kSize = 4096;
constexpr int KCOV_INIT_TRACE = 0x80086301;
constexpr int KCOV_ENABLE = 0x6364;
constexpr int KCOV_DISABLE = 0x6365;
uint64_t* KcovMmap(int fd) {
return (uint64_t*)mmap(nullptr, kSize * sizeof(uint64_t),
PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
}
TEST(KcovTest, Kcov) {
SKIP_IF(!ASSERT_NO_ERRNO_AND_VALUE(HaveCapability((CAP_DAC_OVERRIDE))));
int fd;
ASSERT_THAT(fd = open(kcovPath, O_RDWR),
AnyOf(SyscallSucceeds(), SyscallFailsWithErrno(ENOENT)));
// Kcov not available.
SKIP_IF(errno == ENOENT);
auto fd_closer = Cleanup([fd]() { close(fd); });
ASSERT_THAT(ioctl(fd, KCOV_INIT_TRACE, kSize), SyscallSucceeds());
uint64_t* area = KcovMmap(fd);
ASSERT_TRUE(area != MAP_FAILED);
ASSERT_THAT(ioctl(fd, KCOV_ENABLE, 0), SyscallSucceeds());
for (int i = 0; i < 10; i++) {
// Make some syscalls to generate coverage data.
ASSERT_THAT(ioctl(fd, KCOV_ENABLE, 0), SyscallFailsWithErrno(EINVAL));
}
uint64_t num_pcs = *(uint64_t*)(area);
EXPECT_GT(num_pcs, 0);
for (uint64_t i = 1; i <= num_pcs; i++) {
// Verify that PCs are in the standard kernel range.
EXPECT_GT(area[i], 0xffffffff7fffffffL);
}
ASSERT_THAT(ioctl(fd, KCOV_DISABLE, 0), SyscallSucceeds());
}
TEST(KcovTest, PrematureMmap) {
SKIP_IF(!ASSERT_NO_ERRNO_AND_VALUE(HaveCapability((CAP_DAC_OVERRIDE))));
int fd;
ASSERT_THAT(fd = open(kcovPath, O_RDWR),
AnyOf(SyscallSucceeds(), SyscallFailsWithErrno(ENOENT)));
// Kcov not available.
SKIP_IF(errno == ENOENT);
auto fd_closer = Cleanup([fd]() { close(fd); });
// Cannot mmap before KCOV_INIT_TRACE.
uint64_t* area = KcovMmap(fd);
ASSERT_TRUE(area == MAP_FAILED);
}
// Tests that multiple kcov fds can be used simultaneously.
TEST(KcovTest, MultipleFds) {
SKIP_IF(!ASSERT_NO_ERRNO_AND_VALUE(HaveCapability((CAP_DAC_OVERRIDE))));
int fd1;
ASSERT_THAT(fd1 = open(kcovPath, O_RDWR),
AnyOf(SyscallSucceeds(), SyscallFailsWithErrno(ENOENT)));
// Kcov not available.
SKIP_IF(errno == ENOENT);
int fd2;
ASSERT_THAT(fd2 = open(kcovPath, O_RDWR), SyscallSucceeds());
auto fd_closer = Cleanup([fd1, fd2]() {
close(fd1);
close(fd2);
});
auto t1 = ScopedThread([&] {
ASSERT_THAT(ioctl(fd1, KCOV_INIT_TRACE, kSize), SyscallSucceeds());
uint64_t* area = KcovMmap(fd1);
ASSERT_TRUE(area != MAP_FAILED);
ASSERT_THAT(ioctl(fd1, KCOV_ENABLE, 0), SyscallSucceeds());
});
ASSERT_THAT(ioctl(fd2, KCOV_INIT_TRACE, kSize), SyscallSucceeds());
uint64_t* area = KcovMmap(fd2);
ASSERT_TRUE(area != MAP_FAILED);
ASSERT_THAT(ioctl(fd2, KCOV_ENABLE, 0), SyscallSucceeds());
}
// Tests behavior for two threads trying to use the same kcov fd.
TEST(KcovTest, MultipleThreads) {
SKIP_IF(!ASSERT_NO_ERRNO_AND_VALUE(HaveCapability((CAP_DAC_OVERRIDE))));
int fd;
ASSERT_THAT(fd = open(kcovPath, O_RDWR),
AnyOf(SyscallSucceeds(), SyscallFailsWithErrno(ENOENT)));
// Kcov not available.
SKIP_IF(errno == ENOENT);
auto fd_closer = Cleanup([fd]() { close(fd); });
// Test the behavior of multiple threads trying to use the same kcov fd
// simultaneously.
std::atomic<bool> t1_enabled(false), t1_disabled(false), t2_failed(false),
t2_exited(false);
auto t1 = ScopedThread([&] {
ASSERT_THAT(ioctl(fd, KCOV_INIT_TRACE, kSize), SyscallSucceeds());
uint64_t* area = KcovMmap(fd);
ASSERT_TRUE(area != MAP_FAILED);
ASSERT_THAT(ioctl(fd, KCOV_ENABLE, 0), SyscallSucceeds());
t1_enabled = true;
// After t2 has made sure that enabling kcov again fails, disable it.
while (!t2_failed) {
sched_yield();
}
ASSERT_THAT(ioctl(fd, KCOV_DISABLE, 0), SyscallSucceeds());
t1_disabled = true;
// Wait for t2 to enable kcov and then exit, after which we should be able
// to enable kcov again, without needing to set up a new memory mapping.
while (!t2_exited) {
sched_yield();
}
ASSERT_THAT(ioctl(fd, KCOV_ENABLE, 0), SyscallSucceeds());
});
auto t2 = ScopedThread([&] {
// Wait for t1 to enable kcov, and make sure that enabling kcov again fails.
while (!t1_enabled) {
sched_yield();
}
ASSERT_THAT(ioctl(fd, KCOV_ENABLE, 0), SyscallFailsWithErrno(EINVAL));
t2_failed = true;
// Wait for t1 to disable kcov, after which using fd should now succeed.
while (!t1_disabled) {
sched_yield();
}
uint64_t* area = KcovMmap(fd);
ASSERT_TRUE(area != MAP_FAILED);
ASSERT_THAT(ioctl(fd, KCOV_ENABLE, 0), SyscallSucceeds());
});
t2.Join();
t2_exited = true;
}
} // namespace
} // namespace testing
} // namespace gvisor
| 2,530 |
316 | import numpy as np
import pyclipper
#from shapely import geometry
from sl_utils import rbox_to_polygon, polygon_to_rbox
from ssd_metric import fscore
def evaluate_polygonal_results(ground_truth, detection_results, iou_thresh=0.5):
"""Evaluate polygonal text detection results and return TP, FP, FN.
# Arguments
ground_truth: List of ground truth polygonal with
shape (objects, 4 x xy)
detection_results: List of corresponding detection polygonal with
shape (objects, 4 x xy)
image_size: Input size of detector network.
iou_thresh: Minimum intersection over union required to associate
a detected polygon box to a ground truth polygon box.
# Returns
TP: True Positive detections
FP: False Positive detections
FN: False Negative detections
"""
# we do not sort by confidence here
# all detections are of class text
gt = ground_truth
dt = detection_results
TP = []
FP = []
FN_sum = 0
num_groundtruth_boxes = 0 # has to be TP_sum + FN_sum
num_detections = 0
for i in range(len(gt)): # samples
gt_polys = [np.reshape(gt[i][j,:], (-1, 2)) for j in range(len(gt[i]))]
dt_polys = [np.reshape(dt[i][j,:], (-1, 2)) for j in range(len(dt[i]))]
# prepare polygones, pyclipper, is much faster
scale = 1e5
gt_polys = [np.asarray(p*scale, dtype=np.int64) for p in gt_polys]
dt_polys = [np.asarray(p*scale, dtype=np.int64) for p in dt_polys]
# perpare polygones, shapely
#gt_polys = [geometry.Polygon(p) for p in gt_polys]
#dt_polys = [geometry.Polygon(p) for p in dt_polys]
num_dt = len(dt_polys)
num_gt = len(gt_polys)
num_groundtruth_boxes += num_gt
num_detections += num_dt
TP_img = np.zeros(num_dt)
FP_img = np.zeros(num_dt)
assignment = np.zeros(num_gt, dtype=np.bool)
for k in range(len(dt[i])): # dt
poly1 = dt_polys[k]
gt_iou = []
for j in range(len(gt[i])): # gt
poly2 = gt_polys[j]
# intersection over union, pyclipper
pc = pyclipper.Pyclipper()
pc.AddPath(poly1, pyclipper.PT_CLIP, True)
pc.AddPath(poly2, pyclipper.PT_SUBJECT, True)
I = pc.Execute(pyclipper.CT_INTERSECTION, pyclipper.PFT_EVENODD, pyclipper.PFT_EVENODD)
if len(I) > 0:
U = pc.Execute(pyclipper.CT_UNION, pyclipper.PFT_EVENODD, pyclipper.PFT_EVENODD)
Ia = pyclipper.Area(I[0])
Ua = pyclipper.Area(U[0])
IoU = Ia / Ua
else:
IoU = 0.0
# intersection over union, shapely, much slower
#I = poly1.intersection(poly2)
#if not I.is_empty:
# Ia = I.area
# Ua = poly1.area + poly2.area - Ia
# IoU = Ia / Ua
#else:
# IoU = 0.0
gt_iou.append(IoU)
#print(IoU)
gt_iou = np.array(gt_iou)
max_gt_idx = np.argmax(gt_iou)
dt_idx = k
if gt_iou[max_gt_idx] > iou_thresh:
if not assignment[max_gt_idx]: # todo: use highest iou, not first
TP_img[dt_idx] = 1
assignment[max_gt_idx] = True
continue
FP_img[dt_idx] = 1
FN_img_sum = np.sum(np.logical_not(assignment))
TP.append(TP_img)
FP.append(FP_img)
FN_sum += FN_img_sum
TP = np.concatenate(TP)
FP = np.concatenate(FP)
TP_sum = np.sum(TP)
FP_sum = np.sum(FP)
return TP_sum, FP_sum, FN_sum
recall = TP_sum / (TP_sum+FN_sum)
precision = TP_sum / (TP_sum+FP_sum)
print('TP %i FP %i FN %i' % (TP_sum, FP_sum, FN_sum))
print('precision, recall, f-measure: %.2f, %.2f, %.2f' % (precision, recall, fscore(precision, recall)))
def evaluate_results(ground_truth, detection_results, image_size=(512,512), iou_thresh=0.5):
h, w = image_size
ground_truth = [g[:,0:8] * np.tile((w,h), 4) for g in ground_truth]
detection_results = [np.array([rbox_to_polygon(dd[:5]) for dd in d]) for d in detection_results]
return evaluate_polygonal_results(ground_truth, detection_results, iou_thresh)
| 2,517 |
636 | package cn.org.atool.fluent.mybatis.test1.ifs;
import cn.org.atool.fluent.mybatis.customize.mapper.StudentBatchMapper;
import cn.org.atool.fluent.mybatis.generator.shared2.entity.StudentEntity;
import cn.org.atool.fluent.mybatis.generator.shared2.mapper.StudentMapper;
import cn.org.atool.fluent.mybatis.generator.shared2.wrapper.StudentUpdate;
import cn.org.atool.fluent.mybatis.test1.BaseTest;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.test4j.hamcrest.matcher.string.StringMode;
import java.util.Arrays;
import java.util.List;
import static cn.org.atool.fluent.mybatis.utility.PoJoHelper.getFields;
public class CaseFuncTest extends BaseTest {
@Autowired
private StudentMapper mapper;
@Autowired
private StudentBatchMapper batchMapper;
@Test
public void test_applyFunc() throws Exception {
StudentUpdate update = StudentUpdate.emptyUpdater()
.set.address().applyFunc("case id " +
"when 1 then 'address 1' " +
"when 2 then 'address 2' " +
"else 'address 3' end")
.end()
.where.id().eq(2).end();
mapper.updateBy(update);
// 验证SQL语句
db.sqlList().wantFirstSql()
.eq("UPDATE fluent_mybatis.student " +
"SET `gmt_modified` = now(), " +
"`address` = case id when 1 then 'address 1' when 2 then 'address 2' else 'address 3' end " +
"WHERE `id` = ?",
StringMode.SameAsSpace);
}
@Test
public void test_mybatis_batch() {
batchMapper.updateBatchByIds(Arrays.asList(
new StudentEntity().setId(1L).setAddress("address 1").setAge(23),
new StudentEntity().setId(2L).setAddress("address 2").setAge(24),
new StudentEntity().setId(3L).setAddress("address 3").setAge(25)
));
/** 验证执行的SQL语句 **/
db.sqlList().wantFirstSql().eq("" +
"update student " +
"set address =case id when ? then ? when ? then ? when ? then ? end, " +
"age =case id when ? then ? when ? then ? when ? then ? end " +
"where id in ( ? , ? , ? )"
, StringMode.SameAsSpace);
}
@Test
public void test_fluentMybatisBatch() throws Exception {
final String CaseWhen = "case id " +
"when 1 then ? " +
"when 2 then ? " +
"else ? end";
StudentUpdate update = StudentUpdate.emptyUpdater()
.set.address().applyFunc(CaseWhen, "address 1", "address 2", "address 3")
.set.age().applyFunc(CaseWhen, 23, 24, 25)
.end()
.where.id().in(new long[]{1L, 2L, 3L}).end();
mapper.updateBy(update);
// 验证SQL语句
db.sqlList().wantFirstSql()
.eq("UPDATE fluent_mybatis.student " +
"SET `gmt_modified` = now(), " +
"`address` = case id when 1 then ? when 2 then ? else ? end, " +
"`age` = case id when 1 then ? when 2 then ? else ? end " +
"WHERE `id` IN (?, ?, ?)",
StringMode.SameAsSpace);
// 验证参数
db.sqlList().wantFirstPara()
.eqReflect(new Object[]{"address 1", "address 2", "address 3", 23, 24, 25, 1L, 2L, 3L});
}
@Test
public void test_fluentMybatisBatch2() throws Exception {
List<StudentEntity> students = Arrays.asList(
new StudentEntity().setId(1L).setAddress("address 1").setAge(23),
new StudentEntity().setId(2L).setAddress("address 2").setAge(24),
new StudentEntity().setId(3L).setAddress("address 3").setAge(25));
final String CaseWhen = "case id " +
"when 1 then ? " +
"when 2 then ? " +
"else ? end";
StudentUpdate update = StudentUpdate.emptyUpdater()
.set.address().applyFunc(CaseWhen, getFields(students, StudentEntity::getAddress))
.set.age().applyFunc(CaseWhen, getFields(students, StudentEntity::getAge))
.end()
.where.id().in(getFields(students, StudentEntity::getId)).end();
mapper.updateBy(update);
// 验证SQL语句
db.sqlList().wantFirstSql()
.eq("UPDATE fluent_mybatis.student " +
"SET `gmt_modified` = now(), " +
"`address` = case id when 1 then ? when 2 then ? else ? end, " +
"`age` = case id when 1 then ? when 2 then ? else ? end " +
"WHERE `id` IN (?, ?, ?)",
StringMode.SameAsSpace);
// 验证参数
db.sqlList().wantFirstPara()
.eqReflect(new Object[]{"address 1", "address 2", "address 3", 23, 24, 25, 1L, 2L, 3L});
}
}
| 2,327 |
3,212 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.record.sink.db;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.annotation.documentation.CapabilityDescription;
import org.apache.nifi.annotation.documentation.Tags;
import org.apache.nifi.annotation.lifecycle.OnEnabled;
import org.apache.nifi.components.AllowableValue;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.context.PropertyContext;
import org.apache.nifi.controller.AbstractControllerService;
import org.apache.nifi.controller.ConfigurationContext;
import org.apache.nifi.controller.ControllerServiceInitializationContext;
import org.apache.nifi.dbcp.DBCPService;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.record.sink.RecordSinkService;
import org.apache.nifi.serialization.WriteResult;
import org.apache.nifi.serialization.record.DataType;
import org.apache.nifi.serialization.record.Record;
import org.apache.nifi.serialization.record.RecordField;
import org.apache.nifi.serialization.record.RecordSchema;
import org.apache.nifi.serialization.record.RecordSet;
import org.apache.nifi.serialization.record.util.DataTypeUtils;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLDataException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
@Tags({ "db", "jdbc", "database", "connection", "record" })
@CapabilityDescription("Provides a service to write records using a configured database connection.")
public class DatabaseRecordSink extends AbstractControllerService implements RecordSinkService {
static final AllowableValue IGNORE_UNMATCHED_FIELD = new AllowableValue("Ignore Unmatched Fields", "Ignore Unmatched Fields",
"Any field in the document that cannot be mapped to a column in the database is ignored");
static final AllowableValue FAIL_UNMATCHED_FIELD = new AllowableValue("Fail on Unmatched Fields", "Fail on Unmatched Fields",
"If the document has any field that cannot be mapped to a column in the database, the FlowFile will be routed to the failure relationship");
static final AllowableValue IGNORE_UNMATCHED_COLUMN = new AllowableValue("Ignore Unmatched Columns",
"Ignore Unmatched Columns",
"Any column in the database that does not have a field in the document will be assumed to not be required. No notification will be logged");
static final AllowableValue WARNING_UNMATCHED_COLUMN = new AllowableValue("Warn on Unmatched Columns",
"Warn on Unmatched Columns",
"Any column in the database that does not have a field in the document will be assumed to not be required. A warning will be logged");
static final AllowableValue FAIL_UNMATCHED_COLUMN = new AllowableValue("Fail on Unmatched Columns",
"Fail on Unmatched Columns",
"A flow will fail if any column in the database that does not have a field in the document. An error will be logged");
static final PropertyDescriptor DBCP_SERVICE = new PropertyDescriptor.Builder()
.name("db-record-sink-dcbp-service")
.displayName("Database Connection Pooling Service")
.description("The Controller Service that is used to obtain a connection to the database for sending records.")
.required(true)
.identifiesControllerService(DBCPService.class)
.build();
static final PropertyDescriptor CATALOG_NAME = new PropertyDescriptor.Builder()
.name("db-record-sink-catalog-name")
.displayName("Catalog Name")
.description("The name of the catalog that the statement should update. This may not apply for the database that you are updating. In this case, leave the field empty")
.required(false)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
static final PropertyDescriptor SCHEMA_NAME = new PropertyDescriptor.Builder()
.name("db-record-sink-schema-name")
.displayName("Schema Name")
.description("The name of the schema that the table belongs to. This may not apply for the database that you are updating. In this case, leave the field empty")
.required(false)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
static final PropertyDescriptor TABLE_NAME = new PropertyDescriptor.Builder()
.name("db-record-sink-table-name")
.displayName("Table Name")
.description("The name of the table that the statement should affect.")
.required(true)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
static final PropertyDescriptor TRANSLATE_FIELD_NAMES = new PropertyDescriptor.Builder()
.name("db-record-sink-translate-field-names")
.displayName("Translate Field Names")
.description("If true, the Processor will attempt to translate field names into the appropriate column names for the table specified. "
+ "If false, the field names must match the column names exactly, or the column will not be updated")
.allowableValues("true", "false")
.defaultValue("true")
.build();
static final PropertyDescriptor UNMATCHED_FIELD_BEHAVIOR = new PropertyDescriptor.Builder()
.name("db-record-sink-unmatched-field-behavior")
.displayName("Unmatched Field Behavior")
.description("If an incoming record has a field that does not map to any of the database table's columns, this property specifies how to handle the situation")
.allowableValues(IGNORE_UNMATCHED_FIELD, FAIL_UNMATCHED_FIELD)
.defaultValue(IGNORE_UNMATCHED_FIELD.getValue())
.build();
static final PropertyDescriptor UNMATCHED_COLUMN_BEHAVIOR = new PropertyDescriptor.Builder()
.name("db-record-sink-unmatched-column-behavior")
.displayName("Unmatched Column Behavior")
.description("If an incoming record does not have a field mapping for all of the database table's columns, this property specifies how to handle the situation")
.allowableValues(IGNORE_UNMATCHED_COLUMN, WARNING_UNMATCHED_COLUMN, FAIL_UNMATCHED_COLUMN)
.defaultValue(FAIL_UNMATCHED_COLUMN.getValue())
.build();
static final PropertyDescriptor QUOTED_IDENTIFIERS = new PropertyDescriptor.Builder()
.name("db-record-sink-quoted-identifiers")
.displayName("Quote Column Identifiers")
.description("Enabling this option will cause all column names to be quoted, allowing you to use reserved words as column names in your tables.")
.allowableValues("true", "false")
.defaultValue("false")
.build();
static final PropertyDescriptor QUOTED_TABLE_IDENTIFIER = new PropertyDescriptor.Builder()
.name("db-record-sink-quoted-table-identifiers")
.displayName("Quote Table Identifiers")
.description("Enabling this option will cause the table name to be quoted to support the use of special characters in the table name.")
.allowableValues("true", "false")
.defaultValue("false")
.build();
static final PropertyDescriptor QUERY_TIMEOUT = new PropertyDescriptor.Builder()
.name("db-record-sink-query-timeout")
.displayName("Max Wait Time")
.description("The maximum amount of time allowed for a running SQL statement "
+ ", zero means there is no limit. Max time less than 1 second will be equal to zero.")
.defaultValue("0 seconds")
.required(true)
.addValidator(StandardValidators.TIME_PERIOD_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
.build();
private List<PropertyDescriptor> properties;
private volatile ConfigurationContext context;
private volatile DBCPService dbcpService;
@Override
protected void init(final ControllerServiceInitializationContext context) {
final List<PropertyDescriptor> properties = new ArrayList<>();
properties.add(DBCP_SERVICE);
properties.add(CATALOG_NAME);
properties.add(SCHEMA_NAME);
properties.add(TABLE_NAME);
properties.add(TRANSLATE_FIELD_NAMES);
properties.add(UNMATCHED_FIELD_BEHAVIOR);
properties.add(UNMATCHED_COLUMN_BEHAVIOR);
properties.add(QUOTED_IDENTIFIERS);
properties.add(QUOTED_TABLE_IDENTIFIER);
properties.add(QUERY_TIMEOUT);
this.properties = Collections.unmodifiableList(properties);
}
@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return properties;
}
@OnEnabled
public void onEnabled(final ConfigurationContext context) {
this.context = context;
dbcpService = context.getProperty(DBCP_SERVICE).asControllerService(DBCPService.class);
}
@Override
public WriteResult sendData(RecordSet recordSet, Map<String, String> attributes, boolean sendZeroResults) throws IOException {
Boolean originalAutoCommit = null;
Connection connection = null;
WriteResult writeResult = null;
try {
connection = dbcpService.getConnection(attributes);
originalAutoCommit = connection.getAutoCommit();
connection.setAutoCommit(false);
final DMLSettings settings = new DMLSettings(context);
final String catalog = context.getProperty(CATALOG_NAME).evaluateAttributeExpressions().getValue();
final String schemaName = context.getProperty(SCHEMA_NAME).evaluateAttributeExpressions().getValue();
final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions().getValue();
final int queryTimeout = context.getProperty(QUERY_TIMEOUT).evaluateAttributeExpressions().asTimePeriod(TimeUnit.SECONDS).intValue();
// Ensure the table name has been set, the generated SQL statements (and TableSchema cache) will need it
if (StringUtils.isEmpty(tableName)) {
throw new IOException("Cannot process because Table Name is null or empty");
}
TableSchema tableSchema = TableSchema.from(connection, catalog, schemaName, tableName, settings.translateFieldNames);
// build the fully qualified table name
final StringBuilder tableNameBuilder = new StringBuilder();
if (catalog != null) {
tableNameBuilder.append(catalog).append(".");
}
if (schemaName != null) {
tableNameBuilder.append(schemaName).append(".");
}
tableNameBuilder.append(tableName);
final String fqTableName = tableNameBuilder.toString();
RecordSchema recordSchema = recordSet.getSchema();
if (recordSchema == null) {
throw new IllegalArgumentException("No record schema specified!");
}
final SqlAndIncludedColumns sqlHolder;
sqlHolder = generateInsert(recordSchema, fqTableName, tableSchema, settings);
try (PreparedStatement ps = connection.prepareStatement(sqlHolder.getSql())) {
try {
ps.setQueryTimeout(queryTimeout); // timeout in seconds
} catch (SQLException se) {
// If the driver doesn't support query timeout, then assume it is "infinite". Allow a timeout of zero only
if (queryTimeout > 0) {
throw se;
}
}
Record currentRecord;
List<Integer> fieldIndexes = sqlHolder.getFieldIndexes();
int recordCount = 0;
while ((currentRecord = recordSet.next()) != null) {
Object[] values = currentRecord.getValues();
List<DataType> dataTypes = currentRecord.getSchema().getDataTypes();
if (values != null) {
if (fieldIndexes != null) {
for (int i = 0; i < fieldIndexes.size(); i++) {
final int currentFieldIndex = fieldIndexes.get(i);
final Object currentValue = values[currentFieldIndex];
final DataType dataType = dataTypes.get(currentFieldIndex);
final int sqlType = DataTypeUtils.getSQLTypeValue(dataType);
ps.setObject(i + 1, currentValue, sqlType);
}
} else {
// If there's no index map, assume all values are included and set them in order
for (int i = 0; i < values.length; i++) {
final Object currentValue = values[i];
final DataType dataType = dataTypes.get(i);
final int sqlType = DataTypeUtils.getSQLTypeValue(dataType);
ps.setObject(i + 1, currentValue, sqlType);
}
}
ps.addBatch();
}
recordCount++;
}
ps.executeBatch();
writeResult = WriteResult.of(recordCount, attributes);
}
} catch (IOException ioe) {
throw ioe;
} catch (Exception e) {
throw new IOException("Failed to write metrics using record writer: " + e.getMessage(), e);
} finally {
if (connection != null) {
if (originalAutoCommit != null) {
try {
connection.setAutoCommit(originalAutoCommit);
} catch (Exception e) {
getLogger().debug("Error restoring auto-commit", e);
}
}
try {
connection.close();
} catch (Exception e) {
getLogger().debug("Error closing connection", e);
}
}
}
return writeResult;
}
private static String normalizeColumnName(final String colName, final boolean translateColumnNames) {
return colName == null ? null : (translateColumnNames ? colName.toUpperCase().replace("_", "") : colName);
}
private Set<String> getNormalizedColumnNames(final RecordSchema schema, final boolean translateFieldNames) {
final Set<String> normalizedFieldNames = new HashSet<>();
if (schema != null) {
schema.getFieldNames().forEach((fieldName) -> normalizedFieldNames.add(normalizeColumnName(fieldName, translateFieldNames)));
}
return normalizedFieldNames;
}
private SqlAndIncludedColumns generateInsert(final RecordSchema recordSchema, final String tableName, final TableSchema tableSchema, final DMLSettings settings)
throws IllegalArgumentException, SQLException {
final Set<String> normalizedFieldNames = getNormalizedColumnNames(recordSchema, settings.translateFieldNames);
for (final String requiredColName : tableSchema.getRequiredColumnNames()) {
final String normalizedColName = normalizeColumnName(requiredColName, settings.translateFieldNames);
if (!normalizedFieldNames.contains(normalizedColName)) {
String missingColMessage = "Record does not have a value for the Required column '" + requiredColName + "'";
if (settings.failUnmappedColumns) {
getLogger().error(missingColMessage);
throw new IllegalArgumentException(missingColMessage);
} else if (settings.warningUnmappedColumns) {
getLogger().warn(missingColMessage);
}
}
}
final StringBuilder sqlBuilder = new StringBuilder();
sqlBuilder.append("INSERT INTO ");
if (settings.quoteTableName) {
sqlBuilder.append(tableSchema.getQuotedIdentifierString())
.append(tableName)
.append(tableSchema.getQuotedIdentifierString());
} else {
sqlBuilder.append(tableName);
}
sqlBuilder.append(" (");
// iterate over all of the fields in the record, building the SQL statement by adding the column names
List<String> fieldNames = recordSchema.getFieldNames();
final List<Integer> includedColumns = new ArrayList<>();
if (fieldNames != null) {
int fieldCount = fieldNames.size();
AtomicInteger fieldsFound = new AtomicInteger(0);
for (int i = 0; i < fieldCount; i++) {
RecordField field = recordSchema.getField(i);
String fieldName = field.getFieldName();
final ColumnDescription desc = tableSchema.getColumns().get(normalizeColumnName(fieldName, settings.translateFieldNames));
if (desc == null && !settings.ignoreUnmappedFields) {
throw new SQLDataException("Cannot map field '" + fieldName + "' to any column in the database");
}
if (desc != null) {
if (fieldsFound.getAndIncrement() > 0) {
sqlBuilder.append(", ");
}
if (settings.escapeColumnNames) {
sqlBuilder.append(tableSchema.getQuotedIdentifierString())
.append(desc.getColumnName())
.append(tableSchema.getQuotedIdentifierString());
} else {
sqlBuilder.append(desc.getColumnName());
}
includedColumns.add(i);
}
}
// complete the SQL statements by adding ?'s for all of the values to be escaped.
sqlBuilder.append(") VALUES (");
sqlBuilder.append(StringUtils.repeat("?", ",", includedColumns.size()));
sqlBuilder.append(")");
if (fieldsFound.get() == 0) {
throw new SQLDataException("None of the fields in the record map to the columns defined by the " + tableName + " table");
}
}
return new SqlAndIncludedColumns(sqlBuilder.toString(), includedColumns);
}
private static class DMLSettings {
private final boolean translateFieldNames;
private final boolean ignoreUnmappedFields;
// Is the unmatched column behaviour fail or warning?
private final boolean failUnmappedColumns;
private final boolean warningUnmappedColumns;
// Escape column names?
private final boolean escapeColumnNames;
// Quote table name?
private final boolean quoteTableName;
private DMLSettings(PropertyContext context) {
translateFieldNames = context.getProperty(TRANSLATE_FIELD_NAMES).asBoolean();
ignoreUnmappedFields = IGNORE_UNMATCHED_FIELD.getValue().equalsIgnoreCase(context.getProperty(UNMATCHED_FIELD_BEHAVIOR).getValue());
failUnmappedColumns = FAIL_UNMATCHED_COLUMN.getValue().equalsIgnoreCase(context.getProperty(UNMATCHED_COLUMN_BEHAVIOR).getValue());
warningUnmappedColumns = WARNING_UNMATCHED_COLUMN.getValue().equalsIgnoreCase(context.getProperty(UNMATCHED_COLUMN_BEHAVIOR).getValue());
escapeColumnNames = context.getProperty(QUOTED_IDENTIFIERS).asBoolean();
quoteTableName = context.getProperty(QUOTED_TABLE_IDENTIFIER).asBoolean();
}
}
/**
* A holder class for a SQL prepared statement and a BitSet indicating which columns are being inserted (to determine which values from the record to set on the statement)
* A value of null for getIncludedColumns indicates that all columns/fields should be included.
*/
static class SqlAndIncludedColumns {
String sql;
List<Integer> fieldIndexes;
/**
* Constructor
*
* @param sql The prepared SQL statement (including parameters notated by ? )
* @param fieldIndexes A List of record indexes. The index of the list is the location of the record field in the SQL prepared statement
*/
SqlAndIncludedColumns(String sql, List<Integer> fieldIndexes) {
this.sql = sql;
this.fieldIndexes = fieldIndexes;
}
String getSql() {
return sql;
}
List<Integer> getFieldIndexes() {
return fieldIndexes;
}
}
static class TableSchema {
private List<String> requiredColumnNames;
private Map<String, ColumnDescription> columns;
private String quotedIdentifierString;
private TableSchema(final List<ColumnDescription> columnDescriptions, final boolean translateColumnNames, final String quotedIdentifierString) {
this.columns = new HashMap<>();
this.requiredColumnNames = new ArrayList<>();
this.quotedIdentifierString = quotedIdentifierString;
for (final ColumnDescription desc : columnDescriptions) {
columns.put(normalizeColumnName(desc.columnName, translateColumnNames), desc);
if (desc.isRequired()) {
requiredColumnNames.add(desc.columnName);
}
}
}
Map<String, ColumnDescription> getColumns() {
return columns;
}
List<String> getRequiredColumnNames() {
return requiredColumnNames;
}
String getQuotedIdentifierString() {
return quotedIdentifierString;
}
static TableSchema from(final Connection conn, final String catalog, final String schema, final String tableName,
final boolean translateColumnNames) throws SQLException {
final DatabaseMetaData dmd = conn.getMetaData();
if (!dmd.getTables(catalog, schema, tableName, null).next()) {
throw new SQLException("Table " + tableName + " does not exist in the database");
}
try (final ResultSet colrs = dmd.getColumns(catalog, schema, tableName, "%")) {
final List<ColumnDescription> cols = new ArrayList<>();
while (colrs.next()) {
final ColumnDescription col = ColumnDescription.from(colrs);
cols.add(col);
}
return new TableSchema(cols, translateColumnNames, dmd.getIdentifierQuoteString());
}
}
}
protected static class ColumnDescription {
private final String columnName;
private final int dataType;
private final boolean required;
private final Integer columnSize;
ColumnDescription(final String columnName, final int dataType, final boolean required, final Integer columnSize) {
this.columnName = columnName;
this.dataType = dataType;
this.required = required;
this.columnSize = columnSize;
}
public int getDataType() {
return dataType;
}
public Integer getColumnSize() {
return columnSize;
}
public String getColumnName() {
return columnName;
}
public boolean isRequired() {
return required;
}
public static ColumnDescription from(final ResultSet resultSet) throws SQLException {
final ResultSetMetaData md = resultSet.getMetaData();
List<String> columns = new ArrayList<>();
for (int i = 1; i < md.getColumnCount() + 1; i++) {
columns.add(md.getColumnName(i));
}
// COLUMN_DEF must be read first to work around Oracle bug, see NIFI-4279 for details
final String defaultValue = resultSet.getString("COLUMN_DEF");
final String columnName = resultSet.getString("COLUMN_NAME");
final int dataType = resultSet.getInt("DATA_TYPE");
final int colSize = resultSet.getInt("COLUMN_SIZE");
final String nullableValue = resultSet.getString("IS_NULLABLE");
final boolean isNullable = "YES".equalsIgnoreCase(nullableValue) || nullableValue.isEmpty();
String autoIncrementValue = "NO";
if (columns.contains("IS_AUTOINCREMENT")) {
autoIncrementValue = resultSet.getString("IS_AUTOINCREMENT");
}
final boolean isAutoIncrement = "YES".equalsIgnoreCase(autoIncrementValue);
final boolean required = !isNullable && !isAutoIncrement && defaultValue == null;
return new ColumnDescription(columnName, dataType, required, colSize == 0 ? null : colSize);
}
}
}
| 10,742 |
52,316 | <gh_stars>1000+
import asyncio
import unittest
import time
def tearDownModule():
asyncio.set_event_loop_policy(None)
class SlowTask:
""" Task will run for this defined time, ignoring cancel requests """
TASK_TIMEOUT = 0.2
def __init__(self):
self.exited = False
async def run(self):
exitat = time.monotonic() + self.TASK_TIMEOUT
while True:
tosleep = exitat - time.monotonic()
if tosleep <= 0:
break
try:
await asyncio.sleep(tosleep)
except asyncio.CancelledError:
pass
self.exited = True
class AsyncioWaitForTest(unittest.TestCase):
async def atest_asyncio_wait_for_cancelled(self):
t = SlowTask()
waitfortask = asyncio.create_task(asyncio.wait_for(t.run(), t.TASK_TIMEOUT * 2))
await asyncio.sleep(0)
waitfortask.cancel()
await asyncio.wait({waitfortask})
self.assertTrue(t.exited)
def test_asyncio_wait_for_cancelled(self):
asyncio.run(self.atest_asyncio_wait_for_cancelled())
async def atest_asyncio_wait_for_timeout(self):
t = SlowTask()
try:
await asyncio.wait_for(t.run(), t.TASK_TIMEOUT / 2)
except asyncio.TimeoutError:
pass
self.assertTrue(t.exited)
def test_asyncio_wait_for_timeout(self):
asyncio.run(self.atest_asyncio_wait_for_timeout())
if __name__ == '__main__':
unittest.main()
| 713 |
560 | <gh_stars>100-1000
/*
* Copyright (c) 2018 <NAME> <<EMAIL>>
* All Rights Reserved.
*/
package me.zhanghai.android.douya.ui;
import android.content.Context;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup;
import com.google.android.material.textfield.ExpandedHintTextInputLayout;
import me.zhanghai.android.materialedittext.MaterialEditText;
import me.zhanghai.android.materialedittext.MaterialEditTextBackgroundDrawable;
/**
* @see me.zhanghai.android.materialedittext.MaterialTextInputLayout
*/
public class ExpandedHintMaterialTextInputLayout extends ExpandedHintTextInputLayout {
private MaterialEditTextBackgroundDrawable mEditTextBackground;
public ExpandedHintMaterialTextInputLayout(Context context) {
super(context);
}
public ExpandedHintMaterialTextInputLayout(Context context, AttributeSet attrs) {
super(context, attrs);
}
public ExpandedHintMaterialTextInputLayout(Context context, AttributeSet attrs,
int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
@Override
public void addView(View child, int index, ViewGroup.LayoutParams params) {
super.addView(child, index, params);
if (child instanceof MaterialEditText) {
// Just throw a ClassCastException if the background of MaterialEditText is not the one
// automatically set.
mEditTextBackground = (MaterialEditTextBackgroundDrawable) child.getBackground();
}
}
@Override
public void setError(CharSequence error) {
super.setError(error);
if (mEditTextBackground != null) {
mEditTextBackground.setError(!TextUtils.isEmpty(error));
}
}
}
| 659 |
475 | <gh_stars>100-1000
/*
* Copyright (C) 2020 The zfoo Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*/
package com.zfoo.orm.model.accessor;
import com.mongodb.client.model.BulkWriteOptions;
import com.mongodb.client.model.Filters;
import com.mongodb.client.model.ReplaceOneModel;
import com.zfoo.orm.OrmContext;
import com.zfoo.orm.model.entity.IEntity;
import com.zfoo.protocol.collection.CollectionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import static com.mongodb.client.model.Filters.eq;
import static com.mongodb.client.model.Filters.in;
/**
* @author jaysunxiao
* @version 3.0
*/
public class MongodbAccessor implements IAccessor {
private static final Logger logger = LoggerFactory.getLogger(MongodbAccessor.class);
@Override
public <E extends IEntity<?>> boolean insert(E entity) {
var entityClazz = (Class<E>) entity.getClass();
var collection = OrmContext.getOrmManager().getCollection(entityClazz);
var result = collection.insertOne(entity);
return result.getInsertedId() != null;
}
@Override
public <E extends IEntity<?>> void batchInsert(List<E> entities) {
if (CollectionUtils.isEmpty(entities)) {
return;
}
var entityClazz = (Class<E>) entities.get(0).getClass();
var collection = OrmContext.getOrmManager().getCollection(entityClazz);
collection.insertMany(entities);
}
@Override
public <E extends IEntity<?>> boolean update(E entity) {
try {
var entityClazz = (Class<E>) entity.getClass();
var collection = OrmContext.getOrmManager().getCollection(entityClazz);
var filter = Filters.eq("_id", entity.id());
var result = collection.replaceOne(filter, entity);
if (result.getModifiedCount() <= 0) {
logger.warn("数据库[{}]中没有[id:{}]的字段,或者需要更新的数据和数据库中的相同", entityClazz.getSimpleName(), entity.id());
return false;
}
return true;
} catch (Throwable t) {
logger.error("更新update未知异常", t);
}
return false;
}
@Override
public <E extends IEntity<?>> void batchUpdate(List<E> entities) {
if (CollectionUtils.isEmpty(entities)) {
return;
}
try {
var entityClazz = (Class<E>) entities.get(0).getClass();
var collection = OrmContext.getOrmManager().getCollection(entityClazz);
var batchList = entities.stream()
.map(it -> new ReplaceOneModel<E>(Filters.eq("_id", it.id()), it))
.collect(Collectors.toList());
var result = collection.bulkWrite(batchList, new BulkWriteOptions().ordered(false));
if (result.getModifiedCount() != entities.size()) {
logger.error("在数据库[{}]的批量更新操作中需要更新的数量[{}]和最终更新的数量[{}]不相同"
, entityClazz.getSimpleName(), entities.size(), result.getModifiedCount());
}
} catch (Throwable t) {
logger.error("批量更新batchUpdate未知异常", t);
}
}
@Override
public <E extends IEntity<?>> boolean delete(E entity) {
var entityClazz = (Class<E>) entity.getClass();
var collection = OrmContext.getOrmManager().getCollection(entityClazz);
var result = collection.deleteOne(eq("_id", entity.id()));
return result.getDeletedCount() > 0;
}
@Override
public <E extends IEntity<?>> boolean delete(Object pk, Class<E> entityClazz) {
var collection = OrmContext.getOrmManager().getCollection(entityClazz);
var result = collection.deleteOne(eq("_id", pk));
return result.getDeletedCount() > 0;
}
@Override
public <E extends IEntity<?>> void batchDelete(List<E> entities) {
if (CollectionUtils.isEmpty(entities)) {
return;
}
var entityClazz = (Class<E>) entities.get(0).getClass();
var collection = OrmContext.getOrmManager().getCollection(entityClazz);
var ids = entities.stream().map(it -> (it).id()).collect(Collectors.toList());
collection.deleteMany(in("_id", ids));
}
@Override
public <E extends IEntity<?>> void batchDelete(List<?> pks, Class<E> entityClazz) {
var collection = OrmContext.getOrmManager().getCollection(entityClazz);
collection.deleteMany(in("_id", pks));
}
@Override
public <E extends IEntity<?>> E load(Object pk, Class<E> entityClazz) {
var collection = OrmContext.getOrmManager().getCollection(entityClazz);
var result = new ArrayList<E>(1);
collection.find(eq("_id", pk)).forEach((Consumer<E>) document -> result.add(document));
if (CollectionUtils.isEmpty(result)) {
return null;
}
return result.get(0);
}
}
| 2,301 |
1,561 | <filename>src/soter/ed25519/ge_p3_sub.c
/*
* Copyright (c) 2015 Cossack Labs Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "ge_utils.h"
void ge_p3_sub(ge_p3 *r, const ge_p3 *p, const ge_p3 *q)
{
ge_cached q_cached;
ge_p1p1 r_p1p1;
ge_p3_to_cached(&q_cached, q);
ge_sub(&r_p1p1, p, &q_cached);
ge_p1p1_to_p3(r, &r_p1p1);
}
| 313 |
3,600 | package com.github.dreamhead.moco.mount;
import java.util.function.Predicate;
public interface MountPredicate extends Predicate<String> {
}
| 42 |
766 | /* Copyright 2013-2015 www.snakerflow.com.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package test.task.interceptor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.snaker.engine.SnakerInterceptor;
import org.snaker.engine.core.Execution;
import org.snaker.engine.entity.Task;
/**
* @author yuqs
* @since 1.0
*/
public class LocalTaskInterceptor implements SnakerInterceptor {
private static final Logger log = LoggerFactory.getLogger(LocalTaskInterceptor.class);
public void intercept(Execution execution) {
if(log.isInfoEnabled()) {
log.info("LocalTaskInterceptor start...");
for(Task task : execution.getTasks()) {
StringBuffer buffer = new StringBuffer(100);
buffer.append("创建任务[标识=").append(task.getId());
buffer.append(",名称=").append(task.getDisplayName());
buffer.append(",创建时间=").append(task.getCreateTime());
buffer.append(",参与者={");
if(task.getActorIds() != null) {
for(String actor : task.getActorIds()) {
buffer.append(actor).append(";");
}
}
buffer.append("}]");
log.info(buffer.toString());
}
log.info("LocalTaskInterceptor finish...");
}
}
}
| 588 |
348 | <reponame>chamberone/Leaflet.PixiOverlay<filename>docs/data/leg-t2/065/06501384.json
{"nom":"Sailhan","circ":"1ère circonscription","dpt":"Hautes-Pyrénées","inscrits":103,"abs":52,"votants":51,"blancs":4,"nuls":3,"exp":44,"res":[{"nuance":"REM","nom":"<NAME>","voix":31},{"nuance":"FI","nom":"Mme <NAME>","voix":13}]} | 135 |
2,392 | // This file is part of libigl, a simple c++ geometry processing library.
//
// Copyright (C) 2017 <NAME> <<EMAIL>>
//
// This Source Code Form is subject to the terms of the Mozilla Public License
// v. 2.0. If a copy of the MPL was not distributed with this file, You can
// obtain one at http://mozilla.org/MPL/2.0/.
#include "../../unique.h"
#include <CGAL/Exact_predicates_inexact_constructions_kernel.h>
#include <CGAL/Exact_predicates_exact_constructions_kernel.h>
#ifdef IGL_STATIC_LIBRARY
#undef IGL_STATIC_LIBRARY
#include "../../unique.cpp"
#endif
| 191 |
5,964 | <reponame>wenfeifei/miniblink49
#ifndef GDIPlusInit_h
#define GDIPlusInit_h
namespace blink {
extern CLSID s_bmpClsid;
extern CLSID s_jpgClsid;
extern CLSID s_pngClsid;
bool initGDIPlusClsids();
}
#endif // GDIPlusInit_h | 110 |
1,510 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.fn.impl;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.categories.SqlFunctionTest;
import org.apache.drill.categories.UnlikelyTest;
import org.apache.drill.exec.planner.physical.PlannerSettings;
import org.apache.drill.test.ClusterFixture;
import org.apache.drill.test.ClusterFixtureBuilder;
import org.apache.drill.test.ClusterTest;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
@Category({UnlikelyTest.class, SqlFunctionTest.class})
public class TestCastEmptyStrings extends ClusterTest {
@BeforeClass
public static void setup() throws Exception {
ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
// enable decimal data type
.sessionOption(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY, true)
// Enable the new cast functions (cast empty string "" to null)
.systemOption(ExecConstants.CAST_EMPTY_STRING_TO_NULL, true);
startCluster(builder);
}
@Test // see DRILL-1874
public void testCastOptionalVarCharToNumeric() throws Exception {
testCastOptionalString("columns[0]", "int", "cp.`emptyStrings.csv`", null, 1, 2);
testCastOptionalString("columns[0]", "bigint", "cp.`emptyStrings.csv`", null, 1L, 2L);
testCastOptionalString("columns[0]", "float", "cp.`emptyStrings.csv`", null, 1.0f, 2.0f);
testCastOptionalString("columns[0]", "double", "cp.`emptyStrings.csv`", null, 1.0, 2.0);
}
@Test // see DRILL-1874
public void testCastRequiredVarCharToNumeric() throws Exception {
testCastEmptyString("int");
testCastEmptyString("bigint");
testCastEmptyString("float");
testCastEmptyString("double");
}
@Test // see DRILL-1874
public void testCastOptionalVarCharToDecimal() throws Exception {
BigDecimal one = BigDecimal.valueOf(1L);
BigDecimal two = BigDecimal.valueOf(2L);
testCastOptionalString("columns[0]", "decimal", "cp.`emptyStrings.csv`", null, one, two);
testCastOptionalString("columns[0]", "decimal(9)", "cp.`emptyStrings.csv`", null, one, two);
testCastOptionalString("columns[0]", "decimal(18)", "cp.`emptyStrings.csv`", null, one, two);
testCastOptionalString("columns[0]", "decimal(28)", "cp.`emptyStrings.csv`", null, one, two);
testCastOptionalString("columns[0]", "decimal(38)", "cp.`emptyStrings.csv`", null, one, two);
}
@Test // see DRILL-1874
public void testCastRequiredVarCharToDecimal() throws Exception {
testCastEmptyString("decimal");
testCastEmptyString("decimal(18)");
testCastEmptyString("decimal(28)");
testCastEmptyString("decimal(38)");
}
@Test
public void testCastRequiredVarCharToDateTime() throws Exception {
testCastEmptyString("date");
testCastEmptyString("time");
testCastEmptyString("timestamp");
}
@Test
public void testCastOptionalVarCharToDateTime() throws Exception {
testCastOptionalString("dateCol", "date", "cp.`dateWithEmptyStrings.json`",
null, null, LocalDate.of(1997, 12, 10));
testCastOptionalString("timeCol", "time", "cp.`dateWithEmptyStrings.json`",
null, null, LocalTime.of(7, 21, 39));
testCastOptionalString("timestampCol", "timestamp", "cp.`dateWithEmptyStrings.json`",
null, null, LocalDateTime.of(2003, 9, 11, 10, 1, 37));
}
@Test
public void testCastRequiredVarCharToInterval() throws Exception {
testCastEmptyString("interval year");
testCastEmptyString("interval day");
testCastEmptyString("interval month");
}
private void testCastOptionalString(String column, String asType, String table,
Object... baselineValues) throws Exception {
String query = String.format("select cast(%s as %s) c from %s", column, asType, table);
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("c")
.baselineValuesForSingleColumn(baselineValues)
.go();
}
private void testCastEmptyString(String asType) throws Exception {
Object[] nullObj = new Object[] {null};
String query = String.format("select cast('' as %s) c from (values(1))", asType);
testBuilder()
.sqlQuery(query)
.unOrdered()
.baselineColumns("c")
.baselineValues(nullObj)
.go();
}
@Test
public void testCastOptionalVarCharToNumber() throws Exception {
testBuilder()
.sqlQuery("select to_number(columns[0], '#,##0.0') n from cp.`emptyStrings.csv`")
.unOrdered()
.baselineColumns("n")
.baselineValuesForSingleColumn(null, 1.0, 2.0)
.go();
}
@Test
public void testCastRequiredVarCharToNumber() throws Exception {
Object[] nullObj = new Object[] {null};
testBuilder()
.sqlQuery("select to_number('', '#,##0.0') n from (values(1))")
.unOrdered()
.baselineColumns("n")
.baselineValues(nullObj)
.go();
}
}
| 2,075 |
911 | <filename>tests/test_decoded_packet_dict.py<gh_stars>100-1000
from wifipumpkin3.core.common.platforms import decoded
import unittest
result = {
"IP": {
"version": 4,
"src": "10.0.0.21",
"dst": "192.168.3.11",
"ihl": 5,
"tos": 0,
},
"Headers": {
"Connection": "Keep-Alive",
"Method": "GET",
"Path": "/generate_204",
"Http-Version": "HTTP/1.1",
},
}
class TestConfigPumpkinProxy(unittest.TestCase):
def test_decoded_data(self):
global result
data = {
"IP": {
"version": 4,
"src": "10.0.0.21".encode(),
"dst": "192.168.3.11".encode(),
"ihl": 5,
"tos": 0,
},
"Headers": {
"Connection": "Keep-Alive".encode(),
"Method": "GET".encode(),
"Path": "/generate_204".encode(),
"Http-Version": "HTTP/1.1".encode(),
},
}
# decode byte array to str ascii
with decoded(data) as data_decoded:
self.data_decoded = data_decoded
self.assertEqual(result, self.data_decoded)
if __name__ == "__main__":
unittest.main()
| 697 |
456 | // SPDX-License-Identifier: BSD-3-Clause
// Copyright (c) 2004-2020 <NAME>
// All rights reserved.
#include <djvAV/DPX.h>
#include <djvAV/Cineon.h>
#include <djvSystem/FileIO.h>
using namespace djv::Core;
namespace djv
{
namespace AV
{
namespace DPX
{
struct Read::Private
{
Transfer transfer = Transfer::FilmPrint;
Options options;
};
Read::Read() :
_p(new Private)
{}
Read::~Read()
{
_finish();
}
std::shared_ptr<Read> Read::create(
const System::File::Info& fileInfo,
const IO::ReadOptions& readOptions,
const Options& options,
const std::shared_ptr<System::TextSystem>& textSystem,
const std::shared_ptr<System::ResourceSystem>& resourceSystem,
const std::shared_ptr<System::LogSystem>& logSystem)
{
auto out = std::shared_ptr<Read>(new Read);
out->_p->options = options;
out->_init(fileInfo, readOptions, textSystem, resourceSystem, logSystem);
return out;
}
IO::Info Read::_readInfo(const std::string& fileName)
{
auto io = System::File::IO::create();
return _open(fileName, io);
}
std::shared_ptr<Image::Data> Read::_readImage(const std::string& fileName)
{
auto io = System::File::IO::create();
const auto info = _open(fileName, io);
auto out = Cineon::Read::readImage(info, io);
out->setPluginName(pluginName);
return out;
}
IO::Info Read::_open(const std::string& fileName, const std::shared_ptr<System::File::IO>& io)
{
DJV_PRIVATE_PTR();
io->open(fileName, System::File::Mode::Read);
IO::Info info;
info.videoSpeed = _speed;
info.videoSequence = _sequence;
info.video.push_back(Image::Info());
DPX::read(io, info, p.transfer, _textSystem);
return info;
}
} // namespace DPX
} // namespace AV
} // namespace djv
| 1,258 |
713 | Cache cache = ...
cache.getAdvancedCache()
.withFlags(Flag.SKIP_REMOTE_LOOKUP, Flag.SKIP_CACHE_LOAD)
.put("local", "only")
| 51 |
3,651 | package com.orientechnologies.orient.graph.sql;
import com.orientechnologies.orient.core.db.document.ODatabaseDocument;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.exception.OCommandExecutionException;
import com.orientechnologies.orient.core.metadata.schema.OClass;
import com.orientechnologies.orient.core.metadata.schema.OSchema;
import com.orientechnologies.orient.core.metadata.schema.OType;
import com.orientechnologies.orient.core.record.OVertex;
import com.orientechnologies.orient.core.sql.executor.ExecutionPlanPrintUtils;
import com.orientechnologies.orient.core.sql.executor.OResult;
import com.orientechnologies.orient.core.sql.executor.OResultSet;
import com.orientechnologies.orient.core.storage.ORecordDuplicatedException;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
/** @author <NAME> (l.dellaquila-(at)-orientdb.com) */
public class OCreateEdgeStatementExecutionTest {
static ODatabaseDocument db;
@BeforeClass
public static void beforeClass() {
db = new ODatabaseDocumentTx("memory:OCreateEdgeStatementExecutionTest");
db.create();
OClass v = db.getMetadata().getSchema().getClass("V");
if (v == null) {
db.getMetadata().getSchema().createClass("V");
}
OClass e = db.getMetadata().getSchema().getClass("E");
if (e == null) {
db.getMetadata().getSchema().createClass("E");
}
}
@AfterClass
public static void afterClass() {
db.drop();
}
@Test
public void testCreateSingleEdge() {
OSchema schema = db.getMetadata().getSchema();
String vClass = "testCreateSingleEdgeV";
schema.createClass(vClass, schema.getClass("V"));
String eClass = "testCreateSingleEdgeE";
schema.createClass(eClass, schema.getClass("E"));
OVertex v1 = db.newVertex(vClass);
v1.setProperty("name", "v1");
v1.save();
OVertex v2 = db.newVertex(vClass);
v2.setProperty("name", "v2");
v2.save();
OResultSet createREs =
db.command(
"create edge " + eClass + " from " + v1.getIdentity() + " to " + v2.getIdentity());
ExecutionPlanPrintUtils.printExecutionPlan(createREs);
OResultSet result = db.query("select expand(out()) from " + v1.getIdentity());
Assert.assertNotNull(result);
Assert.assertTrue(result.hasNext());
OResult next = result.next();
Assert.assertNotNull(next);
Assert.assertEquals("v2", next.getProperty("name"));
result.close();
result = db.query("select expand(in()) from " + v2.getIdentity());
Assert.assertNotNull(result);
Assert.assertTrue(result.hasNext());
next = result.next();
Assert.assertNotNull(next);
Assert.assertEquals("v1", next.getProperty("name"));
result.close();
}
@Test
public void testCreateEdgeWithProperty() {
OSchema schema = db.getMetadata().getSchema();
String vClass = "testCreateEdgeWithPropertyV";
schema.createClass(vClass, schema.getClass("V"));
String eClass = "testCreateEdgeWithPropertyE";
schema.createClass(eClass, schema.getClass("E"));
OVertex v1 = db.newVertex(vClass);
v1.setProperty("name", "v1");
v1.save();
OVertex v2 = db.newVertex(vClass);
v2.setProperty("name", "v2");
v2.save();
OResultSet createREs =
db.command(
"create edge "
+ eClass
+ " from "
+ v1.getIdentity()
+ " to "
+ v2.getIdentity()
+ " set name = 'theEdge'");
ExecutionPlanPrintUtils.printExecutionPlan(createREs);
OResultSet result = db.query("select expand(outE()) from " + v1.getIdentity());
Assert.assertNotNull(result);
Assert.assertTrue(result.hasNext());
OResult next = result.next();
Assert.assertNotNull(next);
Assert.assertEquals("theEdge", next.getProperty("name"));
result.close();
}
@Test
public void testCreateTwoByTwo() {
OSchema schema = db.getMetadata().getSchema();
String vClass = "testCreateTwoByTwoV";
schema.createClass(vClass, schema.getClass("V"));
String eClass = "testCreateTwoByTwoE";
schema.createClass(eClass, schema.getClass("E"));
for (int i = 0; i < 4; i++) {
OVertex v1 = db.newVertex(vClass);
v1.setProperty("name", "v" + i);
v1.save();
}
OResultSet createREs =
db.command(
"create edge "
+ eClass
+ " from (select from "
+ vClass
+ " where name in ['v0', 'v1']) to (select from "
+ vClass
+ " where name in ['v2', 'v3'])");
ExecutionPlanPrintUtils.printExecutionPlan(createREs);
OResultSet result = db.query("select expand(out()) from " + vClass + " where name = 'v0'");
Assert.assertNotNull(result);
for (int i = 0; i < 2; i++) {
Assert.assertTrue(result.hasNext());
OResult next = result.next();
Assert.assertNotNull(next);
}
result.close();
result = db.query("select expand(in()) from " + vClass + " where name = 'v2'");
Assert.assertNotNull(result);
for (int i = 0; i < 2; i++) {
Assert.assertTrue(result.hasNext());
OResult next = result.next();
Assert.assertNotNull(next);
}
result.close();
}
@Test
public void testUpsert() {
OSchema schema = db.getMetadata().getSchema();
String vClass1 = "testUpsertV1";
OClass vclazz1 = schema.createClass(vClass1, schema.getClass("V"));
String vClass2 = "testUpsertV2";
OClass vclazz2 = schema.createClass(vClass2, schema.getClass("V"));
String eClass = "testUpsertE";
OClass eclazz = schema.createClass(eClass, schema.getClass("E"));
eclazz.createProperty("out", OType.LINK, vclazz1);
eclazz.createProperty("in", OType.LINK, vclazz2);
db.command("CREATE INDEX " + eClass + "out_in ON " + eclazz + " (out, in) UNIQUE");
for (int i = 0; i < 2; i++) {
OVertex v1 = db.newVertex(vClass1);
v1.setProperty("name", "v" + i);
v1.save();
}
for (int i = 0; i < 2; i++) {
OVertex v1 = db.newVertex(vClass2);
v1.setProperty("name", "v" + i);
v1.save();
}
db.command(
"CREATE EDGE "
+ eClass
+ " from (select from "
+ vClass1
+ " where name = 'v0') to (select from "
+ vClass2
+ " where name = 'v0') SET name = 'foo'")
.close();
OResultSet rs = db.query("SELECT FROM " + eClass);
Assert.assertTrue(rs.hasNext());
rs.next();
Assert.assertFalse(rs.hasNext());
rs.close();
db.command(
"CREATE EDGE "
+ eClass
+ " UPSERT from (select from "
+ vClass1
+ ") to (select from "
+ vClass2
+ ") SET name = 'bar'")
.close();
rs = db.query("SELECT FROM " + eclazz);
for (int i = 0; i < 4; i++) {
Assert.assertTrue(rs.hasNext());
OResult item = rs.next();
Assert.assertEquals("bar", item.getProperty("name"));
}
Assert.assertFalse(rs.hasNext());
rs.close();
}
@Test
public void testUpsertHashIndex() {
OSchema schema = db.getMetadata().getSchema();
String vClass1 = "testUpsertHashIndexV1";
OClass vclazz1 = schema.createClass(vClass1, schema.getClass("V"));
String vClass2 = "testUpsertHashIndexV2";
OClass vclazz2 = schema.createClass(vClass2, schema.getClass("V"));
String eClass = "testUpsertHashIndexE";
OClass eclazz = schema.createClass(eClass, schema.getClass("E"));
eclazz.createProperty("out", OType.LINK, vclazz1);
eclazz.createProperty("in", OType.LINK, vclazz2);
db.command("CREATE INDEX " + eClass + "out_in ON " + eclazz + " (out, in) UNIQUE");
for (int i = 0; i < 2; i++) {
OVertex v1 = db.newVertex(vClass1);
v1.setProperty("name", "v" + i);
v1.save();
}
for (int i = 0; i < 2; i++) {
OVertex v1 = db.newVertex(vClass2);
v1.setProperty("name", "v" + i);
v1.save();
}
db.command(
"CREATE EDGE "
+ eClass
+ " from (select from "
+ vClass1
+ " where name = 'v0') to (select from "
+ vClass2
+ " where name = 'v0')")
.close();
OResultSet rs = db.query("SELECT FROM " + eClass);
Assert.assertTrue(rs.hasNext());
rs.next();
Assert.assertFalse(rs.hasNext());
rs.close();
db.command(
"CREATE EDGE "
+ eClass
+ " UPSERT from (select from "
+ vClass1
+ ") to (select from "
+ vClass2
+ ")")
.close();
rs = db.query("SELECT FROM " + eclazz);
for (int i = 0; i < 4; i++) {
Assert.assertTrue(rs.hasNext());
rs.next();
}
Assert.assertFalse(rs.hasNext());
rs.close();
}
@Test
public void testBreakUniqueWithoutUpsert() {
OSchema schema = db.getMetadata().getSchema();
String vClass1 = "testBreakUniqueWithoutUpsertV1";
OClass vclazz1 = schema.createClass(vClass1, schema.getClass("V"));
String vClass2 = "testBreakUniqueWithoutUpsertV2";
OClass vclazz2 = schema.createClass(vClass2, schema.getClass("V"));
String eClass = "testBreakUniqueWithoutUpsertE";
OClass eclazz = schema.createClass(eClass, schema.getClass("E"));
eclazz.createProperty("out", OType.LINK, vclazz1);
eclazz.createProperty("in", OType.LINK, vclazz2);
db.command("CREATE INDEX " + eClass + "out_in ON " + eclazz + " (out, in) UNIQUE");
for (int i = 0; i < 2; i++) {
OVertex v1 = db.newVertex(vClass1);
v1.setProperty("name", "v" + i);
v1.save();
}
for (int i = 0; i < 2; i++) {
OVertex v1 = db.newVertex(vClass2);
v1.setProperty("name", "v" + i);
v1.save();
}
db.command(
"CREATE EDGE "
+ eClass
+ " from (select from "
+ vClass1
+ " where name = 'v0') to (select from "
+ vClass2
+ " where name = 'v0')")
.close();
OResultSet rs = db.query("SELECT FROM " + eClass);
Assert.assertTrue(rs.hasNext());
rs.next();
Assert.assertFalse(rs.hasNext());
rs.close();
try {
db.command(
"CREATE EDGE "
+ eClass
+ " from (select from "
+ vClass1
+ ") to (select from "
+ vClass2
+ ")")
.close();
Assert.fail();
} catch (ORecordDuplicatedException | OCommandExecutionException e) {
}
}
@Test
public void testUpsertNoIndex() {
OSchema schema = db.getMetadata().getSchema();
String vClass1 = "testUpsertNoIndexV1";
OClass vclazz1 = schema.createClass(vClass1, schema.getClass("V"));
String vClass2 = "testUpsertNoIndexV2";
OClass vclazz2 = schema.createClass(vClass2, schema.getClass("V"));
String eClass = "testUpsertNoIndexE";
for (int i = 0; i < 2; i++) {
OVertex v1 = db.newVertex(vClass1);
v1.setProperty("name", "v" + i);
v1.save();
}
for (int i = 0; i < 2; i++) {
OVertex v1 = db.newVertex(vClass2);
v1.setProperty("name", "v" + i);
v1.save();
}
try {
db.command(
"CREATE EDGE "
+ eClass
+ " UPSERT from (select from "
+ vClass1
+ ") to (select from "
+ vClass2
+ ")")
.close();
Assert.fail();
} catch (OCommandExecutionException e) {
}
}
@Test
public void testPositionalParams() {
String vClass1 = "testPositionalParamsV";
db.createVertexClass(vClass1);
String eClass = "testPositionalParamsE";
db.createEdgeClass(eClass);
for (int i = 0; i < 2; i++) {
OVertex v1 = db.newVertex(vClass1);
v1.setProperty("name", "v" + i);
v1.save();
}
db.command(
"CREATE EDGE "
+ eClass
+ " from (select from "
+ vClass1
+ " WHERE name = ? ) to (select from "
+ vClass1
+ " WHERE name = ? )",
"v0",
"v1")
.close();
OResultSet result =
db.query("select from " + eClass + " where out.name = 'v0' AND in.name = 'v1'");
Assert.assertTrue(result.hasNext());
result.close();
}
}
| 5,858 |
471 | <reponame>THEGUY3ds/pirater12-luma-updater
/* Compiler.h
2015-08-02 : <NAME> : Public domain */
#ifndef __7Z_COMPILER_H
#define __7Z_COMPILER_H
#define UNUSED_VAR(x) (void)x;
/* #define UNUSED_VAR(x) x=x; */
#endif
| 112 |
2,258 | <filename>java/serving/src/test/java/feast/serving/it/ServingRedisGSRegistryIT.java
/*
* SPDX-License-Identifier: Apache-2.0
* Copyright 2018-2021 The Feast Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package feast.serving.it;
import static org.junit.jupiter.api.Assertions.*;
import com.google.cloud.storage.*;
import com.google.cloud.storage.testing.RemoteStorageHelper;
import feast.proto.core.RegistryProto;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.Bean;
import org.springframework.test.context.DynamicPropertyRegistry;
import org.springframework.test.context.DynamicPropertySource;
public class ServingRedisGSRegistryIT extends ServingBase {
static Storage storage =
RemoteStorageHelper.create()
.getOptions()
.toBuilder()
.setProjectId(System.getProperty("GCP_PROJECT", "kf-feast"))
.build()
.getService();
static final String bucket = RemoteStorageHelper.generateBucketName();
@DynamicPropertySource
static void initialize(DynamicPropertyRegistry registry) {
registry.add("feast.registry", () -> String.format("gs://%s/registry.db", bucket));
registry.add("feast.registry-refresh-interval", () -> 1);
ServingBase.initialize(registry);
}
static void putToStorage(RegistryProto.Registry registry) {
BlobId blobId = BlobId.of(bucket, "registry.db");
storage.create(BlobInfo.newBuilder(blobId).build(), registry.toByteArray());
assertArrayEquals(storage.get(blobId).getContent(), registry.toByteArray());
}
@BeforeAll
static void setUp() {
storage.create(BucketInfo.of(bucket));
putToStorage(registryProto);
}
@AfterAll
static void tearDown() throws ExecutionException, InterruptedException {
RemoteStorageHelper.forceDelete(storage, bucket, 5, TimeUnit.SECONDS);
}
@Override
void updateRegistryFile(RegistryProto.Registry registry) {
putToStorage(registry);
}
@TestConfiguration
public static class GSRegistryConfig {
@Bean
Storage googleStorage() {
return storage;
}
}
}
| 895 |
852 | import ROOT
class ElectronMVAID:
def __init__(self,name,type,*xmls):
self.name = name
self.estimator = ROOT.heppy.EGammaMvaEleEstimatorFWLite()
self.sxmls = ROOT.vector(ROOT.string)()
for x in xmls: self.sxmls.push_back(x)
self.etype = -1
if type == "Trig": self.etype = self.estimator.kTrig;
if type == "NonTrig": self.etype = self.estimator.kNonTrig;
if type == "TrigNoIP": self.etype = self.estimator.kTrigNoIP;
if type == "TrigCSA14": self.etype = self.estimator.kTrigCSA14;
if type == "NonTrigCSA14": self.etype = self.estimator.kNonTrigCSA14;
if type == "NonTrigPhys14": self.etype = self.estimator.kNonTrigPhys14;
if self.etype == -1: raise RuntimeError("Unknown type %s" % type)
self._init = False
def __call__(self,ele,vtx,rho,full5x5=False,debug=False):
if not self._init:
self.estimator.initialize(self.name,self.etype,True,self.sxmls)
self._init = True
return self.estimator.mvaValue(ele,vtx,rho,full5x5,debug)
ElectronMVAID_Trig = ElectronMVAID("BDT", "Trig",
"EgammaAnalysis/ElectronTools/data/Electrons_BDTG_TrigV0_Cat1.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/Electrons_BDTG_TrigV0_Cat2.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/Electrons_BDTG_TrigV0_Cat3.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/Electrons_BDTG_TrigV0_Cat4.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/Electrons_BDTG_TrigV0_Cat5.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/Electrons_BDTG_TrigV0_Cat6.weights.xml.gz",
)
ElectronMVAID_NonTrig = ElectronMVAID("BDT", "NonTrig",
"EgammaAnalysis/ElectronTools/data/Electrons_BDTG_NonTrigV0_Cat1.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/Electrons_BDTG_NonTrigV0_Cat2.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/Electrons_BDTG_NonTrigV0_Cat3.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/Electrons_BDTG_NonTrigV0_Cat4.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/Electrons_BDTG_NonTrigV0_Cat5.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/Electrons_BDTG_NonTrigV0_Cat6.weights.xml.gz",
)
ElectronMVAID_TrigNoIP = ElectronMVAID("BDT", "TrigNoIP",
"EgammaAnalysis/ElectronTools/data/Electrons_BDTG_TrigNoIPV0_2012_Cat1.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/Electrons_BDTG_TrigNoIPV0_2012_Cat2.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/Electrons_BDTG_TrigNoIPV0_2012_Cat3.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/Electrons_BDTG_TrigNoIPV0_2012_Cat4.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/Electrons_BDTG_TrigNoIPV0_2012_Cat5.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/Electrons_BDTG_TrigNoIPV0_2012_Cat6.weights.xml.gz",
)
ElectronMVAID_TrigCSA14bx50 = ElectronMVAID("BDT", "TrigCSA14",
"EgammaAnalysis/ElectronTools/data/CSA14/TrigIDMVA_50ns_EB_BDT.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/CSA14/TrigIDMVA_50ns_EE_BDT.weights.xml.gz",
)
ElectronMVAID_TrigCSA14bx25 = ElectronMVAID("BDT", "TrigCSA14",
"EgammaAnalysis/ElectronTools/data/CSA14/TrigIDMVA_25ns_EB_BDT.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/CSA14/TrigIDMVA_25ns_EE_BDT.weights.xml.gz",
)
ElectronMVAID_NonTrigCSA14bx25 = ElectronMVAID("BDT", "NonTrigCSA14",
"EgammaAnalysis/ElectronTools/data/CSA14/EIDmva_EB_5_25ns_BDT.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/CSA14/EIDmva_EE_5_25ns_BDT.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/CSA14/EIDmva_EB_10_25ns_BDT.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/CSA14/EIDmva_EE_10_25ns_BDT.weights.xml.gz",
)
ElectronMVAID_NonTrigCSA14bx50 = ElectronMVAID("BDT", "NonTrigCSA14",
"EgammaAnalysis/ElectronTools/data/CSA14/EIDmva_EB_5_50ns_BDT.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/CSA14/EIDmva_EE_5_50ns_BDT.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/CSA14/EIDmva_EB_10_50ns_BDT.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/CSA14/EIDmva_EE_10_50ns_BDT.weights.xml.gz",
)
ElectronMVAID_NonTrigPhys14 = ElectronMVAID("BDT", "NonTrigPhys14",
"EgammaAnalysis/ElectronTools/data/PHYS14/EIDmva_EB1_5_oldscenario2phys14_BDT.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/PHYS14/EIDmva_EB2_5_oldscenario2phys14_BDT.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/PHYS14/EIDmva_EE_5_oldscenario2phys14_BDT.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/PHYS14/EIDmva_EB1_10_oldscenario2phys14_BDT.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/PHYS14/EIDmva_EB2_10_oldscenario2phys14_BDT.weights.xml.gz",
"EgammaAnalysis/ElectronTools/data/PHYS14/EIDmva_EE_10_oldscenario2phys14_BDT.weights.xml.gz",
)
ElectronMVAID_ByName = {
'Trig':ElectronMVAID_Trig,
'NonTrig':ElectronMVAID_NonTrig,
'TrigNoIP':ElectronMVAID_TrigNoIP,
'TrigCSA14bx50':ElectronMVAID_TrigCSA14bx50,
'TrigCSA14bx25':ElectronMVAID_TrigCSA14bx25,
'NonTrigCSA14bx25':ElectronMVAID_NonTrigCSA14bx25,
'NonTrigCSA14bx50':ElectronMVAID_NonTrigCSA14bx50,
'NonTrigPhys14':ElectronMVAID_NonTrigPhys14,
}
| 2,602 |
1,987 | <filename>social/backends/yandex.py
from social_core.backends.yandex import YandexOpenId, YandexOAuth2, YaruOAuth2
| 45 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.web.jsf.api.facesmodel;
import java.util.List;
import org.netbeans.modules.web.jsf.impl.facesmodel.JSFConfigQNames;
/**
* The "locale-config" element allows the app developer to
* declare the supported locales for this application.
*
* @author <NAME>
*/
public interface LocaleConfig extends ApplicationElement, IdentifiableElement {
/**
* Property name of <default-locale> element.
*/
public static final String DEFAULT_LOCALE = JSFConfigQNames.DEFAULT_LOCALE.getLocalName();
/**
* Property name of <supported-locale> element.
*/
public static final String SUPPORTED_LOCALE = JSFConfigQNames.SUPPORTED_LOCALE.getLocalName();
/**
* The "default-locale" element declares the default locale
* for this application instance.
*
* @return the default locale
*/
DefaultLocale getDefaultLocale();
/**
* The "default-locale" element declares the default locale
* for this application instance.
*
* It must be specified as :language:[_:country:[_:variant:]]
* without the colons, for example "ja_JP_SJIS". The
* separators between the segments may be '-' or '_'.
* @param locale the default locale
*/
void setDefaultLocale(DefaultLocale locale);
/**
* The "supported-locale" element allows authors to declare
* which locales are supported in this application instance.
*
* @return a list of supported locales
*/
List<SupportedLocale> getSupportedLocales();
/**
* The "supported-locale" element allows authors to declare
* which locales are supported in this application instance.
*
* It must be specified as :language:[_:country:[_:variant:]]
* without the colons, for example "ja_JP_SJIS". The
* separators between the segments may be '-' or '_'.
* @param locale the supported locale
*/
void addSupportedLocales(SupportedLocale locale);
/**
* The "supported-locale" element allows authors to declare
* which locales are supported in this application instance.
*
* It must be specified as :language:[_:country:[_:variant:]]
* without the colons, for example "ja_JP_SJIS". The
* separators between the segments may be '-' or '_'.
* @param index where will be putted
* @param locale the supported locale
*/
void addSupportedLocales(int index, SupportedLocale locale);
void removeSupportedLocale(SupportedLocale locale);
}
| 1,048 |
923 | package core.webui.server.handlers.internals;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.http.HttpException;
import org.apache.http.HttpRequest;
import org.apache.http.nio.protocol.HttpAsyncExchange;
import org.apache.http.protocol.HttpContext;
import core.webui.server.handlers.AbstractSingleMethodHttpHandler;
import core.webui.webcommon.HttpServerUtilities;
import utilities.json.JSONUtility;
import utilities.json.Jsonizer;
public class GetPathSuggestionHandler extends AbstractSingleMethodHttpHandler {
public GetPathSuggestionHandler() {
super(AbstractSingleMethodHttpHandler.GET_METHOD);
}
@Override
protected Void handleAllowedRequestWithBackend(HttpRequest request, HttpAsyncExchange exchange, HttpContext context) throws HttpException, IOException {
Map<String, String> params = HttpServerUtilities.parseGetParameters(request.getRequestLine().getUri());
if (params == null) {
return HttpServerUtilities.prepareHttpResponse(exchange, 400, "Failed to parse GET parameters.");
}
String path = params.get("path");
if (path == null) {
return HttpServerUtilities.prepareHttpResponse(exchange, 400, "Path must be provided.");
}
if (path.isEmpty()) {
path = ".";
}
Path p = Paths.get(path);
if (!Files.exists(p)) {
return paths(exchange);
}
if (Files.isRegularFile(p)) {
return paths(exchange, p.toAbsolutePath().toString());
}
if (Files.isDirectory(p)) {
File[] files = p.toFile().listFiles();
List<String> suggested = Arrays.asList(files).stream().map(File::getAbsolutePath).collect(Collectors.toList());
return paths(exchange, suggested);
}
return paths(exchange);
}
private Void paths(HttpAsyncExchange exchange, String... paths) throws IOException {
return paths(exchange, Arrays.asList(paths));
}
private Void paths(HttpAsyncExchange exchange, Iterable<String> paths) throws IOException {
String data = JSONUtility.jsonToString(Jsonizer.jsonize(SuggestedPaths.of(paths)).getRootNode());
return HttpServerUtilities.prepareHttpResponse(exchange, 200, data);
}
private static class SuggestedPaths {
private List<String> paths;
private static SuggestedPaths of(Iterable<String> paths) {
SuggestedPaths output = new SuggestedPaths();
output.paths = new ArrayList<>();
paths.forEach(output.paths::add);
return output;
}
}
}
| 858 |
304 | <filename>apps/app7_static_files/views.py
# coding: utf-8
from django.shortcuts import render
from django.conf import settings
def production(request):
# You can pass anything in the templates, including the whole settings
# object which contains everything that is inside the settings.py file.
context = {
'settings': settings,
# used for Apache conf
'STRIPPED_STATIC_URL': settings.STATIC_URL.rstrip('/')
}
return render(request, 'app7_static_files/prod_static_files.html', context) | 179 |
2,890 | package com.github.ltsopensource.zookeeper.lts;
import com.github.ltsopensource.core.cluster.Config;
import com.github.ltsopensource.zookeeper.ZkClient;
import com.github.ltsopensource.zookeeper.ZookeeperTransporter;
public class LtsZookeeperTransporter implements ZookeeperTransporter {
public ZkClient connect(Config config) {
return new LtsZkClient(config);
}
}
| 137 |
30,023 | <reponame>liangleslie/core
"""Config flow to configure the Tile integration."""
from __future__ import annotations
from collections.abc import Mapping
from typing import Any
from pytile import async_login
from pytile.errors import InvalidAuthError, TileError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.data_entry_flow import FlowResult
from homeassistant.helpers import aiohttp_client
from .const import DOMAIN, LOGGER
STEP_REAUTH_SCHEMA = vol.Schema(
{
vol.Required(CONF_PASSWORD): str,
}
)
STEP_USER_SCHEMA = vol.Schema(
{
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
}
)
class TileFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a Tile config flow."""
VERSION = 1
def __init__(self) -> None:
"""Initialize the config flow."""
self._password: str | None = None
self._username: str | None = None
async def _async_verify(self, step_id: str, schema: vol.Schema) -> FlowResult:
"""Attempt to authenticate the provided credentials."""
assert self._username
assert self._password
errors = {}
session = aiohttp_client.async_get_clientsession(self.hass)
try:
await async_login(self._username, self._password, session=session)
except InvalidAuthError:
errors["base"] = "invalid_auth"
except TileError as err:
LOGGER.error("Unknown Tile error: %s", err)
errors["base"] = "unknown"
if errors:
return self.async_show_form(
step_id=step_id, data_schema=schema, errors=errors
)
data = {CONF_USERNAME: self._username, CONF_PASSWORD: <PASSWORD>._password}
if existing_entry := await self.async_set_unique_id(self._username):
self.hass.config_entries.async_update_entry(existing_entry, data=data)
self.hass.async_create_task(
self.hass.config_entries.async_reload(existing_entry.entry_id)
)
return self.async_abort(reason="reauth_successful")
return self.async_create_entry(title=self._username, data=data)
async def async_step_import(self, import_config: dict[str, Any]) -> FlowResult:
"""Import a config entry from configuration.yaml."""
return await self.async_step_user(import_config)
async def async_step_reauth(self, config: Mapping[str, Any]) -> FlowResult:
"""Handle configuration by re-auth."""
self._username = config[CONF_USERNAME]
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, str] | None = None
) -> FlowResult:
"""Handle re-auth completion."""
if not user_input:
return self.async_show_form(
step_id="reauth_confirm", data_schema=STEP_REAUTH_SCHEMA
)
self._password = user_input[CONF_PASSWORD]
return await self._async_verify("reauth_confirm", STEP_REAUTH_SCHEMA)
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle the start of the config flow."""
if not user_input:
return self.async_show_form(step_id="user", data_schema=STEP_USER_SCHEMA)
await self.async_set_unique_id(user_input[CONF_USERNAME])
self._abort_if_unique_id_configured()
self._username = user_input[CONF_USERNAME]
self._password = user_input[CONF_PASSWORD]
return await self._async_verify("user", STEP_USER_SCHEMA)
| 1,543 |
776 | package ghissues.gh967;
import act.util.SingletonBase;
public class XyzService extends SingletonBase {
public String doService() {
return "xyz";
}
}
| 63 |
1,172 | // Copyright (C) 2005 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.caja.parser.js;
import com.google.caja.lexer.FilePosition;
import com.google.caja.lexer.TokenConsumer;
import com.google.caja.parser.ParseTreeNode;
import com.google.caja.reporting.RenderContext;
import java.util.List;
import java.util.Collections;
/**
* Sometimes called a function literal or a closure, an expression that
* constructs a new function.
*
* <p>E.g.
* <code>function () { return 0; }</code>
*
* @author <EMAIL>
*/
public final class FunctionConstructor
extends AbstractExpression implements NestedScope {
private static final long serialVersionUID = 4183249730129328478L;
// Local member variables are only changed in childrenChanged(),
// so this class satisfies the immutability contract of the superclass.
private Identifier identifier;
private List<FormalParam> params;
private Block body;
/** @param value unused. This ctor is provided for reflection. */
@ReflectiveCtor
public FunctionConstructor(
FilePosition pos, Void value, List<? extends ParseTreeNode> children) {
super(pos, ParseTreeNode.class);
createMutation().appendChildren(children).execute();
}
public FunctionConstructor(
FilePosition pos, Identifier identifier, List<FormalParam> params,
Block body) {
super(pos, ParseTreeNode.class);
createMutation()
.appendChild(identifier)
.appendChildren(params)
.appendChild(body)
.execute();
}
@Override
protected void childrenChanged() {
super.childrenChanged();
List<? extends ParseTreeNode> children = children();
int n = children.size();
this.identifier = (Identifier) children.get(0);
this.params = Collections.<FormalParam>unmodifiableList(
childrenPart(1, n - 1, FormalParam.class));
for (ParseTreeNode p : params) {
if (!(p instanceof FormalParam)) {
throw new ClassCastException(p.getClass().getName());
}
}
this.body = (Block) children().get(n - 1);
}
public List<FormalParam> getParams() { return params; }
public Block getBody() { return body; }
public Identifier getIdentifier() { return identifier; }
public String getIdentifierName() { return identifier.getName(); }
@Override
public Object getValue() { return null; }
@Override
public Boolean conditionResult() { return true; }
public void render(RenderContext rc) {
TokenConsumer out = rc.getOut();
out.mark(getFilePosition());
out.consume("function");
String name = identifier.getName();
if (null != name) {
out.consume(name);
}
renderActuals(rc);
renderBody(rc);
}
void renderActuals(RenderContext rc) {
TokenConsumer out = rc.getOut();
out.consume("(");
boolean seen = false;
for (FormalParam e : params) {
if (seen) {
out.consume(",");
} else {
seen = true;
}
e.render(rc);
}
out.consume(")");
}
void renderBody(RenderContext rc) {
body.renderBlock(rc, false);
}
public String typeOf() { return "function"; }
}
| 1,199 |
3,645 | /*
* audio resampling
* Copyright (c) 2004-2012 <NAME> <<EMAIL>>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef SWRESAMPLE_RESAMPLE_H
#define SWRESAMPLE_RESAMPLE_H
#include "libavutil/log.h"
#include "libavutil/samplefmt.h"
#include "swresample_internal.h"
typedef struct ResampleContext {
const AVClass *av_class;
uint8_t *filter_bank;
int filter_length;
int filter_alloc;
int ideal_dst_incr;
int dst_incr;
int dst_incr_div;
int dst_incr_mod;
int index;
int frac;
int src_incr;
int compensation_distance;
int phase_count;
int linear;
enum SwrFilterType filter_type;
double kaiser_beta;
double factor;
enum AVSampleFormat format;
int felem_size;
int filter_shift;
int phase_count_compensation; /* desired phase_count when compensation is enabled */
struct {
void (*resample_one)(void *dst, const void *src,
int n, int64_t index, int64_t incr);
int (*resample)(struct ResampleContext *c, void *dst,
const void *src, int n, int update_ctx);
} dsp;
} ResampleContext;
void swri_resample_dsp_init(ResampleContext *c);
void swri_resample_dsp_x86_init(ResampleContext *c);
void swri_resample_dsp_arm_init(ResampleContext *c);
#endif /* SWRESAMPLE_RESAMPLE_H */
| 758 |
1,104 | <gh_stars>1000+
package backtype.storm.serialization;
import backtype.storm.task.TopologyContext;
import backtype.storm.tuple.MessageId;
import backtype.storm.tuple.Tuple;
import backtype.storm.utils.ThreadResourceManager;
import backtype.storm.utils.WritableUtils;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.List;
import java.util.Map;
public class KryoTupleDeserializer implements ITupleDeserializer {
ThreadResourceManager<Worker> _manager;
public KryoTupleDeserializer(final Map conf, final TopologyContext context) {
_manager = new ThreadResourceManager<Worker>(new ThreadResourceManager.ResourceFactory<Worker>() {
@Override
public Worker makeResource() {
return new Worker(conf, context);
}
});
}
@Override
public Tuple deserialize(byte[] ser) {
Worker worker = _manager.acquire();
try {
return worker.deserialize(ser);
} finally {
_manager.release(worker);
}
}
public static class Worker implements ITupleDeserializer {
TopologyContext _context;
KryoValuesDeserializer _kryo;
SerializationFactory.IdDictionary _ids;
public Worker(Map conf, TopologyContext context) {
_kryo = new KryoValuesDeserializer(conf);
_context = context;
_ids = new SerializationFactory.IdDictionary(context.getRawTopology());
}
public Tuple deserialize(byte[] ser) {
try {
ByteArrayInputStream bin = new ByteArrayInputStream(ser);
DataInputStream in = new DataInputStream(bin);
int taskId = WritableUtils.readVInt(in);
int streamId = WritableUtils.readVInt(in);
String componentName = _context.getComponentId(taskId);
String streamName = _ids.getStreamName(componentName, streamId);
MessageId id = MessageId.deserialize(in);
List<Object> values = _kryo.deserializeFrom(bin);
return new Tuple(_context, values, taskId, streamName, id);
} catch(IOException e) {
throw new RuntimeException(e);
}
}
}
}
| 1,010 |
2,633 | <reponame>afxcn/unit<gh_stars>1000+
/*
* Copyright (C) <NAME>
* Copyright (C) NGINX, Inc.
*/
#include <nxt_main.h>
static const nxt_service_t nxt_services[] = {
#if (NXT_HAVE_KQUEUE)
{ "engine", "kqueue", &nxt_kqueue_engine },
#endif
#if (NXT_HAVE_EPOLL_EDGE)
{ "engine", "epoll", &nxt_epoll_edge_engine },
{ "engine", "epoll_edge", &nxt_epoll_edge_engine },
{ "engine", "epoll_level", &nxt_epoll_level_engine },
#elif (NXT_HAVE_EPOLL)
{ "engine", "epoll", &nxt_epoll_level_engine },
{ "engine", "epoll_level", &nxt_epoll_level_engine },
#endif
#if (NXT_HAVE_EVENTPORT)
{ "engine", "eventport", &nxt_eventport_engine },
#endif
#if (NXT_HAVE_DEVPOLL)
{ "engine", "devpoll", &nxt_devpoll_engine },
{ "engine", "/dev/poll", &nxt_devpoll_engine },
#endif
#if (NXT_HAVE_POLLSET)
{ "engine", "pollset", &nxt_pollset_engine },
#endif
{ "engine", "poll", &nxt_poll_engine },
{ "engine", "select", &nxt_select_engine },
#if (NXT_HAVE_OPENSSL)
{ "SSL/TLS", "OpenSSL", &nxt_openssl_lib },
{ "SSL/TLS", "openssl", &nxt_openssl_lib },
#endif
#if (NXT_HAVE_GNUTLS)
{ "SSL/TLS", "GnuTLS", &nxt_gnutls_lib },
{ "SSL/TLS", "gnutls", &nxt_gnutls_lib },
#endif
#if (NXT_HAVE_CYASSL)
{ "SSL/TLS", "CyaSSL", &nxt_cyassl_lib },
{ "SSL/TLS", "cyassl", &nxt_cyassl_lib },
#endif
};
nxt_array_t *
nxt_services_init(nxt_mp_t *mp)
{
nxt_uint_t n;
nxt_array_t *services;
nxt_service_t *s;
const nxt_service_t *service;
services = nxt_array_create(mp, 32, sizeof(nxt_service_t));
if (nxt_fast_path(services != NULL)) {
service = nxt_services;
n = nxt_nitems(nxt_services);
while (n != 0) {
s = nxt_array_add(services);
if (nxt_slow_path(s == NULL)) {
return NULL;
}
*s = *service;
service++;
n--;
}
}
return services;
}
nxt_int_t
nxt_service_add(nxt_array_t *services, const nxt_service_t *service)
{
nxt_uint_t n;
nxt_service_t *s;
s = services->elts;
n = services->nelts;
while (n != 0) {
if (nxt_strcmp(s->type, service->type) != 0) {
goto next;
}
if (nxt_strcmp(s->name, service->name) != 0) {
goto next;
}
nxt_thread_log_alert("service \"%s:%s\" is duplicate",
service->type, service->name);
return NXT_ERROR;
next:
s++;
n--;
}
s = nxt_array_add(services);
if (nxt_fast_path(s != NULL)) {
*s = *service;
return NXT_OK;
}
return NXT_ERROR;
}
const void *
nxt_service_get(nxt_array_t *services, const char *type, const char *name)
{
nxt_uint_t n;
const nxt_service_t *s;
if (services != NULL) {
s = services->elts;
n = services->nelts;
} else {
s = nxt_services;
n = nxt_nitems(nxt_services);
}
while (n != 0) {
if (nxt_strcmp(s->type, type) == 0) {
if (name == NULL) {
return s->service;
}
if (nxt_strcmp(s->name, name) == 0) {
return s->service;
}
}
s++;
n--;
}
nxt_thread_log_alert("service \"%s%s%s\" not found",
type, (name != NULL) ? ":" : "", name);
return NULL;
}
| 1,827 |
1,444 |
package mage.cards.k;
import java.util.UUID;
import mage.MageInt;
import mage.abilities.keyword.BandingAbility;
import mage.cards.CardImpl;
import mage.cards.CardSetInfo;
import mage.constants.CardType;
import mage.constants.SubType;
/**
*
* @author L_J
*/
public final class KjeldoranWarrior extends CardImpl {
public KjeldoranWarrior (UUID ownerId, CardSetInfo setInfo) {
super(ownerId,setInfo,new CardType[]{CardType.CREATURE},"{W}");
this.subtype.add(SubType.HUMAN);
this.subtype.add(SubType.WARRIOR);
this.power = new MageInt(1);
this.toughness = new MageInt(1);
// Banding
this.addAbility(BandingAbility.getInstance());
}
public KjeldoranWarrior (final KjeldoranWarrior card) {
super(card);
}
@Override
public KjeldoranWarrior copy() {
return new KjeldoranWarrior(this);
}
}
| 367 |
7,892 | <gh_stars>1000+
/**********************************************************************
Audacity: A Digital Audio Editor
AVFrameWrapper.cpp
<NAME>
**********************************************************************/
#include "AVFrameWrapper.h"
#include "FFmpegFunctions.h"
AVFrameWrapper::AVFrameWrapper(const FFmpegFunctions& ffmpeg) noexcept
: mFFmpeg(ffmpeg)
{
mAVFrame = mFFmpeg.av_frame_alloc();
}
AVFrame* AVFrameWrapper::GetWrappedValue() noexcept
{
return mAVFrame;
}
const AVFrame* AVFrameWrapper::GetWrappedValue() const noexcept
{
return mAVFrame;
}
AVFrameWrapper::~AVFrameWrapper()
{
if (mAVFrame != nullptr)
mFFmpeg.av_frame_free(&mAVFrame);
}
| 232 |
5,169 | {
"name": "UIAlertController+Show",
"version": "0.1.2",
"summary": "Show UIAlertControllers from anywhere.",
"description": "Light-weight extension to UIAlertController that adds 'show' method for presenting Alerts / Action Sheets from anywhere",
"homepage": "https://github.com/hightower/UIAlertController-Show",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"authors": {
"<NAME>": "<EMAIL>",
"<NAME>": "<EMAIL>"
},
"social_media_url": "http://twitter.com/erikwithfriends",
"platforms": {
"ios": "8.0"
},
"source": {
"git": "https://github.com/hightower/UIAlertController-Show.git",
"tag": "0.1.2"
},
"source_files": "UIAlertController+Show/UIAlertController+Show.swift",
"frameworks": [
"Foundation",
"UIKit"
],
"requires_arc": true
}
| 310 |
548 | # Copyright 2019 ZTE corporation. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""
The CMD script
"""
import subprocess
import argparse
import os
import time
from grpc._channel import _InactiveRpcError
from adlik_serving import PredictContext # pylint: disable=import-error
def _test_connect_of_serving(args):
try:
PredictContext(os.getenv('MODEL_NAME'), url=args.url, protocol=args.protocol, verbose=True)
return True
except _InactiveRpcError:
return False
def _main(args):
compile_command = ['sh', '-c', args.compile_script]
serving_command = ['sh', '-c', args.serving_script]
client_command = ['sh', '-c', args.client_script]
subprocess.run(compile_command)
with subprocess.Popen(serving_command) as process:
while not _test_connect_of_serving(args):
time.sleep(1)
subprocess.run(client_command)
process.kill()
if __name__ == '__main__':
ARGS_PARSER = argparse.ArgumentParser()
ARGS_PARSER.add_argument('-s', '--serving-script', type=str, required=True, help='The serving script')
ARGS_PARSER.add_argument('-c', '--client-script', type=str, required=True, help='The client script')
ARGS_PARSER.add_argument('-cs', '--compile-script', type=str, required=True, help='The compile script')
ARGS_PARSER.add_argument('-u', '--url', type=str, required=False, default='localhost:8500',
help='Server URL. Default is localhost:8500.')
ARGS_PARSER.add_argument('-i', '--protocol', type=str, required=False, default='grpc',
help='Protocol ("http"/"grpc") used to ' + 'communicate with service. Default is "grpc".')
PARSE_ARGS = ARGS_PARSER.parse_args()
_main(PARSE_ARGS)
| 693 |
363 | #include "ik/retcodes.h"
| 11 |
1,405 | package com.tendcloud.tenddata;
import java.io.ByteArrayOutputStream;
import java.util.zip.GZIPOutputStream;
import org.apache.http.HttpHost;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.conn.scheme.PlainSocketFactory;
import org.apache.http.conn.scheme.Scheme;
import org.apache.http.conn.scheme.SchemeRegistry;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.params.BasicHttpParams;
import org.apache.http.params.HttpConnectionParams;
public final class o {
private static final String a = "http://tdcv3.talkingdata.net";
private static final String b = "/g/d";
private static final int c = 60000;
private static final boolean d = true;
static DefaultHttpClient a() {
HttpHost d2;
int i = c;
boolean b2 = u.b();
new SchemeRegistry().register(new Scheme("http", PlainSocketFactory.getSocketFactory(), 80));
BasicHttpParams basicHttpParams = new BasicHttpParams();
HttpConnectionParams.setConnectionTimeout(basicHttpParams, b2 ? c : 120000);
if (!b2) {
i = 120000;
}
HttpConnectionParams.setSoTimeout(basicHttpParams, i);
DefaultHttpClient defaultHttpClient = new DefaultHttpClient(basicHttpParams);
if (!b2 && u.c() && (d2 = u.d()) != null) {
defaultHttpClient.getParams().setParameter("http.route.default-proxy", d2);
}
return defaultHttpClient;
}
static boolean a(ah ahVar) {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
GZIPOutputStream gZIPOutputStream = new GZIPOutputStream(byteArrayOutputStream);
new p(gZIPOutputStream).a(ahVar);
gZIPOutputStream.finish();
gZIPOutputStream.flush();
return a(b, byteArrayOutputStream.toByteArray(), d);
}
static boolean a(String str, byte[] bArr, boolean z) {
DefaultHttpClient a2 = a();
try {
HttpPost httpPost = new HttpPost(a + str);
ByteArrayEntity byteArrayEntity = new ByteArrayEntity(bArr);
byteArrayEntity.setContentType("application/unpack_chinar");
httpPost.setEntity(byteArrayEntity);
if (a2.execute(httpPost).getStatusLine().getStatusCode() == 200) {
return d;
}
return false;
} catch (Exception e) {
}
}
}
| 980 |
3,361 | /*
* Copyright 2015 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.physical_web.collection;
import static org.junit.Assert.*;
import org.json.JSONObject;
import org.junit.Before;
import org.junit.Test;
import org.skyscreamer.jsonassert.JSONAssert;
/**
* SimpleUrlDevice unit test class.
*/
public class UrlDeviceTest {
private static final String ID1 = "id1";
private static final String ID2 = "id2";
private static final String URL1 = "http://example.com";
private static final String URL2 = "http://physical-web.org";
private static final double RANK1 = 0.5d;
private static final double RANK2 = 0.9d;
private UrlDevice mUrlDevice1;
private JSONObject jsonObject1;
@Before
public void setUp() {
mUrlDevice1 = new UrlDevice.Builder(ID1, URL1)
.addExtra("key", "value")
.build();
jsonObject1 = new JSONObject("{"
+ " \"id\": \"" + ID1 + "\","
+ " \"url\": \"" + URL1 + "\","
+ " \"extra\": {"
+ " \"key\": \"value\""
+ " }"
+ "}");
}
@Test
public void getIdReturnsId() {
assertEquals(mUrlDevice1.getId(), ID1);
}
@Test
public void getUrlReturnsUrl() {
assertEquals(mUrlDevice1.getUrl(), URL1);
}
@Test
public void jsonSerializeWorks() {
JSONAssert.assertEquals(mUrlDevice1.jsonSerialize(), jsonObject1, true);
}
@Test
public void jsonDeserializeWorks() {
UrlDevice urlDevice = UrlDevice.jsonDeserialize(jsonObject1);
assertNotNull(urlDevice);
assertEquals(urlDevice.getId(), ID1);
assertEquals(urlDevice.getUrl(), URL1);
}
@Test
public void deviceIsEqualToItself() {
assertEquals(mUrlDevice1, mUrlDevice1);
}
@Test
public void alikeDevicesAreEqual() {
UrlDevice urlDevice = new UrlDevice.Builder(ID1, URL1)
.addExtra("key", "value")
.build();
assertEquals(mUrlDevice1, urlDevice);
}
@Test
public void unalikeDevicesAreNotEqual() {
UrlDevice urlDevice1 = new UrlDevice(ID1, URL1);
UrlDevice urlDevice2 = new UrlDevice(ID1, URL2); // same id, different url
UrlDevice urlDevice3 = new UrlDevice(ID2, URL1); // same url, different id
assertNotEquals(urlDevice1, urlDevice2);
assertNotEquals(urlDevice1, urlDevice3);
}
@Test
public void compareDeviceToItselfReturnsZero() {
assertEquals(mUrlDevice1.compareTo(mUrlDevice1), 0);
}
@Test
public void compareDeviceToUnalikeDeviceReturnsNonZero() {
UrlDevice urlDevice2 = new UrlDevice(ID2, URL1); // different device ID
UrlDevice urlDevice3 = new UrlDevice(ID1, URL2); // different URL
assertTrue(mUrlDevice1.compareTo(urlDevice2) < 0); // "id1" < "id2"
assertTrue(urlDevice2.compareTo(mUrlDevice1) > 0);
assertTrue(mUrlDevice1.compareTo(urlDevice3) < 0); // "example.com" < "physical-web.org"
assertTrue(urlDevice3.compareTo(mUrlDevice1) > 0);
}
}
| 1,249 |
14,425 | <gh_stars>1000+
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred.gridmix;
import java.io.IOException;
import java.util.Arrays;
import java.util.Random;
import org.junit.Test;
import static org.junit.Assert.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.io.WritableUtils;
public class TestGridmixRecord {
private static final Logger LOG = LoggerFactory.getLogger(TestGridmixRecord.class);
static void lengthTest(GridmixRecord x, GridmixRecord y, int min,
int max) throws Exception {
final Random r = new Random();
final long seed = r.nextLong();
r.setSeed(seed);
LOG.info("length: " + seed);
final DataInputBuffer in = new DataInputBuffer();
final DataOutputBuffer out1 = new DataOutputBuffer();
final DataOutputBuffer out2 = new DataOutputBuffer();
for (int i = min; i < max; ++i) {
setSerialize(x, r.nextLong(), i, out1);
// check write
assertEquals(i, out1.getLength());
// write to stream
x.write(out2);
// check read
in.reset(out1.getData(), 0, out1.getLength());
y.readFields(in);
assertEquals(i, x.getSize());
assertEquals(i, y.getSize());
}
// check stream read
in.reset(out2.getData(), 0, out2.getLength());
for (int i = min; i < max; ++i) {
y.readFields(in);
assertEquals(i, y.getSize());
}
}
static void randomReplayTest(GridmixRecord x, GridmixRecord y, int min,
int max) throws Exception {
final Random r = new Random();
final long seed = r.nextLong();
r.setSeed(seed);
LOG.info("randReplay: " + seed);
final DataOutputBuffer out1 = new DataOutputBuffer();
for (int i = min; i < max; ++i) {
final int s = out1.getLength();
x.setSeed(r.nextLong());
x.setSize(i);
x.write(out1);
assertEquals(i, out1.getLength() - s);
}
final DataInputBuffer in = new DataInputBuffer();
in.reset(out1.getData(), 0, out1.getLength());
final DataOutputBuffer out2 = new DataOutputBuffer();
// deserialize written records, write to separate buffer
for (int i = min; i < max; ++i) {
final int s = in.getPosition();
y.readFields(in);
assertEquals(i, in.getPosition() - s);
y.write(out2);
}
// verify written contents match
assertEquals(out1.getLength(), out2.getLength());
// assumes that writes will grow buffer deterministically
assertEquals("Bad test", out1.getData().length, out2.getData().length);
assertArrayEquals(out1.getData(), out2.getData());
}
static void eqSeedTest(GridmixRecord x, GridmixRecord y, int max)
throws Exception {
final Random r = new Random();
final long s = r.nextLong();
r.setSeed(s);
LOG.info("eqSeed: " + s);
assertEquals(x.fixedBytes(), y.fixedBytes());
final int min = x.fixedBytes() + 1;
final DataOutputBuffer out1 = new DataOutputBuffer();
final DataOutputBuffer out2 = new DataOutputBuffer();
for (int i = min; i < max; ++i) {
final long seed = r.nextLong();
setSerialize(x, seed, i, out1);
setSerialize(y, seed, i, out2);
assertEquals(x, y);
assertEquals(x.hashCode(), y.hashCode());
// verify written contents match
assertEquals(out1.getLength(), out2.getLength());
// assumes that writes will grow buffer deterministically
assertEquals("Bad test", out1.getData().length, out2.getData().length);
assertArrayEquals(out1.getData(), out2.getData());
}
}
static void binSortTest(GridmixRecord x, GridmixRecord y, int min,
int max, WritableComparator cmp) throws Exception {
final Random r = new Random();
final long s = r.nextLong();
r.setSeed(s);
LOG.info("sort: " + s);
final DataOutputBuffer out1 = new DataOutputBuffer();
final DataOutputBuffer out2 = new DataOutputBuffer();
for (int i = min; i < max; ++i) {
final long seed1 = r.nextLong();
setSerialize(x, seed1, i, out1);
assertEquals(0, x.compareSeed(seed1, Math.max(0, i - x.fixedBytes())));
final long seed2 = r.nextLong();
setSerialize(y, seed2, i, out2);
assertEquals(0, y.compareSeed(seed2, Math.max(0, i - x.fixedBytes())));
// for eq sized records, ensure byte cmp where req
final int chk = WritableComparator.compareBytes(
out1.getData(), 0, out1.getLength(),
out2.getData(), 0, out2.getLength());
assertEquals(Integer.signum(chk), Integer.signum(x.compareTo(y)));
assertEquals(Integer.signum(chk), Integer.signum(cmp.compare(
out1.getData(), 0, out1.getLength(),
out2.getData(), 0, out2.getLength())));
// write second copy, compare eq
final int s1 = out1.getLength();
x.write(out1);
assertEquals(0, cmp.compare(out1.getData(), 0, s1,
out1.getData(), s1, out1.getLength() - s1));
final int s2 = out2.getLength();
y.write(out2);
assertEquals(0, cmp.compare(out2.getData(), 0, s2,
out2.getData(), s2, out2.getLength() - s2));
assertEquals(Integer.signum(chk), Integer.signum(cmp.compare(out1.getData(), 0, s1,
out2.getData(), s2, out2.getLength() - s2)));
}
}
static void checkSpec(GridmixKey a, GridmixKey b) throws Exception {
final Random r = new Random();
final long s = r.nextLong();
r.setSeed(s);
LOG.info("spec: " + s);
final DataInputBuffer in = new DataInputBuffer();
final DataOutputBuffer out = new DataOutputBuffer();
a.setType(GridmixKey.REDUCE_SPEC);
b.setType(GridmixKey.REDUCE_SPEC);
for (int i = 0; i < 100; ++i) {
final int in_rec = r.nextInt(Integer.MAX_VALUE);
a.setReduceInputRecords(in_rec);
final int out_rec = r.nextInt(Integer.MAX_VALUE);
a.setReduceOutputRecords(out_rec);
final int out_bytes = r.nextInt(Integer.MAX_VALUE);
a.setReduceOutputBytes(out_bytes);
final int min = WritableUtils.getVIntSize(in_rec)
+ WritableUtils.getVIntSize(out_rec)
+ WritableUtils.getVIntSize(out_bytes)
+ WritableUtils.getVIntSize(0);
assertEquals(min + 2, a.fixedBytes()); // meta + vint min
final int size = r.nextInt(1024) + a.fixedBytes() + 1;
setSerialize(a, r.nextLong(), size, out);
assertEquals(size, out.getLength());
assertTrue(a.equals(a));
assertEquals(0, a.compareTo(a));
in.reset(out.getData(), 0, out.getLength());
b.readFields(in);
assertEquals(size, b.getSize());
assertEquals(in_rec, b.getReduceInputRecords());
assertEquals(out_rec, b.getReduceOutputRecords());
assertEquals(out_bytes, b.getReduceOutputBytes());
assertTrue(a.equals(b));
assertEquals(0, a.compareTo(b));
assertEquals(a.hashCode(), b.hashCode());
}
}
static void setSerialize(GridmixRecord x, long seed, int size,
DataOutputBuffer out) throws IOException {
x.setSeed(seed);
x.setSize(size);
out.reset();
x.write(out);
}
@Test
public void testKeySpec() throws Exception {
final int min = 6;
final int max = 300;
final GridmixKey a = new GridmixKey(GridmixKey.REDUCE_SPEC, 1, 0L);
final GridmixKey b = new GridmixKey(GridmixKey.REDUCE_SPEC, 1, 0L);
lengthTest(a, b, min, max);
randomReplayTest(a, b, min, max);
binSortTest(a, b, min, max, new GridmixKey.Comparator());
// 2 fixed GR bytes, 1 type, 3 spec
eqSeedTest(a, b, max);
checkSpec(a, b);
}
@Test
public void testKeyData() throws Exception {
final int min = 2;
final int max = 300;
final GridmixKey a = new GridmixKey(GridmixKey.DATA, 1, 0L);
final GridmixKey b = new GridmixKey(GridmixKey.DATA, 1, 0L);
lengthTest(a, b, min, max);
randomReplayTest(a, b, min, max);
binSortTest(a, b, min, max, new GridmixKey.Comparator());
// 2 fixed GR bytes, 1 type
eqSeedTest(a, b, 300);
}
@Test
public void testBaseRecord() throws Exception {
final int min = 1;
final int max = 300;
final GridmixRecord a = new GridmixRecord();
final GridmixRecord b = new GridmixRecord();
lengthTest(a, b, min, max);
randomReplayTest(a, b, min, max);
binSortTest(a, b, min, max, new GridmixRecord.Comparator());
// 2 fixed GR bytes
eqSeedTest(a, b, 300);
}
public static void main(String[] argv) throws Exception {
boolean fail = false;
final TestGridmixRecord test = new TestGridmixRecord();
try { test.testKeySpec(); } catch (Exception e) {
fail = true;
e.printStackTrace();
}
try {test.testKeyData(); } catch (Exception e) {
fail = true;
e.printStackTrace();
}
try {test.testBaseRecord(); } catch (Exception e) {
fail = true;
e.printStackTrace();
}
System.exit(fail ? -1 : 0);
}
static void printDebug(GridmixRecord a, GridmixRecord b) throws IOException {
DataOutputBuffer out = new DataOutputBuffer();
a.write(out);
System.out.println("A " +
Arrays.toString(Arrays.copyOf(out.getData(), out.getLength())));
out.reset();
b.write(out);
System.out.println("B " +
Arrays.toString(Arrays.copyOf(out.getData(), out.getLength())));
}
}
| 4,036 |
488 | #ifndef DATA_H
#define DATA_H 1
/* nmemonic*/
#define TOPLEVEL 1
/* Define a structure to hold*/
/* any one possible value*/
typedef union Constvalue {
struct Datalist* compoundv; /* NC_COMPOUND*/
char charv; /* NC_CHAR*/
signed char int8v; /* NC_BYTE*/
unsigned char uint8v; /* NC_UBYTE*/
short int16v; /* NC_SHORT*/
unsigned short uint16v; /* NC_USHORT*/
int int32v; /* NC_INT*/
unsigned int uint32v; /* NC_UINT*/
long long int64v; /* NC_INT64*/
unsigned long long uint64v; /* NC_UINT64*/
float floatv; /* NC_FLOAT*/
double doublev; /* NC_DOUBLE*/
struct Stringv { /* NC_STRING*/
int len;
char* stringv;
/*struct Datalist* charlist;*/
} stringv;
struct Opaquev { /* NC_OPAQUE*/
int len; /* length as originally written (rounded to even number)*/
char* stringv; /*as constant was written*/
/* (padded to even # chars >= 16)*/
/* without leading 0x*/
} opaquev;
struct Symbol* enumv; /* NC_ECONST*/
} Constvalue;
typedef struct Constant {
nc_type nctype;
int lineno;
Constvalue value;
int filled; /* was this originally NC_FILLVALUE? */
} Constant;
typedef struct Datalist {
struct Datalist* next; /* chain of all known datalists*/
int readonly; /* data field is shared with another Datalist*/
size_t length; /* |data| */
size_t nelems; /* # of elements in the datalist;
should only differ from length when using
certain complex structures with scalar fields
(see datalist constant rules in ncgen man page */
size_t alloc; /* track total allocated space for data field*/
Constant* data; /* actual list of constants constituting the datalist*/
/* Track various values associated with the datalist*/
/* (used to be in Constvalue.compoundv)*/
struct Vlen {
struct Symbol* schema; /* type/var that defines structure of this*/
unsigned int count; /* # of vlen basetype instances*/
unsigned int uid; /* unique id for NC_VLEN*/
} vlen;
} Datalist;
/* Define a structure to track
location of current read point in the Datalist sequence
In effect, we are parsing the data sequence.
Push and pop of data sources is supported (see srcpush() below).*/
typedef struct Datasrc {
Constant* data; /* duplicate pointer; so do not free.*/
int index;
int length;
int spliced; /* Was this list spliced into our parent ? */
struct Datasrc* prev; /* linked list for debugging */
} Datasrc;
/* Define a holder for passing a start/count array */
struct Vlendata {
char* data;
unsigned long count;
};
extern struct Vlendata* vlendata;
/* Convenience*/
#define SRCPUSH(iscmpd,src) {if(((iscmpd)=issublist(src))) {srcpush(src);}}
#define SRCPOP(iscmpd,src) {if((iscmpd)) {srcpop(src);}}
int issublist(Datasrc* src);
int isstring(Datasrc* src);
int isfillvalue(Datasrc* src);
int istype(Datasrc* src, nc_type);
int isstringable(nc_type nctype);
#ifdef ENABLE_BINARY
/* from: cdfdata.c */
void bindata_array(struct Symbol*,Bytebuffer*,Datasrc*,Odometer*,int,Datalist*);
void bindata_attrdata(struct Symbol* asym, Bytebuffer*);
void bindata_vardata(struct Symbol* vsym, Bytebuffer*);
void bindata_vlenconstants(List*);
void bindata_basetype(struct Symbol*,struct Datasrc*,Bytebuffer*,struct Datalist*);
#endif
#ifdef ENABLE_C
/* from: cdata.c */
void cdata_attrdata(struct Symbol* asym, Bytebuffer*);
void cdata_array(struct Symbol*,Bytebuffer*,Datasrc*,Odometer*,int,Datalist*);
void cdata_basetype(struct Symbol*,struct Datasrc*,Bytebuffer*,struct Datalist*);
void cdata_vlenconstants(List*,Bytebuffer*);
char* cdata_const(Constant*);
#endif
#ifdef ENABLE_F77
/* from: f77data.c */
void f77data_attrdata(struct Symbol* asym, Bytebuffer*);
void f77data_array(struct Symbol*,Bytebuffer*,Datasrc*,Odometer*,int,Datalist*);
void f77data_basetype(struct Symbol*,struct Datasrc*,Bytebuffer*,struct Datalist*);
char* f77data_const(Constant* ci);
void f77quotestring(Bytebuffer* databuf);
#endif
#ifdef ENABLE_CML
/* from: cmldata.c */
void gencml_attrdata(struct Symbol* asym, Bytebuffer*);
void gencml_scalardata(struct Symbol* vsym, Bytebuffer*);
void gencml_arraydata(struct Symbol* vsym, Bytebuffer*);
void gencml_vlenconstants(List*, Bytebuffer*);
void gencml_fillvalue(struct Symbol*, Datalist*, Datasrc*, Bytebuffer*);
void xquotestring(Bytebuffer* databuf);
char* xconst(Constant* ci);
#endif
#ifdef ENABLE_JAVA
/* from: jdata.c */
void jdata_array(struct Symbol*,Bytebuffer*,Datasrc*,Odometer*,int,Datalist*);
void jdata_basetype(struct Symbol*,struct Datasrc*,Bytebuffer*,struct Datalist*);
char* jdata_const(Constant* ci);
void jquotestring(Bytebuffer* databuf, char);
#endif
/* from: data.c */
Constant gen_string(unsigned long, Datasrc*);
int stringimplode(Constant* con);
Constant cloneconstant(Constant* con); /* shallow clone*/
Constant gen_stringall(unsigned long size, Datasrc* src, unsigned long);
Datasrc* datalist2src(Datalist* list);
Datasrc* const2src(Constant*);
Constant list2const(Datalist*);
void freedatasrc(Datasrc* src);
void srcpush(Datasrc*);
void srcpushlist(Datasrc* src, Datalist* cmpd);
void srcpop(Datasrc*);
void srcmove(Datasrc*,size_t);
void srcsetfill(Datasrc* ds, Datalist* list);
Datalist* datalistclone(Datalist* dl);
Datalist* datalistconcat(Datalist* dl1, Datalist* dl2);
Datalist* datalistappend(Datalist* dl, Constant* con);
Datalist* datalistreplace(Datalist* dl, unsigned int index, Constant* con);
Constant* srcnext(Datasrc*);
int srclast(Datasrc*); /* are we at the last entry ? */
int srcmore(Datasrc*);
int srcline(Datasrc* ds);
void srcsplice(Datasrc* ds, Datalist* list);
void alignbuffer(struct Constant* prim, Bytebuffer* buf);
/* Code dump support procedures */
void bbindent(Bytebuffer*,const int);
void bbprintf(Bytebuffer*,const char *fmt, ...);
void bbprintf0(Bytebuffer*,const char *fmt, ...);
/* Following dump to codebuffer */
void codeprintf(const char *fmt, ...);
void codedump(Bytebuffer*);
void codepartial(const char*);
void codeline(const char*);
void codelined(int n,const char*);
void codeflush(void); /* flush codebuffer to stdout */
void commify(Bytebuffer* buf);
char* word(char* p, Bytebuffer* buf);
/* Provide buffers for language based generators */
extern Bytebuffer* codebuffer; /* buffer over the std output */
extern Bytebuffer* stmt; /* single stmt text generation */
#ifdef FASTDATASRC
#define srcpeek(ds) ((ds)==NULL || (ds)->index >= (ds)->max?NULL:(ds)->data+(ds)->index)
#else
Constant* srcpeek(Datasrc*);
#endif
/* Aliases */
#define srcincr(src) srcnext(src)
#define srcget(src) srcpeek(src)
extern Constant nullconstant;
extern Constant fillconstant;
/* From genchar.c */
void gen_charattr(struct Symbol* asym, Bytebuffer* databuf);
void gen_chararray(struct Symbol*, Bytebuffer*, Datasrc*, struct Odometer*, int);
void gen_charfield(Datasrc* src, struct Odometer*, int index, Bytebuffer* databuf);
void gen_charvlen(Datasrc*, Bytebuffer*);
int collectstring(struct Constant* con, size_t declsize, Bytebuffer* databuf);
#endif /*DATA_H*/
| 2,787 |
2,757 | <reponame>BearerPipelineTest/google-ctf<filename>2020/quals/reversing-sprint/asm.py
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
# Usage: python3 asm.py source.s scaffold.c output.c
assembly = open(sys.argv[1]).readlines()
assembly = [line.split(";")[0].strip().replace(",", "") for line in assembly]
assembly = [line for line in assembly if line]
label_to_pc = {}
code = ["\x00"] * 65536
def reg_to_int(r):
assert r[0] == 'r'
return int(r[1:])*2 + 8
def get_const(label):
try:
n = int(label, 0)
return str(n % 2**16)
except:
if passnum == 1:
n = label_to_pc[label]
else:
n = label_to_pc.get(label, 0)
return "{:05d}".format(n % 2**16)
def dst_to_num(dst):
if dst[0] == 'r':
return int(dst[1:])*2+9
elif dst == "dptr":
return 7
elif dst == "*dptr":
return 6
else:
print("Oops")
raise Exception("Invalid dst: " + dst)
def src_to_str(src):
if src[0] == 'r':
return "*{}$".format(int(src[1:])*2+8)
elif src == "dptr":
return "*6$"
elif src == "*dptr":
return "*5$"
else:
return get_const(src)
for passnum in range(2):
pc = 0
print("Pass #" + str(passnum))
for i, line in enumerate(assembly):
label_to_pc["_" + str(i)] = pc
if ":" in line:
# A label.
name = line.split(":")[0].strip()
label_to_pc[name] = pc
elif line[0] == ".":
# Directive.
line = line.split()
name = line[0]
args = line[1:]
if name == ".org":
pc = int(args[0], 0)
elif name == ".equ":
label, val = args
label_to_pc[label] = int(val, 0)
elif name == ".db":
for a in args:
code[pc] = chr(int(a, 0)%256)
pc += 1
elif name == ".dw":
for a in args:
code[pc] = chr(int(a, 0)&255)
code[pc+1] = chr(int(a, 0)>>8)
pc += 2
else:
print("Oops")
raise Exception("Unknown directive: " + name)
else:
line = line.split()
name = line[0]
args = line[1:]
#print(name, args)
if name == "jnz":
# Special case.
reg, where = args
reg = reg_to_int(reg)
A = int(get_const("_" + str(i+1)))
B = int(get_const(where))
first = (B-A-1) % 2**16
second = (A-2-first) % 2**16
ins = "%{reg:02d}$c%1${first:05d}s%2$c%4$s%1${second:05d}s%3$hn"
ins = ins.format(reg=reg, first=first, second=second)
elif name == "jmp":
tgt, = args
tgt = get_const(tgt)
ins = "%1${tgt}s%3$hn".format(tgt=tgt)
else:
next = int(get_const("_" + str(i+1)))
compl = 2**16 - next
ins = "%1${next:05d}s%3$hn%1${compl:05d}s"
ins = ins.format(next=next, compl=compl)
ap = ""
if name == "mov":
dst, src = args
dst = dst_to_num(dst)
src = src_to_str(src)
ap = "%1${src}s%{dst}$hn".format(src=src, dst=dst)
elif name == "add":
dst, src1, src2 = args
dst = dst_to_num(dst)
src1 = src_to_str(src1)
src2 = src_to_str(src2)
ap = "%1${src1}s%1${src2}s%{dst}$hn"
ap = ap.format(src1=src1, src2=src2, dst=dst)
else:
print("Oops")
raise Exception("Unknown opcode: " + name)
ins += ap
#print("Asm:", ins)
for j, c in enumerate(ins):
code[pc+j] = c
pc += len(ins) + 1 # For NUL
full = ""
for c in "".join(code).rstrip("\x00"):
full += "\\x{:02x}".format(ord(c))
#print("Final code:")
#print(full)
scaffold = open(sys.argv[2]).read()
open(sys.argv[3], "w").write(scaffold.replace("PROG_HERE", full))
open(sys.argv[3] + ".map", "w").write("".join(
"{:04x}".format(label_to_pc["_" + str(i)]) + ": " + assembly[i] + "\n"
for i in range(len(assembly))))
| 2,774 |
575 | <reponame>iridium-browser/iridium-browser
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROMEOS_SERVICES_LIBASSISTANT_DEVICE_SETTINGS_CONTROLLER_H_
#define CHROMEOS_SERVICES_LIBASSISTANT_DEVICE_SETTINGS_CONTROLLER_H_
#include <memory>
#include <string>
#include <vector>
#include "base/component_export.h"
#include "base/memory/weak_ptr.h"
#include "base/sequenced_task_runner.h"
#include "chromeos/assistant/internal/action/assistant_action_observer.h"
#include "chromeos/services/libassistant/assistant_manager_observer.h"
#include "chromeos/services/libassistant/public/mojom/device_settings_delegate.mojom.h"
#include "mojo/public/cpp/bindings/pending_remote.h"
#include "mojo/public/cpp/bindings/remote.h"
namespace assistant {
namespace api {
namespace client_op {
class ModifySettingArgs;
class GetDeviceSettingsArgs;
} // namespace client_op
} // namespace api
} // namespace assistant
namespace chromeos {
namespace assistant {
struct DeviceSetting;
} // namespace assistant
} // namespace chromeos
namespace chromeos {
namespace libassistant {
class Setting;
class DeviceSettingsController
: public AssistantManagerObserver,
public chromeos::assistant::action::AssistantActionObserver {
public:
DeviceSettingsController();
DeviceSettingsController(DeviceSettingsController&) = delete;
DeviceSettingsController& operator=(DeviceSettingsController&) = delete;
~DeviceSettingsController() override;
void Bind(mojo::PendingRemote<mojom::DeviceSettingsDelegate> delegate);
// chromeos::assistant::action::AssistantActionObserver implementation:
void OnModifyDeviceSetting(
const ::assistant::api::client_op::ModifySettingArgs& setting) override;
void OnGetDeviceSettings(
int interaction_id,
const ::assistant::api::client_op::GetDeviceSettingsArgs& setting)
override;
// AssistantManagerObserver implementation:
void OnAssistantManagerCreated(
assistant_client::AssistantManager* assistant_manager,
assistant_client::AssistantManagerInternal* assistant_manager_internal)
override;
void OnDestroyingAssistantManager(
assistant_client::AssistantManager* assistant_manager,
assistant_client::AssistantManagerInternal* assistant_manager_internal)
override;
// Returns which of the given device settings are supported or not.
std::vector<chromeos::assistant::DeviceSetting> GetSupportedDeviceSettings(
const ::assistant::api::client_op::GetDeviceSettingsArgs& args) const;
private:
bool IsSettingSupported(const std::string& setting_id) const;
void AddSetting(std::unique_ptr<Setting> setting);
std::vector<std::unique_ptr<Setting>> settings_;
assistant_client::AssistantManagerInternal* assistant_manager_internal_ =
nullptr;
mojo::Remote<mojom::DeviceSettingsDelegate> remote_;
scoped_refptr<base::SequencedTaskRunner> mojom_task_runner_;
base::WeakPtrFactory<DeviceSettingsController> weak_factory_{this};
};
} // namespace libassistant
} // namespace chromeos
#endif // CHROMEOS_SERVICES_LIBASSISTANT_DEVICE_SETTINGS_CONTROLLER_H_
| 983 |
971 | package com.ucar.datalink.manager.core.utils.timer;
/**
* Created by lubiao on 2016/12/12.
*/
class TimerTaskEntry implements Comparable<TimerTaskEntry> {
private volatile TimerTaskList list;
private volatile TimerTaskEntry next;
private volatile TimerTaskEntry prev;
private final TimerTask timerTask;
private final Long expirationMs;
TimerTaskEntry(TimerTask timerTask, long expirationMs) {
// if this timerTask is already held by an existing timer task entry,
// setTimerTaskEntry will remove it.
if (timerTask != null) {
timerTask.setTimerTaskEntry(this);
}
this.timerTask = timerTask;
this.expirationMs = expirationMs;
}
boolean cancelled() {
return timerTask.getTimerTaskEntry() != this;
}
void remove() {
TimerTaskList currentList = this.list;
// If remove is called when another thread is moving the entry parseFrom a task entry list to another,
// this may fail to remove the entry due to the change of value of list. Thus, we retry until the list becomes null.
// In a rare case, this thread sees null and exits the loop, but the other thread insert the entry to another list later.
while (currentList != null) {
currentList.remove(this);
currentList = list;
}
}
@Override
public int compareTo(TimerTaskEntry o) {
return this.expirationMs.compareTo(o.expirationMs);
}
public TimerTaskList getList() {
return list;
}
public void setList(TimerTaskList list) {
this.list = list;
}
public TimerTaskEntry getNext() {
return next;
}
public void setNext(TimerTaskEntry next) {
this.next = next;
}
public TimerTaskEntry getPrev() {
return prev;
}
public void setPrev(TimerTaskEntry prev) {
this.prev = prev;
}
public TimerTask getTimerTask() {
return timerTask;
}
public Long getExpirationMs() {
return expirationMs;
}
}
| 782 |
3,508 | package com.fishercoder.solutions;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class _448 {
public static class Solution1 {
/**
* O(n) space
* O(n) time
*/
public List<Integer> findDisappearedNumbers(int[] nums) {
int max = Integer.MIN_VALUE;
for (int i : nums) {
max = Math.max(max, i);
}
max = Math.max(max, nums.length);
Map<Integer, Integer> map = new HashMap();
for (int i = 1; i <= max; i++) {
map.put(i, 0);
}
for (int i : nums) {
if (map.get(i) == 0) {
map.put(i, 1);
} else {
map.put(i, map.get(i) + 1);
}
}
List<Integer> result = new ArrayList();
for (int i : map.keySet()) {
if (map.get(i) == 0) {
result.add(i);
}
}
return result;
}
}
public static class Solution2 {
/**
* O(1) space
* O(n) time
*/
public List<Integer> findDisappearedNumbers(int[] nums) {
for (int i = 0; i < nums.length; i++) {
int val = Math.abs(nums[i]) - 1;
if (nums[val] > 0) {
nums[val] = -nums[val];
}
}
List<Integer> result = new ArrayList();
for (int i = 0; i < nums.length; i++) {
if (nums[i] > 0) {
result.add(i + 1);
}
}
return result;
}
}
} | 1,050 |
6,098 | <filename>h2o-genmodel/src/main/java/hex/genmodel/algos/tree/ScoreTree.java
package hex.genmodel.algos.tree;
import java.io.Serializable;
public interface ScoreTree extends Serializable {
double scoreTree(byte[] tree, double[] row, boolean computeLeafAssignment, String[][] domains);
}
| 93 |
1,474 | /*
* Copyright 2020 FormDev Software GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.formdev.flatlaf.ui;
import java.awt.Dimension;
import java.awt.Image;
import java.util.ArrayList;
import java.util.List;
import com.formdev.flatlaf.util.MultiResolutionImageSupport;
import com.formdev.flatlaf.util.ScaledImageIcon;
/**
* @author <NAME>
*/
public class FlatTitlePaneIcon
extends ScaledImageIcon
{
private final List<Image> images;
/** @since 1.2 */
public FlatTitlePaneIcon( List<Image> images, Dimension size ) {
super( null, size.width, size.height );
this.images = images;
}
@Override
protected Image getResolutionVariant( int destImageWidth, int destImageHeight ) {
// collect all images including multi-resolution variants for requested size
List<Image> allImages = new ArrayList<>();
for( Image image : images ) {
if( MultiResolutionImageSupport.isMultiResolutionImage( image ) )
allImages.add( MultiResolutionImageSupport.getResolutionVariant( image, destImageWidth, destImageHeight ) );
else
allImages.add( image );
}
if( allImages.size() == 1 )
return allImages.get( 0 );
// sort images by size
allImages.sort( (image1, image2) -> {
return image1.getWidth( null ) - image2.getWidth( null );
} );
// search for optimal image size
for( Image image : allImages ) {
if( destImageWidth <= image.getWidth( null ) &&
destImageHeight <= image.getHeight( null ) )
return image;
}
// use largest image
return allImages.get( allImages.size() - 1 );
}
}
| 655 |
313 | <reponame>hz-ants/LabelFusion-docker2-<filename>scripts/registration/testFGR.py
from director import objectmodel as om
from director import ioUtils
from director import visualization as vis
from director import transformUtils
from director import filterUtils
from director import segmentation
from director import applogic
from director.debugVis import DebugData
from director import viewbehaviors
from director import vtkAll as vtk
from director import vtkNumpy as vnp
from director import segmentation
from director.shallowCopy import shallowCopy
import os
import numpy as np
import subprocess
import PythonQt
from PythonQt import QtGui, QtCore
def computePointFeatureHistograms(polyData, searchRadius=0.10):
f = vtk.vtkPCLFPFHEstimation()
f.SetInput(polyData)
f.SetSearchRadius(searchRadius)
f.Update()
return shallowCopy(f.GetOutput())
def removeFile(filename):
if os.path.isfile(filename):
os.remove(filename)
def renameFeaturesFile(newName):
assert os.path.isfile('features.bin')
os.rename('features.bin', newName)
def loadTransformFromFile(filename):
lines = open(filename).readlines()
data = np.array([[float(x) for x in line.split()] for line in lines[1:]])
return transformUtils.getTransformFromNumpy(data)
def showTransformedData(name):
obj = om.findObjectByName(name)
t = loadTransformFromFile('output.txt')
pd = filterUtils.transformPolyData(obj.polyData, t)
vis.showPolyData(pd, name + ' transformed')
def testAlign(modelName, sceneName):
removeFile('model_features.bin')
removeFile('scene_features.bin')
removeFile('features.bin')
for objType, objName in [('model', modelName), ('scene', sceneName)]:
print 'process', objName
pd = om.findObjectByName(objName).polyData
pd = segmentation.applyVoxelGrid(pd, leafSize=0.005)
print 'compute normals...'
pd = segmentation.normalEstimation(pd, searchRadius=0.05, useVoxelGrid=False, voxelGridLeafSize=0.01)
print 'compute features...'
computePointFeatureHistograms(pd, searchRadius=0.10)
renameFeaturesFile(objType + '_features.bin')
print 'run registration...'
subprocess.check_call(['bash', 'runFGR.sh'])
showTransformedData(sceneName)
print 'done.'
segmentation.SegmentationContext.initWithUser(0.0, vtk.vtkTransform(), viewAxis=2)
robotMesh = ioUtils.readPolyData('robot-mesh.vtp')
t = transformUtils.frameFromPositionAndRPY([0.0, 0.0, 2.0], [90,0,0])
robotMesh = filterUtils.transformPolyData(robotMesh, t)
robotMesh.GetPointData().SetNormals(None)
obj = vis.showPolyData(robotMesh, 'robot mesh', visible=False)
subd = vtk.vtkLoopSubdivisionFilter()
subd.SetInput(robotMesh)
subd.SetNumberOfSubdivisions(3)
subd.Update()
subdividedMesh = subd.GetOutput()
modelPoints = segmentation.applyVoxelGrid(subdividedMesh, leafSize=0.005)
vis.showPolyData(modelPoints, 'model points')
print 'model pts:', modelPoints.GetNumberOfPoints()
pointCloud = ioUtils.readPolyData('pointcloud.vtp')
obj = vis.showPolyData(pointCloud, 'pointcloud original')
'''
t = transformUtils.frameFromPositionAndRPY([0.2, 0.3, 0.4], [10,14,15])
pointCloud = filterUtils.transformPolyData(pointCloud, t)
scenePoints = segmentation.applyVoxelGrid(pointCloud, leafSize=0.005)
vis.showPolyData(scenePoints, 'scene points')
print 'scene pts:', scenePoints.GetNumberOfPoints()
'''
testAlign('model points', 'pointcloud original')
'''
pd = modelPoints
print 'compute normals...'
pd = segmentation.normalEstimation(pd, searchRadius=0.05, useVoxelGrid=False, voxelGridLeafSize=0.01)
computePointFeatureHistograms(pd)
glyphs = segmentation.applyArrowGlyphs(pd, computeNormals=False)
vis.showPolyData(glyphs, 'glyphs', visible=False)
'''
| 1,342 |
8,629 | #include <Common/config.h>
#if USE_YAML_CPP
#include "gtest_helper_functions.h"
#include <base/scope_guard.h>
#include <Common/Config/YAMLParser.h>
#include <Common/Config/ConfigHelper.h>
#include <Poco/AutoPtr.h>
#include "Poco/DOM/Document.h"
#include <gtest/gtest.h>
using namespace DB;
TEST(Common, YamlParserInvalidFile)
{
ASSERT_THROW(YAMLParser::parse("some-non-existing-file.yaml"), Exception);
}
TEST(Common, YamlParserProcessKeysList)
{
auto yaml_file = getFileWithContents("keys-list.yaml", R"YAML(
operator:
access_management: "1"
networks:
- ip: "10.1.6.168"
- ip: "::1"
- ip: "127.0.0.1"
)YAML");
SCOPE_EXIT({ yaml_file->remove(); });
Poco::AutoPtr<Poco::XML::Document> xml = YAMLParser::parse(yaml_file->path());
auto *p_node = xml->getNodeByPath("/clickhouse");
EXPECT_EQ(xmlNodeAsString(p_node), R"CONFIG(<clickhouse>
<operator>
<access_management>1</access_management>
<networks>
<ip>10.1.6.168</ip>
<ip>::1</ip>
<ip>127.0.0.1</ip>
</networks>
</operator>
</clickhouse>
)CONFIG");
}
TEST(Common, YamlParserProcessValuesList)
{
auto yaml_file = getFileWithContents("values-list.yaml", R"YAML(
rules:
- apiGroups: [""]
resources:
- nodes
- nodes/proxy
- services
- endpoints
- pods
)YAML");
SCOPE_EXIT({ yaml_file->remove(); });
Poco::AutoPtr<Poco::XML::Document> xml = YAMLParser::parse(yaml_file->path());
auto *p_node = xml->getNodeByPath("/clickhouse");
EXPECT_EQ(xmlNodeAsString(p_node), R"CONFIG(<clickhouse>
<rules>
<apiGroups></apiGroups>
<resources>nodes</resources>
<resources>nodes/proxy</resources>
<resources>services</resources>
<resources>endpoints</resources>
<resources>pods</resources>
</rules>
</clickhouse>
)CONFIG");
}
#endif
| 761 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.php.editor.model.impl;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.netbeans.modules.csl.api.OffsetRange;
import org.netbeans.modules.php.editor.api.PhpElementKind;
import org.netbeans.modules.php.editor.model.CodeMarker;
import org.netbeans.modules.php.editor.model.FileScope;
import org.netbeans.modules.php.editor.model.IndexScope;
import org.netbeans.modules.php.editor.model.ModelElement;
import org.netbeans.modules.php.editor.model.ModelUtils;
import org.netbeans.modules.php.editor.model.NamespaceScope;
import org.netbeans.modules.php.editor.parser.PHPParseResult;
import org.openide.filesystems.FileObject;
import org.openide.util.Union2;
/**
*
* @author <NAME>
*/
final class FileScopeImpl extends ScopeImpl implements FileScope {
private PHPParseResult info;
private final List<CodeMarkerImpl> codeMarkers = Collections.synchronizedList(new ArrayList<CodeMarkerImpl>());
FileScopeImpl(PHPParseResult info) {
this(info, "program"); //NOI18N
}
private FileScopeImpl(PHPParseResult info, String name) {
super(
null,
name,
Union2.<String, FileObject>createSecond(info != null ? info.getSnapshot().getSource().getFileObject() : null),
new OffsetRange(0, 0),
PhpElementKind.PROGRAM,
false);
this.info = info;
}
void addCodeMarker(CodeMarkerImpl codeMarkerImpl) {
codeMarkers.add(codeMarkerImpl);
}
List<? extends CodeMarker> getMarkers() {
return codeMarkers;
}
void clearMarkers() {
codeMarkers.clear();
}
/**
* @return the indexScope
*/
@Override
public IndexScope getIndexScope() {
return info.getModel().getIndexScope();
}
@Override
public Collection<? extends NamespaceScope> getDeclaredNamespaces() {
return filter(getElements(), new ElementFilter<NamespaceScope>() {
@Override
public boolean isAccepted(ModelElement element) {
return element.getPhpElementKind().equals(PhpElementKind.NAMESPACE_DECLARATION);
}
});
}
@Override
public NamespaceScope getDefaultDeclaredNamespace() {
return ModelUtils.getFirst(ModelUtils.filter(getDeclaredNamespaces(), new ModelUtils.ElementFilter<NamespaceScope>() {
@Override
public boolean isAccepted(NamespaceScope ns) {
return ns.isDefaultNamespace();
}
}));
}
}
| 1,241 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.