max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
454 |
<gh_stars>100-1000
package io.vertx.up.atom;
public class User {
private String username;
private String password;
private Integer age;
private String email;
public String getEmail() {
return email;
}
public void setEmail(final String email) {
this.email = email;
}
public String getUsername() {
return username;
}
public void setUsername(final String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(final String password) {
this.password = password;
}
public Integer getAge() {
return age;
}
public void setAge(final Integer age) {
this.age = age;
}
@Override
public String toString() {
return "User{" +
"username='" + username + '\'' +
", password='" + password + '\'' +
", age=" + age +
", email='" + email + '\'' +
'}';
}
}
| 434 |
386 |
<gh_stars>100-1000
##########################################################################
#
# Copyright (c) 2007-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import IECore
## Registers a type id for an extension class. This makes TypeId.className
# available and also checks that no other type is trying to use the same id.
# It raises a RuntimeError if a conflicting type is already registered.
def __registerTypeId( typeId, typeName, baseTypeId ) :
assert( type( typeId ) is IECore.TypeId )
assert( type( typeName ) is str )
assert( type( baseTypeId ) is IECore.TypeId )
# check this type hasn't been registered already
if hasattr( IECore.TypeId, typeName ):
if getattr( IECore.TypeId, typeName ) != typeId:
raise RuntimeError( "Type \"%s\" is already registered." % typeName )
return
if typeId in IECore.TypeId.values :
raise RuntimeError( "TypeId \"%d\" is already registered as \"%s\"." % (typeId, IECore.TypeId.values[typeId] ) )
# update the TypeId enum
setattr( IECore.TypeId, typeName, typeId )
IECore.TypeId.values[ int( typeId ) ] = typeId
# register the new type id
IECore.RunTimeTyped.registerType( typeId, typeName, baseTypeId )
__nextDynamicRunTimeTypedId = None
## This function adds the necessary function definitions to a python
# class for it to properly implement the RunTimeTyped interface. It should
# be called once for all python classes inheriting from RunTimeTyped. It also
# calls registerTypeId() for you.
# typId is optional and if not defined, this function will associate a dynamic Id
# in the range FirstDynamicTypeId and LastDynamicTypeId from TypeIds.h.
# It's necessary to specify type Id for Object derived class or anything that
# is serializable.
# If typeName is not specified then the name of the class itself is used - you may wish
# to provide an explicit typeName in order to prefix the name with a module name.
def registerRunTimeTyped( typ, typId = None, typeName = None ) :
if typeName is None :
typeName = typ.__name__
runTypedBaseClass = next( c for c in typ.__bases__ if issubclass( c, IECore.RunTimeTyped ) )
# constants below are the same as in TypeIds.h
FirstDynamicTypeId = 300000
LastDynamicTypeId = 399999
# check if overwritting registration.
if not hasattr( IECore.TypeId, typeName ) :
if typId is None :
global __nextDynamicRunTimeTypedId
if __nextDynamicRunTimeTypedId is None :
__nextDynamicRunTimeTypedId = FirstDynamicTypeId
elif __nextDynamicRunTimeTypedId > LastDynamicTypeId:
raise Exception( "Too many dynamic RunTimeTyped registered classes! You must change TypeIds.h and rebuild Cortex." )
typId = __nextDynamicRunTimeTypedId
__nextDynamicRunTimeTypedId += 1
__registerTypeId( IECore.TypeId( typId ), typeName, IECore.TypeId( runTypedBaseClass.staticTypeId() ) )
else :
# check if the new type Id is compatible with the previously registered one.
prevTypId = getattr( IECore.TypeId, typeName )
if prevTypId in range( FirstDynamicTypeId, LastDynamicTypeId+1 ) :
if not typId is None :
raise Exception( "Trying to set a type ID for %s previously registered as a dynamic type Id!" % typeName )
else :
if typId is None :
raise Exception( "Trying to re-register type %s as dynamic type Id!" % typeName )
elif typId != prevTypId :
raise Exception( "Trying to re-register %s under different type Id: %s != %s" % ( typeName, str(typId), prevTypId ) )
# necessary when the typeid is defined in IECore/TypeIds.h and bound in TypeIdBinding.cpp, but then
# the class for that typeid is implemented in python (currently ClassParameter does this).
if IECore.RunTimeTyped.typeNameFromTypeId( prevTypId )=="" :
IECore.RunTimeTyped.registerType( prevTypId, typeName, IECore.TypeId( runTypedBaseClass.staticTypeId() ) )
# Retrieve the correct value from the enum
tId = getattr( IECore.TypeId, typeName )
# add the typeId and typeName method overrides
typ.typeId = lambda x : tId
typ.typeName = lambda x: typeName
# add the staticTypeId, staticTypeName, baseTypeId, and baseTypeName overrides
typ.staticTypeId = staticmethod( lambda : tId )
typ.staticTypeName = staticmethod( lambda : typeName )
typ.baseTypeId = staticmethod( lambda : runTypedBaseClass.staticTypeId() )
typ.baseTypeName = staticmethod( lambda : runTypedBaseClass.staticTypeName() )
# add the inheritsFrom method override
def inheritsFrom( t, baseClass ) :
if type( t ) is str :
if type( baseClass ) is list :
for base in baseClass :
if base.staticTypeName() == t :
return True
else:
if baseClass.staticTypeName() == t :
return True
else :
if type( baseClass ) is list :
for base in baseClass :
if base.staticTypeId() == IECore.TypeId( t ) :
return True
else:
if baseClass.staticTypeId() == IECore.TypeId( t ) :
return True
if type( baseClass ) is list :
for base in baseClass:
if base.inheritsFrom( t ):
return True
else:
return baseClass.inheritsFrom( t )
return False
typ.inheritsFrom = staticmethod( lambda t : inheritsFrom( t, runTypedBaseClass ) )
# add the isInstanceOf method override
def isInstanceOf( self, t, baseClass ) :
if type( t ) is str :
if self.staticTypeName() == t :
return True
else :
if IECore.TypeId( t ) == self.staticTypeId() :
return True
return inheritsFrom( t, baseClass )
typ.isInstanceOf = lambda self, t : isInstanceOf( self, t, runTypedBaseClass )
| 2,289 |
829 |
/*
* Generated by class-dump 3.3.4 (64 bit).
*
* class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2011 by <NAME>.
*/
#import "IDEIndexDBSQLStream.h"
#import <sqlite3.h>
@class IDEIndexDatabase, NSMutableSet, NSObject/*<OS_dispatch_queue>*/;
@interface IDEIndexDBConnection : IDEIndexDBSQLStream
{
IDEIndexDatabase *_database;
NSObject<OS_dispatch_queue> *_runQueue;
struct sqlite3 *_dbHandle;
id _cancelCallback;
long long _tempTableCount;
NSMutableSet *_tempTables;
BOOL _closing;
void *_checkpointInfo;
int _inUseCount;
int _collectionCount;
}
+ (void)initialize;
- (id)database;
//- (void).cxx_destruct;
- (void)dealloc;
- (void)close;
- (void)wait;
- (void)reportSQLiteError:(int)arg1 function:(id)arg2 message:(const char *)arg3 info:(id)arg4;
- (void)finalizeSQLiteStmt:(struct sqlite3_stmt **)arg1;
- (void)runSQLiteStmt:(struct sqlite3_stmt **)arg1 sql:(id)arg2 bindings:(id)arg3 forEachRow:(void)arg4 whenDone:(id)arg5;
- (void)cancelWhen:(id)arg1;
- (BOOL)shouldCancel;
- (void)shutdown;
- (void)runSQLite:(id)arg1;
- (void)willSubmitTransaction:(id)arg1;
- (void)explainQueryPlan:(id)arg1;
- (int)doSQLChanges:(id)arg1 withBindings:(id)arg2;
- (void)doSQLQuery:(id)arg1 withBindings:(id)arg2 forEachRow:(void)arg3;
- (void)didDropTempTable:(id)arg1;
- (void)didCreateTempTable:(id)arg1;
- (id)newTempTableWithName:(id)arg1 schema:(id)arg2;
- (id)newTempTableWithSchema:(id)arg1;
- (id)newTempTableName;
- (void)doBlock:(id)arg1;
- (id)dbConnection;
- (void)setAutoCheckpointThreshold:(int)arg1;
- (id)initWithDatabase:(id)arg1 create:(BOOL)arg2 backgroundPriority:(BOOL)arg3;
@end
| 681 |
386 |
##########################################################################
#
# Copyright (c) 2007-2014, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import sys
import unittest
import IECore
class TestRunTimeTyped( unittest.TestCase ) :
def test( self ) :
i = IECore.IntData( 10 )
self.assertTrue( i.isInstanceOf( "IntData" ) )
self.assertTrue( i.isInstanceOf( "Data" ) )
self.assertTrue( i.isInstanceOf( "Object" ) )
self.assertTrue( i.isInstanceOf( "RunTimeTyped" ) )
self.assertTrue( i.isInstanceOf( IECore.TypeId.IntData ) )
self.assertTrue( i.isInstanceOf( IECore.TypeId.Data ) )
self.assertTrue( i.isInstanceOf( IECore.TypeId.Object ) )
self.assertTrue( i.isInstanceOf( IECore.TypeId.RunTimeTyped ) )
self.assertEqual( i.baseTypeId(), IECore.TypeId.Data )
self.assertTrue( IECore.IntData.inheritsFrom( "Data" ) )
self.assertTrue( IECore.IntData.inheritsFrom( "Object" ) )
self.assertTrue( IECore.IntData.inheritsFrom( "RunTimeTyped" ) )
self.assertTrue( IECore.IntData.inheritsFrom( IECore.TypeId.Data ) )
self.assertTrue( IECore.IntData.inheritsFrom( IECore.TypeId.Object ) )
self.assertTrue( IECore.IntData.inheritsFrom( IECore.TypeId.RunTimeTyped ) )
self.assertTrue( IECore.Data.inheritsFrom( IECore.TypeId.Object ) )
self.assertTrue( IECore.Data.inheritsFrom( IECore.TypeId.RunTimeTyped ) )
self.assertTrue( IECore.Object.inheritsFrom( IECore.TypeId.RunTimeTyped ) )
self.assertEqual( IECore.RunTimeTyped.typeNameFromTypeId( IECore.TypeId.IntData ), "IntData" )
self.assertEqual( IECore.RunTimeTyped.typeIdFromTypeName( "IntData" ), IECore.TypeId.IntData )
def testStaticTypeBindings( self ) :
import IECore
typeNames={}
typeIds = {}
# Make that all classes derived from RunTimeTyped correctly bind staticTypeName and staticTypeId
for typeName in dir(IECore):
t = getattr(IECore, typeName)
baseClasses = []
# Iteratively expand base classes all the way to the top
if hasattr(t, "__bases__"):
baseClasses = list( t.__bases__ )
i=0
while i < len(baseClasses):
for x in baseClasses[i].__bases__:
baseClasses.extend([x])
i = i + 1
if IECore.RunTimeTyped in baseClasses:
baseIds = IECore.RunTimeTyped.baseTypeIds( t.staticTypeId() )
baseIds = set( [ int(x) for x in baseIds ] )
self.assertTrue( hasattr(t, "staticTypeName") )
self.assertTrue( hasattr(t, "staticTypeId") )
self.assertTrue( hasattr(t, "baseTypeId") )
self.assertTrue( hasattr(t, "baseTypeName") )
self.assertTrue( t.staticTypeId() in IECore.TypeId.values.keys() )
# Make sure that no 2 IECore classes provide the same typeId or typeName
if t.staticTypeName() in typeNames:
raise RuntimeError( "'%s' does not have a unique RunTimeTyped static type name (conflicts with '%s')." % ( t.__name__ , typeNames[t.staticTypeName()] ))
if t.staticTypeId() in typeIds:
raise RuntimeError( "'%s' does not have a unique RunTimeTyped static type id (conflicts with '%s')." % (t.__name__ , typeIds[t.staticTypeId()] ))
self.assertEqual( IECore.RunTimeTyped.typeNameFromTypeId( t.staticTypeId() ), t.staticTypeName() )
self.assertEqual( IECore.RunTimeTyped.typeIdFromTypeName( t.staticTypeName() ), t.staticTypeId() )
for base in baseClasses :
if issubclass( base, IECore.RunTimeTyped ) :
self.assertNotEqual( base.staticTypeId(), t.staticTypeId() )
if not base.staticTypeId() in IECore.RunTimeTyped.baseTypeIds( t.staticTypeId() ):
raise Exception( "'%s' does not have '%s' in its RunTimeTyped base classes, even though Python says it derives from it." % (t.staticTypeName(), base.staticTypeName() ))
typeNames[t.staticTypeName()] = t.__name__
typeIds[t.staticTypeId()] = t.__name__
def testRegisterRunTimeTyped( self ) :
# should raise because given type ID is different than the FileSequenceParameter type id
self.assertRaises( Exception, IECore.registerRunTimeTyped, IECore.FileSequenceParameter, 100009 )
# should raise because SequenceLsOp is registered with dynamic type id.
self.assertRaises( Exception, IECore.registerRunTimeTyped, IECore.SequenceLsOp, 100009 )
# should raise because FileSequenceParameter is registered with a non-dynamic type id
self.assertRaises( Exception, IECore.registerRunTimeTyped, IECore.FileSequenceParameter )
# should not raise because SequenceLsOp was already registered with a dynamic type id
IECore.registerRunTimeTyped( IECore.SequenceLsOp )
self.assertEqual( IECore.TypeId.OptionalCompoundParameter, IECore.OptionalCompoundParameter.staticTypeId() )
self.assertEqual( IECore.TypeId.OptionalCompoundParameter, IECore.OptionalCompoundParameter( "", "" ).typeId() )
self.assertTrue( IECore.OptionalCompoundParameter.inheritsFrom( "CompoundParameter" ) )
self.assertTrue( IECore.OptionalCompoundParameter.inheritsFrom( IECore.TypeId.CompoundParameter ) )
self.assertTrue( IECore.OptionalCompoundParameter.inheritsFrom( "Parameter" ) )
self.assertTrue( IECore.OptionalCompoundParameter.inheritsFrom( IECore.TypeId.Parameter ) )
self.assertTrue( IECore.OptionalCompoundParameter.inheritsFrom( "RunTimeTyped" ) )
self.assertTrue( IECore.OptionalCompoundParameter.inheritsFrom( IECore.TypeId.RunTimeTyped ) )
self.assertTrue( IECore.OptionalCompoundParameter( "", "" ).isInstanceOf( "OptionalCompoundParameter" ) )
self.assertTrue( IECore.OptionalCompoundParameter( "", "" ).isInstanceOf( IECore.TypeId.OptionalCompoundParameter ) )
self.assertTrue( IECore.OptionalCompoundParameter( "", "" ).isInstanceOf( "CompoundParameter" ) )
self.assertTrue( IECore.OptionalCompoundParameter( "", "" ).isInstanceOf( IECore.TypeId.CompoundParameter ) )
self.assertRaises( TypeError, IECore.OptionalCompoundParameter( "", "" ).isInstanceOf, 10 )
def testTypeNameFromRunTimeTypedTypeId( self ) :
self.assertEqual( IECore.RunTimeTyped.typeIdFromTypeName( "RunTimeTyped" ), IECore.TypeId.RunTimeTyped )
def testRunTimeTypedTypeIdFromTypeName( self ) :
self.assertEqual( IECore.RunTimeTyped.typeNameFromTypeId( IECore.TypeId.RunTimeTyped ), "RunTimeTyped" )
def testInheritsFromWithTwoArguments( self ) :
self.assertTrue( IECore.RunTimeTyped.inheritsFrom( IECore.TypeId.Object, IECore.TypeId.RunTimeTyped ) )
self.assertTrue( IECore.RunTimeTyped.inheritsFrom( "Object", "RunTimeTyped" ) )
self.assertTrue( IECore.RunTimeTyped.inheritsFrom( IECore.TypeId.CompoundObject, IECore.TypeId.Object ) )
self.assertTrue( IECore.RunTimeTyped.inheritsFrom( "CompoundObject", "Object" ) )
self.assertFalse( IECore.RunTimeTyped.inheritsFrom( IECore.TypeId.CompoundObject, IECore.TypeId.Writer ) )
self.assertFalse( IECore.RunTimeTyped.inheritsFrom( "CompoundObject", "Writer" ) )
def testRegisterPrefixedTypeName( self ) :
class Prefixed( IECore.Op ) :
def __init__( self ) :
IECore.Op.__init__( self, "", IECore.IntParameter( "result", "" ) )
prefixedTypeName = "SomeModuleName::Prefixed"
IECore.registerRunTimeTyped( Prefixed, typeName = prefixedTypeName )
self.assertEqual( Prefixed.staticTypeName(), prefixedTypeName )
self.assertEqual( IECore.RunTimeTyped.typeIdFromTypeName( Prefixed.staticTypeName() ), Prefixed.staticTypeId() )
p = Prefixed()
self.assertEqual( p.typeName(), prefixedTypeName )
self.assertEqual( p.typeId(), IECore.RunTimeTyped.typeIdFromTypeName( Prefixed.staticTypeName() ) )
self.assertTrue( p.isInstanceOf( IECore.Op.staticTypeId() ) )
self.assertTrue( p.isInstanceOf( IECore.Parameterised.staticTypeId() ) )
self.assertTrue( p.isInstanceOf( IECore.RunTimeTyped.staticTypeId() ) )
def testClassInPlaceOfTypeId( self ) :
# check that we can pass the python class itself
# where C++ would like a TypeId.
self.assertTrue( IECore.Data.inheritsFrom( IECore.RunTimeTyped ) )
self.assertTrue( IECore.Data.inheritsFrom( IECore.Object ) )
self.assertFalse( IECore.Data.inheritsFrom( IECore.Parameter ) )
self.assertRaises( Exception, IECore.Data.inheritsFrom, dict )
# check that the converter mechanism doesn't both the
# reference count for the class objects
r = sys.getrefcount( IECore.Data )
IECore.IntData.inheritsFrom( IECore.Data )
self.assertEqual( r, sys.getrefcount( IECore.Data ) )
if __name__ == "__main__":
unittest.main()
| 3,523 |
1,016 |
<reponame>peter-ls/kylo
package com.thinkbiganalytics.feedmgr.support;
/*-
* #%L
* thinkbig-feed-manager-controller
* %%
* Copyright (C) 2017 ThinkBig Analytics
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
/**
* Utility to interact with zip files
*/
public class ZipFileUtil {
/**
* Validate filenames in a zip file This does case insensitive comparison
*/
public static boolean validateZipEntries(byte[] zipFile, Set<String> validNames, Set<String> requiredNames, boolean matchAllValidNames) throws IOException {
if (validNames == null) {
validNames = new HashSet<>();
}
List<String> validNamesList = validNames.stream().map(String::toLowerCase).collect(Collectors.toList());
Set<String> fileNames = getFileNames(zipFile).stream().map(String::toLowerCase).collect(Collectors.toSet());
boolean isValid = fileNames != null && !fileNames.isEmpty() && validNamesList.stream().allMatch(fileNames::contains);
if (isValid && matchAllValidNames) {
isValid = fileNames.size() == validNames.size();
}
if (isValid && requiredNames != null && !requiredNames.isEmpty()) {
isValid = requiredNames.stream().allMatch(fileNames::contains);
}
return isValid;
}
public static String zipEntryToString(byte[] buffer, ZipInputStream zis, ZipEntry entry) throws IOException {
// consume all the data from this entry
ByteArrayOutputStream out = new ByteArrayOutputStream();
int len = 0;
while ((len = zis.read(buffer)) > 0) {
out.write(buffer, 0, len);
}
out.close();
return new String(out.toByteArray(), "UTF-8");
}
/**
*
* @param zipFile
* @param validNames
* @param requiredNames
* @return
* @throws IOException
*/
public static boolean validateZipEntriesWithRequiredEntries(byte[] zipFile, Set<String> validNames, Set<String> requiredNames) throws IOException {
return validateZipEntries(zipFile, validNames, requiredNames, false);
}
/**
*
* @param zipFile
* @param validNames
* @param matchAllValidNames
* @return
* @throws IOException
*/
public static boolean validateZipEntries(byte[] zipFile, Set<String> validNames, boolean matchAllValidNames) throws IOException {
return validateZipEntries(zipFile, validNames, null, matchAllValidNames);
}
/**
* Gets the file names in a zip file
*/
public static Set<String> getFileNames(byte[] zipFile) throws IOException {
Set<String> fileNames = new HashSet<>();
ByteArrayInputStream inputStream = new ByteArrayInputStream(zipFile);
ZipInputStream zis = new ZipInputStream(inputStream);
ZipEntry entry;
while ((entry = zis.getNextEntry()) != null) {
fileNames.add(entry.getName());
}
return fileNames;
}
/**
* Adds an entry to a zip file
*
* @param zip the zip file which will have the content added
* @param file the string to add to the zip
* @param fileName the zip file name
* @return the zip file with the newly added content
*/
public static byte[] addToZip(byte[] zip, String file, String fileName) throws IOException {
InputStream zipInputStream = new ByteArrayInputStream(zip);
ZipInputStream zis = new ZipInputStream(zipInputStream);
byte[] buffer = new byte[1024];
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try (ZipOutputStream zos = new ZipOutputStream(baos)) {
ZipEntry entry;
while ((entry = zis.getNextEntry()) != null) {
ByteArrayOutputStream out = new ByteArrayOutputStream();
int len = 0;
while ((len = zis.read(buffer)) > 0) {
out.write(buffer, 0, len);
}
out.close();
zos.putNextEntry(entry);
zos.write(out.toByteArray());
zos.closeEntry();
}
zis.closeEntry();
zis.close();
entry = new ZipEntry(fileName);
zos.putNextEntry(entry);
zos.write(file.getBytes());
zos.closeEntry();
}
return baos.toByteArray();
}
}
| 2,024 |
1,144 |
package de.metas.common.util.time;
import lombok.NonNull;
import javax.annotation.Nullable;
import java.sql.Timestamp;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.temporal.ChronoUnit;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
/*
* #%L
* de-metas-common-util
* %%
* Copyright (C) 2020 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
public class SystemTime
{
private static final TimeSource defaultTimeSource = new SystemTimeSource();
@Nullable
private static TimeSource timeSource;
private static TimeSource getTimeSource()
{
return timeSource == null ? defaultTimeSource : timeSource;
}
/**
* After invocation of this method, the time returned will be the system time again.
*/
public static void resetTimeSource()
{
timeSource = null;
}
/**
* @param newTimeSource the given TimeSource will be used for the time returned by the
* methods of this class (unless it is null).
*/
public static void setTimeSource(@NonNull final TimeSource newTimeSource)
{
timeSource = newTimeSource;
}
public static void setFixedTimeSource(@NonNull final ZonedDateTime date)
{
setTimeSource(FixedTimeSource.ofZonedDateTime(date));
}
/**
* @param zonedDateTime ISO 8601 date time format (see {@link ZonedDateTime#parse(CharSequence)}).
* e.g. 2018-02-28T13:13:13+01:00[Europe/Berlin]
*/
public static void setFixedTimeSource(@NonNull final String zonedDateTime)
{
setTimeSource(FixedTimeSource.ofZonedDateTime(ZonedDateTime.parse(zonedDateTime)));
}
public static long millis()
{
return getTimeSource().millis();
}
public static ZoneId zoneId()
{
return getTimeSource().zoneId();
}
public static GregorianCalendar asGregorianCalendar()
{
final GregorianCalendar cal = new GregorianCalendar();
cal.setTimeInMillis(millis());
return cal;
}
public static Date asDate()
{
return new Date(millis());
}
public static Timestamp asTimestamp()
{
return new Timestamp(millis());
}
/**
* Same as {@link #asTimestamp()} but the returned date will be truncated to DAY.
*/
public static Timestamp asDayTimestamp()
{
final GregorianCalendar cal = asGregorianCalendar();
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
return new Timestamp(cal.getTimeInMillis());
}
public static Instant asInstant()
{
return Instant.ofEpochMilli(millis());
}
public static LocalDateTime asLocalDateTime()
{
return asZonedDateTime().toLocalDateTime();
}
@NonNull
public static LocalDate asLocalDate()
{
return asLocalDate(zoneId());
}
@NonNull
public static LocalDate asLocalDate(@NonNull final ZoneId zoneId)
{
return asZonedDateTime(zoneId).toLocalDate();
}
public static ZonedDateTime asZonedDateTime()
{
return asZonedDateTime(zoneId());
}
public static ZonedDateTime asZonedDateTimeAtStartOfDay()
{
return asZonedDateTime(zoneId()).truncatedTo(ChronoUnit.DAYS);
}
public static ZonedDateTime asZonedDateTimeAtEndOfDay(@NonNull final ZoneId zoneId)
{
return asZonedDateTime(zoneId)
.toLocalDate()
.atTime(LocalTime.MAX)
.atZone(zoneId);
}
public static ZonedDateTime asZonedDateTime(@NonNull final ZoneId zoneId)
{
return asInstant().atZone(zoneId);
}
}
| 1,413 |
1,288 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "multiprocess_func_list.h"
#include <map>
// Helper functions to maintain mapping of "test name"->test func.
// The information is accessed via a global map.
namespace multi_process_function_list {
namespace {
struct ProcessFunctions {
ProcessFunctions() : main(NULL), setup(NULL) {}
ProcessFunctions(TestMainFunctionPtr main, SetupFunctionPtr setup)
: main(main),
setup(setup) {
}
TestMainFunctionPtr main;
SetupFunctionPtr setup;
};
typedef std::map<std::string, ProcessFunctions> MultiProcessTestMap;
// Retrieve a reference to the global 'func name' -> func ptr map.
MultiProcessTestMap& GetMultiprocessFuncMap() {
static MultiProcessTestMap test_name_to_func_ptr_map;
return test_name_to_func_ptr_map;
}
} // namespace
AppendMultiProcessTest::AppendMultiProcessTest(
std::string test_name,
TestMainFunctionPtr main_func_ptr,
SetupFunctionPtr setup_func_ptr) {
GetMultiprocessFuncMap()[test_name] =
ProcessFunctions(main_func_ptr, setup_func_ptr);
}
int InvokeChildProcessTest(std::string test_name) {
MultiProcessTestMap& func_lookup_table = GetMultiprocessFuncMap();
MultiProcessTestMap::iterator it = func_lookup_table.find(test_name);
if (it != func_lookup_table.end()) {
const ProcessFunctions& process_functions = it->second;
if (process_functions.setup)
(*process_functions.setup)();
if (process_functions.main)
return (*process_functions.main)();
}
return -1;
}
} // namespace multi_process_function_list
| 567 |
781 |
<gh_stars>100-1000
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
package Windows.UI.Xaml.Controls;
import android.content.Context;
import android.view.ViewGroup;
import android.widget.RelativeLayout;
import run.ace.Utils;
// A container for an item in a ListBox.
public class ListBoxItem extends RelativeLayout implements IHaveProperties, IFireEvents {
public ListBoxItem(Context context) {
super(context);
// Stretch horizontally by default
this.setGravity(android.view.Gravity.FILL_HORIZONTAL);
}
public void setContent(Object content) {
setProperty("ContentControl.Content", content);
}
// IHaveProperties.setProperty
public void setProperty(String propertyName, Object propertyValue)
{
// Handle the Content property specially if it's a primitive
// so it is formatted as expected inside a list.
// Otherwise, defer to the generic handlng for all ViewGroups.
if (propertyName.equals("ContentControl.Content") &&
Utils.isBoxedNumberOrString(propertyValue)) {
android.widget.TextView content = (android.widget.TextView)android.view.LayoutInflater.from(getContext())
.inflate(android.R.layout.simple_list_item_activated_1, this, false);
content.setText(propertyValue.toString());
this.addView(content);
}
else if (!ViewGroupHelper.setProperty(this, propertyName, propertyValue)) {
throw new RuntimeException("Unhandled property for " + this.getClass().getSimpleName() + ": " + propertyName);
}
}
// IFireEvents.addEventHandler
public void addEventHandler(final String eventName, final Handle handle) {
// TODO
}
// IFireEvents.removeEventHandler
public void removeEventHandler(String eventName) {
// TODO
}
}
| 563 |
552 |
<reponame>asheraryam/ETEngine<gh_stars>100-1000
#include "stdafx.h"
#include "AssetDatabase.h"
#include <rttr/registration>
#include "AssetRegistration.h"
namespace et {
namespace core {
// reflection
RTTR_REGISTRATION
{
using namespace rttr;
registration::class_<AssetDatabase::AssetCache>("asset cache")
.property("cache", &AssetDatabase::AssetCache::cache);
registration::class_<AssetDatabase>("asset database")
.property("packages", &AssetDatabase::packages)
.property("caches", &AssetDatabase::caches);
}
//-----------------------------------
// AssetDatabase::GetValidAssetTypes
//
// Get all asset types that derive from the given type
//
std::vector<rttr::type> AssetDatabase::GetValidAssetTypes(rttr::type const type, bool const reportErrors)
{
std::vector<rttr::type> assetTypes;
if (!IsAssetDataType(type))
{
rttr::array_range<rttr::type> derivedTypes = type.get_derived_classes();
for (rttr::type const derivedType : derivedTypes)
{
if (IsAssetDataType(derivedType)) // only leaf asset types
{
assetTypes.push_back(derivedType);
}
}
if (reportErrors)
{
ET_ASSERT(!assetTypes.empty(), "Couldn't find asset type derived from '%s'!", type.get_name().data());
}
}
else
{
assetTypes.push_back(type);
}
return assetTypes;
}
//====================
// Asset Cache
//====================
//---------------------------------
// AssetDatabase::AssetCache::GetType
//
// Get the type of an asset cache
//
rttr::type AssetDatabase::AssetCache::GetType() const
{
if (cache.size() > 0)
{
return cache[0]->GetType();
}
return rttr::type::get(nullptr);
}
//--------------------------------------
// AssetDatabase::AssetCache::GetAssetType
//
// Get the asset type of an asset cache
//
rttr::type AssetDatabase::AssetCache::GetAssetType() const
{
if (cache.size() > 0)
{
return rttr::type::get(*(cache[0]));
}
return rttr::type::get(nullptr);
}
//===================
// Asset Database
//===================
//---------------------------------
// AssetDatabase::d-tor
//
// Delete all asset pointers
//
AssetDatabase::~AssetDatabase()
{
if (!m_OwnsAssets)
{
return;
}
for (AssetCache& cache : caches)
{
for (I_Asset* asset : cache.cache)
{
delete asset;
asset = nullptr;
}
}
caches.clear();
}
//---------------------------------
// AssetDatabase::GetAsset
//
// Get an asset by its ID
//
I_Asset* AssetDatabase::GetAsset(HashString const assetId, bool const reportErrors) const
{
// in this version we loop over all caches
for (AssetCache const& cache : caches)
{
// try finding our asset by its ID in the cache
auto foundAssetIt = std::find_if(cache.cache.begin(), cache.cache.end(), [assetId](I_Asset* asset)
{
return asset->GetId() == assetId;
});
if (foundAssetIt != cache.cache.cend())
{
return *foundAssetIt;
}
}
// didn't find an asset in any cache, return null
if (reportErrors)
{
ET_ASSERT(false, "Couldn't find asset with ID '%s'!", assetId.ToStringDbg());
}
return nullptr;
}
//---------------------------------
// AssetDatabase::GetAsset
//
// Get an asset by its ID and type
//
I_Asset* AssetDatabase::GetAsset(HashString const assetId, rttr::type const type, bool const reportErrors) const
{
std::vector<rttr::type> const assetTypes = GetValidAssetTypes(type, reportErrors);
for (rttr::type const assetType : assetTypes)
{
// Try finding a cache containing our type
auto const foundCacheIt = std::find_if(caches.cbegin(), caches.cend(), [assetType](AssetCache const& cache)
{
return cache.GetType() == assetType;
});
if (foundCacheIt != caches.cend())
{
// try finding our asset by its ID in the cache
auto const foundAssetIt = std::find_if(foundCacheIt->cache.cbegin(), foundCacheIt->cache.cend(), [assetId](I_Asset const* const asset)
{
return asset->GetId() == assetId;
});
if (foundAssetIt != foundCacheIt->cache.cend())
{
return *foundAssetIt;
}
}
}
if (reportErrors)
{
ET_ASSERT(false, "Couldn't find asset with ID '%s'!", assetId.ToStringDbg());
}
return nullptr;
}
//---------------------------------
// AssetDatabase::IterateAllAssets
//
void AssetDatabase::IterateAllAssets(I_AssetDatabase::T_AssetFunc const& func)
{
for (AssetCache& cache : caches)
{
for (I_Asset* asset : cache.cache)
{
func(asset);
}
}
}
//---------------------------------
// AssetDatabase::Flush
//
// Force unloading all assets with no references
//
void AssetDatabase::Flush()
{
for (AssetDatabase::AssetCache& cache : caches)
{
for (I_Asset* asset : cache.cache)
{
if (asset->GetRefCount() <= 0u && asset->IsLoaded())
{
asset->Unload(true);
}
}
}
}
} // namespace core
} // namespace et
| 1,664 |
1,738 |
/*
* All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
* its licensors.
*
* For complete copyright and license terms please see the LICENSE at the root of this
* distribution (the "License"). All use of this software is governed by the License,
* or, if provided, by the license below or the license accompanying this file. Do not
* remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
*/
#include "CryLegacy_precompiled.h"
#include "ComponentEntityAttributes.h"
#include "Serialization/IArchive.h"
#include "Serialization/IArchiveHost.h"
#include <Components/IComponentSerialization.h>
DECLARE_DEFAULT_COMPONENT_FACTORY(CComponentEntityAttributes, IComponentEntityAttributes)
namespace
{
struct SEntityAttributesSerializer
{
SEntityAttributesSerializer(TEntityAttributeArray& _attributes)
: attributes(_attributes)
{}
void Serialize(Serialization::IArchive& archive)
{
for (size_t iAttribute = 0, attributeCount = attributes.size(); iAttribute < attributeCount; ++iAttribute)
{
IEntityAttribute* pAttribute = attributes[iAttribute].get();
if (pAttribute != NULL)
{
archive(*pAttribute, pAttribute->GetName(), pAttribute->GetLabel());
}
}
}
TEntityAttributeArray& attributes;
};
}
//////////////////////////////////////////////////////////////////////////
void CComponentEntityAttributes::ProcessEvent(SEntityEvent& event) {}
//////////////////////////////////////////////////////////////////////////
void CComponentEntityAttributes::Initialize(SComponentInitializer const& inititializer) {}
//////////////////////////////////////////////////////////////////////////
void CComponentEntityAttributes::Done() {}
//////////////////////////////////////////////////////////////////////////
void CComponentEntityAttributes::UpdateComponent(SEntityUpdateContext& context) {}
//////////////////////////////////////////////////////////////////////////
bool CComponentEntityAttributes::InitComponent(IEntity* pEntity, SEntitySpawnParams& params)
{
if (m_attributes.empty() == true)
{
EntityAttributeUtils::CloneAttributes(params.pClass->GetEntityAttributes(), m_attributes);
}
pEntity->GetComponent<IComponentSerialization>()->Register<CComponentEntityAttributes>(SerializationOrder::Attributes, *this, &CComponentEntityAttributes::Serialize, &CComponentEntityAttributes::SerializeXML, &CComponentEntityAttributes::NeedSerialize, &CComponentEntityAttributes::GetSignature);
return true;
}
//////////////////////////////////////////////////////////////////////////
void CComponentEntityAttributes::Reload(IEntity* pEntity, SEntitySpawnParams& params) {}
//////////////////////////////////////////////////////////////////////////
void CComponentEntityAttributes::SerializeXML(XmlNodeRef& entityNodeXML, bool loading)
{
if (loading == true)
{
if (XmlNodeRef attributesNodeXML = entityNodeXML->findChild("Attributes"))
{
SEntityAttributesSerializer serializer(m_attributes);
Serialization::LoadXmlNode(serializer, attributesNodeXML);
}
}
else
{
if (!m_attributes.empty())
{
SEntityAttributesSerializer serializer(m_attributes);
if (XmlNodeRef attributesNodeXML = Serialization::SaveXmlNode(serializer, "Attributes"))
{
entityNodeXML->addChild(attributesNodeXML);
}
}
}
}
//////////////////////////////////////////////////////////////////////////
void CComponentEntityAttributes::Serialize(TSerialize serialize) {}
//////////////////////////////////////////////////////////////////////////
bool CComponentEntityAttributes::NeedSerialize()
{
return false;
}
//////////////////////////////////////////////////////////////////////////
bool CComponentEntityAttributes::GetSignature(TSerialize signature)
{
return true;
}
//////////////////////////////////////////////////////////////////////////
void CComponentEntityAttributes::GetMemoryUsage(ICrySizer* pSizer) const
{
pSizer->AddObject(this, sizeof(*this));
}
//////////////////////////////////////////////////////////////////////////
void CComponentEntityAttributes::SetAttributes(const TEntityAttributeArray& attributes)
{
const size_t attributeCount = attributes.size();
m_attributes.resize(attributeCount);
for (size_t iAttribute = 0; iAttribute < attributeCount; ++iAttribute)
{
IEntityAttribute* pSrc = attributes[iAttribute].get();
IEntityAttribute* pDst = m_attributes[iAttribute].get();
if ((pDst != NULL) && (strcmp(pSrc->GetName(), pDst->GetName()) == 0))
{
Serialization::CloneBinary(*pDst, *pSrc);
}
else if (pSrc != NULL)
{
m_attributes[iAttribute] = pSrc->Clone();
}
}
}
//////////////////////////////////////////////////////////////////////////
TEntityAttributeArray& CComponentEntityAttributes::GetAttributes()
{
return m_attributes;
}
//////////////////////////////////////////////////////////////////////////
const TEntityAttributeArray& CComponentEntityAttributes::GetAttributes() const
{
return m_attributes;
}
| 1,689 |
577 |
<reponame>Shicheng-Guo/TDC
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
# Adapted by TDC.
import os
import torch
import numpy as np
import pandas as pd
import pickle
def get_dataset_class(dataset_name):
"""Return the dataset class with the given name."""
if dataset_name not in globals():
raise NotImplementedError("Dataset not found: {}".format(dataset_name))
return globals()[dataset_name]
def num_environments(dataset_name):
return len(get_dataset_class(dataset_name).ENVIRONMENTS)
class MultipleDomainDataset:
N_STEPS = 5001 # Default, subclasses may override
CHECKPOINT_FREQ = 100 # Default, subclasses may override
N_WORKERS = 0 # Default, subclasses may override
ENVIRONMENTS = None # Subclasses should override
INPUT_SHAPE = None # Subclasses should override
def __getitem__(self, index):
return self.datasets[index]
def __len__(self):
return len(self.datasets)
amino_char = ['?', 'A', 'C', 'B', 'E', 'D', 'G', 'F', 'I', 'H', 'K', 'M', 'L', 'O',
'N', 'Q', 'P', 'S', 'R', 'U', 'T', 'W', 'V', 'Y', 'X', 'Z']
smiles_char = ['?', '#', '%', ')', '(', '+', '-', '.', '1', '0', '3', '2', '5', '4',
'7', '6', '9', '8', '=', 'A', 'C', 'B', 'E', 'D', 'G', 'F', 'I',
'H', 'K', 'M', 'L', 'O', 'N', 'P', 'S', 'R', 'U', 'T', 'W', 'V',
'Y', '[', 'Z', ']', '_', 'a', 'c', 'b', 'e', 'd', 'g', 'f', 'i',
'h', 'm', 'l', 'o', 'n', 's', 'r', 'u', 't', 'y']
MAX_SEQ_PROTEIN = 1000
MAX_SEQ_DRUG = 100
from sklearn.preprocessing import OneHotEncoder
enc_protein = OneHotEncoder().fit(np.array(amino_char).reshape(-1, 1))
enc_drug = OneHotEncoder().fit(np.array(smiles_char).reshape(-1, 1))
def protein_2_embed(x):
return enc_protein.transform(np.array(x).reshape(-1,1)).toarray().T
def drug_2_embed(x):
return enc_drug.transform(np.array(x).reshape(-1,1)).toarray().T
def trans_protein(x):
temp = list(x.upper())
temp = [i if i in amino_char else '?' for i in temp]
if len(temp) < MAX_SEQ_PROTEIN:
temp = temp + ['?'] * (MAX_SEQ_PROTEIN-len(temp))
else:
temp = temp [:MAX_SEQ_PROTEIN]
return temp
def trans_drug(x):
temp = list(x)
temp = [i if i in smiles_char else '?' for i in temp]
if len(temp) < MAX_SEQ_DRUG:
temp = temp + ['?'] * (MAX_SEQ_DRUG-len(temp))
else:
temp = temp [:MAX_SEQ_DRUG]
return temp
from torch.utils import data
class dti_tensor_dataset(data.Dataset):
def __init__(self, df):
self.df = df
def __len__(self):
return len(self.df)
def __getitem__(self, index):
d = self.df.iloc[index].Drug_Enc
t = self.df.iloc[index].Target_Enc
d = drug_2_embed(d)
t = protein_2_embed(t)
y = self.df.iloc[index].Y
return d, t, y
class TdcDtiDg(MultipleDomainDataset):
def __init__(self, root, test_envs, hparams):
super().__init__()
ENVIRONMENTS = [str(i) for i in list(range(2013, 2022))]
TRAIN_ENV = [str(i) for i in list(range(2013, 2019))]
TEST_ENV = ['2019', '2020', '2021']
#TRAIN_ENV = ['2019', '2020']
#TEST_ENV = ['2021']
self.input_shape = [(26, 100), (63, 1000)]
self.num_classes = 1
if root is None:
raise ValueError('Data directory not specified!')
## create a datasets object
self.datasets = []
from tdc import BenchmarkGroup
self.group = BenchmarkGroup(name = 'DTI_DG_Group', path = root)
benchmark = self.group.get('BindingDB_Patent')
train_val, test, name = benchmark['train_val'], benchmark['test'], benchmark['name']
unique_drug = pd.Series(train_val['Drug'].unique()).apply(trans_drug)
unique_dict_drug = dict(zip(train_val['Drug'].unique(), unique_drug))
train_val['Drug_Enc'] = [unique_dict_drug[i] for i in train_val['Drug']]
unique_target = pd.Series(train_val['Target'].unique()).apply(trans_protein)
unique_dict_target = dict(zip(train_val['Target'].unique(), unique_target))
train_val['Target_Enc'] = [unique_dict_target[i] for i in train_val['Target']]
for i in TRAIN_ENV:
df_data = train_val[train_val.Year == int(i)]
self.datasets.append(dti_tensor_dataset(df_data))
print('Year ' + i + ' loaded...')
unique_drug = pd.Series(test['Drug'].unique()).apply(trans_drug)
unique_dict_drug = dict(zip(test['Drug'].unique(), unique_drug))
test['Drug_Enc'] = [unique_dict_drug[i] for i in test['Drug']]
unique_target = pd.Series(test['Target'].unique()).apply(trans_protein)
unique_dict_target = dict(zip(test['Target'].unique(), unique_target))
test['Target_Enc'] = [unique_dict_target[i] for i in test['Target']]
for i in TEST_ENV:
df_data = test[test.Year == int(i)]
self.datasets.append(dti_tensor_dataset(df_data))
print('Year ' + i + ' loaded...')
| 2,346 |
346 |
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
// Do not edit this file. It is machine generated.
{
"schemas/variants-schema.json.": "Opciones de variante",
"schemas/variants-schema.json.patternProperties.base\\$": "Mezcla base para todas las variantes (no implementada para las Herramientas de CMake)",
"schemas/variants-schema.json.patternProperties.^\\w+$.properties.description": "Descripción de esta opción",
"schemas/variants-schema.json.patternProperties.^\\w+$.properties.default": "La configuración predeterminada de esta opción",
"schemas/variants-schema.json.patternProperties.^\\w+$.properties.choices": "Un valor de opción posible",
"schemas/variants-schema.json.patternProperties.^\\w+$.properties.choices.patternProperties.^\\w+$.properties.short": "Descripción simple de una sola palabra de este valor",
"schemas/variants-schema.json.patternProperties.^\\w+$.properties.choices.patternProperties.^\\w+$.properties.long": "Descripción más extensa de lo que hace este valor",
"schemas/variants-schema.json.patternProperties.^\\w+$.properties.choices.patternProperties.^\\w+$.properties.buildType": "El valor de CMAKE_BUILD_TYPE para esta variante",
"schemas/variants-schema.json.patternProperties.^\\w+$.properties.choices.patternProperties.^\\w+$.properties.linkage": "Establece el valor de BUILD_SHARED en la línea de comandos de CMake",
"schemas/variants-schema.json.patternProperties.^\\w+$.properties.choices.patternProperties.^\\w+$.properties.env": "Variables de entorno que se establecen durante la configuración de CMake.",
"schemas/variants-schema.json.patternProperties.^\\w+$.properties.choices.patternProperties.^\\w+$.properties.env.patternProperties.^\\w+$": "Valor de la variable de entorno",
"schemas/variants-schema.json.patternProperties.^\\w+$.properties.choices.patternProperties.^\\w+$.properties.settings": "Los argumentos -D que se van a pasar a CMake para esta variante.",
"schemas/variants-schema.json.patternProperties.^\\w+$.properties.choices.patternProperties.^\\w+$.properties.generator": "El generador de CMake que se va a usar para esta variante.",
"schemas/variants-schema.json.patternProperties.^\\w+$.properties.choices.patternProperties.^\\w+$.properties.toolset": "El conjunto de herramientas del generador que se va a usar para esta variante."
}
| 835 |
949 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.log4j.varia;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.net.ServerSocket;
import java.net.Socket;
import org.apache.log4j.RollingFileAppender;
import org.apache.log4j.helpers.LogLog;
/**
This appender listens on a socket on the port specified by the
<b>Port</b> property for a "RollOver" message. When such a message
is received, the underlying log file is rolled over and an
acknowledgment message is sent back to the process initiating the
roll over.
<p>This method of triggering roll over has the advantage of being
operating system independent, fast and reliable.
<p>A simple application {@link Roller} is provided to initiate the
roll over.
<p>Note that the initiator is not authenticated. Anyone can trigger
a rollover. In production environments, it is recommended that you
add some form of protection to prevent undesired rollovers.
@author <NAME>ülcü
@since version 0.9.0 */
public class ExternallyRolledFileAppender extends RollingFileAppender {
/**
The string constant sent to initiate a roll over. Current value of
this string constant is <b>RollOver</b>.
*/
static final public String ROLL_OVER = "RollOver";
/**
The string constant sent to acknowledge a roll over. Current value of
this string constant is <b>OK</b>.
*/
static final public String OK = "OK";
int port = 0;
HUP hup;
/**
The default constructor does nothing but calls its super-class
constructor. */
public
ExternallyRolledFileAppender() {
}
/**
The <b>Port</b> [roperty is used for setting the port for
listening to external roll over messages.
*/
public
void setPort(int port) {
this.port = port;
}
/**
Returns value of the <b>Port</b> option.
*/
public
int getPort() {
return port;
}
/**
Start listening on the port specified by a preceding call to
{@link #setPort}. */
public
void activateOptions() {
super.activateOptions();
if(port != 0) {
if(hup != null) {
hup.interrupt();
}
hup = new HUP(this, port);
hup.setDaemon(true);
hup.start();
}
}
}
class HUP extends Thread {
int port;
ExternallyRolledFileAppender er;
HUP(ExternallyRolledFileAppender er, int port) {
this.er = er;
this.port = port;
}
public
void run() {
while(!isInterrupted()) {
try {
ServerSocket serverSocket = new ServerSocket(port);
while(true) {
Socket socket = serverSocket.accept();
LogLog.debug("Connected to client at " + socket.getInetAddress());
new Thread(new HUPNode(socket, er), "ExternallyRolledFileAppender-HUP").start();
}
} catch(InterruptedIOException e) {
Thread.currentThread().interrupt();
e.printStackTrace();
} catch(IOException e) {
e.printStackTrace();
} catch(RuntimeException e) {
e.printStackTrace();
}
}
}
}
class HUPNode implements Runnable {
Socket socket;
DataInputStream dis;
DataOutputStream dos;
ExternallyRolledFileAppender er;
public
HUPNode(Socket socket, ExternallyRolledFileAppender er) {
this.socket = socket;
this.er = er;
try {
dis = new DataInputStream(socket.getInputStream());
dos = new DataOutputStream(socket.getOutputStream());
} catch(InterruptedIOException e) {
Thread.currentThread().interrupt();
e.printStackTrace();
} catch(IOException e) {
e.printStackTrace();
} catch(RuntimeException e) {
e.printStackTrace();
}
}
public void run() {
try {
String line = dis.readUTF();
LogLog.debug("Got external roll over signal.");
if(ExternallyRolledFileAppender.ROLL_OVER.equals(line)) {
synchronized(er) {
er.rollOver();
}
dos.writeUTF(ExternallyRolledFileAppender.OK);
}
else {
dos.writeUTF("Expecting [RollOver] string.");
}
dos.close();
} catch(InterruptedIOException e) {
Thread.currentThread().interrupt();
LogLog.error("Unexpected exception. Exiting HUPNode.", e);
} catch(IOException e) {
LogLog.error("Unexpected exception. Exiting HUPNode.", e);
} catch(RuntimeException e) {
LogLog.error("Unexpected exception. Exiting HUPNode.", e);
}
}
}
| 1,758 |
672 |
<filename>ZZZ_OtherDemo/00-dyld-832.7.3/unit-tests/test-cases/threaded-flat-lookup/foo.c<gh_stars>100-1000
void foo() {}
void bar() {}
void baz() {}
| 67 |
853 |
<reponame>blin00/tinyssh
/*
20130604
<NAME>
Public domain.
*/
#include "numtostr.h"
/*
The 'numtostr(strbuf,n)' converts number 'n' into the 0-terminated string.
The caller must allocate at least NUMTOSTR_LEN bytes for 'strbuf'.
The 'numtostr' function is ready for 128-bit integer.
*/
char *numtostr(char *strbuf, long long n) {
long long len = 0;
unsigned long long n1, n2;
static char staticbuf[NUMTOSTR_LEN];
int flagsign = 0;
if (!strbuf) strbuf = staticbuf; /* not thread-safe */
if (n < 0) {
n1 = n2 = -(unsigned long long)n;
flagsign = 1;
}
else {
n1 = n2 = (unsigned long long)n;
}
do {
n1 /= 10; ++len;
} while (n1);
if (flagsign) ++len;
strbuf += len;
do {
*--strbuf = '0' + (n2 % 10);
n2 /= 10;
} while (n2);
if (flagsign) *--strbuf = '-';
while (len < NUMTOSTR_LEN) strbuf[len++] = 0;
return strbuf;
}
| 436 |
1,380 |
/*
* sophia database
* sphia.org
*
* Copyright (c) <NAME>
* BSD License
*/
#include <libss.h>
#include <libsf.h>
#include <libsr.h>
#include <libsv.h>
#include <libsd.h>
ssiterif sd_indexiter =
{
.close = sd_indexiter_close,
.has = sd_indexiter_has,
.of = sd_indexiter_of,
.next = sd_indexiter_next
};
| 146 |
380 |
<reponame>geralltf/mini-tor
#pragma once
#include "key.h"
#include "../common.h"
#include <mini/byte_buffer.h>
#include <mini/stack_buffer.h>
#include <windows.h>
#include <wincrypt.h>
namespace mini::crypto::capi {
template <
cipher_mode AES_MODE,
size_type KEY_SIZE
>
class aes_key
: public key
{
MINI_MAKE_NONCOPYABLE(aes_key);
static_assert(
KEY_SIZE == 128 ||
KEY_SIZE == 192 ||
KEY_SIZE == 256,
"valid AES key sizes are: 128, 192, 256");
public:
static constexpr size_type key_size = KEY_SIZE;
static constexpr size_type key_size_in_bytes = KEY_SIZE / 8;
static constexpr cipher_mode mode = AES_MODE;
aes_key(
void
);
aes_key(
const byte_buffer_ref key
);
aes_key(
aes_key&& other
);
aes_key&
operator=(
aes_key&& other
);
void
swap(
aes_key& other
);
//
// import.
//
void
import(
const byte_buffer_ref key
);
byte_buffer_ref
get_iv(
void
) const;
void
set_iv(
const byte_buffer_ref iv
);
public:
struct blob
{
struct provider_type
{
static constexpr auto get_handle = &provider::get_rsa_aes_handle;
};
BLOBHEADER header;
DWORD size;
BYTE key[key_size_in_bytes];
};
private:
static constexpr ALG_ID _key_alg = key_size == 128
? CALG_AES_128 : key_size == 192
? CALG_AES_192 : key_size == 256
? CALG_AES_256 : 0;
blob _blob;
byte_type _iv[key_size_in_bytes];
};
//
// AES
//
template <
cipher_mode AES_MODE,
size_type KEY_SIZE
>
class aes
{
MINI_MAKE_NONCOPYABLE(aes);
public:
static constexpr size_type key_size = KEY_SIZE;
static constexpr size_type key_size_in_bytes = KEY_SIZE / 8;
static constexpr cipher_mode mode = AES_MODE;
using key = aes_key<AES_MODE, KEY_SIZE>;
aes(
void
) = default;
aes(
key&& k
);
~aes(
void
) = default;
void
init(
key&& k
);
void
encrypt_inplace(
mutable_byte_buffer_ref buffer
);
void
encrypt(
const byte_buffer_ref input,
mutable_byte_buffer_ref output
);
byte_buffer
encrypt(
const byte_buffer_ref input
);
void
decrypt_inplace(
mutable_byte_buffer_ref buffer
);
void
decrypt(
const byte_buffer_ref input,
mutable_byte_buffer_ref output
);
byte_buffer
decrypt(
const byte_buffer_ref input
);
static byte_buffer
encrypt(
key&& k,
const byte_buffer_ref input
);
static byte_buffer
decrypt(
key&& k,
const byte_buffer_ref input
);
private:
key _key;
};
//
// AES-CTR
//
template <
size_type KEY_SIZE
>
class aes<cipher_mode::ctr, KEY_SIZE>
: private aes<cipher_mode::ecb, KEY_SIZE>
{
MINI_MAKE_NONCOPYABLE(aes);
using base_type = aes<cipher_mode::ecb, KEY_SIZE>;
public:
static constexpr size_type key_size = KEY_SIZE;
static constexpr size_type key_size_in_bytes = KEY_SIZE / 8;
static constexpr cipher_mode mode = cipher_mode::ctr;
using key = aes_key<cipher_mode::ctr, KEY_SIZE>;
aes(
void
) = default;
aes(
key&& k
);
~aes(
void
) = default;
void
init(
key&& k
);
void
encrypt_inplace(
mutable_byte_buffer_ref buffer
);
void
encrypt(
const byte_buffer_ref input,
mutable_byte_buffer_ref output
);
byte_buffer
encrypt(
const byte_buffer_ref input
);
//
// encrypt/decrypt in ctr mode are the same.
//
void
decrypt_inplace(
mutable_byte_buffer_ref buffer
);
void
decrypt(
const byte_buffer_ref input,
mutable_byte_buffer_ref output
);
byte_buffer
decrypt(
const byte_buffer_ref input
);
static byte_buffer
crypt(
key&& k,
const byte_buffer_ref input
);
private:
byte_type
next_keystream_byte(
void
);
void
update_counter(
void
);
void
encrypt_counter(
void
);
void
increment_counter(
void
);
stack_byte_buffer<key_size_in_bytes> _counter;
stack_byte_buffer<key_size_in_bytes> _counter_out;
size_type _keystream_pointer;
};
}
#include "aes.inl"
| 2,242 |
4,140 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.optimizer.calcite.translator.opconventer;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.OperatorFactory;
import org.apache.hadoop.hive.ql.exec.RowSchema;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
import org.apache.hadoop.hive.ql.optimizer.calcite.HiveCalciteUtil;
import org.apache.hadoop.hive.ql.optimizer.calcite.RelOptHiveTable;
import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableScan;
import org.apache.hadoop.hive.ql.optimizer.calcite.translator.opconventer.HiveOpConverter.OpAttr;
import org.apache.hadoop.hive.ql.plan.TableScanDesc;
class HiveTableScanVisitor extends HiveRelNodeVisitor<HiveTableScan> {
HiveTableScanVisitor(HiveOpConverter hiveOpConverter) {
super(hiveOpConverter);
}
/**
* TODO: 1. PPD needs to get pushed in to TS.
*/
@Override
OpAttr visit(HiveTableScan scanRel) {
if (LOG.isDebugEnabled()) {
LOG.debug("Translating operator rel#" + scanRel.getId() + ":" + scanRel.getRelTypeName()
+ " with row type: [" + scanRel.getRowType() + "]");
}
RelOptHiveTable ht = (RelOptHiveTable) scanRel.getTable();
// 1. Setup TableScan Desc
// 1.1 Build col details used by scan
ArrayList<ColumnInfo> colInfos = new ArrayList<ColumnInfo>();
List<VirtualColumn> virtualCols = new ArrayList<VirtualColumn>();
List<Integer> neededColumnIDs = new ArrayList<Integer>();
List<String> neededColumnNames = new ArrayList<String>();
Set<Integer> vcolsInCalcite = new HashSet<Integer>();
List<String> partColNames = new ArrayList<String>();
Map<Integer, VirtualColumn> vColsMap = HiveCalciteUtil.getVColsMap(ht.getVirtualCols(),
ht.getNoOfNonVirtualCols());
Map<Integer, ColumnInfo> posToPartColInfo = ht.getPartColInfoMap();
Map<Integer, ColumnInfo> posToNonPartColInfo = ht.getNonPartColInfoMap();
List<Integer> neededColIndxsFrmReloptHT = scanRel.getNeededColIndxsFrmReloptHT();
List<String> scanColNames = scanRel.getRowType().getFieldNames();
String tableAlias = scanRel.getConcatQbIDAlias();
String colName;
ColumnInfo colInfo;
VirtualColumn vc;
for (int index = 0; index < scanRel.getRowType().getFieldList().size(); index++) {
colName = scanColNames.get(index);
if (vColsMap.containsKey(index)) {
vc = vColsMap.get(index);
virtualCols.add(vc);
colInfo = new ColumnInfo(vc.getName(), vc.getTypeInfo(), tableAlias, true, vc.getIsHidden());
vcolsInCalcite.add(index);
} else if (posToPartColInfo.containsKey(index)) {
partColNames.add(colName);
colInfo = posToPartColInfo.get(index);
vcolsInCalcite.add(index);
} else {
colInfo = posToNonPartColInfo.get(index);
}
colInfos.add(colInfo);
if (neededColIndxsFrmReloptHT.contains(index)) {
neededColumnIDs.add(index);
neededColumnNames.add(colName);
}
}
// 1.2 Create TableScanDesc
TableScanDesc tsd = new TableScanDesc(tableAlias, virtualCols, ht.getHiveTableMD());
// 1.3. Set Partition cols in TSDesc
tsd.setPartColumns(partColNames);
// 1.4. Set needed cols in TSDesc
tsd.setNeededColumnIDs(neededColumnIDs);
tsd.setNeededColumns(neededColumnNames);
// 2. Setup TableScan
TableScanOperator ts = (TableScanOperator) OperatorFactory.get(
hiveOpConverter.getSemanticAnalyzer().getOpContext(), tsd, new RowSchema(colInfos));
//now that we let Calcite process subqueries we might have more than one
// tablescan with same alias.
if (hiveOpConverter.getTopOps().get(tableAlias) != null) {
tableAlias = tableAlias + hiveOpConverter.getUniqueCounter();
}
hiveOpConverter.getTopOps().put(tableAlias, ts);
if (LOG.isDebugEnabled()) {
LOG.debug("Generated " + ts + " with row schema: [" + ts.getSchema() + "]");
}
return new OpAttr(tableAlias, vcolsInCalcite, ts);
}
}
| 1,839 |
414 |
///
/// anax
/// An open source C++ entity system.
///
/// Copyright (C) 2013-2014 <NAME> (<EMAIL>)
///
/// Permission is hereby granted, free of charge, to any person obtaining a copy
/// of this software and associated documentation files (the "Software"), to deal
/// in the Software without restriction, including without limitation the rights
/// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
/// copies of the Software, and to permit persons to whom the Software is
/// furnished to do so, subject to the following conditions:
///
/// The above copyright notice and this permission notice shall be included in
/// all copies or substantial portions of the Software.
///
/// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
/// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
/// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
/// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
/// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
/// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
/// THE SOFTWARE.
///
#ifndef ANAX_HPP
#define ANAX_HPP
/// \brief A namespace that contains all classes/functions for the anax library
///
/// This namespace contains all you need to use the anax library.
///
/// \author <NAME>
namespace anax
{
}
/// \mainpage
///
/// \section welcome Welcome
///
/// Welcome to the anax documentation, here you may find
/// detailed documentation on how to use specific classes/methods for the library.
///
/// \section resources Extra-resources
/// You may find more resources to learn about the anax library on
/// the anax's wiki, hosted on GitHub
/// <a href="https://github.com/miguelishawt/anax/wiki"> here</a>.
#define ANAX_VERSION_MAJOR 1
#define ANAX_VERSION_MINOR 1
#define ANAX_PATCH_NUMBER 1
#define ANAX_VERSION_NUMBER ANAX_VERSION_MAJOR.ANAX_VERSION_MINOR.ANAX_PATCH_NUMBER
#include <anax/World.hpp>
#endif // ANAX_HPP
| 587 |
466 |
//---------------------------------------------------------------------------
// Greenplum Database
// Copyright (C) 2012 EMC Corp
//
// @filename:
// CXformDelete2DML.h
//
// @doc:
// Transform Logical Delete to Logical DML
//---------------------------------------------------------------------------
#ifndef GPOPT_CXformDelete2DML_H
#define GPOPT_CXformDelete2DML_H
#include "gpos/base.h"
#include "gpopt/xforms/CXformExploration.h"
namespace gpopt
{
using namespace gpos;
//---------------------------------------------------------------------------
// @class:
// CXformDelete2DML
//
// @doc:
// Transform Logical Delete to Logical DML
//
//---------------------------------------------------------------------------
class CXformDelete2DML : public CXformExploration
{
private:
// private copy ctor
CXformDelete2DML(const CXformDelete2DML &);
public:
// ctor
explicit CXformDelete2DML(CMemoryPool *mp);
// dtor
virtual ~CXformDelete2DML()
{
}
// ident accessors
virtual EXformId
Exfid() const
{
return ExfDelete2DML;
}
// return a string for xform name
virtual const CHAR *
SzId() const
{
return "CXformDelete2DML";
}
// compute xform promise for a given expression handle
virtual EXformPromise Exfp(CExpressionHandle &exprhdl) const;
// actual transform
virtual void Transform(CXformContext *pxfctxt, CXformResult *pxfres,
CExpression *pexpr) const;
}; // class CXformDelete2DML
} // namespace gpopt
#endif // !GPOPT_CXformDelete2DML_H
// EOF
| 498 |
2,151 |
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef THIRD_PARTY_BLINK_RENDERER_CORE_PAGE_SCROLLING_SCROLL_CUSTOMIZATION_CALLBACKS_H_
#define THIRD_PARTY_BLINK_RENDERER_CORE_PAGE_SCROLLING_SCROLL_CUSTOMIZATION_CALLBACKS_H_
#include "base/macros.h"
#include "third_party/blink/renderer/core/core_export.h"
#include "third_party/blink/renderer/core/dom/element.h"
#include "third_party/blink/renderer/platform/heap/handle.h"
#include "third_party/blink/renderer/platform/wtf/hash_map.h"
namespace blink {
class ScrollStateCallback;
class CORE_EXPORT ScrollCustomizationCallbacks
: public GarbageCollected<ScrollCustomizationCallbacks> {
public:
ScrollCustomizationCallbacks() = default;
void SetDistributeScroll(Element*, ScrollStateCallback*);
ScrollStateCallback* GetDistributeScroll(Element*);
void SetApplyScroll(Element*, ScrollStateCallback*);
void RemoveApplyScroll(Element*);
ScrollStateCallback* GetApplyScroll(Element*);
bool InScrollPhase(Element*) const;
void SetInScrollPhase(Element*, bool);
void Trace(blink::Visitor* visitor) {
visitor->Trace(apply_scroll_callbacks_);
visitor->Trace(distribute_scroll_callbacks_);
visitor->Trace(in_scrolling_phase_);
};
private:
using ScrollStateCallbackList =
HeapHashMap<WeakMember<Element>, Member<ScrollStateCallback>>;
ScrollStateCallbackList apply_scroll_callbacks_;
ScrollStateCallbackList distribute_scroll_callbacks_;
HeapHashMap<WeakMember<Element>, bool> in_scrolling_phase_;
DISALLOW_COPY_AND_ASSIGN(ScrollCustomizationCallbacks);
};
} // namespace blink
#endif // THIRD_PARTY_BLINK_RENDERER_CORE_PAGE_SCROLLING_SCROLL_CUSTOMIZATION_CALLBACKS_H_
| 616 |
1,238 |
#include "EditLexer.h"
#include "EditStyleX.h"
// https://www.perl.org/
// https://perldoc.perl.org/index-language.html
// https://learn.perl.org/docs/keywords.html
static KEYWORDLIST Keywords_Perl = {{
"__DATA__ __END__ __FILE__ __LINE__ __PACKAGE__ abs accept alarm and atan2 AUTOLOAD BEGIN "
"bind binmode bless break caller chdir CHECK chmod chomp chop chown chr chroot close closedir cmp "
"connect continue CORE cos crypt dbmclose dbmopen default defined delete DESTROY die do "
"dump each else elsif END endgrent endhostent endnetent endprotoent endpwent endservent eof "
"eq EQ eval exec exists exit exp fcntl fileno flock for foreach fork format formline ge GE "
"getc getgrent getgrgid getgrnam gethostbyaddr gethostbyname gethostent getlogin "
"getnetbyaddr getnetbyname getnetent getpeername getpgrp getppid getpriority getprotobyname "
"getprotobynumber getprotoent getpwent getpwnam getpwuid getservbyname getservbyport "
"getservent getsockname getsockopt given glob gmtime goto grep gt GT hex if index INIT int "
"ioctl join keys kill last lc lcfirst le LE length link listen local localtime lock log "
"lstat lt LT map mkdir msgctl msgget msgrcv msgsnd my ne NE next no not NULL oct open "
"opendir or ord our pack package pipe pop pos print printf prototype push qu quotemeta rand "
"read readdir readline readlink readpipe recv redo ref rename require reset return reverse "
"rewinddir rindex rmdir say scalar seek seekdir select semctl semget semop send setgrent "
"sethostent setnetent setpgrp setpriority setprotoent setpwent setservent setsockopt shift "
"shmctl shmget shmread shmwrite shutdown sin sleep socket socketpair sort splice split "
"sprintf sqrt srand stat state study sub substr symlink syscall sysopen sysread sysseek "
"system syswrite tell telldir tie tied time times truncate uc ucfirst umask undef UNITCHECK "
"unless unlink unpack unshift untie until use utime values vec wait waitpid wantarray warn "
"when while write xor"
, // keywords that forces /PATTERN/ at all times
"elsif if split while"
, NULL, NULL, NULL, NULL, NULL, NULL, NULL
, NULL, NULL, NULL, NULL, NULL, NULL, NULL
}};
static EDITSTYLE Styles_Perl[] = {
EDITSTYLE_DEFAULT,
{ SCE_PL_COMMENTLINE, NP2StyleX_Comment, L"fore:#608060" },
{ SCE_PL_WORD, NP2StyleX_Keyword, L"bold; fore:#FF8000" },
{ MULTI_STYLE(SCE_PL_STRING, SCE_PL_CHARACTER, 0, 0), NP2StyleX_String, L"fore:#008000" },
{ SCE_PL_NUMBER, NP2StyleX_Number, L"fore:#FF0000" },
{ SCE_PL_OPERATOR, NP2StyleX_Operator, L"fore:#B000B0" },
{ MULTI_STYLE(SCE_PL_SCALAR, SCE_PL_STRING_VAR, 0, 0), NP2StyleX_ScalarVar, L"fore:#0000FF" },
{ SCE_PL_ARRAY, NP2StyleX_ArrayVar, L"fore:#FF0080" },
{ SCE_PL_HASH, NP2StyleX_HashVar, L"fore:#B000B0" },
{ SCE_PL_SYMBOLTABLE, NP2StyleX_SymbolTableVar, L"fore:#3A6EA5" },
{ SCE_PL_REGEX, NP2StyleX_Regex_mre, L"fore:#006633; back:#FFF1A8" },
{ SCE_PL_REGSUBST, NP2StyleX_Substitution_re, L"fore:#006633; back:#FFF1A8" },
{ SCE_PL_BACKTICKS, NP2StyleX_Backticks, L"fore:#E24000; back:#FFF1A8" },
{ SCE_PL_HERE_DELIM, NP2StyleX_HeredocDelimiter, L"fore:#648000" },
{ SCE_PL_HERE_Q, NP2StyleX_HeredocSingleQuoted, L"fore:#648000" },
{ SCE_PL_HERE_QQ, NP2StyleX_HeredocDoubleQuoted, L"fore:#648000" },
{ SCE_PL_HERE_QX, NP2StyleX_HeredocBackticks, L"fore:#E24000; back:#FFF1A8" },
{ SCE_PL_STRING_Q, NP2StyleX_SingleQuotedString_q, L"fore:#008000" },
{ SCE_PL_STRING_QQ, NP2StyleX_DoubleQuotedString_qq, L"fore:#008000" },
{ SCE_PL_STRING_QX, NP2StyleX_Backticks_qx, L"fore:#E24000; back:#FFF1A8" },
{ SCE_PL_STRING_QR, NP2StyleX_Regex_qr, L"fore:#006633; back:#FFF1A8" },
{ SCE_PL_STRING_QW, NP2StyleX_Array_qw, L"fore:#003CE6" },
{ SCE_PL_SUB_PROTOTYPE, NP2StyleX_Prototype, L"fore:#800080; back:#FFE2FF" },
{ SCE_PL_FORMAT_IDENT, NP2StyleX_FormatIdentifier, L"bold; fore:#648000; back:#FFF1A8" },
{ SCE_PL_FORMAT, NP2StyleX_FormatBody, L"fore:#648000; back:#FFF1A8" },
{ SCE_PL_POD, NP2StyleX_PODCommon, L"fore:#A46000; back:#FFFFC0; eolfilled" },
{ SCE_PL_POD_VERB, NP2StyleX_PODVerbatim, L"fore:#A46000; back:#FFFFC0; eolfilled" },
{ SCE_PL_DATASECTION, NP2StyleX_DataSection, L"fore:#A46000; back:#FFFFC0; eolfilled" },
{ SCE_PL_ERROR, NP2StyleX_ParsingError, L"fore:#C80000; back:#FFFF80" },
// {SCE_PL_XLAT},
//{ SCE_PL_PUNCTUATION, EDITSTYLE_HOLE(L"Symbols / Punctuation (not used)"), L"" },
//{ SCE_PL_PREPROCESSOR, EDITSTYLE_HOLE(L"Preprocessor (not used)"), L"" },
//{ SCE_PL_LONGQUOTE, EDITSTYLE_HOLE(L"Long Quote (qq, qr, qw, qx) (not used)"), L"" },
};
EDITLEXER lexPerl = {
SCLEX_PERL, NP2LEX_PERL,
SCHEME_SETTINGS_DEFAULT,
EDITLEXER_HOLE(L"Perl Script", Styles_Perl),
L"pl; pm; cgi; pod; plx; stp",
&Keywords_Perl,
Styles_Perl
};
| 2,033 |
2,151 |
<gh_stars>1000+
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "ui/display/manager/display_change_observer.h"
#include <string>
#include "base/test/scoped_feature_list.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "ui/display/display_switches.h"
#include "ui/display/manager/display_configurator.h"
#include "ui/display/manager/fake_display_snapshot.h"
#include "ui/display/manager/managed_display_info.h"
#include "ui/display/types/display_mode.h"
#include "ui/gfx/geometry/rect.h"
#include "ui/gfx/geometry/size.h"
namespace display {
namespace {
float ComputeDeviceScaleFactor(float diagonal_inch,
const gfx::Rect& resolution) {
// We assume that displays have square pixel.
float diagonal_pixel = std::sqrt(std::pow(resolution.width(), 2) +
std::pow(resolution.height(), 2));
float dpi = diagonal_pixel / diagonal_inch;
return DisplayChangeObserver::FindDeviceScaleFactor(dpi);
}
std::unique_ptr<DisplayMode> MakeDisplayMode(int width,
int height,
bool is_interlaced,
float refresh_rate) {
return std::make_unique<DisplayMode>(gfx::Size(width, height), is_interlaced,
refresh_rate);
}
} // namespace
class DisplayChangeObserverTest : public testing::Test {
public:
DisplayChangeObserverTest() = default;
void SetUp() override {
scoped_feature_list_.InitAndDisableFeature(
features::kEnableDisplayZoomSetting);
testing::Test::SetUp();
}
private:
base::test::ScopedFeatureList scoped_feature_list_;
DISALLOW_COPY_AND_ASSIGN(DisplayChangeObserverTest);
};
TEST_F(DisplayChangeObserverTest, GetExternalManagedDisplayModeList) {
std::unique_ptr<DisplaySnapshot> display_snapshot =
FakeDisplaySnapshot::Builder()
.SetId(123)
.SetNativeMode(MakeDisplayMode(1920, 1200, false, 60))
// All non-interlaced (as would be seen with different refresh rates).
.AddMode(MakeDisplayMode(1920, 1080, false, 80))
.AddMode(MakeDisplayMode(1920, 1080, false, 70))
.AddMode(MakeDisplayMode(1920, 1080, false, 60))
// Interlaced vs non-interlaced.
.AddMode(MakeDisplayMode(1280, 720, true, 60))
.AddMode(MakeDisplayMode(1280, 720, false, 60))
// Interlaced only.
.AddMode(MakeDisplayMode(1024, 768, true, 70))
.AddMode(MakeDisplayMode(1024, 768, true, 60))
// Mixed.
.AddMode(MakeDisplayMode(1024, 600, true, 60))
.AddMode(MakeDisplayMode(1024, 600, false, 70))
.AddMode(MakeDisplayMode(1024, 600, false, 60))
// Just one interlaced mode.
.AddMode(MakeDisplayMode(640, 480, true, 60))
.Build();
ManagedDisplayInfo::ManagedDisplayModeList display_modes =
DisplayChangeObserver::GetExternalManagedDisplayModeList(
*display_snapshot);
ASSERT_EQ(6u, display_modes.size());
EXPECT_EQ("640x480", display_modes[0].size().ToString());
EXPECT_TRUE(display_modes[0].is_interlaced());
EXPECT_EQ(display_modes[0].refresh_rate(), 60);
EXPECT_EQ("1024x600", display_modes[1].size().ToString());
EXPECT_FALSE(display_modes[1].is_interlaced());
EXPECT_EQ(display_modes[1].refresh_rate(), 70);
EXPECT_EQ("1024x768", display_modes[2].size().ToString());
EXPECT_TRUE(display_modes[2].is_interlaced());
EXPECT_EQ(display_modes[2].refresh_rate(), 70);
EXPECT_EQ("1280x720", display_modes[3].size().ToString());
EXPECT_FALSE(display_modes[3].is_interlaced());
EXPECT_EQ(display_modes[3].refresh_rate(), 60);
EXPECT_EQ("1920x1080", display_modes[4].size().ToString());
EXPECT_FALSE(display_modes[4].is_interlaced());
EXPECT_EQ(display_modes[4].refresh_rate(), 80);
EXPECT_EQ("1920x1200", display_modes[5].size().ToString());
EXPECT_FALSE(display_modes[5].is_interlaced());
EXPECT_EQ(display_modes[5].refresh_rate(), 60);
}
TEST_F(DisplayChangeObserverTest, GetEmptyExternalManagedDisplayModeList) {
FakeDisplaySnapshot display_snapshot(
123, gfx::Point(), gfx::Size(), DISPLAY_CONNECTION_TYPE_UNKNOWN, false,
false, false, std::string(), {}, nullptr, nullptr, 0, gfx::Size());
ManagedDisplayInfo::ManagedDisplayModeList display_modes =
DisplayChangeObserver::GetExternalManagedDisplayModeList(
display_snapshot);
EXPECT_EQ(0u, display_modes.size());
}
TEST_F(DisplayChangeObserverTest, GetInternalManagedDisplayModeList) {
std::unique_ptr<DisplaySnapshot> display_snapshot =
FakeDisplaySnapshot::Builder()
.SetId(123)
.SetNativeMode(MakeDisplayMode(1366, 768, false, 60))
.AddMode(MakeDisplayMode(1024, 768, false, 60))
.AddMode(MakeDisplayMode(800, 600, false, 60))
.AddMode(MakeDisplayMode(600, 600, false, 56.2))
.AddMode(MakeDisplayMode(640, 480, false, 59.9))
.Build();
ManagedDisplayInfo info(1, "", false);
info.SetBounds(gfx::Rect(0, 0, 1366, 768));
ManagedDisplayInfo::ManagedDisplayModeList display_modes =
DisplayChangeObserver::GetInternalManagedDisplayModeList(
info, *display_snapshot);
ASSERT_EQ(5u, display_modes.size());
EXPECT_EQ("1366x768", display_modes[0].size().ToString());
EXPECT_FALSE(display_modes[0].native());
EXPECT_NEAR(display_modes[0].ui_scale(), 0.5, 0.01);
EXPECT_EQ(display_modes[0].refresh_rate(), 60);
EXPECT_EQ("1366x768", display_modes[1].size().ToString());
EXPECT_FALSE(display_modes[1].native());
EXPECT_NEAR(display_modes[1].ui_scale(), 0.6, 0.01);
EXPECT_EQ(display_modes[1].refresh_rate(), 60);
EXPECT_EQ("1366x768", display_modes[2].size().ToString());
EXPECT_FALSE(display_modes[2].native());
EXPECT_NEAR(display_modes[2].ui_scale(), 0.75, 0.01);
EXPECT_EQ(display_modes[2].refresh_rate(), 60);
EXPECT_EQ("1366x768", display_modes[3].size().ToString());
EXPECT_TRUE(display_modes[3].native());
EXPECT_NEAR(display_modes[3].ui_scale(), 1.0, 0.01);
EXPECT_EQ(display_modes[3].refresh_rate(), 60);
EXPECT_EQ("1366x768", display_modes[4].size().ToString());
EXPECT_FALSE(display_modes[4].native());
EXPECT_NEAR(display_modes[4].ui_scale(), 1.125, 0.01);
EXPECT_EQ(display_modes[4].refresh_rate(), 60);
}
TEST_F(DisplayChangeObserverTest, GetInternalHiDPIManagedDisplayModeList) {
// Data picked from peppy.
std::unique_ptr<DisplaySnapshot> display_snapshot =
FakeDisplaySnapshot::Builder()
.SetId(123)
.SetNativeMode(MakeDisplayMode(2560, 1700, false, 60))
.AddMode(MakeDisplayMode(2048, 1536, false, 60))
.AddMode(MakeDisplayMode(1920, 1440, false, 60))
.Build();
ManagedDisplayInfo info(1, "", false);
info.SetBounds(gfx::Rect(0, 0, 2560, 1700));
info.set_device_scale_factor(2.0f);
ManagedDisplayInfo::ManagedDisplayModeList display_modes =
DisplayChangeObserver::GetInternalManagedDisplayModeList(
info, *display_snapshot);
ASSERT_EQ(8u, display_modes.size());
EXPECT_EQ("2560x1700", display_modes[0].size().ToString());
EXPECT_FALSE(display_modes[0].native());
EXPECT_NEAR(display_modes[0].ui_scale(), 0.5, 0.01);
EXPECT_EQ(display_modes[0].refresh_rate(), 60);
EXPECT_EQ("2560x1700", display_modes[1].size().ToString());
EXPECT_FALSE(display_modes[1].native());
EXPECT_NEAR(display_modes[1].ui_scale(), 0.625, 0.01);
EXPECT_EQ(display_modes[1].refresh_rate(), 60);
EXPECT_EQ("2560x1700", display_modes[2].size().ToString());
EXPECT_FALSE(display_modes[2].native());
EXPECT_NEAR(display_modes[2].ui_scale(), 0.8, 0.01);
EXPECT_EQ(display_modes[2].refresh_rate(), 60);
EXPECT_EQ("2560x1700", display_modes[3].size().ToString());
EXPECT_FALSE(display_modes[3].native());
EXPECT_NEAR(display_modes[3].ui_scale(), 1.0, 0.01);
EXPECT_EQ(display_modes[3].refresh_rate(), 60);
EXPECT_EQ("2560x1700", display_modes[4].size().ToString());
EXPECT_FALSE(display_modes[4].native());
EXPECT_NEAR(display_modes[4].ui_scale(), 1.125, 0.01);
EXPECT_EQ(display_modes[4].refresh_rate(), 60);
EXPECT_EQ("2560x1700", display_modes[5].size().ToString());
EXPECT_FALSE(display_modes[5].native());
EXPECT_NEAR(display_modes[5].ui_scale(), 1.25, 0.01);
EXPECT_EQ(display_modes[5].refresh_rate(), 60);
EXPECT_EQ("2560x1700", display_modes[6].size().ToString());
EXPECT_FALSE(display_modes[6].native());
EXPECT_NEAR(display_modes[6].ui_scale(), 1.5, 0.01);
EXPECT_EQ(display_modes[6].refresh_rate(), 60);
EXPECT_EQ("2560x1700", display_modes[7].size().ToString());
EXPECT_TRUE(display_modes[7].native());
EXPECT_NEAR(display_modes[7].ui_scale(), 2.0, 0.01);
EXPECT_EQ(display_modes[7].refresh_rate(), 60);
}
TEST_F(DisplayChangeObserverTest, GetInternalManagedDisplayModeList1_25) {
// Data picked from peppy.
std::unique_ptr<DisplaySnapshot> display_snapshot =
FakeDisplaySnapshot::Builder()
.SetId(123)
.SetNativeMode(MakeDisplayMode(1920, 1080, false, 60))
.Build();
ManagedDisplayInfo info(1, "", false);
info.SetBounds(gfx::Rect(0, 0, 1920, 1080));
info.set_device_scale_factor(1.25);
ManagedDisplayInfo::ManagedDisplayModeList display_modes =
DisplayChangeObserver::GetInternalManagedDisplayModeList(
info, *display_snapshot);
ASSERT_EQ(5u, display_modes.size());
EXPECT_EQ("1920x1080", display_modes[0].size().ToString());
EXPECT_FALSE(display_modes[0].native());
EXPECT_NEAR(display_modes[0].ui_scale(), 0.5, 0.01);
EXPECT_EQ(display_modes[0].refresh_rate(), 60);
EXPECT_EQ("1920x1080", display_modes[1].size().ToString());
EXPECT_FALSE(display_modes[1].native());
EXPECT_NEAR(display_modes[1].ui_scale(), 0.625, 0.01);
EXPECT_EQ(display_modes[1].refresh_rate(), 60);
EXPECT_EQ("1920x1080", display_modes[2].size().ToString());
EXPECT_FALSE(display_modes[2].native());
EXPECT_NEAR(display_modes[2].ui_scale(), 0.8, 0.01);
EXPECT_EQ(display_modes[2].refresh_rate(), 60);
EXPECT_EQ("1920x1080", display_modes[3].size().ToString());
EXPECT_TRUE(display_modes[3].native());
EXPECT_NEAR(display_modes[3].ui_scale(), 1.0, 0.01);
EXPECT_EQ(display_modes[3].refresh_rate(), 60);
EXPECT_EQ("1920x1080", display_modes[4].size().ToString());
EXPECT_FALSE(display_modes[4].native());
EXPECT_NEAR(display_modes[4].ui_scale(), 1.25, 0.01);
EXPECT_EQ(display_modes[4].refresh_rate(), 60);
}
TEST_F(DisplayChangeObserverTest, GetExternalManagedDisplayModeList4K) {
std::unique_ptr<DisplaySnapshot> display_snapshot =
FakeDisplaySnapshot::Builder()
.SetId(123)
.SetNativeMode(MakeDisplayMode(3840, 2160, false, 30))
.AddMode(MakeDisplayMode(1920, 1200, false, 60))
// All non-interlaced (as would be seen with different refresh rates).
.AddMode(MakeDisplayMode(1920, 1080, false, 80))
.AddMode(MakeDisplayMode(1920, 1080, false, 70))
.AddMode(MakeDisplayMode(1920, 1080, false, 60))
// Interlaced vs non-interlaced.
.AddMode(MakeDisplayMode(1280, 720, true, 60))
.AddMode(MakeDisplayMode(1280, 720, false, 60))
// Interlaced only.
.AddMode(MakeDisplayMode(1024, 768, true, 70))
.AddMode(MakeDisplayMode(1024, 768, true, 60))
// Mixed.
.AddMode(MakeDisplayMode(1024, 600, true, 60))
.AddMode(MakeDisplayMode(1024, 600, false, 70))
.AddMode(MakeDisplayMode(1024, 600, false, 60))
// Just one interlaced mode.
.AddMode(MakeDisplayMode(640, 480, true, 60))
.Build();
ManagedDisplayInfo::ManagedDisplayModeList display_modes =
DisplayChangeObserver::GetExternalManagedDisplayModeList(
*display_snapshot);
ManagedDisplayInfo info(1, "", false);
info.SetManagedDisplayModes(display_modes); // Sort as external display.
display_modes = info.display_modes();
ASSERT_EQ(9u, display_modes.size());
EXPECT_EQ("640x480", display_modes[0].size().ToString());
EXPECT_TRUE(display_modes[0].is_interlaced());
EXPECT_EQ(display_modes[0].refresh_rate(), 60);
EXPECT_EQ("1024x600", display_modes[1].size().ToString());
EXPECT_FALSE(display_modes[1].is_interlaced());
EXPECT_EQ(display_modes[1].refresh_rate(), 70);
EXPECT_EQ("1024x768", display_modes[2].size().ToString());
EXPECT_TRUE(display_modes[2].is_interlaced());
EXPECT_EQ(display_modes[2].refresh_rate(), 70);
EXPECT_EQ("1280x720", display_modes[3].size().ToString());
EXPECT_FALSE(display_modes[3].is_interlaced());
EXPECT_EQ(display_modes[3].refresh_rate(), 60);
EXPECT_EQ("1920x1080", display_modes[4].size().ToString());
EXPECT_FALSE(display_modes[4].is_interlaced());
EXPECT_EQ(display_modes[4].refresh_rate(), 80);
EXPECT_EQ("3840x2160", display_modes[5].size().ToString());
EXPECT_FALSE(display_modes[5].is_interlaced());
EXPECT_FALSE(display_modes[5].native());
EXPECT_EQ(display_modes[5].refresh_rate(), 30);
EXPECT_EQ(display_modes[5].device_scale_factor(), 2.0);
EXPECT_EQ("1920x1200", display_modes[6].size().ToString());
EXPECT_FALSE(display_modes[6].is_interlaced());
EXPECT_EQ(display_modes[6].refresh_rate(), 60);
EXPECT_EQ("3840x2160", display_modes[7].size().ToString());
EXPECT_FALSE(display_modes[7].is_interlaced());
EXPECT_FALSE(display_modes[7].native());
EXPECT_EQ(display_modes[7].refresh_rate(), 30);
EXPECT_EQ(display_modes[7].device_scale_factor(), 1.25);
EXPECT_EQ("3840x2160", display_modes[8].size().ToString());
EXPECT_FALSE(display_modes[8].is_interlaced());
EXPECT_TRUE(display_modes[8].native());
EXPECT_EQ(display_modes[8].refresh_rate(), 30);
}
TEST_F(DisplayChangeObserverTest, FindDeviceScaleFactor) {
EXPECT_EQ(1.0f, ComputeDeviceScaleFactor(19.5f, gfx::Rect(1600, 900)));
// 21.5" 1920x1080
EXPECT_EQ(1.0f, ComputeDeviceScaleFactor(21.5f, gfx::Rect(1920, 1080)));
// 12.1" 1280x800
EXPECT_EQ(1.0f, ComputeDeviceScaleFactor(12.1f, gfx::Rect(1280, 800)));
// 13.3" 1920x1080
EXPECT_EQ(1.25f, ComputeDeviceScaleFactor(13.3f, gfx::Rect(1920, 1080)));
// 14" 1920x1080
EXPECT_EQ(1.25f, ComputeDeviceScaleFactor(14.0f, gfx::Rect(1920, 1080)));
// 11.6" 1920x1080
EXPECT_EQ(1.6f, ComputeDeviceScaleFactor(11.6f, gfx::Rect(1920, 1080)));
// 12.02" 2160x1440
EXPECT_EQ(1.6f, ComputeDeviceScaleFactor(12.02f, gfx::Rect(2160, 1440)));
// 12.85" 2560x1700
EXPECT_EQ(2.0f, ComputeDeviceScaleFactor(12.85f, gfx::Rect(2560, 1700)));
// 12.3" 2400x1600
EXPECT_EQ(2.0f, ComputeDeviceScaleFactor(12.3f, gfx::Rect(2400, 1600)));
// Erroneous values should still work.
EXPECT_EQ(1.0f, DisplayChangeObserver::FindDeviceScaleFactor(-100.0f));
EXPECT_EQ(1.0f, DisplayChangeObserver::FindDeviceScaleFactor(0.0f));
EXPECT_EQ(2.0f, DisplayChangeObserver::FindDeviceScaleFactor(10000.0f));
}
TEST_F(DisplayChangeObserverTest,
FindExternalDisplayNativeModeWhenOverwritten) {
std::unique_ptr<DisplaySnapshot> display_snapshot =
FakeDisplaySnapshot::Builder()
.SetId(123)
.SetNativeMode(MakeDisplayMode(1920, 1080, true, 60))
.AddMode(MakeDisplayMode(1920, 1080, false, 60))
.Build();
ManagedDisplayInfo::ManagedDisplayModeList display_modes =
DisplayChangeObserver::GetExternalManagedDisplayModeList(
*display_snapshot);
ASSERT_EQ(2u, display_modes.size());
EXPECT_EQ("1920x1080", display_modes[0].size().ToString());
EXPECT_FALSE(display_modes[0].is_interlaced());
EXPECT_FALSE(display_modes[0].native());
EXPECT_EQ(display_modes[0].refresh_rate(), 60);
EXPECT_EQ("1920x1080", display_modes[1].size().ToString());
EXPECT_TRUE(display_modes[1].is_interlaced());
EXPECT_TRUE(display_modes[1].native());
EXPECT_EQ(display_modes[1].refresh_rate(), 60);
}
} // namespace display
| 6,682 |
14,668 |
<gh_stars>1000+
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_SSL_HTTPS_ONLY_MODE_NAVIGATION_THROTTLE_H_
#define CHROME_BROWSER_SSL_HTTPS_ONLY_MODE_NAVIGATION_THROTTLE_H_
#include <memory>
#include "base/timer/timer.h"
#include "components/security_interstitials/content/security_blocking_page_factory.h"
#include "content/public/browser/navigation_throttle.h"
#include "url/gurl.h"
class PrefService;
// HttpsOnlyModeNavigationThrottle is responsible for observing HTTPS-Only Mode
// navigations that have been upgraded by HttpsOnlyModeUpgradeInterceptor,
// timing them out if they take too long, and handling failure by loading the
// HTTPS-Only Mode interstitial.
//
// Metadata about the navigation state (as it pertains to HTTPS-Only Mode)
// shared between HttpsOnlyModeUpgradeInterceptor and
// HttpsOnlyModeNavigationThrottle is stored in an HttpsOnlyModeTabHelper set
// as user-data on the WebContents in which the navigation occurs. (Such
// metadata might ordinarily be added to ChromeNavigationUIData, but the
// Interceptor only receives a clone of the data, so it can't be used as a
// channel between these classes.)
class HttpsOnlyModeNavigationThrottle : public content::NavigationThrottle {
public:
// These values are persisted to logs. Entries should not be renumbered and
// numeric values should never be reused.
enum class Event {
// Navigation was upgraded from HTTP to HTTPS at some point (either the
// initial request or after a redirect).
kUpgradeAttempted = 0,
// Navigation succeeded after being upgraded to HTTPS.
kUpgradeSucceeded = 1,
// Navigation failed after being upgraded to HTTPS.
kUpgradeFailed = 2,
// kUpgradeCertError, kUpgradeNetError, and kUpgradeTimedOut are subsets of
// kUpgradeFailed. kUpgradeFailed should also be recorded whenever these
// events are recorded.
// Navigation failed due to a cert error.
kUpgradeCertError = 3,
// Navigation failed due to a net error.
kUpgradeNetError = 4,
// Navigation failed due to timing out.
kUpgradeTimedOut = 5,
kMaxValue = kUpgradeTimedOut,
};
static std::unique_ptr<HttpsOnlyModeNavigationThrottle>
MaybeCreateThrottleFor(
content::NavigationHandle* handle,
std::unique_ptr<SecurityBlockingPageFactory> blocking_page_factory,
PrefService* prefs);
HttpsOnlyModeNavigationThrottle(
content::NavigationHandle* handle,
std::unique_ptr<SecurityBlockingPageFactory> blocking_page_factory);
~HttpsOnlyModeNavigationThrottle() override;
HttpsOnlyModeNavigationThrottle(const HttpsOnlyModeNavigationThrottle&) =
delete;
HttpsOnlyModeNavigationThrottle& operator=(
const HttpsOnlyModeNavigationThrottle&) = delete;
// content::NavigationThrottle:
content::NavigationThrottle::ThrottleCheckResult WillRedirectRequest()
override;
content::NavigationThrottle::ThrottleCheckResult WillFailRequest() override;
content::NavigationThrottle::ThrottleCheckResult WillProcessResponse()
override;
const char* GetNameForLogging() override;
static void set_timeout_for_testing(int timeout_in_seconds);
private:
std::unique_ptr<SecurityBlockingPageFactory> blocking_page_factory_;
};
#endif // CHROME_BROWSER_SSL_HTTPS_ONLY_MODE_NAVIGATION_THROTTLE_H_
| 1,074 |
1,431 |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.ooxml;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNotSame;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.time.ZoneId;
import java.util.Calendar;
import java.util.Date;
import java.util.Optional;
import org.apache.poi.POIDataSamples;
import org.apache.poi.ooxml.POIXMLProperties.CoreProperties;
import org.apache.poi.openxml4j.exceptions.InvalidFormatException;
import org.apache.poi.openxml4j.opc.OPCPackage;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.util.IOUtils;
import org.apache.poi.util.LocaleUtil;
import org.apache.poi.xssf.XSSFTestDataSamples;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.apache.poi.xwpf.XWPFTestDataSamples;
import org.apache.poi.xwpf.usermodel.XWPFDocument;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
/**
* Test setting extended and custom OOXML properties
*/
public final class TestPOIXMLProperties {
private XWPFDocument sampleDoc;
private XWPFDocument sampleNoThumb;
private POIXMLProperties _props;
private CoreProperties _coreProperties;
@BeforeEach
void setUp() throws IOException {
sampleDoc = XWPFTestDataSamples.openSampleDocument("documentProperties.docx");
sampleNoThumb = XWPFTestDataSamples.openSampleDocument("SampleDoc.docx");
assertNotNull(sampleDoc);
assertNotNull(sampleNoThumb);
_props = sampleDoc.getProperties();
_coreProperties = _props.getCoreProperties();
assertNotNull(_props);
}
@AfterEach
void closeResources() throws Exception {
sampleDoc.close();
sampleNoThumb.close();
}
@Test
void testWorkbookExtendedProperties() throws Exception {
XSSFWorkbook workbook = new XSSFWorkbook();
POIXMLProperties props = workbook.getProperties();
assertNotNull(props);
POIXMLProperties.ExtendedProperties properties =
props.getExtendedProperties();
org.openxmlformats.schemas.officeDocument.x2006.extendedProperties.CTProperties
ctProps = properties.getUnderlyingProperties();
String appVersion = "3.5 beta";
String application = "POI";
ctProps.setApplication(application);
ctProps.setAppVersion(appVersion);
XSSFWorkbook newWorkbook =
XSSFTestDataSamples.writeOutAndReadBack(workbook);
workbook.close();
assertNotSame(workbook, newWorkbook);
POIXMLProperties newProps = newWorkbook.getProperties();
assertNotNull(newProps);
POIXMLProperties.ExtendedProperties newProperties =
newProps.getExtendedProperties();
assertEquals(application, newProperties.getApplication());
assertEquals(appVersion, newProperties.getAppVersion());
org.openxmlformats.schemas.officeDocument.x2006.extendedProperties.CTProperties
newCtProps = newProperties.getUnderlyingProperties();
assertEquals(application, newCtProps.getApplication());
assertEquals(appVersion, newCtProps.getAppVersion());
newWorkbook.close();
}
@Test
void testWorkbookExtendedPropertiesGettersSetters() throws Exception {
XSSFWorkbook workbook = new XSSFWorkbook();
POIXMLProperties props = workbook.getProperties();
assertNotNull(props);
POIXMLProperties.ExtendedProperties properties =
props.getExtendedProperties();
String appVersion = "3.5 beta";
String application = "POI Modified";
assertEquals("Apache POI", properties.getApplication());
properties.setApplication(application);
assertEquals(properties.getApplication(), application);
assertNull(properties.getAppVersion());
properties.setAppVersion(appVersion);
assertEquals(properties.getAppVersion(), appVersion);
XSSFWorkbook newWorkbook =
XSSFTestDataSamples.writeOutAndReadBack(workbook);
workbook.close();
assertNotSame(workbook, newWorkbook);
POIXMLProperties newProps = newWorkbook.getProperties();
assertNotNull(newProps);
POIXMLProperties.ExtendedProperties newProperties =
newProps.getExtendedProperties();
assertEquals(application, newProperties.getApplication());
assertEquals(appVersion, newProperties.getAppVersion());
newWorkbook.close();
}
/**
* Test usermodel API for setting custom properties
*/
@Test
void testCustomProperties() throws Exception {
try (XSSFWorkbook wb1 = new XSSFWorkbook()) {
POIXMLProperties.CustomProperties customProps = wb1.getProperties().getCustomProperties();
customProps.addProperty("test-1", "string val");
customProps.addProperty("test-2", 1974);
customProps.addProperty("test-3", 36.6);
//adding a duplicate
IllegalArgumentException e = assertThrows(IllegalArgumentException.class, () -> customProps.addProperty("test-3", 36.6));
assertEquals("A property with this name already exists in the custom properties", e.getMessage());
customProps.addProperty("test-4", true);
try (XSSFWorkbook wb2 = XSSFTestDataSamples.writeOutAndReadBack(wb1)) {
org.openxmlformats.schemas.officeDocument.x2006.customProperties.CTProperties ctProps =
wb2.getProperties().getCustomProperties().getUnderlyingProperties();
assertEquals(4, ctProps.sizeOfPropertyArray());
org.openxmlformats.schemas.officeDocument.x2006.customProperties.CTProperty p;
p = ctProps.getPropertyArray(0);
assertEquals("{D5CDD505-2E9C-101B-9397-08002B2CF9AE}", p.getFmtid());
assertEquals("test-1", p.getName());
assertEquals("string val", p.getLpwstr());
assertEquals(2, p.getPid());
p = ctProps.getPropertyArray(1);
assertEquals("{D5CDD505-2E9C-101B-9397-08002B2CF9AE}", p.getFmtid());
assertEquals("test-2", p.getName());
assertEquals(1974, p.getI4());
assertEquals(3, p.getPid());
p = ctProps.getPropertyArray(2);
assertEquals("{D5CDD505-2E9C-101B-9397-08002B2CF9AE}", p.getFmtid());
assertEquals("test-3", p.getName());
assertEquals(36.6, p.getR8(), 0);
assertEquals(4, p.getPid());
p = ctProps.getPropertyArray(3);
assertEquals("{D5CDD505-2E9C-101B-9397-08002B2CF9AE}", p.getFmtid());
assertEquals("test-4", p.getName());
assertTrue(p.getBool());
assertEquals(5, p.getPid());
}
}
}
@Test
void testDocumentProperties() {
String category = _coreProperties.getCategory();
assertEquals("test", category);
String contentStatus = "Draft";
_coreProperties.setContentStatus(contentStatus);
assertEquals("Draft", contentStatus);
Date created = _coreProperties.getCreated();
// the original file contains a following value: 2009-07-20T13:12:00Z
assertTrue(dateTimeEqualToUTCString(created, "2009-07-20T13:12:00Z"));
String creator = _coreProperties.getCreator();
assertEquals("<NAME>", creator);
String subject = _coreProperties.getSubject();
assertEquals("Greetings", subject);
String title = _coreProperties.getTitle();
assertEquals("Hello World", title);
}
@Test
void testTransitiveSetters() throws IOException {
XWPFDocument doc = new XWPFDocument();
CoreProperties cp = doc.getProperties().getCoreProperties();
Date dateCreated = LocaleUtil.getLocaleCalendar(2010, 6, 15, 10, 0, 0).getTime();
cp.setCreated(Optional.of(dateCreated));
assertEquals(dateCreated, cp.getCreated());
XWPFDocument doc2 = XWPFTestDataSamples.writeOutAndReadBack(doc);
doc.close();
cp = doc2.getProperties().getCoreProperties();
Date dt3 = cp.getCreated();
assertEquals(dateCreated, dt3);
doc2.close();
}
@Test
void testGetSetRevision() {
String revision = _coreProperties.getRevision();
assertTrue(Integer.parseInt(revision) > 1, "Revision number is 1");
_coreProperties.setRevision("20");
assertEquals("20", _coreProperties.getRevision());
_coreProperties.setRevision("20xx");
assertEquals("20", _coreProperties.getRevision());
}
@Test
void testLastModifiedByUserProperty() {
String lastModifiedByUser = _coreProperties.getLastModifiedByUser();
assertEquals("<NAME>", lastModifiedByUser);
_coreProperties.setLastModifiedByUser("Test User");
assertEquals("Test User", _coreProperties.getLastModifiedByUser());
}
public static boolean dateTimeEqualToUTCString(Date dateTime, String utcString) {
Calendar utcCalendar = LocaleUtil.getLocaleCalendar(LocaleUtil.TIMEZONE_UTC);
utcCalendar.setTimeInMillis(dateTime.getTime());
String dateTimeUtcString = utcCalendar.get(Calendar.YEAR) + "-" +
zeroPad((utcCalendar.get(Calendar.MONTH)+1)) + "-" +
zeroPad(utcCalendar.get(Calendar.DAY_OF_MONTH)) + "T" +
zeroPad(utcCalendar.get(Calendar.HOUR_OF_DAY)) + ":" +
zeroPad(utcCalendar.get(Calendar.MINUTE)) + ":" +
zeroPad(utcCalendar.get(Calendar.SECOND)) + "Z";
return utcString.equals(dateTimeUtcString);
}
@Disabled("Fails to add some of the thumbnails, needs more investigation")
@Test
void testThumbnails() throws Exception {
POIXMLProperties noThumbProps = sampleNoThumb.getProperties();
assertNotNull(_props.getThumbnailPart());
assertNull(noThumbProps.getThumbnailPart());
assertNotNull(_props.getThumbnailFilename());
assertNull(noThumbProps.getThumbnailFilename());
assertNotNull(_props.getThumbnailImage());
assertNull(noThumbProps.getThumbnailImage());
assertEquals("/thumbnail.jpeg", _props.getThumbnailFilename());
// Adding / changing
ByteArrayInputStream imageData = new ByteArrayInputStream(new byte[1]);
noThumbProps.setThumbnail("Testing.png", imageData);
assertNotNull(noThumbProps.getThumbnailPart());
assertEquals("/Testing.png", noThumbProps.getThumbnailFilename());
assertNotNull(noThumbProps.getThumbnailImage());
assertEquals(1, IOUtils.toByteArray(noThumbProps.getThumbnailImage()).length);
imageData = new ByteArrayInputStream(new byte[2]);
noThumbProps.setThumbnail("Testing2.png", imageData);
assertNotNull(noThumbProps.getThumbnailPart());
assertEquals("/Testing.png", noThumbProps.getThumbnailFilename());
assertNotNull(noThumbProps.getThumbnailImage());
assertEquals(2, IOUtils.toByteArray(noThumbProps.getThumbnailImage()).length);
}
private static String zeroPad(long i) {
if (i >= 0 && i <= 9) {
return "0" + i;
} else {
return String.valueOf(i);
}
}
@Test
void testAddProperty() throws IOException {
try (XWPFDocument doc = XWPFTestDataSamples.openSampleDocument("documentProperties.docx")) {
POIXMLProperties.CustomProperties cps = doc.getProperties().getCustomProperties();
assertEquals(1, cps.getLastPid());
cps.addProperty("prop1", "abc");
assertEquals(2, cps.getLastPid());
assertEquals(2, cps.getProperty("prop1").getPid());
assertEquals("abc", cps.getProperty("prop1").getLpwstr());
}
}
@Test
void testOoxmlStrict() throws Exception {
POIDataSamples _ssTests = POIDataSamples.getSpreadSheetInstance();
try (OPCPackage pkg = OPCPackage.open(_ssTests.openResourceAsStream("sample.strict.xlsx"))) {
POIXMLProperties props = new POIXMLProperties(pkg);
assertNotNull(props.getCoreProperties().getCreated());
assertEquals(2007, props.getCoreProperties().getCreated().toInstant().atZone(ZoneId.of("UTC")).getYear());
}
}
@Test
void testBug60977() throws IOException {
try (final XSSFWorkbook workbook = new XSSFWorkbook()) {
final Sheet sheet = workbook.createSheet("sheet");
final Row row = sheet.createRow(0);
final Cell cell = row.createCell(0);
cell.setCellValue("cell");
final POIXMLProperties properties = workbook.getProperties();
final POIXMLProperties.CustomProperties customProperties = properties.getCustomProperties();
final String propName = "Project";
final String propValue = "Some name";
customProperties.addProperty(propName, propValue);
// in the unit-test just try to write out the file more than once and see if we can still parse it
XSSFWorkbook wbBack = XSSFTestDataSamples.writeOutAndReadBack(workbook);
assertNotNull(wbBack);
// properties documents are read lazily, so we have to access them to verify they parse properly
assertNotNull(wbBack.getProperties(), "First writeOutAndReadBack");
assertEquals(propValue, wbBack.getProperties().getCustomProperties().getProperty(propName).getLpwstr(), "First prop check");
customProperties.addProperty(propName + "1", propValue);
wbBack = XSSFTestDataSamples.writeOutAndReadBack(workbook);
assertNotNull(wbBack);
// properties documents are read lazily, so we have to access them to verify they parse properly
assertNotNull(wbBack.getProperties(), "Second writeOutAndReadBack");
assertEquals(propValue, wbBack.getProperties().getCustomProperties().getProperty(propName).getLpwstr(), "Second prop check");
assertEquals(propValue, wbBack.getProperties().getCustomProperties().getProperty(propName + "1").getLpwstr(), "Second prop check1");
wbBack = XSSFTestDataSamples.writeOutAndReadBack(workbook);
assertNotNull(wbBack);
// properties documents are read lazily, so we have to access them to verify they parse properly
assertNotNull(wbBack.getProperties(), "Third writeOutAndReadBack");
assertEquals(propValue, wbBack.getProperties().getCustomProperties().getProperty(propName).getLpwstr(), "Third prop check");
assertEquals(propValue, wbBack.getProperties().getCustomProperties().getProperty(propName + "1").getLpwstr(), "Third prop check1");
}
}
@Test
void testSetInvalidCreatedDate() throws IOException {
try (XWPFDocument doc = new XWPFDocument()) {
CoreProperties cp = doc.getProperties().getCoreProperties();
assertThrows(InvalidFormatException.class, () -> cp.setCreated("not a date"));
}
}
@Test
void testSetInvalidLastPrintedDate() throws IOException {
try (XWPFDocument doc = new XWPFDocument()) {
CoreProperties cp = doc.getProperties().getCoreProperties();
assertThrows(InvalidFormatException.class, () -> cp.setLastPrinted("not a date"));
}
}
@Test
void testSetInvalidModifiedDate() throws IOException {
try (XWPFDocument doc = new XWPFDocument()) {
CoreProperties cp = doc.getProperties().getCoreProperties();
assertThrows(InvalidFormatException.class, () -> cp.setModified("not a date"));
}
}
}
| 6,903 |
737 |
<reponame>etnrlz/rtbkit
/* exp_test.cc
<NAME>, 29 November 2004
Copyright (c) 2004 <NAME>. All rights reserved.
$Source$
Test to find the maximum value we can take an exp of.
*/
#include <cmath>
#include <iostream>
#include <iomanip>
#include <numeric>
#include "jml/arch/demangle.h"
using namespace std;
template<class Float>
void test()
{
Float minv = std::numeric_limits<Float>::min();
Float maxv = std::numeric_limits<Float>::max();
Float v = (minv + maxv) * (Float)0.5;
while (minv != maxv) {
Float e = std::exp(v);
if (isfinite(e)) minv = v;
else maxv = v;
Float oldv = v;
v = (minv + maxv) * (Float)0.5;
if (v== oldv) break;
}
cout << "maximum exp arg for " << demangle(typeid(Float).name()) << " is "
<< std::setprecision(std::numeric_limits<Float>::digits10 + 2)
<< v << endl;
}
int main(int argc, char ** argv)
{
test<float>();
test<double>();
test<long double>();
}
| 443 |
1,310 |
<reponame>akrzemi1/Mach7
#include <iostream>
#include <utility>
#include "type_switchN-patterns.hpp"
#include "patterns/primitive.hpp"
//------------------------------------------------------------------------------
typedef std::pair<double,double> loc;
//------------------------------------------------------------------------------
// An Algebraic Data Type implemented through inheritance
struct Shape
{
virtual ~Shape() {}
};
//------------------------------------------------------------------------------
struct Circle : Shape
{
Circle(const loc& c, const double& r) : center(c), radius(r) {}
const loc& get_center() const { return center; }
loc center;
double radius;
};
//------------------------------------------------------------------------------
struct Square : Shape
{
Square(const loc& c, const double& s) : upper_left(c), side(s) {}
loc upper_left;
double side;
};
//------------------------------------------------------------------------------
struct Triangle : Shape
{
Triangle(const loc& a, const loc& b, const loc& c) : first(a), second(b), third(c) {}
loc first;
loc second;
loc third;
};
//------------------------------------------------------------------------------
void do_match(const Shape* s0)
{
const char* text = "unknown";
mch::var<const Circle&> c;
mch::var<const Square&> s;
mch::var<const Triangle&> t;
Match(s0)
{
Case(c) text = "C"; break;
Case(s) text = "S"; break;
Case(t) text = "T"; break;
Otherwise() text = "other"; break;
}
EndMatch
std::cout << text << std::endl;
}
//------------------------------------------------------------------------------
void do_match(const Shape* s0, const Shape* s1)
{
const char* text = "unknown";
mch::var<const Circle&> c;
mch::var<const Square&> s;
mch::var<const Triangle&> t;
Match(s0,s1)
{
//Case(c, c) text = "C,C"; break;
Case(c, s) text = "C,S"; break;
Case(s, c) text = "S,C"; break;
//Case(s, s) text = "S,S"; break;
Case(s, t) text = "S,T"; break;
Case(t, s) text = "T,S"; break;
Case(t, t) text = "T,T"; break;
Case(t, c) text = "T,C"; break;
Case(c, t) text = "C,T"; break;
Otherwise()text = "other"; break;
}
EndMatch
std::cout << text << std::endl;
}
//------------------------------------------------------------------------------
void do_match(Shape* s0, Shape* s1, Shape* s2)
{
const char* text = "unknown";
mch::var<const Circle&> c;
mch::var<const Square&> s;
mch::var<const Triangle&> t;
Match(s0,s1,s2)
{
Case(c, c, c) text = "C,C,C"; break;
Case(c, c, s) text = "C,C,S"; break;
Case(c, s, c) text = "C,S,C"; break;
Case(c, s, s) text = "C,S,S"; break;
Case(c, s, t) text = "C,S,T"; break;
Case(c, t, s) text = "C,T,S"; break;
Case(c, t, t) text = "C,T,T"; break;
Case(c, t, c) text = "C,T,C"; break;
Case(c, c, t) text = "C,C,T"; break;
//Case(s, c, c) text = "S,C,C"; break;
//Case(s, c, s) text = "S,C,S"; break;
Case(s, s, c) text = "S,S,C"; break;
Case(s, s, s) text = "S,S,S"; break;
Case(s, s, t) text = "S,S,T"; break;
Case(s, t, s) text = "S,T,S"; break;
Case(s, t, t) text = "S,T,T"; break;
Case(s, t, c) text = "S,T,C"; break;
//Case(s, c, t) text = "S,C,T"; break;
Case(t, c, c) text = "T,C,C"; break;
Case(t, c, s) text = "T,C,S"; break;
Case(t, s, c) text = "T,S,C"; break;
Case(t, s, s) text = "T,S,S"; break;
Case(t, s, t) text = "T,S,T"; break;
Case(t, t, s) text = "T,T,S"; break;
Case(t, t, t) text = "T,T,T"; break;
Case(t, t, c) text = "T,T,C"; break;
Case(t, c, t) text = "T,C,T"; break;
Otherwise() text = "other"; break;
}
EndMatch
std::cout << text << std::endl;
}
//------------------------------------------------------------------------------
void do_match(Shape* s0, Shape* s1, Shape* s2, Shape* s3)
{
const char* text = "unknown";
mch::var<const Circle&> c;
mch::var<const Square&> s;
mch::var<const Triangle&> t;
Match(s0,s1,s2,s3)
{
Case(c, c, c, c) text = "C,C,C,C"; break;
Case(c, c, c, s) text = "C,C,C,S"; break;
Case(c, c, s, c) text = "C,C,S,C"; break;
Case(c, c, s, s) text = "C,C,S,S"; break;
Case(c, c, s, t) text = "C,C,S,T"; break;
Case(c, c, t, s) text = "C,C,T,S"; break;
Case(c, c, t, t) text = "C,C,T,T"; break;
Case(c, c, t, c) text = "C,C,T,C"; break;
Case(c, c, c, t) text = "C,C,C,T"; break;
Case(c, s, c, c) text = "C,S,C,C"; break;
Case(c, s, c, s) text = "C,S,C,S"; break;
Case(c, s, s, c) text = "C,S,S,C"; break;
Case(c, s, s, s) text = "C,S,S,S"; break;
Case(c, s, s, t) text = "C,S,S,T"; break;
Case(c, s, t, s) text = "C,S,T,S"; break;
Case(c, s, t, t) text = "C,S,T,T"; break;
Case(c, s, t, c) text = "C,S,T,C"; break;
Case(c, s, c, t) text = "C,S,C,T"; break;
Case(c, t, c, c) text = "C,T,C,C"; break;
Case(c, t, c, s) text = "C,T,C,S"; break;
Case(c, t, s, c) text = "C,T,S,C"; break;
Case(c, t, s, s) text = "C,T,S,S"; break;
Case(c, t, s, t) text = "C,T,S,T"; break;
Case(c, t, t, s) text = "C,T,T,S"; break;
Case(c, t, t, t) text = "C,T,T,T"; break;
Case(c, t, t, c) text = "C,T,T,C"; break;
Case(c, t, c, t) text = "C,T,C,T"; break;
Case(s, c, c, c) text = "S,C,C,C"; break;
Case(s, c, c, s) text = "S,C,C,S"; break;
Case(s, c, s, c) text = "S,C,S,C"; break;
Case(s, c, s, s) text = "S,C,S,S"; break;
Case(s, c, s, t) text = "S,C,S,T"; break;
Case(s, c, t, s) text = "S,C,T,S"; break;
Case(s, c, t, t) text = "S,C,T,T"; break;
Case(s, c, t, c) text = "S,C,T,C"; break;
Case(s, c, c, t) text = "S,C,C,T"; break;
Case(s, s, c, c) text = "S,S,C,C"; break;
Case(s, s, c, s) text = "S,S,C,S"; break;
Case(s, s, s, c) text = "S,S,S,C"; break;
Case(s, s, s, s) text = "S,S,S,S"; break;
Case(s, s, s, t) text = "S,S,S,T"; break;
Case(s, s, t, s) text = "S,S,T,S"; break;
Case(s, s, t, t) text = "S,S,T,T"; break;
Case(s, s, t, c) text = "S,S,T,C"; break;
Case(s, s, c, t) text = "S,S,C,T"; break;
Case(s, t, c, c) text = "S,T,C,C"; break;
Case(s, t, c, s) text = "S,T,C,S"; break;
Case(s, t, s, c) text = "S,T,S,C"; break;
Case(s, t, s, s) text = "S,T,S,S"; break;
Case(s, t, s, t) text = "S,T,S,T"; break;
Case(s, t, t, s) text = "S,T,T,S"; break;
Case(s, t, t, t) text = "S,T,T,T"; break;
Case(s, t, t, c) text = "S,T,T,C"; break;
Case(s, t, c, t) text = "S,T,C,T"; break;
Case(t, c, c, c) text = "T,C,C,C"; break;
Case(t, c, c, s) text = "T,C,C,S"; break;
Case(t, c, s, c) text = "T,C,S,C"; break;
Case(t, c, s, s) text = "T,C,S,S"; break;
Case(t, c, s, t) text = "T,C,S,T"; break;
Case(t, c, t, s) text = "T,C,T,S"; break;
Case(t, c, t, t) text = "T,C,T,T"; break;
Case(t, c, t, c) text = "T,C,T,C"; break;
Case(t, c, c, t) text = "T,C,C,T"; break;
Case(t, s, c, c) text = "T,S,C,C"; break;
Case(t, s, c, s) text = "T,S,C,S"; break;
Case(t, s, s, c) text = "T,S,S,C"; break;
Case(t, s, s, s) text = "T,S,S,S"; break;
Case(t, s, s, t) text = "T,S,S,T"; break;
Case(t, s, t, s) text = "T,S,T,S"; break;
Case(t, s, t, t) text = "T,S,T,T"; break;
Case(t, s, t, c) text = "T,S,T,C"; break;
Case(t, s, c, t) text = "T,S,C,T"; break;
Case(t, t, c, c) text = "T,T,C,C"; break;
Case(t, t, c, s) text = "T,T,C,S"; break;
Case(t, t, s, c) text = "T,T,S,C"; break;
Case(t, t, s, s) text = "T,T,S,S"; break;
Case(t, t, s, t) text = "T,T,S,T"; break;
Case(t, t, t, s) text = "T,T,T,S"; break;
Case(t, t, t, t) text = "T,T,T,T"; break;
Case(t, t, t, c) text = "T,T,T,C"; break;
Case(t, t, c, t) text = "T,T,C,T"; break;
Otherwise() text = "other"; break;
}
EndMatch
std::cout << text << std::endl;
}
//------------------------------------------------------------------------------
int main()
{
Shape* c = new Circle(loc(1,1),7);
Shape* s = new Square(loc(1,1),2);
Shape* t = new Triangle(loc(1,1),loc(1,0),loc(0,0));
Shape* shapes[] = {c,s,t};
for (size_t n = 0; n < 3; n++)
{
// 1 argument
for (size_t i = 0; i < 3; ++i)
do_match(shapes[i]);
// 2 arguments
for (size_t i = 0; i < 3; ++i)
for (size_t j = 0; j < 3; ++j)
do_match(shapes[i], shapes[j]);
// 3 arguments
for (size_t i = 0; i < 3; ++i)
for (size_t j = 0; j < 3; ++j)
for (size_t k = 0; k < 3; ++k)
do_match(shapes[i], shapes[j], shapes[k]);
// 4 arguments
for (size_t i = 0; i < 3; ++i)
for (size_t j = 0; j < 3; ++j)
for (size_t k = 0; k < 3; ++k)
for (size_t l = 0; l < 3; ++l)
do_match(shapes[i], shapes[j], shapes[k], shapes[l]);
}
}
//------------------------------------------------------------------------------
| 5,401 |
909 |
# Copyright 2021 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
from cumm.conv.bases import ConvEnum
from cumm.gemm.core.metaarray import MetaArray, seq
from cumm import dtypes
import pccm
from cumm.gemm.layout import TensorGeneric, to_stride
from cumm.common import TensorView, TensorViewHashKernel, TensorViewKernel, ThrustLib, GemmBasic
from cumm.gemm import codeops
from typing import List
from cumm.conv.params import ConvProblem
from cumm.gemm.mask_iters import MaskTileIterator, MaskTileIteratorParams
import numpy as np
from cumm.gemm import (thread_map)
class IndiceMaxPool(pccm.Class):
# TODO optimize this function
def __init__(self):
super().__init__()
self.add_dependency(TensorViewKernel, TensorView, GemmBasic)
@pccm.cuda.cuda_global_function
def forward_kernel(self):
code = pccm.FunctionCode()
code.targ("T")
code.arg("out_features", f"T*")
code.arg("in_features", f"const T*")
code.arg("out_indices", "const int*")
code.arg("in_indices", "const int*")
code.arg("size", "int")
code.arg("num_features", "int")
code.raw(f"""
for (int i : tv::KernelLoopY<int>(size)) {{
int in_idx = in_indices[i];
int out_idx = out_indices[i];
auto in_ptr = in_features + in_idx * num_features;
auto out_ptr = out_features + out_idx * num_features;
for (int j : tv::KernelLoopX<int>(num_features)) {{
auto in = in_ptr[j];
auto out = out_ptr[j];
if (in > out){{
out_ptr[j] = in;
}}
}}
}}
""")
return code
@pccm.cuda.cuda_global_function
def backward_kernel(self):
code = pccm.FunctionCode()
code.targ("T")
code.arg("out_features", f"const T*")
code.arg("in_features", f"const T*")
code.arg("dout_features", f"const T*")
code.arg("din_features", f"T*")
code.arg("out_indices", "const int*")
code.arg("in_indices", "const int*")
code.arg("size", "int")
code.arg("num_features", "int")
code.raw(f"""
for (int i : tv::KernelLoopY<int>(size)) {{
int in_idx_offset = in_indices[i] * num_features;
int out_idx_offset = out_indices[i] * num_features;
auto in_ptr = in_features + in_idx_offset;
auto out_ptr = out_features + out_idx_offset;
auto din_ptr = din_features + in_idx_offset;
auto dout_ptr = dout_features + out_idx_offset;
for (int j : tv::KernelLoopX<int>(num_features)) {{
auto in = in_ptr[j];
auto out = out_ptr[j];
if (in == out){{
din_ptr[j] = din_ptr[j] + dout_ptr[j];
}}
}}
}}
""")
return code
@pccm.cuda.static_function
def forward(self):
code = pccm.FunctionCode()
code.arg("out", "tv::Tensor")
code.arg("in", "tv::Tensor")
code.arg("out_inds", "tv::Tensor")
code.arg("in_inds", "tv::Tensor")
code.arg("stream", "std::uintptr_t", "0")
code.raw(f"""
auto nhot = out_inds.dim(0);
auto cudastream = reinterpret_cast<cudaStream_t>(stream);
tv::dispatch<float, double, tv::half_t, tv::bfloat16_t>(out.dtype(), [&](auto I){{
using T = TV_DECLTYPE(I);
constexpr int MaxThreads = 512;
tv::cuda::Launch launcher(1);
bool found = tv::dispatch_int_noexcept<512, 256, 128, 64, 32, 16>(out.dim(1), [](int my, int expect){{return my >= expect;}}, [&](auto V){{
// if out.dim(1) > value in list above, run this function.
// if a value is found, other value won't be executed.
int NumFeatures = TV_DECLTYPE(V)::value;
int Num0 = MaxThreads / NumFeatures;
dim3 blocks(tv::div_up(out.dim(1), NumFeatures), tv::div_up(nhot, Num0));
dim3 threads(NumFeatures, Num0);
launcher = tv::cuda::Launch(blocks, threads, cudastream);
}});
if (!found){{
int NumFeatures = 16;
int Num0 = MaxThreads / NumFeatures;
dim3 blocks(tv::div_up(out.dim(1), NumFeatures), tv::div_up(nhot, Num0));
dim3 threads(NumFeatures, Num0);
launcher = tv::cuda::Launch(blocks, threads, cudastream);
}}
launcher(forward_kernel<T>, out.data_ptr<T>(), in.data_ptr<const T>(),
out_inds.data_ptr<const int>(), in_inds.data_ptr<const int>(), nhot, out.dim(1));
}});
""")
return code
@pccm.cuda.static_function
def backward(self):
code = pccm.FunctionCode()
code.arg("out", "tv::Tensor")
code.arg("in", "tv::Tensor")
code.arg("dout", "tv::Tensor")
code.arg("din", "tv::Tensor")
code.arg("out_inds", "tv::Tensor")
code.arg("in_inds", "tv::Tensor")
code.arg("stream", "std::uintptr_t", "0")
code.raw(f"""
auto nhot = out_inds.dim(0);
auto cudastream = reinterpret_cast<cudaStream_t>(stream);
tv::dispatch<float, double, tv::half_t, tv::bfloat16_t>(out.dtype(), [&](auto I){{
using T = TV_DECLTYPE(I);
constexpr int MaxThreads = 512;
tv::cuda::Launch launcher(1);
bool found = tv::dispatch_int_noexcept<512, 256, 128, 64, 32, 16>(out.dim(1), [](int my, int expect){{return my >= expect;}}, [&](auto V){{
// if out.dim(1) > value in list above, run this function.
// if a value is found, other value won't be executed.
int NumFeatures = TV_DECLTYPE(V)::value;
int Num0 = MaxThreads / NumFeatures;
dim3 blocks(tv::div_up(out.dim(1), NumFeatures), tv::div_up(nhot, Num0));
dim3 threads(NumFeatures, Num0);
launcher = tv::cuda::Launch(blocks, threads, cudastream);
}});
if (!found){{
int NumFeatures = 16;
int Num0 = MaxThreads / NumFeatures;
dim3 blocks(tv::div_up(out.dim(1), NumFeatures), tv::div_up(nhot, Num0));
dim3 threads(NumFeatures, Num0);
launcher = tv::cuda::Launch(blocks, threads, cudastream);
}}
launcher(backward_kernel<T>, out.data_ptr<const T>(), in.data_ptr<const T>(),
dout.data_ptr<const T>(), din.data_ptr<T>(),
out_inds.data_ptr<const int>(), in_inds.data_ptr<const int>(), nhot, out.dim(1));
}});
""")
return code
| 3,527 |
7,137 |
package io.onedev.server.entitymanager.impl;
import java.util.Collection;
import java.util.HashSet;
import java.util.stream.Collectors;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.hibernate.criterion.Restrictions;
import io.onedev.server.entitymanager.PullRequestQueryPersonalizationManager;
import io.onedev.server.model.Project;
import io.onedev.server.model.PullRequestQueryPersonalization;
import io.onedev.server.model.User;
import io.onedev.server.model.support.NamedQuery;
import io.onedev.server.persistence.annotation.Sessional;
import io.onedev.server.persistence.annotation.Transactional;
import io.onedev.server.persistence.dao.BaseEntityManager;
import io.onedev.server.persistence.dao.Dao;
import io.onedev.server.persistence.dao.EntityCriteria;
@Singleton
public class DefaultPullRequestQueryPersonalizationManager extends BaseEntityManager<PullRequestQueryPersonalization>
implements PullRequestQueryPersonalizationManager {
@Inject
public DefaultPullRequestQueryPersonalizationManager(Dao dao) {
super(dao);
}
@Sessional
@Override
public PullRequestQueryPersonalization find(Project project, User user) {
EntityCriteria<PullRequestQueryPersonalization> criteria = newCriteria();
criteria.add(Restrictions.and(Restrictions.eq("project", project), Restrictions.eq("user", user)));
criteria.setCacheable(true);
return find(criteria);
}
@Transactional
@Override
public void save(PullRequestQueryPersonalization personalization) {
Collection<String> retainNames = new HashSet<>();
retainNames.addAll(personalization.getQueries().stream()
.map(it->NamedQuery.PERSONAL_NAME_PREFIX+it.getName()).collect(Collectors.toSet()));
retainNames.addAll(personalization.getProject().getPullRequestSetting().getNamedQueries(true).stream()
.map(it->NamedQuery.GLOBAL_NAME_PREFIX+it.getName()).collect(Collectors.toSet()));
personalization.getQueryWatchSupport().getQueryWatches().keySet().retainAll(retainNames);
if (personalization.getQueryWatchSupport().getQueryWatches().isEmpty() && personalization.getQueries().isEmpty()) {
if (!personalization.isNew())
delete(personalization);
} else {
super.save(personalization);
}
}
}
| 715 |
6,304 |
/*
* Copyright 2021 Google LLC
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
/*
* This GM creates the same gradients as the Chromium test fillrect_gradient:
* http://osscs/chromium/chromium/src/+/main:third_party/blink/web_tests/fast/canvas/fillrect_gradient.html
*/
#include "gm/gm.h"
#include "include/core/SkCanvas.h"
#include "include/core/SkColor.h"
#include "include/core/SkPaint.h"
#include "include/core/SkPoint.h"
#include "include/core/SkRect.h"
#include "include/core/SkRefCnt.h"
#include "include/core/SkScalar.h"
#include "include/core/SkShader.h"
#include "include/core/SkSize.h"
#include "include/core/SkString.h"
#include "include/core/SkTileMode.h"
#include "include/effects/SkGradientShader.h"
const int kCellSize = 50;
const int kNumColumns = 2;
const int kNumRows = 9;
const int kPadSize = 10;
class FillrectGradientGM : public skiagm::GM {
public:
FillrectGradientGM() {}
protected:
struct GradientStop {
float pos;
SkColor color;
};
SkString onShortName() override {
return SkString("fillrect_gradient");
}
SkISize onISize() override {
return SkISize::Make(kNumColumns * (kCellSize + kPadSize),
kNumRows * (kCellSize + kPadSize));
}
void drawGradient(SkCanvas* canvas, std::initializer_list<GradientStop> stops) {
std::vector<SkColor> colors;
std::vector<SkScalar> positions;
colors.reserve(stops.size());
positions.reserve(stops.size());
for (const GradientStop& stop : stops) {
colors.push_back(stop.color);
positions.push_back(stop.pos);
}
static constexpr SkPoint points[] = {
SkPoint::Make(kCellSize, 0),
SkPoint::Make(kCellSize, kCellSize),
};
// Draw the gradient linearly.
sk_sp<SkShader> shader = SkGradientShader::MakeLinear(points,
colors.data(),
positions.data(),
colors.size(),
SkTileMode::kClamp);
SkPaint paint;
paint.setShader(shader);
canvas->drawRect(SkRect::MakeXYWH(0, 0, kCellSize, kCellSize), paint);
canvas->save();
canvas->translate(kCellSize + kPadSize, 0);
// Draw the gradient radially.
shader = SkGradientShader::MakeRadial(SkPoint::Make(kCellSize / 2, kCellSize / 2),
kCellSize / 2,
colors.data(),
positions.data(),
colors.size(),
SkTileMode::kClamp);
paint.setShader(shader);
canvas->drawRect(SkRect::MakeXYWH(0, 0, kCellSize, kCellSize), paint);
canvas->restore();
canvas->translate(0, kCellSize + kPadSize);
}
void onDraw(SkCanvas* canvas) override {
// Simple gradient: Green to white
this->drawGradient(canvas, {{0.0f, SK_ColorGREEN}, {1.0f, SK_ColorWHITE}});
// Multiple sections: Green to white to red
this->drawGradient(canvas,
{{0.0f, SK_ColorGREEN}, {0.5f, SK_ColorWHITE}, {1.0f, SK_ColorRED}});
// No stops at 0.0 or 1.0: Larger green to white to larger red
this->drawGradient(canvas,
{{0.4f, SK_ColorGREEN}, {0.5f, SK_ColorWHITE}, {0.6f, SK_ColorRED}});
// Only one stop, at zero: Solid red
this->drawGradient(canvas, {{0.0f, SK_ColorRED}});
// Only one stop, at 1.0: Solid red
this->drawGradient(canvas, {{1.0f, SK_ColorRED}});
// Only one stop, in the middle: Solid red
this->drawGradient(canvas, {{0.5f, SK_ColorRED}});
// Disjoint gradients (multiple stops at the same offset)
// Blue to white in the top (inner) half, red to yellow in the bottom (outer) half
this->drawGradient(canvas,
{{0.0f, SK_ColorBLUE},
{0.5f, SK_ColorWHITE},
{0.5f, SK_ColorRED},
{1.0f, SK_ColorYELLOW}});
// Ignored stops: Blue to white, red to yellow (same as previous)
this->drawGradient(canvas,
{{0.0f, SK_ColorBLUE},
{0.5f, SK_ColorWHITE},
{0.5f, SK_ColorGRAY},
{0.5f, SK_ColorCYAN},
{0.5f, SK_ColorRED},
{1.0f, SK_ColorYELLOW}});
// Unsorted stops: Blue to white, red to yellow
// Unlike Chrome, we don't sort the stops, so this renders differently than the prior cell.
this->drawGradient(canvas,
{{0.5f, SK_ColorWHITE},
{0.5f, SK_ColorGRAY},
{1.0f, SK_ColorYELLOW},
{0.5f, SK_ColorCYAN},
{0.5f, SK_ColorRED},
{0.0f, SK_ColorBLUE}});
}
private:
using INHERITED = skiagm::GM;
};
DEF_GM(return new FillrectGradientGM;)
| 2,855 |
1,010 |
<filename>hazelcast-jet-sql/src/test/java/com/hazelcast/jet/sql/impl/connector/infoschema/MappingsTableTest.java
/*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.jet.sql.impl.connector.infoschema;
import com.hazelcast.jet.sql.impl.schema.Mapping;
import org.junit.Test;
import java.util.List;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import static org.assertj.core.api.Assertions.assertThat;
public class MappingsTableTest {
@Test
public void test_rows() {
// given
Mapping mapping = new Mapping(
"table-name",
"table-external-name",
"table-type",
emptyList(),
singletonMap("key", "value")
);
MappingsTable mappingTable = new MappingsTable("catalog", null, "table-schema", singletonList(mapping));
// when
List<Object[]> rows = mappingTable.rows();
// then
assertThat(rows).containsExactly(new Object[]{
"catalog"
, "table-schema"
, "table-name"
, "table-external-name"
, "table-type"
, "{\"key\":\"value\"}"
});
}
}
| 749 |
1,814 |
<filename>library/src/main/java/io/github/luizgrp/sectionedrecyclerviewadapter/Section.java
package io.github.luizgrp.sectionedrecyclerviewadapter;
import android.view.View;
import android.view.ViewGroup;
import java.util.List;
import androidx.annotation.LayoutRes;
import androidx.annotation.NonNull;
import androidx.recyclerview.widget.RecyclerView;
/**
* Abstract Section to be used with {@link SectionedRecyclerViewAdapter}.
*/
@SuppressWarnings("WeakerAccess")
public abstract class Section {
public enum State {
LOADING,
LOADED,
FAILED,
EMPTY
}
private State state = State.LOADED;
private boolean visible = true;
@SuppressWarnings({"PMD.BeanMembersShouldSerialize", "PMD.AvoidFieldNameMatchingMethodName"})
private boolean hasHeader;
@SuppressWarnings({"PMD.BeanMembersShouldSerialize", "PMD.AvoidFieldNameMatchingMethodName"})
private boolean hasFooter;
@LayoutRes
private final Integer itemResourceId;
@LayoutRes
private final Integer headerResourceId;
@LayoutRes
private final Integer footerResourceId;
@LayoutRes
private final Integer loadingResourceId;
@LayoutRes
private final Integer failedResourceId;
@LayoutRes
private final Integer emptyResourceId;
private final boolean itemViewWillBeProvided;
private final boolean headerViewWillBeProvided;
private final boolean footerViewWillBeProvided;
private final boolean loadingViewWillBeProvided;
private final boolean failedViewWillBeProvided;
private final boolean emptyViewWillBeProvided;
/**
* Create a Section object based on {@link SectionParameters}.
*
* @param sectionParameters section parameters
*/
public Section(@NonNull final SectionParameters sectionParameters) {
this.itemResourceId = sectionParameters.itemResourceId;
this.headerResourceId = sectionParameters.headerResourceId;
this.footerResourceId = sectionParameters.footerResourceId;
this.loadingResourceId = sectionParameters.loadingResourceId;
this.failedResourceId = sectionParameters.failedResourceId;
this.emptyResourceId = sectionParameters.emptyResourceId;
this.itemViewWillBeProvided = sectionParameters.itemViewWillBeProvided;
this.headerViewWillBeProvided = sectionParameters.headerViewWillBeProvided;
this.footerViewWillBeProvided = sectionParameters.footerViewWillBeProvided;
this.loadingViewWillBeProvided = sectionParameters.loadingViewWillBeProvided;
this.failedViewWillBeProvided = sectionParameters.failedViewWillBeProvided;
this.emptyViewWillBeProvided = sectionParameters.emptyViewWillBeProvided;
this.hasHeader = this.headerResourceId != null || this.headerViewWillBeProvided;
this.hasFooter = this.footerResourceId != null || this.footerViewWillBeProvided;
}
/**
* Set the State of this Section.
*
* @param state state of this section
*/
public final void setState(final State state) {
switch (state) {
case LOADING:
if (loadingResourceId == null && !loadingViewWillBeProvided) {
throw new IllegalStateException(
"Resource id for 'loading state' should be provided or 'loadingViewWillBeProvided' should be set");
}
break;
case FAILED:
if (failedResourceId == null && !failedViewWillBeProvided) {
throw new IllegalStateException("Resource id for 'failed state' should be provided or 'failedViewWillBeProvided' should be set");
}
break;
case EMPTY:
if (emptyResourceId == null && !emptyViewWillBeProvided) {
throw new IllegalStateException("Resource id for 'empty state' should be provided or 'emptyViewWillBeProvided' should be set");
}
break;
default:
break;
}
this.state = state;
}
/**
* Return the current State of this Section.
*
* @return current state of this section
*/
public final State getState() {
return state;
}
/**
* Check if this Section is visible.
*
* @return true if this Section is visible
*/
public final boolean isVisible() {
return visible;
}
/**
* Set if this Section is visible.
*
* @param visible true if this Section is visible
*/
public final void setVisible(final boolean visible) {
this.visible = visible;
}
/**
* Check if this Section has a header.
*
* @return true if this Section has a header
*/
public final boolean hasHeader() {
return hasHeader;
}
/**
* Set if this Section has header.
*
* @param hasHeader true if this Section has a header
*/
public final void setHasHeader(final boolean hasHeader) {
this.hasHeader = hasHeader;
}
/**
* Check if this Section has a footer.
*
* @return true if this Section has a footer
*/
public final boolean hasFooter() {
return hasFooter;
}
/**
* Set if this Section has footer.
*
* @param hasFooter true if this Section has a footer
*/
public final void setHasFooter(final boolean hasFooter) {
this.hasFooter = hasFooter;
}
/**
* Return whether the item view is provided through {@link #getItemView(ViewGroup)}.
* If false, the item view is inflated using the resource from {@link #getItemResourceId()}.
*
* @return whether the item view is provided through {@link #getItemView(ViewGroup)}.
*/
public final boolean isItemViewWillBeProvided() {
return itemViewWillBeProvided;
}
/**
* Return the layout resource id of the item.
*
* @return layout resource id of the item
*/
public final Integer getItemResourceId() {
return itemResourceId;
}
/**
* Return whether the header view is provided through {@link #getHeaderView(ViewGroup)}.
* If false, the header view is inflated using the resource from
* {@link #getHeaderResourceId()}.
*
* @return whether the header view is provided through {@link #getHeaderView(ViewGroup)}.
*/
public final boolean isHeaderViewWillBeProvided() {
return headerViewWillBeProvided;
}
/**
* Return the layout resource id of the header.
*
* @return layout resource id of the header
*/
public final Integer getHeaderResourceId() {
return headerResourceId;
}
/**
* Return whether the footer view is provided through {@link #getFooterView(ViewGroup)}.
* If false, the footer view is inflated using the resource from
* {@link #getFooterResourceId()}.
*
* @return whether the footer view is provided through {@link #getFooterView(ViewGroup)}.
*/
public final boolean isFooterViewWillBeProvided() {
return footerViewWillBeProvided;
}
/**
* Return the layout resource id of the footer.
*
* @return layout resource id of the footer
*/
public final Integer getFooterResourceId() {
return footerResourceId;
}
/**
* Return whether the loading view is provided through {@link #getLoadingView(ViewGroup)}.
* If false, the loading view is inflated using the resource from
* {@link #getLoadingResourceId()}.
*
* @return whether the loading view is provided through {@link #getLoadingView(ViewGroup)}.
*/
public final boolean isLoadingViewWillBeProvided() {
return loadingViewWillBeProvided;
}
/**
* Return the layout resource id of the loading view.
*
* @return layout resource id of the loading view
*/
public final Integer getLoadingResourceId() {
return loadingResourceId;
}
/**
* Return whether the failed view is provided through {@link #getFailedView(ViewGroup)}.
* If false, the failed view is inflated using the resource from
* {@link #getFailedResourceId()}.
*
* @return whether the failed view is provided through {@link #getFailedView(ViewGroup)}.
*/
public final boolean isFailedViewWillBeProvided() {
return failedViewWillBeProvided;
}
/**
* Return the layout resource id of the failed view.
*
* @return layout resource id of the failed view
*/
public final Integer getFailedResourceId() {
return failedResourceId;
}
/**
* Return whether the empty view is provided through {@link #getEmptyView(ViewGroup)}.
* If false, the empty view is inflated using the resource from
* {@link #getEmptyResourceId()}.
*
* @return whether the empty view is provided through {@link #getEmptyView(ViewGroup)}.
*/
public final boolean isEmptyViewWillBeProvided() {
return emptyViewWillBeProvided;
}
/**
* Return the layout resource id of the empty view.
*
* @return layout resource id of the empty view
*/
public final Integer getEmptyResourceId() {
return emptyResourceId;
}
/**
* Return the total of items of this Section, including content items (according to the section
* state) plus header and footer.
*
* @return total of items of this section
*/
public final int getSectionItemsTotal() {
int contentItemsTotal;
switch (state) {
case LOADED:
contentItemsTotal = getContentItemsTotal();
break;
case LOADING:
case FAILED:
case EMPTY:
contentItemsTotal = 1;
break;
default:
throw new IllegalStateException("Invalid state");
}
return contentItemsTotal + (hasHeader ? 1 : 0) + (hasFooter ? 1 : 0);
}
/**
* Return the total of items of this Section.
*
* @return total of items of this Section
*/
public abstract int getContentItemsTotal();
/**
* Creates the View for a single Item. This must be implemented if and only if
* {@link #isItemViewWillBeProvided()} is true.
*
* @param parent The parent view. Note that there is no need to attach the new view.
* @return View for an Item of this Section.
*/
public View getItemView(@SuppressWarnings("unused") final ViewGroup parent) {
throw new UnsupportedOperationException(
"You need to implement getItemView() if you set itemViewWillBeProvided");
}
/**
* Return the ViewHolder for a single Item of this Section.
*
* @param view View created by getItemView or inflated resource returned by getItemResourceId
* @return ViewHolder for the Item of this Section
*/
public abstract RecyclerView.ViewHolder getItemViewHolder(View view);
/**
* Bind the data to the ViewHolder for an Item of this Section.
*
* @param holder ViewHolder for the Item of this Section
* @param position position of the item in the Section, not in the RecyclerView
*/
public abstract void onBindItemViewHolder(RecyclerView.ViewHolder holder, int position);
/**
* Bind the data to the ViewHolder for an Item of this Section.
*
* @param holder ViewHolder for the Item of this Section
* @param position position of the item in the Section, not in the RecyclerView
* @param payloads A non-null and non-empty list of merged payloads.
*/
public void onBindItemViewHolder(final RecyclerView.ViewHolder holder, final int position,
@SuppressWarnings("unused") final List<Object> payloads) {
this.onBindItemViewHolder(holder, position);
}
/**
* Creates the View for the Header. This must be implemented if and only if
* {@link #isHeaderViewWillBeProvided()} is true.
*
* @param parent The parent view. Note that there is no need to attach the new view.
* @return View for the Header of this Section.
*/
public View getHeaderView(@SuppressWarnings("unused") final ViewGroup parent) {
throw new UnsupportedOperationException(
"You need to implement getHeaderView() if you set headerViewWillBeProvided");
}
/**
* Return the ViewHolder for the Header of this Section.
*
* @param view View inflated by resource returned by getHeaderResourceId
* @return ViewHolder for the Header of this Section
*/
public RecyclerView.ViewHolder getHeaderViewHolder(final View view) {
throw new UnsupportedOperationException(
"You need to implement getHeaderViewHolder() if you set headerResourceId");
}
/**
* Bind the data to the ViewHolder for the Header of this Section.
*
* @param holder ViewHolder for the Header of this Section
*/
@SuppressWarnings("PMD.EmptyMethodInAbstractClassShouldBeAbstract")
public void onBindHeaderViewHolder(final RecyclerView.ViewHolder holder) {
// Nothing to bind here.
}
/**
* Bind the data to the ViewHolder for the Header of this Section.
*
* @param holder ViewHolder for the Header of this Section
* @param payloads A non-null and non-empty list of merged payloads.
*/
@SuppressWarnings("PMD.EmptyMethodInAbstractClassShouldBeAbstract")
public void onBindHeaderViewHolder(final RecyclerView.ViewHolder holder,
@SuppressWarnings("unused") final List<Object> payloads) {
this.onBindHeaderViewHolder(holder);
}
/**
* Creates the View for the Footer. This must be implemented if and only if
* {@link #isFooterViewWillBeProvided()} is true.
*
* @param parent The parent view. Note that there is no need to attach the new view.
* @return View for the Footer of this Section.
*/
public View getFooterView(@SuppressWarnings("unused") final ViewGroup parent) {
throw new UnsupportedOperationException(
"You need to implement getFooterView() if you set footerViewWillBeProvided");
}
/**
* Return the ViewHolder for the Footer of this Section.
*
* @param view View inflated by resource returned by getFooterResourceId
* @return ViewHolder for the Footer of this Section
*/
public RecyclerView.ViewHolder getFooterViewHolder(final View view) {
throw new UnsupportedOperationException(
"You need to implement getFooterViewHolder() if you set footerResourceId");
}
/**
* Bind the data to the ViewHolder for the Footer of this Section.
*
* @param holder ViewHolder for the Footer of this Section
*/
@SuppressWarnings("PMD.EmptyMethodInAbstractClassShouldBeAbstract")
public void onBindFooterViewHolder(final RecyclerView.ViewHolder holder) {
// Nothing to bind here.
}
/**
* Bind the data to the ViewHolder for the Footer of this Section.
*
* @param holder ViewHolder for the Footer of this Section
* @param payloads A non-null and non-empty list of merged payloads.
*/
@SuppressWarnings("PMD.EmptyMethodInAbstractClassShouldBeAbstract")
public void onBindFooterViewHolder(final RecyclerView.ViewHolder holder,
@SuppressWarnings("unused") final List<Object> payloads) {
this.onBindFooterViewHolder(holder);
}
/**
* Creates the View for the Loading state. This must be implemented if and only if
* {@link #isLoadingViewWillBeProvided()} is true.
*
* @param parent The parent view. Note that there is no need to attach the new view.
* @return View for the Loading state of this Section.
*/
public View getLoadingView(@SuppressWarnings("unused") final ViewGroup parent) {
throw new UnsupportedOperationException(
"You need to implement getLoadingView() if you set loadingViewWillBeProvided");
}
/**
* Return the ViewHolder for the Loading state of this Section.
*
* @param view View inflated by resource returned by getItemResourceId
* @return ViewHolder for the Loading state of this Section
*/
public RecyclerView.ViewHolder getLoadingViewHolder(final View view) {
throw new UnsupportedOperationException(
"You need to implement getLoadingViewHolder() if you set loadingResourceId");
}
/**
* Bind the data to the ViewHolder for Loading state of this Section.
*
* @param holder ViewHolder for the Loading state of this Section
*/
@SuppressWarnings({"EmptyMethod", "PMD.EmptyMethodInAbstractClassShouldBeAbstract"})
public void onBindLoadingViewHolder(@SuppressWarnings("unused") final RecyclerView.ViewHolder holder) {
// Nothing to bind here.
}
/**
* Bind the data to the ViewHolder for Loading state of this Section.
*
* @param holder ViewHolder for the Loading state of this Section
* @param payloads A non-null and non-empty list of merged payloads.
*/
@SuppressWarnings({"EmptyMethod", "PMD.EmptyMethodInAbstractClassShouldBeAbstract"})
public void onBindLoadingViewHolder(final RecyclerView.ViewHolder holder,
@SuppressWarnings("unused") final List<Object> payloads) {
this.onBindLoadingViewHolder(holder);
}
/**
* Creates the View for the Failed state. This must be implemented if and only if
* {@link #isFailedViewWillBeProvided()} is true.
*
* @param parent The parent view. Note that there is no need to attach the new view.
* @return View for the Failed state of this Section.
*/
public View getFailedView(@SuppressWarnings("unused") final ViewGroup parent) {
throw new UnsupportedOperationException(
"You need to implement getFailedView() if you set failedViewWillBeProvided");
}
/**
* Return the ViewHolder for the Failed state of this Section.
*
* @param view View inflated by resource returned by getItemResourceId
* @return ViewHolder for the Failed of this Section
*/
public RecyclerView.ViewHolder getFailedViewHolder(final View view) {
throw new UnsupportedOperationException(
"You need to implement getFailedViewHolder() if you set failedResourceId");
}
/**
* Bind the data to the ViewHolder for the Failed state of this Section.
*
* @param holder ViewHolder for the Failed state of this Section
*/
@SuppressWarnings({"EmptyMethod", "PMD.EmptyMethodInAbstractClassShouldBeAbstract"})
public void onBindFailedViewHolder(final RecyclerView.ViewHolder holder) {
// Nothing to bind here.
}
/**
* Bind the data to the ViewHolder for the Failed state of this Section.
*
* @param holder ViewHolder for the Failed state of this Section
* @param payloads A non-null and non-empty list of merged payloads.
*/
@SuppressWarnings({"EmptyMethod", "PMD.EmptyMethodInAbstractClassShouldBeAbstract"})
public void onBindFailedViewHolder(final RecyclerView.ViewHolder holder,
@SuppressWarnings("unused") final List<Object> payloads) {
this.onBindFailedViewHolder(holder);
}
/**
* Creates the View for the Empty state. This must be implemented if and only if
* {@link #isEmptyViewWillBeProvided()} is true.
*
* @param parent The parent view. Note that there is no need to attach the new view.
* @return View for the Empty state of this Section.
*/
public View getEmptyView(@SuppressWarnings("unused") final ViewGroup parent) {
throw new UnsupportedOperationException(
"You need to implement getEmptyView() if you set emptyViewWillBeProvided");
}
/**
* Return the ViewHolder for the Empty state of this Section.
*
* @param view View inflated by resource returned by getItemResourceId
* @return ViewHolder for the Empty of this Section
*/
public RecyclerView.ViewHolder getEmptyViewHolder(final View view) {
throw new UnsupportedOperationException(
"You need to implement getEmptyViewHolder() if you set emptyResourceId");
}
/**
* Bind the data to the ViewHolder for the Empty state of this Section.
*
* @param holder ViewHolder for the Empty state of this Section
*/
@SuppressWarnings({"EmptyMethod", "PMD.EmptyMethodInAbstractClassShouldBeAbstract"})
public void onBindEmptyViewHolder(@SuppressWarnings("unused") final RecyclerView.ViewHolder holder) {
// Nothing to bind here.
}
/**
* Bind the data to the ViewHolder for the Empty state of this Section.
*
* @param holder ViewHolder for the Empty state of this Section
* @param payloads A non-null and non-empty list of merged payloads.
*/
@SuppressWarnings({"EmptyMethod", "PMD.EmptyMethodInAbstractClassShouldBeAbstract"})
public void onBindEmptyViewHolder(final RecyclerView.ViewHolder holder,
@SuppressWarnings("unused") final List<Object> payloads) {
this.onBindEmptyViewHolder(holder);
}
}
| 7,826 |
563 |
<reponame>RRua/droidbot<gh_stars>100-1000
import json
import logging
import subprocess
import time
from .input_event import EventLog
from .input_policy import UtgBasedInputPolicy, UtgNaiveSearchPolicy, UtgGreedySearchPolicy, \
UtgReplayPolicy, \
ManualPolicy, \
POLICY_NAIVE_DFS, POLICY_GREEDY_DFS, \
POLICY_NAIVE_BFS, POLICY_GREEDY_BFS, \
POLICY_REPLAY, POLICY_MEMORY_GUIDED, \
POLICY_MANUAL, POLICY_MONKEY, POLICY_NONE
DEFAULT_POLICY = POLICY_GREEDY_DFS
DEFAULT_EVENT_INTERVAL = 1
DEFAULT_EVENT_COUNT = 100000000
DEFAULT_TIMEOUT = -1
class UnknownInputException(Exception):
pass
class InputManager(object):
"""
This class manages all events to send during app running
"""
def __init__(self, device, app, policy_name, random_input,
event_count, event_interval,
script_path=None, profiling_method=None, master=None,
replay_output=None):
"""
manage input event sent to the target device
:param device: instance of Device
:param app: instance of App
:param policy_name: policy of generating events, string
:return:
"""
self.logger = logging.getLogger('InputEventManager')
self.enabled = True
self.device = device
self.app = app
self.policy_name = policy_name
self.random_input = random_input
self.events = []
self.policy = None
self.script = None
self.event_count = event_count
self.event_interval = event_interval
self.replay_output = replay_output
self.monkey = None
if script_path is not None:
f = open(script_path, 'r')
script_dict = json.load(f)
from .input_script import DroidBotScript
self.script = DroidBotScript(script_dict)
self.policy = self.get_input_policy(device, app, master)
self.profiling_method = profiling_method
def get_input_policy(self, device, app, master):
if self.policy_name == POLICY_NONE:
input_policy = None
elif self.policy_name == POLICY_MONKEY:
input_policy = None
elif self.policy_name in [POLICY_NAIVE_DFS, POLICY_NAIVE_BFS]:
input_policy = UtgNaiveSearchPolicy(device, app, self.random_input, self.policy_name)
elif self.policy_name in [POLICY_GREEDY_DFS, POLICY_GREEDY_BFS]:
input_policy = UtgGreedySearchPolicy(device, app, self.random_input, self.policy_name)
elif self.policy_name == POLICY_MEMORY_GUIDED:
from .input_policy2 import MemoryGuidedPolicy
input_policy = MemoryGuidedPolicy(device, app, self.random_input)
elif self.policy_name == POLICY_REPLAY:
input_policy = UtgReplayPolicy(device, app, self.replay_output)
elif self.policy_name == POLICY_MANUAL:
input_policy = ManualPolicy(device, app)
else:
self.logger.warning("No valid input policy specified. Using policy \"none\".")
input_policy = None
if isinstance(input_policy, UtgBasedInputPolicy):
input_policy.script = self.script
input_policy.master = master
return input_policy
def add_event(self, event):
"""
add one event to the event list
:param event: the event to be added, should be subclass of AppEvent
:return:
"""
if event is None:
return
self.events.append(event)
event_log = EventLog(self.device, self.app, event, self.profiling_method)
event_log.start()
while True:
time.sleep(self.event_interval)
if not self.device.pause_sending_event:
break
event_log.stop()
def start(self):
"""
start sending event
"""
self.logger.info("start sending events, policy is %s" % self.policy_name)
try:
if self.policy is not None:
self.policy.start(self)
elif self.policy_name == POLICY_NONE:
self.device.start_app(self.app)
if self.event_count == 0:
return
while self.enabled:
time.sleep(1)
elif self.policy_name == POLICY_MONKEY:
throttle = self.event_interval * 1000
monkey_cmd = "adb -s %s shell monkey %s --ignore-crashes --ignore-security-exceptions" \
" --throttle %d -v %d" % \
(self.device.serial,
"" if self.app.get_package_name() is None else "-p " + self.app.get_package_name(),
throttle,
self.event_count)
self.monkey = subprocess.Popen(monkey_cmd.split(),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
for monkey_out_line in iter(self.monkey.stdout.readline, ''):
if not isinstance(monkey_out_line, str):
monkey_out_line = monkey_out_line.decode()
self.logger.info(monkey_out_line)
# may be disturbed from outside
if self.monkey is not None:
self.monkey.wait()
elif self.policy_name == POLICY_MANUAL:
self.device.start_app(self.app)
while self.enabled:
keyboard_input = input("press ENTER to save current state, type q to exit...")
if keyboard_input.startswith('q'):
break
state = self.device.get_current_state()
if state is not None:
state.save2dir()
except KeyboardInterrupt:
pass
self.stop()
self.logger.info("Finish sending events")
def stop(self):
"""
stop sending event
"""
if self.monkey:
if self.monkey.returncode is None:
self.monkey.terminate()
self.monkey = None
pid = self.device.get_app_pid("com.android.commands.monkey")
if pid is not None:
self.device.adb.shell("kill -9 %d" % pid)
self.enabled = False
| 3,255 |
423 |
package com.googlecode.javaewah;
/*
* Copyright 2009-2016, <NAME>, <NAME>, <NAME>, <NAME>, Google Inc., <NAME>, <NAME>, <NAME>, <NAME>
* Licensed under the Apache License, Version 2.0.
*/
/**
* The IntIterator interface is used to iterate over a stream of integers.
*
* @author <NAME>
* @since 0.2.0
*/
public interface IntIterator {
/**
* Is there more?
*
* @return true, if there is more, false otherwise
*/
boolean hasNext();
/**
* Return the next integer
*
* @return the integer
*/
int next();
}
| 207 |
854 |
<reponame>rakhi2001/ecom7<gh_stars>100-1000
__________________________________________________________________________________________________
sample 5 ms submission
class Solution {
public int findMaxLength(int[] nums) {
int n = nums.length;
int len = 2 * n + 1;
int[] f = new int[len];
for(int i=0; i<len; i++) f[i] = -2;
int s = 0;
int ret = 0;
f[n] = -1;
for(int i=0; i<n; i++) {
s += (nums[i]==0? -1: 1);
if(f[s+n]!=-2){
ret = Math.max(ret, i-f[s+n]);
}
else f[s+n] = i;
}
return ret;
}
}
__________________________________________________________________________________________________
sample 52188 kb submission
class Solution {
public int findMaxLength(int[] nums) {
int sum =0;
int maxLen=0;
int endIndex = 0;
Map<Integer,Integer> m = new HashMap<>();
for(int i=0;i<nums.length;i++){
if(nums[i]==0)
nums[i]=-1;
}
for(int i=0;i<nums.length;i++){
sum =sum + nums[i];
if(sum==0){
maxLen = i+1;
endIndex = i;
}
if(m.containsKey(sum)){
if(maxLen< i-m.get(sum)){
maxLen = i - m.get(sum);
endIndex =i;
}
}else
m.put(sum,i);
}
return maxLen;
}
}
__________________________________________________________________________________________________
| 851 |
3,222 |
<gh_stars>1000+
package org.apache.maven.repository.legacy.resolver.transform;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.List;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.DefaultRepositoryRequest;
import org.apache.maven.artifact.repository.RepositoryRequest;
import org.apache.maven.artifact.repository.metadata.ArtifactRepositoryMetadata;
import org.apache.maven.artifact.repository.metadata.Metadata;
import org.apache.maven.artifact.repository.metadata.RepositoryMetadata;
import org.apache.maven.artifact.repository.metadata.RepositoryMetadataManager;
import org.apache.maven.artifact.repository.metadata.RepositoryMetadataResolutionException;
import org.apache.maven.artifact.repository.metadata.SnapshotArtifactRepositoryMetadata;
import org.apache.maven.artifact.repository.metadata.Versioning;
import org.apache.maven.artifact.resolver.ArtifactNotFoundException;
import org.apache.maven.artifact.resolver.ArtifactResolutionException;
import org.apache.maven.repository.legacy.WagonManager;
import org.codehaus.plexus.component.annotations.Requirement;
import org.codehaus.plexus.logging.AbstractLogEnabled;
/**
* Describes a version transformation during artifact resolution.
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
* TODO try and refactor to remove abstract methods - not particular happy about current design
*/
public abstract class AbstractVersionTransformation
extends AbstractLogEnabled
implements ArtifactTransformation
{
@Requirement
protected RepositoryMetadataManager repositoryMetadataManager;
@Requirement
protected WagonManager wagonManager;
public void transformForResolve( Artifact artifact, List<ArtifactRepository> remoteRepositories,
ArtifactRepository localRepository )
throws ArtifactResolutionException, ArtifactNotFoundException
{
RepositoryRequest request = new DefaultRepositoryRequest();
request.setLocalRepository( localRepository );
request.setRemoteRepositories( remoteRepositories );
transformForResolve( artifact, request );
}
protected String resolveVersion( Artifact artifact, ArtifactRepository localRepository,
List<ArtifactRepository> remoteRepositories )
throws RepositoryMetadataResolutionException
{
RepositoryRequest request = new DefaultRepositoryRequest();
request.setLocalRepository( localRepository );
request.setRemoteRepositories( remoteRepositories );
return resolveVersion( artifact, request );
}
protected String resolveVersion( Artifact artifact, RepositoryRequest request )
throws RepositoryMetadataResolutionException
{
RepositoryMetadata metadata;
// Don't use snapshot metadata for LATEST (which isSnapshot returns true for)
if ( !artifact.isSnapshot() || Artifact.LATEST_VERSION.equals( artifact.getBaseVersion() ) )
{
metadata = new ArtifactRepositoryMetadata( artifact );
}
else
{
metadata = new SnapshotArtifactRepositoryMetadata( artifact );
}
repositoryMetadataManager.resolve( metadata, request );
artifact.addMetadata( metadata );
Metadata repoMetadata = metadata.getMetadata();
String version = null;
if ( repoMetadata != null && repoMetadata.getVersioning() != null )
{
version = constructVersion( repoMetadata.getVersioning(), artifact.getBaseVersion() );
}
if ( version == null )
{
// use the local copy, or if it doesn't exist - go to the remote repo for it
version = artifact.getBaseVersion();
}
// TODO also do this logging for other metadata?
// TODO figure out way to avoid duplicated message
if ( getLogger().isDebugEnabled() )
{
if ( !version.equals( artifact.getBaseVersion() ) )
{
String message = artifact.getArtifactId() + ": resolved to version " + version;
if ( artifact.getRepository() != null )
{
message += " from repository " + artifact.getRepository().getId();
}
else
{
message += " from local repository";
}
getLogger().debug( message );
}
else
{
// Locally installed file is newer, don't use the resolved version
getLogger().debug( artifact.getArtifactId() + ": using locally installed snapshot" );
}
}
return version;
}
protected abstract String constructVersion( Versioning versioning, String baseVersion );
}
| 1,984 |
335 |
{
"word": "Horn",
"definitions": [
"A hard permanent outgrowth, often curved and pointed, found in pairs on the heads of cattle, sheep, goats, giraffes, etc. and consisting of a core of bone encased in keratinized skin.",
"A woolly keratinized outgrowth, occurring singly or one behind another, on the snout of a rhinoceros.",
"A deer's antler.",
"A projection resembling a horn on the head of another animal, e.g. a snail's tentacle or the tuft of a horned owl.",
"A pair of horns as an emblem of a cuckold.",
"Marital infidelity.",
"The substance of which horns are composed.",
"A receptacle made of horn, such as a drinking container or powder flask.",
"A horn-shaped projection or object.",
"A sharp promontory or mountain peak.",
"Cape Horn.",
"An arm or branch of a river or bay.",
"Each of the extremities of a crescent moon.",
"An erect penis.",
"A wind instrument, conical in shape or wound into a spiral, originally made from an animal horn (now typically brass) and played by lip vibration.",
"(in jazz and popular music) any wind instrument.",
"A device sounding a warning or other signal."
],
"parts-of-speech": "Noun"
}
| 444 |
1,599 |
<gh_stars>1000+
# This code is part of Qiskit.
#
# (C) Copyright IBM 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Phase Estimators."""
from .phase_estimator import PhaseEstimator
from .phase_estimation import PhaseEstimation
from .phase_estimation_result import PhaseEstimationResult
from .phase_estimation_scale import PhaseEstimationScale
from .hamiltonian_phase_estimation import HamiltonianPhaseEstimation
from .hamiltonian_phase_estimation_result import HamiltonianPhaseEstimationResult
from .ipe import IterativePhaseEstimation
__all__ = [
"PhaseEstimator",
"PhaseEstimation",
"PhaseEstimationResult",
"PhaseEstimationScale",
"HamiltonianPhaseEstimation",
"HamiltonianPhaseEstimationResult",
"IterativePhaseEstimation",
]
| 327 |
4,140 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.session;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.security.authorization.Privilege;
import org.apache.hadoop.hive.ql.security.authorization.PrivilegeRegistry;
public class CreateTableAutomaticGrant {
private Map<String, List<PrivilegeGrantInfo>> userGrants;
private Map<String, List<PrivilegeGrantInfo>> groupGrants;
private Map<String, List<PrivilegeGrantInfo>> roleGrants;
// the owner can change, also owner might appear in user grants as well
// so keep owner privileges separate from userGrants
private List<PrivilegeGrantInfo> ownerGrant;
public static CreateTableAutomaticGrant create(HiveConf conf)
throws HiveException {
CreateTableAutomaticGrant grants = new CreateTableAutomaticGrant();
grants.userGrants = getGrantMap(HiveConf.getVar(conf,
HiveConf.ConfVars.HIVE_AUTHORIZATION_TABLE_USER_GRANTS));
grants.groupGrants = getGrantMap(HiveConf.getVar(conf,
HiveConf.ConfVars.HIVE_AUTHORIZATION_TABLE_GROUP_GRANTS));
grants.roleGrants = getGrantMap(HiveConf.getVar(conf,
HiveConf.ConfVars.HIVE_AUTHORIZATION_TABLE_ROLE_GRANTS));
grants.ownerGrant = getGrantorInfoList(HiveConf.getVar(conf,
HiveConf.ConfVars.HIVE_AUTHORIZATION_TABLE_OWNER_GRANTS));
return grants;
}
private static Map<String, List<PrivilegeGrantInfo>> getGrantMap(String grantMapStr)
throws HiveException {
if (grantMapStr != null && !grantMapStr.trim().equals("")) {
String[] grantArrayStr = grantMapStr.split(";");
Map<String, List<PrivilegeGrantInfo>> grantsMap = new HashMap<String, List<PrivilegeGrantInfo>>();
for (String grantStr : grantArrayStr) {
String[] principalListAndPrivList = grantStr.split(":");
if (principalListAndPrivList.length != 2
|| principalListAndPrivList[0] == null
|| principalListAndPrivList[0].trim().equals("")) {
throw new HiveException(
"Can not understand the config privilege definition " + grantStr);
}
String userList = principalListAndPrivList[0];
String privList = principalListAndPrivList[1];
List<PrivilegeGrantInfo> grantInfoList = getGrantorInfoList(privList);
if(grantInfoList != null) {
String[] users = userList.split(",");
for (String user : users) {
grantsMap.put(user, grantInfoList);
}
}
}
return grantsMap;
}
return null;
}
private static List<PrivilegeGrantInfo> getGrantorInfoList(String privList)
throws HiveException {
if (privList == null || privList.trim().equals("")) {
return null;
}
validatePrivilege(privList);
String[] grantArray = privList.split(",");
List<PrivilegeGrantInfo> grantInfoList = new ArrayList<PrivilegeGrantInfo>();
String grantor = SessionState.getUserFromAuthenticator();
for (String grant : grantArray) {
grantInfoList.add(new PrivilegeGrantInfo(grant, -1, grantor,
PrincipalType.USER, true));
}
return grantInfoList;
}
private static void validatePrivilege(String ownerGrantsInConfig)
throws HiveException {
String[] ownerGrantArray = ownerGrantsInConfig.split(",");
// verify the config
for (String ownerGrant : ownerGrantArray) {
Privilege prive = PrivilegeRegistry.getPrivilege(ownerGrant);
if (prive == null) {
throw new HiveException("Privilege " + ownerGrant + " is not found.");
}
}
}
public Map<String, List<PrivilegeGrantInfo>> getUserGrants() {
Map<String, List<PrivilegeGrantInfo>> curUserGrants = new HashMap<String, List<PrivilegeGrantInfo>>();
String owner = SessionState.getUserFromAuthenticator();
if (owner != null && ownerGrant != null) {
curUserGrants.put(owner, ownerGrant);
}
if (userGrants != null) {
curUserGrants.putAll(userGrants);
}
return curUserGrants;
}
public Map<String, List<PrivilegeGrantInfo>> getGroupGrants() {
return groupGrants;
}
public Map<String, List<PrivilegeGrantInfo>> getRoleGrants() {
return roleGrants;
}
}
| 1,826 |
412 |
<gh_stars>100-1000
int main()
{
// ∀ binds very weakly.
// In particular, 'i' below is in the scope of the quantifier.
__CPROVER_assert(∀ int i; 1 ? 1 : i, "∀ binds weakly");
}
| 73 |
1,829 |
<filename>spring-framework-samples/spring-framework-3.2.x-sample/src/main/java/thinking/in/spring/boot/samples/spring3/service/CalculatingService.java<gh_stars>1000+
package thinking.in.spring.boot.samples.spring3.service;
/**
* 计算服务
*
* @author <a href="mailto:<EMAIL>">Mercy</a>
* @version 1.0.0
* @since 1.0.0
*/
public interface CalculatingService {
/**
* 累加求和
* @param values 多个累加值
* @return 累加结果
*/
Integer sum(Integer... values);
}
| 223 |
14,668 |
<gh_stars>1000+
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/bookmarks/test/mock_bookmark_model_observer.h"
namespace bookmarks {
MockBookmarkModelObserver::MockBookmarkModelObserver() {}
MockBookmarkModelObserver::~MockBookmarkModelObserver() {}
} // namespace bookmarks
| 129 |
718 |
<reponame>plutoyuxie/mmgeneration<filename>configs/lsgan/lsgan_lsgan-archi_lr-1e-4_lsun-bedroom_128_b64x1_10m.py
_base_ = [
'../_base_/models/lsgan/lsgan_128x128.py',
'../_base_/datasets/unconditional_imgs_128x128.py',
'../_base_/default_runtime.py'
]
# define dataset
# you must set `samples_per_gpu` and `imgs_root`
data = dict(
samples_per_gpu=64, train=dict(imgs_root='./data/lsun/bedroom_train'))
optimizer = dict(
generator=dict(type='Adam', lr=0.0001, betas=(0.5, 0.99)),
discriminator=dict(type='Adam', lr=0.0001, betas=(0.5, 0.99)))
# adjust running config
lr_config = None
checkpoint_config = dict(interval=10000, by_epoch=False, max_keep_ckpts=20)
custom_hooks = [
dict(
type='VisualizeUnconditionalSamples',
output_dir='training_samples',
interval=10000)
]
evaluation = dict(
type='GenerativeEvalHook',
interval=10000,
metrics=dict(
type='FID', num_images=50000, inception_pkl=None, bgr2rgb=True),
sample_kwargs=dict(sample_model='orig'))
total_iters = 160000
# use ddp wrapper for faster training
use_ddp_wrapper = True
find_unused_parameters = False
runner = dict(
type='DynamicIterBasedRunner',
is_dynamic_ddp=False, # Note that this flag should be False.
pass_training_status=True)
metrics = dict(
ms_ssim10k=dict(type='MS_SSIM', num_images=10000),
swd16k=dict(type='SWD', num_images=16384, image_shape=(3, 128, 128)),
fid50k=dict(type='FID', num_images=50000, inception_pkl=None))
| 638 |
1,338 |
<reponame>Kirishikesan/haiku<gh_stars>1000+
/*
* Copyright 2004-2010, Haiku Inc. All Rights Reserved.
* Distributed under the terms of the MIT License.
*/
#ifndef _USB_HID_PAGE_SIMULATION_H
#define _USB_HID_PAGE_SIMULATION_H
/* Reference:
* HID Usage Page 0x02: SIMULATION
* HID Usage Tables Ver. 1.12
* http://www.usb.org/developers/devclass_docs/Hut1_12.pdf
*/
// Usage IDs
enum {
B_HID_UID_SIM_FLIGHT_SIMULATION_DEVICE = 0x1,
B_HID_UID_SIM_AUTOMOBILE_SIMULATION_DEVICE,
B_HID_UID_SIM_TANK_SIMULATION_DEVICE,
B_HID_UID_SIM_SPACESHIP_SIMULATION_DEVICE,
B_HID_UID_SIM_SUBMARINE_SIMULATION_DEVICE,
B_HID_UID_SIM_SAILING_SIMULATION_DEVICE,
B_HID_UID_SIM_MOTORCYCLE_SIMULATION_DEVICE,
B_HID_UID_SIM_SPORTS_SIMULATION_DEVICE,
B_HID_UID_SIM_AIRPLANE_SIMULATION_DEVICE,
B_HID_UID_SIM_HELICOPTER_SIMULATION_DEVICE,
B_HID_UID_SIM_MAGIC_CARPET_SIMULATION_DEVICE,
B_HID_UID_SIM_BICYCLE_SIMULATION_DEVICE,
B_HID_UID_SIM_FLIGHT_CONTROL_STICK = 0x20,
B_HID_UID_SIM_FLIGHT_STICK,
B_HID_UID_SIM_CYCLIC_CONTROL,
B_HID_UID_SIM_CYCLIC_TRIM,
B_HID_UID_SIM_FLIGHT_YOKE,
B_HID_UID_SIM_TRACK_CONTROL,
B_HID_UID_SIM_AILERON = 0xb0,
B_HID_UID_SIM_AILERON_TRIM,
B_HID_UID_SIM_ANTI_TORQUE_CONTROL,
B_HID_UID_SIM_AUTOPILOT_ENABLE,
B_HID_UID_SIM_CHAFF_RELEASE,
B_HID_UID_SIM_COLLECTIVE_CONTROL,
B_HID_UID_SIM_DIVE_BREAK,
B_HID_UID_SIM_ELECTRONIC_COUNTERMEASURES,
B_HID_UID_SIM_ELEVATOR,
B_HID_UID_SIM_ELEVATOR_TRIM,
B_HID_UID_SIM_RUDDER,
B_HID_UID_SIM_THROTTLE,
B_HID_UID_SIM_FLIGHT_COMMUNICATIONS,
B_HID_UID_SIM_FLARE_RELEASE,
B_HID_UID_SIM_LANDING_GEAR,
B_HID_UID_SIM_TOE_BRAKE,
B_HID_UID_SIM_TRIGGER = 0xc0,
B_HID_UID_SIM_WEAPONS_ARM,
B_HID_UID_SIM_WEAPONS_SELECT,
B_HID_UID_SIM_WING_FLAPS,
B_HID_UID_SIM_ACCELERATOR,
B_HID_UID_SIM_BRAKE,
B_HID_UID_SIM_CLUTCH,
B_HID_UID_SIM_SHIFTER,
B_HID_UID_SIM_STEERING,
B_HID_UID_SIM_TURRET_DIRECTION,
B_HID_UID_SIM_BARREL_ELEVATION,
B_HID_UID_SIM_DIVE_PLANE,
B_HID_UID_SIM_BALLAST,
B_HID_UID_SIM_BICYCLE_CRANK,
B_HID_UID_SIM_HANDLE_BARS,
B_HID_UID_SIM_FRONT_BRAKE,
B_HID_UID_SIM_REAR_BRAKE
};
#endif // _USB_HID_PAGE_SIMULATION_H
| 1,161 |
2,151 |
<filename>include/llvm/CodeGen/GlobalISel/GISelAccessor.h
//===-- GISelAccessor.h - GISel Accessor ------------------------*- C++ -*-===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
/// This file declares the API to access the various APIs related
/// to GlobalISel.
//
//===----------------------------------------------------------------------===/
#ifndef LLVM_CODEGEN_GLOBALISEL_GISELACCESSOR_H
#define LLVM_CODEGEN_GLOBALISEL_GISELACCESSOR_H
namespace llvm {
class CallLowering;
class InstructionSelector;
class LegalizerInfo;
class RegisterBankInfo;
/// The goal of this helper class is to gather the accessor to all
/// the APIs related to GlobalISel.
/// It should be derived to feature an actual accessor to the GISel APIs.
/// The reason why this is not simply done into the subtarget is to avoid
/// spreading ifdefs around.
struct GISelAccessor {
virtual ~GISelAccessor() {}
virtual const CallLowering *getCallLowering() const { return nullptr;}
virtual const InstructionSelector *getInstructionSelector() const {
return nullptr;
}
virtual const LegalizerInfo *getLegalizerInfo() const { return nullptr; }
virtual const RegisterBankInfo *getRegBankInfo() const { return nullptr;}
};
} // End namespace llvm;
#endif
| 421 |
4,348 |
/***
Copyright (c) 2008-2016 CommonsWare, LLC
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy
of the License at http://www.apache.org/licenses/LICENSE-2.0. Unless required
by applicable law or agreed to in writing, software distributed under the
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
OF ANY KIND, either express or implied. See the License for the specific
language governing permissions and limitations under the License.
Covered in detail in the book _The Busy Coder's Guide to Android Development_
https://commonsware.com/Android
*/
package com.commonsware.android.rx;
import android.content.Context;
import android.content.res.Resources;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v7.widget.DividerItemDecoration;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import android.widget.Toast;
import org.xmlpull.v1.XmlPullParser;
import java.util.ArrayList;
import io.reactivex.Observable;
import io.reactivex.ObservableEmitter;
import io.reactivex.ObservableOnSubscribe;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.disposables.Disposable;
import io.reactivex.schedulers.Schedulers;
public class RxDemoFragment extends Fragment {
private ArrayList<String> model=new ArrayList<>();
private RVArrayAdapter adapter;
private Disposable sub=null;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
adapter=new RVArrayAdapter(model, getLayoutInflater());
Observable<String> observable=Observable
.create(new WordSource(getActivity()))
.subscribeOn(Schedulers.io())
.map(s -> (s.toUpperCase()))
.observeOn(AndroidSchedulers.mainThread())
.doOnComplete(() ->
Toast.makeText(getActivity(), R.string.done, Toast.LENGTH_SHORT).show());
sub=observable.subscribe(s -> adapter.add(s),
error ->
Toast
.makeText(getActivity(), error.getMessage(), Toast.LENGTH_LONG)
.show());
}
@Nullable
@Override
public View onCreateView(@NonNull LayoutInflater inflater,
@Nullable ViewGroup container,
@Nullable Bundle savedInstanceState) {
return inflater.inflate(R.layout.main, container, false);
}
@Override
public void onViewCreated(View v, Bundle savedInstanceState) {
super.onViewCreated(v, savedInstanceState);
RecyclerView rv=v.findViewById(android.R.id.list);
rv.setLayoutManager(new LinearLayoutManager(getActivity()));
rv.addItemDecoration(new DividerItemDecoration(getActivity(),
DividerItemDecoration.VERTICAL));
rv.setAdapter(adapter);
}
@Override
public void onDestroy() {
if (sub!=null && !sub.isDisposed()) {
sub.dispose();
}
super.onDestroy();
}
private static class WordSource implements ObservableOnSubscribe<String> {
private final Resources resources;
WordSource(Context ctxt) {
resources=ctxt.getResources();
}
@Override
public void subscribe(ObservableEmitter<String> emitter) {
try {
XmlPullParser xpp=resources.getXml(R.xml.words);
while (xpp.getEventType()!=XmlPullParser.END_DOCUMENT) {
if (xpp.getEventType()==XmlPullParser.START_TAG) {
if (xpp.getName().equals("word")) {
emitter.onNext(xpp.getAttributeValue(0));
}
}
xpp.next();
}
emitter.onComplete();
}
catch (Exception e) {
emitter.onError(e);
}
}
}
private static class RVArrayAdapter extends RecyclerView.Adapter<RowHolder> {
private final ArrayList<String> words;
private final LayoutInflater inflater;
private RVArrayAdapter(ArrayList<String> words,
LayoutInflater inflater) {
this.words=words;
this.inflater=inflater;
}
@NonNull
@Override
public RowHolder onCreateViewHolder(@NonNull ViewGroup parent,
int viewType) {
View row=inflater.inflate(android.R.layout.simple_list_item_1, parent, false);
return new RowHolder(row);
}
@Override
public void onBindViewHolder(@NonNull RowHolder holder,
int position) {
holder.bind(words.get(position));
}
@Override
public int getItemCount() {
return words.size();
}
private void add(String word) {
words.add(word);
notifyItemInserted(words.size()-1);
}
}
private static class RowHolder extends RecyclerView.ViewHolder {
private final TextView title;
RowHolder(View itemView) {
super(itemView);
title=itemView.findViewById(android.R.id.text1);
}
public void bind(String text) {
title.setText(text);
}
}
}
| 2,017 |
6,304 |
<gh_stars>1000+
/*
* Copyright 2012 Google Inc.
*
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*/
#include "gm/gm.h"
#include "include/core/SkBitmap.h"
#include "include/core/SkCanvas.h"
#include "include/core/SkFont.h"
#include "include/core/SkImageFilter.h"
#include "include/core/SkPaint.h"
#include "include/core/SkRect.h"
#include "include/core/SkScalar.h"
#include "include/core/SkSize.h"
#include "include/core/SkString.h"
#include "include/core/SkTypeface.h"
#include "include/core/SkTypes.h"
#include "include/effects/SkImageFilters.h"
#include "tools/ToolUtils.h"
#define WIDTH 700
#define HEIGHT 560
namespace skiagm {
class MorphologyGM : public GM {
public:
MorphologyGM() {
this->setBGColor(0xFF000000);
}
protected:
SkString onShortName() override {
return SkString("morphology");
}
void onOnceBeforeDraw() override {
auto surf = SkSurface::MakeRasterN32Premul(135, 135);
SkFont font(ToolUtils::create_portable_typeface(), 64.0f);
SkPaint paint;
paint.setColor(0xFFFFFFFF);
surf->getCanvas()->drawString("ABC", 10, 55, font, paint);
surf->getCanvas()->drawString("XYZ", 10, 110, font, paint);
fImage = surf->makeImageSnapshot();
}
SkISize onISize() override {
return SkISize::Make(WIDTH, HEIGHT);
}
void drawClippedBitmap(SkCanvas* canvas, const SkPaint& paint, int x, int y) {
canvas->save();
canvas->translate(SkIntToScalar(x), SkIntToScalar(y));
canvas->clipIRect(fImage->bounds());
canvas->drawImage(fImage, 0, 0, SkSamplingOptions(), &paint);
canvas->restore();
}
void onDraw(SkCanvas* canvas) override {
struct {
int fWidth, fHeight;
int fRadiusX, fRadiusY;
} samples[] = {
{ 140, 140, 0, 0 },
{ 140, 140, 0, 2 },
{ 140, 140, 2, 0 },
{ 140, 140, 2, 2 },
{ 24, 24, 25, 25 },
};
SkPaint paint;
SkIRect cropRect = SkIRect::MakeXYWH(25, 20, 100, 80);
for (unsigned j = 0; j < 4; ++j) {
for (unsigned i = 0; i < SK_ARRAY_COUNT(samples); ++i) {
const SkIRect* cr = j & 0x02 ? &cropRect : nullptr;
if (j & 0x01) {
paint.setImageFilter(SkImageFilters::Erode(
samples[i].fRadiusX, samples[i].fRadiusY, nullptr, cr));
} else {
paint.setImageFilter(SkImageFilters::Dilate(
samples[i].fRadiusX, samples[i].fRadiusY, nullptr, cr));
}
this->drawClippedBitmap(canvas, paint, i * 140, j * 140);
}
}
}
private:
sk_sp<SkImage> fImage;
using INHERITED = GM;
};
//////////////////////////////////////////////////////////////////////////////
DEF_GM(return new MorphologyGM;)
} // namespace skiagm
| 1,413 |
15,947 |
<filename>tests/cli/commands/test_rotate_fernet_key_command.py<gh_stars>1000+
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from unittest import mock
from cryptography.fernet import Fernet
from airflow.cli import cli_parser
from airflow.cli.commands import rotate_fernet_key_command
from airflow.hooks.base import BaseHook
from airflow.models import Connection, Variable
from airflow.utils.session import provide_session
from tests.test_utils.config import conf_vars
from tests.test_utils.db import clear_db_connections, clear_db_variables
class TestRotateFernetKeyCommand(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.parser = cli_parser.get_parser()
def setUp(self) -> None:
clear_db_connections(add_default_connections_back=False)
clear_db_variables()
def tearDown(self) -> None:
clear_db_connections(add_default_connections_back=False)
clear_db_variables()
@provide_session
def test_should_rotate_variable(self, session):
fernet_key1 = Fernet.generate_key()
fernet_key2 = Fernet.generate_key()
var1_key = f"{__file__}_var1"
var2_key = f"{__file__}_var2"
# Create unencrypted variable
with conf_vars({('core', 'fernet_key'): ''}), mock.patch('airflow.models.crypto._fernet', None):
Variable.set(key=var1_key, value="value")
# Create encrypted variable
with conf_vars({('core', 'fernet_key'): fernet_key1.decode()}), mock.patch(
'airflow.models.crypto._fernet', None
):
Variable.set(key=var2_key, value="value")
# Rotate fernet key
with conf_vars(
{('core', 'fernet_key'): ','.join([fernet_key2.decode(), fernet_key1.decode()])}
), mock.patch('airflow.models.crypto._fernet', None):
args = self.parser.parse_args(['rotate-fernet-key'])
rotate_fernet_key_command.rotate_fernet_key(args)
# Assert correctness using a new fernet key
with conf_vars({('core', 'fernet_key'): fernet_key2.decode()}), mock.patch(
'airflow.models.crypto._fernet', None
):
var1 = session.query(Variable).filter(Variable.key == var1_key).first()
# Unencrypted variable should be unchanged
assert Variable.get(key=var1_key) == 'value'
assert var1._val == 'value'
assert Variable.get(key=var2_key) == 'value'
@provide_session
def test_should_rotate_connection(self, session):
fernet_key1 = Fernet.generate_key()
fernet_key2 = Fernet.generate_key()
var1_key = f"{__file__}_var1"
var2_key = f"{__file__}_var2"
# Create unencrypted variable
with conf_vars({('core', 'fernet_key'): ''}), mock.patch('airflow.models.crypto._fernet', None):
session.add(Connection(conn_id=var1_key, uri="mysql://user:pass@localhost"))
session.commit()
# Create encrypted variable
with conf_vars({('core', 'fernet_key'): fernet_key1.decode()}), mock.patch(
'airflow.models.crypto._fernet', None
):
session.add(Connection(conn_id=var2_key, uri="mysql://user:pass@localhost"))
session.commit()
# Rotate fernet key
with conf_vars(
{('core', 'fernet_key'): ','.join([fernet_key2.decode(), fernet_key1.decode()])}
), mock.patch('airflow.models.crypto._fernet', None):
args = self.parser.parse_args(['rotate-fernet-key'])
rotate_fernet_key_command.rotate_fernet_key(args)
# Assert correctness using a new fernet key
with conf_vars({('core', 'fernet_key'): fernet_key2.decode()}), mock.patch(
'airflow.models.crypto._fernet', None
):
# Unencrypted variable should be unchanged
conn1: Connection = BaseHook.get_connection(var1_key)
assert conn1.password == '<PASSWORD>'
assert conn1._password == '<PASSWORD>'
assert BaseHook.get_connection(var2_key).password == '<PASSWORD>'
| 2,014 |
1,085 |
/*
* Copyright (C) 2017-2019 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dremio.jdbc.impl;
import java.io.InputStream;
import java.io.Reader;
import java.sql.NClob;
import java.sql.ResultSetMetaData;
import java.sql.RowId;
import java.sql.SQLException;
import java.sql.SQLTimeoutException;
import java.sql.SQLXML;
import java.util.Properties;
import java.util.TimeZone;
import org.apache.calcite.avatica.AvaticaConnection;
import org.apache.calcite.avatica.AvaticaStatement;
import org.apache.calcite.avatica.Helper;
import org.apache.calcite.avatica.Meta;
import org.apache.calcite.avatica.Meta.StatementHandle;
import org.apache.calcite.avatica.QueryState;
import com.dremio.exec.client.DremioClient;
import com.dremio.exec.client.ServerMethod;
import com.dremio.exec.proto.UserProtos.CreatePreparedStatementResp;
import com.dremio.exec.proto.UserProtos.RequestStatus;
import com.dremio.exec.rpc.RpcFuture;
/**
* Implementation of {@link net.hydromatic.avatica.AvaticaFactory} for Dremio and
* JDBC 4.1 (corresponds to JDK 1.7).
*/
// Note: Must be public so net.hydromatic.avatica.UnregisteredDriver can
// (reflectively) call no-args constructor.
public class DremioJdbc41Factory extends DremioFactory {
private static final org.slf4j.Logger logger =
org.slf4j.LoggerFactory.getLogger(DremioJdbc41Factory.class);
/** Creates a factory for JDBC version 4.1. */
// Note: Must be public so net.hydromatic.avatica.UnregisteredDriver can
// (reflectively) call this constructor.
public DremioJdbc41Factory() {
this(4, 1);
}
/** Creates a JDBC factory with given major/minor version number. */
protected DremioJdbc41Factory(int major, int minor) {
super(major, minor);
}
@Override
DremioConnectionImpl newConnection(DriverImpl driver,
DremioFactory factory,
String url,
Properties info) throws SQLException {
return new DremioConnectionImpl(driver, factory, url, info);
}
@Override
public DremioDatabaseMetaDataImpl newDatabaseMetaData(AvaticaConnection connection) {
return new DremioDatabaseMetaDataImpl(connection);
}
@Override
public DremioStatementImpl newStatement(AvaticaConnection connection,
StatementHandle h,
int resultSetType,
int resultSetConcurrency,
int resultSetHoldability) {
return new DremioStatementImpl((DremioConnectionImpl) connection,
h,
resultSetType,
resultSetConcurrency,
resultSetHoldability);
}
@Override
public DremioJdbc41PreparedStatement newPreparedStatement(AvaticaConnection connection,
StatementHandle h,
Meta.Signature signature,
int resultSetType,
int resultSetConcurrency,
int resultSetHoldability)
throws SQLException {
DremioConnectionImpl dremioConnection = (DremioConnectionImpl) connection;
DremioClient client = dremioConnection.getClient();
if (dremioConnection.getConfig().isServerPreparedStatementDisabled() || !client.getSupportedMethods().contains(ServerMethod.PREPARED_STATEMENT)) {
// fallback to client side prepared statement
return new DremioJdbc41PreparedStatement(dremioConnection, h, signature, null, resultSetType, resultSetConcurrency, resultSetHoldability);
}
return newServerPreparedStatement(dremioConnection, h, signature, resultSetType,
resultSetConcurrency, resultSetHoldability);
}
private DremioJdbc41PreparedStatement newServerPreparedStatement(DremioConnectionImpl connection,
StatementHandle h,
Meta.Signature signature,
int resultSetType,
int resultSetConcurrency,
int resultSetHoldability)
throws SQLException {
String sql = signature.sql;
try {
RpcFuture<CreatePreparedStatementResp> respFuture = connection.getClient().createPreparedStatement(signature.sql);
CreatePreparedStatementResp resp;
try {
resp = respFuture.get();
} catch (InterruptedException e) {
// Preserve evidence that the interruption occurred so that code higher up
// on the call stack can learn of the interruption and respond to it if it
// wants to.
Thread.currentThread().interrupt();
throw new SQLException( "Interrupted", e );
}
final RequestStatus status = resp.getStatus();
if (status != RequestStatus.OK) {
final String errMsgFromServer = resp.getError() != null ? resp.getError().getMessage() : "";
if (status == RequestStatus.TIMEOUT) {
logger.error("Request timed out to create prepare statement: {}", errMsgFromServer);
throw new SQLTimeoutException("Failed to create prepared statement: " + errMsgFromServer);
}
if (status == RequestStatus.FAILED) {
logger.error("Failed to create prepared statement: {}", errMsgFromServer);
throw new SQLException("Failed to create prepared statement: " + resp.getError());
}
logger.error("Failed to create prepared statement. Unknown status: {}, Error: {}", status, errMsgFromServer);
throw new SQLException(String.format(
"Failed to create prepared statement. Unknown status: %s, Error: %s", status, errMsgFromServer));
}
return new DremioJdbc41PreparedStatement(connection,
h,
signature,
resp.getPreparedStatement(),
resultSetType,
resultSetConcurrency,
resultSetHoldability);
} catch (SQLException e) {
throw e;
} catch (RuntimeException e) {
throw Helper.INSTANCE.createException("Error while preparing statement [" + sql + "]", e);
} catch (Exception e) {
throw Helper.INSTANCE.createException("Error while preparing statement [" + sql + "]", e);
}
}
@Override
public DremioResultSetImpl newResultSet(AvaticaStatement statement,
QueryState state,
Meta.Signature signature,
TimeZone timeZone,
Meta.Frame firstFrame) throws SQLException {
final ResultSetMetaData metaData = newResultSetMetaData(statement, signature);
return new DremioResultSetImpl(statement, state, signature, metaData, timeZone, firstFrame);
}
@Override
public ResultSetMetaData newResultSetMetaData(AvaticaStatement statement,
Meta.Signature signature) {
return new DremioResultSetMetaDataImpl(statement, null, signature);
}
/**
* JDBC 4.1 version of {@link DremioPreparedStatementImpl}.
*/
private static class DremioJdbc41PreparedStatement extends DremioPreparedStatementImpl {
DremioJdbc41PreparedStatement(DremioConnectionImpl connection,
StatementHandle h,
Meta.Signature signature,
com.dremio.exec.proto.UserProtos.PreparedStatement pstmt,
int resultSetType,
int resultSetConcurrency,
int resultSetHoldability) throws SQLException {
super(connection, h, signature, pstmt,
resultSetType, resultSetConcurrency, resultSetHoldability);
}
// These don't need throwIfClosed(), since getParameter already calls it.
@Override
public void setRowId(int parameterIndex, RowId x) throws SQLException {
getSite(parameterIndex).setRowId(x);
}
@Override
public void setNString(int parameterIndex, String value) throws SQLException {
getSite(parameterIndex).setNString(value);
}
@Override
public void setNCharacterStream(int parameterIndex, Reader value,
long length) throws SQLException {
getSite(parameterIndex).setNCharacterStream(value, length);
}
@Override
public void setNClob(int parameterIndex, NClob value) throws SQLException {
getSite(parameterIndex).setNClob(value);
}
@Override
public void setClob(int parameterIndex, Reader reader,
long length) throws SQLException {
getSite(parameterIndex).setClob(reader, length);
}
@Override
public void setBlob(int parameterIndex, InputStream inputStream,
long length) throws SQLException {
getSite(parameterIndex).setBlob(inputStream, length);
}
@Override
public void setNClob(int parameterIndex, Reader reader,
long length) throws SQLException {
getSite(parameterIndex).setNClob(reader, length);
}
@Override
public void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException {
getSite(parameterIndex).setSQLXML(xmlObject);
}
@Override
public void setAsciiStream(int parameterIndex, InputStream x,
long length) throws SQLException {
getSite(parameterIndex).setAsciiStream(x, length);
}
@Override
public void setBinaryStream(int parameterIndex, InputStream x,
long length) throws SQLException {
getSite(parameterIndex).setBinaryStream(x, length);
}
@Override
public void setCharacterStream(int parameterIndex, Reader reader,
long length) throws SQLException {
getSite(parameterIndex).setCharacterStream(reader, length);
}
@Override
public void setAsciiStream(int parameterIndex,
InputStream x) throws SQLException {
getSite(parameterIndex).setAsciiStream(x);
}
@Override
public void setBinaryStream(int parameterIndex,
InputStream x) throws SQLException {
getSite(parameterIndex).setBinaryStream(x);
}
@Override
public void setCharacterStream(int parameterIndex,
Reader reader) throws SQLException {
getSite(parameterIndex).setCharacterStream(reader);
}
@Override
public void setNCharacterStream(int parameterIndex,
Reader value) throws SQLException {
getSite(parameterIndex).setNCharacterStream(value);
}
@Override
public void setClob(int parameterIndex, Reader reader) throws SQLException {
getSite(parameterIndex).setClob(reader);
}
@Override
public void setBlob(int parameterIndex,
InputStream inputStream) throws SQLException {
getSite(parameterIndex).setBlob(inputStream);
}
@Override
public void setNClob(int parameterIndex, Reader reader) throws SQLException {
getSite(parameterIndex).setNClob(reader);
}
}
}
// End DremioJdbc41Factory.java
| 5,224 |
352 |
<filename>examples/core/composer.json
{
"require": {
"monolog/monolog": "2.*"
},
"require-dev": {
"roave/security-advisories": "dev-master"
}
}
| 71 |
367 |
<gh_stars>100-1000
from typing import List
class ModuleFilter:
"""
filter logs by module name
"""
def __init__(self, exclude_list:List[str]=None,include_list:List[str]=None ) -> None:
"""[summary]
Args:
exclude_list (List[str], optional): module name (prefixes) to reject. Defaults to [].
include_list (List[str], optional): module name (prefixes) to include (even if higher form is excluded). Defaults to [].
Usage:
ModuleFilter(["uvicorn"]) # exclude all logs coming from module name starting with "uvicorn"
ModuleFilter(["uvicorn"], ["uvicorn.access]) # exclude all logs coming from module name starting with "uvicorn" except ones starting with "uvicorn.access")
"""
self._exclude_list = exclude_list or []
self._include_list = include_list or []
def filter(self, record):
name:str = record["name"]
for module in self._include_list:
if name.startswith(module):
return True
for module in self._exclude_list:
if name.startswith(module):
return False
return True
| 480 |
722 |
<gh_stars>100-1000
from lixian_plugins.api import command
from lixian_cli_parser import command_line_parser, command_line_option
from lixian_cli_parser import with_parser
from lixian_cli import parse_login
from lixian_commands.util import create_client
@command(name='get-torrent', usage='get .torrent by task id or info hash')
@command_line_parser()
@with_parser(parse_login)
@command_line_option('rename', default=True)
def get_torrent(args):
'''
usage: lx get-torrent [info-hash|task-id]...
'''
client = create_client(args)
for id in args:
id = id.lower()
import re
if re.match(r'[a-fA-F0-9]{40}$', id):
torrent = client.get_torrent_file_by_info_hash(id)
elif re.match(r'\d+$', id):
import lixian_query
task = lixian_query.get_task_by_id(client, id)
id = task['bt_hash']
id = id.lower()
torrent = client.get_torrent_file_by_info_hash(id)
else:
raise NotImplementedError()
if args.rename:
import lixian_hash_bt
from lixian_encoding import default_encoding
info = lixian_hash_bt.bdecode(torrent)['info']
name = info['name'].decode(info.get('encoding', 'utf-8')).encode(default_encoding)
import re
name = re.sub(r'[\\/:*?"<>|]', '-', name)
else:
name = id
path = name + '.torrent'
print path
with open(path, 'wb') as output:
output.write(torrent)
| 556 |
940 |
/*
* b2ether driver -- derived from DDK packet driver sample
*
* Basilisk II (C) 1997-1999 <NAME>
*
* Windows platform specific code copyright (C) <NAME>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "stdarg.h"
#include "ntddk.h"
#include "ntiologc.h"
#include "ndis.h"
#include "ntddpack.h"
#include "b2ether.h"
#undef DBG
#define DBG 0
#include "debug.h"
NTSTATUS
DriverEntry(
IN PDRIVER_OBJECT DriverObject,
IN PUNICODE_STRING RegistryPath
);
NTSTATUS
PacketReadRegistry(
IN PWSTR *MacDriverName,
IN PWSTR *PacketDriverName,
IN PUNICODE_STRING RegistryPath
);
NTSTATUS
PacketCreateSymbolicLink(
IN PUNICODE_STRING DeviceName,
IN BOOLEAN Create
);
NTSTATUS
PacketQueryRegistryRoutine(
IN PWSTR ValueName,
IN ULONG ValueType,
IN PVOID ValueData,
IN ULONG ValueLength,
IN PVOID Context,
IN PVOID EntryContext
);
#if DBG
ULONG PacketDebugFlag = PACKET_DEBUG_LOUD;
#endif
PDEVICE_EXTENSION GlobalDeviceExtension;
NTSTATUS DriverEntry(
IN PDRIVER_OBJECT DriverObject,
IN PUNICODE_STRING RegistryPath
)
{
NDIS_PROTOCOL_CHARACTERISTICS ProtocolChar;
UNICODE_STRING MacDriverName;
UNICODE_STRING UnicodeDeviceName;
PDEVICE_OBJECT DeviceObject = NULL;
PDEVICE_EXTENSION DeviceExtension = NULL;
NTSTATUS Status = STATUS_SUCCESS;
NTSTATUS ErrorCode = STATUS_SUCCESS;
NDIS_STRING ProtoName = NDIS_STRING_CONST("PacketDriver");
ULONG DevicesCreated=0;
PWSTR BindString;
PWSTR ExportString;
PWSTR BindStringSave;
PWSTR ExportStringSave;
NDIS_HANDLE NdisProtocolHandle;
IF_LOUD(DbgPrint("\n\nPacket: DriverEntry\n");)
RtlZeroMemory(&ProtocolChar,sizeof(NDIS_PROTOCOL_CHARACTERISTICS));
ProtocolChar.MajorNdisVersion = 3;
ProtocolChar.MinorNdisVersion = 0;
ProtocolChar.Reserved = 0;
ProtocolChar.OpenAdapterCompleteHandler = PacketOpenAdapterComplete;
ProtocolChar.CloseAdapterCompleteHandler = PacketCloseAdapterComplete;
ProtocolChar.SendCompleteHandler = PacketSendComplete;
ProtocolChar.TransferDataCompleteHandler = PacketTransferDataComplete;
ProtocolChar.ResetCompleteHandler = PacketResetComplete;
ProtocolChar.RequestCompleteHandler = PacketRequestComplete;
ProtocolChar.ReceiveHandler = PacketReceiveIndicate;
ProtocolChar.ReceiveCompleteHandler = PacketReceiveComplete;
ProtocolChar.StatusHandler = PacketStatus;
ProtocolChar.StatusCompleteHandler = PacketStatusComplete;
ProtocolChar.Name = ProtoName;
NdisRegisterProtocol(
&Status,
&NdisProtocolHandle,
&ProtocolChar,
sizeof(NDIS_PROTOCOL_CHARACTERISTICS));
if (Status != NDIS_STATUS_SUCCESS) {
IF_LOUD(DbgPrint("Packet: Failed to register protocol with NDIS\n");)
return Status;
}
//
// Set up the device driver entry points.
//
DriverObject->MajorFunction[IRP_MJ_CREATE] = PacketOpen;
DriverObject->MajorFunction[IRP_MJ_CLOSE] = PacketClose;
DriverObject->MajorFunction[IRP_MJ_READ] = PacketRead;
DriverObject->MajorFunction[IRP_MJ_WRITE] = PacketWrite;
DriverObject->MajorFunction[IRP_MJ_CLEANUP] = PacketCleanup;
DriverObject->MajorFunction[IRP_MJ_DEVICE_CONTROL] = PacketIoControl;
DriverObject->DriverUnload = PacketUnload;
//
// Get the name of the Packet driver and the name of the MAC driver
// to bind to from the registry
//
Status=PacketReadRegistry(
&BindString,
&ExportString,
RegistryPath
);
if (Status != STATUS_SUCCESS) {
IF_LOUD(DbgPrint("Perf: Failed to read registry\n");)
goto RegistryError;
}
BindStringSave = BindString;
ExportStringSave = ExportString;
// create a device object for each entry
while (*BindString!= UNICODE_NULL && *ExportString!= UNICODE_NULL) {
// Create a counted unicode string for both null terminated strings
RtlInitUnicodeString(
&MacDriverName,
BindString
);
RtlInitUnicodeString(
&UnicodeDeviceName,
ExportString
);
// Advance to the next string of the MULTI_SZ string
BindString += (MacDriverName.Length+sizeof(UNICODE_NULL))/sizeof(WCHAR);
ExportString += (UnicodeDeviceName.Length+sizeof(UNICODE_NULL))/sizeof(WCHAR);
IF_LOUD(DbgPrint("Packet: DeviceName=%ws MacName=%ws\n",UnicodeDeviceName.Buffer,MacDriverName.Buffer);)
// Create the device object
Status = IoCreateDevice(
DriverObject,
sizeof(DEVICE_EXTENSION),
&UnicodeDeviceName,
FILE_DEVICE_PROTOCOL,
0,
FALSE,
&DeviceObject
);
if (Status != STATUS_SUCCESS) {
IF_LOUD(DbgPrint("Perf: IoCreateDevice() failed:\n");)
break;
}
DevicesCreated++;
DeviceObject->Flags |= DO_DIRECT_IO;
DeviceExtension = (PDEVICE_EXTENSION) DeviceObject->DeviceExtension;
DeviceExtension->DeviceObject = DeviceObject;
// Save the the name of the MAC driver to open in the Device Extension
DeviceExtension->AdapterName=MacDriverName;
if (DevicesCreated == 1) {
DeviceExtension->BindString = BindStringSave;
DeviceExtension->ExportString = ExportStringSave;
}
DeviceExtension->NdisProtocolHandle=NdisProtocolHandle;
}
if (DevicesCreated > 0) {
return STATUS_SUCCESS;
}
ExFreePool(BindStringSave);
ExFreePool(ExportStringSave);
RegistryError:
NdisDeregisterProtocol(
&Status,
NdisProtocolHandle
);
Status=STATUS_UNSUCCESSFUL;
return(Status);
}
VOID PacketUnload( IN PDRIVER_OBJECT DriverObject )
{
PDEVICE_OBJECT DeviceObject;
PDEVICE_OBJECT OldDeviceObject;
PDEVICE_EXTENSION DeviceExtension;
NDIS_HANDLE NdisProtocolHandle;
NDIS_STATUS Status;
IF_LOUD(DbgPrint("Packet: Unload\n");)
DeviceObject = DriverObject->DeviceObject;
while (DeviceObject != NULL) {
DeviceExtension = DeviceObject->DeviceExtension;
NdisProtocolHandle = DeviceExtension->NdisProtocolHandle;
if (DeviceExtension->BindString != NULL) {
ExFreePool(DeviceExtension->BindString);
}
if (DeviceExtension->ExportString != NULL) {
ExFreePool(DeviceExtension->ExportString);
}
OldDeviceObject=DeviceObject;
DeviceObject=DeviceObject->NextDevice;
IoDeleteDevice(OldDeviceObject);
}
NdisDeregisterProtocol( &Status, NdisProtocolHandle );
}
NTSTATUS PacketIoControl( IN PDEVICE_OBJECT DeviceObject, IN PIRP Irp )
{
POPEN_INSTANCE Open;
PIO_STACK_LOCATION IrpSp;
PLIST_ENTRY RequestListEntry;
PINTERNAL_REQUEST pRequest;
ULONG FunctionCode;
NDIS_STATUS Status;
IF_LOUD(DbgPrint("Packet: IoControl\n");)
IrpSp = IoGetCurrentIrpStackLocation(Irp);
FunctionCode=IrpSp->Parameters.DeviceIoControl.IoControlCode;
Open=IrpSp->FileObject->FsContext;
RequestListEntry=ExInterlockedRemoveHeadList(&Open->RequestList,&Open->RequestSpinLock);
if (RequestListEntry == NULL) {
Irp->IoStatus.Status = STATUS_UNSUCCESSFUL;
Irp->IoStatus.Information = 0;
IoCompleteRequest(Irp, IO_NO_INCREMENT);
return STATUS_UNSUCCESSFUL;
}
pRequest=CONTAINING_RECORD(RequestListEntry,INTERNAL_REQUEST,ListElement);
pRequest->Irp=Irp;
IoMarkIrpPending(Irp);
Irp->IoStatus.Status = STATUS_PENDING;
IF_LOUD(DbgPrint("Packet: Function code is %08lx buff size=%08lx %08lx\n",FunctionCode,IrpSp->Parameters.DeviceIoControl.InputBufferLength,IrpSp->Parameters.DeviceIoControl.OutputBufferLength);)
if (FunctionCode == IOCTL_PROTOCOL_RESET) {
IF_LOUD(DbgPrint("Packet: IoControl - Reset request\n");)
ExInterlockedInsertTailList(
&Open->ResetIrpList,
&Irp->Tail.Overlay.ListEntry,
&Open->RequestSpinLock);
NdisReset( &Status, Open->AdapterHandle );
if (Status != NDIS_STATUS_PENDING) {
IF_LOUD(DbgPrint("Packet: IoControl - ResetComplte being called\n");)
PacketResetComplete( Open, Status );
}
} else {
// See if it is an Ndis request
PPACKET_OID_DATA OidData=Irp->AssociatedIrp.SystemBuffer;
if (((FunctionCode == IOCTL_PROTOCOL_SET_OID) || (FunctionCode == IOCTL_PROTOCOL_QUERY_OID))
&&
(IrpSp->Parameters.DeviceIoControl.InputBufferLength == IrpSp->Parameters.DeviceIoControl.OutputBufferLength)
&&
(IrpSp->Parameters.DeviceIoControl.InputBufferLength >= sizeof(PACKET_OID_DATA))
&&
(IrpSp->Parameters.DeviceIoControl.InputBufferLength >= sizeof(PACKET_OID_DATA)-1+OidData->Length)) {
IF_LOUD(DbgPrint("Packet: IoControl: Request: Oid=%08lx, Length=%08lx\n",OidData->Oid,OidData->Length);)
if (FunctionCode == IOCTL_PROTOCOL_SET_OID) {
pRequest->Request.RequestType=NdisRequestSetInformation;
pRequest->Request.DATA.SET_INFORMATION.Oid=OidData->Oid;
pRequest->Request.DATA.SET_INFORMATION.InformationBuffer=OidData->Data;
pRequest->Request.DATA.SET_INFORMATION.InformationBufferLength=OidData->Length;
} else {
pRequest->Request.RequestType=NdisRequestQueryInformation;
pRequest->Request.DATA.QUERY_INFORMATION.Oid=OidData->Oid;
pRequest->Request.DATA.QUERY_INFORMATION.InformationBuffer=OidData->Data;
pRequest->Request.DATA.QUERY_INFORMATION.InformationBufferLength=OidData->Length;
}
NdisRequest(
&Status,
Open->AdapterHandle,
&pRequest->Request
);
} else { // buffer too small
Status=NDIS_STATUS_FAILURE;
pRequest->Request.DATA.SET_INFORMATION.BytesRead=0;
pRequest->Request.DATA.QUERY_INFORMATION.BytesWritten=0;
}
if (Status != NDIS_STATUS_PENDING) {
IF_LOUD(DbgPrint("Packet: Calling RequestCompleteHandler\n");)
PacketRequestComplete(
Open,
&pRequest->Request,
Status
);
}
}
return(STATUS_PENDING);
}
VOID PacketRequestComplete(
IN NDIS_HANDLE ProtocolBindingContext,
IN PNDIS_REQUEST NdisRequest,
IN NDIS_STATUS Status
)
{
POPEN_INSTANCE Open;
PIO_STACK_LOCATION IrpSp;
PIRP Irp;
PINTERNAL_REQUEST pRequest;
UINT FunctionCode;
PPACKET_OID_DATA OidData;
IF_LOUD(DbgPrint("Packet: RequestComplete\n");)
Open= (POPEN_INSTANCE)ProtocolBindingContext;
pRequest=CONTAINING_RECORD(NdisRequest,INTERNAL_REQUEST,Request);
Irp=pRequest->Irp;
IrpSp = IoGetCurrentIrpStackLocation(Irp);
FunctionCode=IrpSp->Parameters.DeviceIoControl.IoControlCode;
OidData=Irp->AssociatedIrp.SystemBuffer;
if (FunctionCode == IOCTL_PROTOCOL_SET_OID) {
OidData->Length=pRequest->Request.DATA.SET_INFORMATION.BytesRead;
} else {
if (FunctionCode == IOCTL_PROTOCOL_QUERY_OID) {
OidData->Length=pRequest->Request.DATA.QUERY_INFORMATION.BytesWritten;
}
}
Irp->IoStatus.Information=IrpSp->Parameters.DeviceIoControl.InputBufferLength;
ExInterlockedInsertTailList(
&Open->RequestList,
&pRequest->ListElement,
&Open->RequestSpinLock);
Irp->IoStatus.Status = Status;
IoCompleteRequest(Irp, IO_NO_INCREMENT);
}
VOID PacketStatus(
IN NDIS_HANDLE ProtocolBindingContext,
IN NDIS_STATUS Status,
IN PVOID StatusBuffer,
IN UINT StatusBufferSize
)
{
IF_LOUD(DbgPrint("Packet: Status Indication\n");)
}
VOID PacketStatusComplete( IN NDIS_HANDLE ProtocolBindingContext )
{
IF_LOUD(DbgPrint("Packet: StatusIndicationComplete\n");)
}
#if 0
NTSTATUS PacketCreateSymbolicLink(
IN PUNICODE_STRING DeviceName,
IN BOOLEAN Create
)
{
UNICODE_STRING UnicodeDosDeviceName;
NTSTATUS Status;
if (DeviceName->Length < sizeof(L"\\Device\\")) {
return STATUS_UNSUCCESSFUL;
}
RtlInitUnicodeString(&UnicodeDosDeviceName,NULL);
UnicodeDosDeviceName.MaximumLength=DeviceName->Length+sizeof(L"\\DosDevices")+sizeof(UNICODE_NULL);
UnicodeDosDeviceName.Buffer=ExAllocatePool(
NonPagedPool,
UnicodeDosDeviceName.MaximumLength
);
if (UnicodeDosDeviceName.Buffer != NULL) {
RtlZeroMemory( UnicodeDosDeviceName.Buffer, UnicodeDosDeviceName.MaximumLength );
RtlAppendUnicodeToString( &UnicodeDosDeviceName, L"\\DosDevices\\" );
RtlAppendUnicodeToString( &UnicodeDosDeviceName, (DeviceName->Buffer+(sizeof("\\Device"))) );
IF_LOUD(DbgPrint("Packet: DosDeviceName is %ws\n",UnicodeDosDeviceName.Buffer);)
if (Create) {
Status=IoCreateSymbolicLink(&UnicodeDosDeviceName,DeviceName);
} else {
Status=IoDeleteSymbolicLink(&UnicodeDosDeviceName);
}
ExFreePool(UnicodeDosDeviceName.Buffer);
}
return Status;
}
#endif
NTSTATUS PacketReadRegistry(
IN PWSTR *MacDriverName,
IN PWSTR *PacketDriverName,
IN PUNICODE_STRING RegistryPath
)
{
NTSTATUS Status;
RTL_QUERY_REGISTRY_TABLE ParamTable[5];
PWSTR Bind = L"Bind"; // LAURI: \Device\W30NT1
PWSTR Export = L"Export"; // \Device\appletalk\W30NT1\0\0
PWSTR Parameters = L"Parameters";
PWSTR Linkage = L"Linkage";
PWCHAR Path;
Path=ExAllocatePool( PagedPool, RegistryPath->Length+sizeof(WCHAR) );
if (!Path) return STATUS_INSUFFICIENT_RESOURCES;
RtlZeroMemory( Path, RegistryPath->Length+sizeof(WCHAR) );
RtlCopyMemory( Path, RegistryPath->Buffer, RegistryPath->Length );
IF_LOUD(DbgPrint("Packet: Reg path is %ws\n",RegistryPath->Buffer);)
RtlZeroMemory( ParamTable, sizeof(ParamTable) );
// change to the parmeters key
ParamTable[0].QueryRoutine = NULL;
ParamTable[0].Flags = RTL_QUERY_REGISTRY_SUBKEY;
ParamTable[0].Name = Parameters;
// change to the linkage key
ParamTable[1].QueryRoutine = NULL;
ParamTable[1].Flags = RTL_QUERY_REGISTRY_SUBKEY;
ParamTable[1].Name = Linkage;
// Get the name of the mac driver we should bind to
ParamTable[2].QueryRoutine = PacketQueryRegistryRoutine;
ParamTable[2].Flags = RTL_QUERY_REGISTRY_REQUIRED | RTL_QUERY_REGISTRY_NOEXPAND;
ParamTable[2].Name = Bind;
ParamTable[2].EntryContext = (PVOID)MacDriverName;
ParamTable[2].DefaultType = REG_MULTI_SZ;
// Get the name that we should use for the driver object
ParamTable[3].QueryRoutine = PacketQueryRegistryRoutine;
ParamTable[3].Flags = RTL_QUERY_REGISTRY_REQUIRED | RTL_QUERY_REGISTRY_NOEXPAND;
ParamTable[3].Name = Export;
ParamTable[3].EntryContext = (PVOID)PacketDriverName;
ParamTable[3].DefaultType = REG_MULTI_SZ;
Status=RtlQueryRegistryValues(
RTL_REGISTRY_ABSOLUTE,
Path,
ParamTable,
NULL,
NULL
);
ExFreePool(Path);
return Status;
}
NTSTATUS PacketQueryRegistryRoutine(
IN PWSTR ValueName,
IN ULONG ValueType,
IN PVOID ValueData,
IN ULONG ValueLength,
IN PVOID Context,
IN PVOID EntryContext
)
{
PUCHAR Buffer;
IF_LOUD(DbgPrint("Perf: QueryRegistryRoutine\n");)
if (ValueType != REG_MULTI_SZ) {
return STATUS_OBJECT_NAME_NOT_FOUND;
}
Buffer=ExAllocatePool(NonPagedPool,ValueLength);
if(!Buffer) return STATUS_INSUFFICIENT_RESOURCES;
RtlCopyMemory( Buffer, ValueData, ValueLength );
*((PUCHAR *)EntryContext)=Buffer;
return STATUS_SUCCESS;
}
| 7,701 |
313 |
<gh_stars>100-1000
package erjang.epmd;
import java.io.*;
import java.net.*;
import java.util.*;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.nio.*;
import java.nio.channels.*;
/**
* a simple message switch using NIO based socket i/o part of the pkwnet package
* a very simple text message switching program default command line is java
* StreamSwitch -p5050 -i600 -p port to listen on -i default idle time in
* seconds user commands start with $ and consist of blank seperated arguments
* other lines sent by the user are forwarded $on nickname targets sign on as
* nickname, sending to targets $to targets change target list, reports current
* value $list nicknames list status of specified nicknames $list list all
* connected users $off sign off
*
* @author PKWooster
* @version 1.0 June 14,2004
*/
public abstract class PacketServer {
static final Logger log = Logger.getLogger("erjang.epmd");
private ServerSocket ss; // the listening socket
private ServerSocketChannel sschan; // the listening channel
private Selector selector; // the only selector
private int bufsz = 8192;
protected void listen(int port) {
int n = 0;
Iterator<SelectionKey> it;
SelectionKey key;
log.info("listening on port=" + port);
try {
sschan = ServerSocketChannel.open();
sschan.configureBlocking(false);
ss = sschan.socket();
ss.bind(new InetSocketAddress(InetAddress.getByAddress(new byte[] {127,0,0,1}), port));
selector = Selector.open();
sschan.register(selector, SelectionKey.OP_ACCEPT);
} catch (IOException ie) {
ie.printStackTrace();
System.exit(0);
}
while (true) {
// now we select any pending io
try {
n = selector.select();
} // select
catch (Exception e) {
log.severe("select failed: " + e.getMessage());
log.log(Level.FINE, "details: ", e);
}
log.fine("select n=" + n);
// process any selected keys
Set<SelectionKey> selectedKeys = selector.selectedKeys();
it = selectedKeys.iterator();
while (it.hasNext()) {
key = (SelectionKey) it.next();
int kro = key.readyOps();
log.fine("kro=" + kro);
if ((kro & SelectionKey.OP_READ) == SelectionKey.OP_READ)
doRead(key);
if ((kro & SelectionKey.OP_WRITE) == SelectionKey.OP_WRITE)
doWrite(key);
if ((kro & SelectionKey.OP_ACCEPT) == SelectionKey.OP_ACCEPT)
doAccept(key);
if ((kro & SelectionKey.OP_CONNECT) == SelectionKey.OP_CONNECT)
doConnect(key);
if (key.isValid() && key.interestOps() == 0) {
it.remove(); // remove the key
}
}
}
}
private void doAccept(SelectionKey sk) {
ServerSocketChannel sc = (ServerSocketChannel) sk.channel();
log.fine("accept");
SocketChannel usc = null;
ByteBuffer data;
try {
usc = sc.accept();
if (usc == null) return;
usc.configureBlocking(false);
Socket sock = usc.socket();
String nm = sock.getInetAddress() + ":" + sock.getPort();
log.info("connection from " + nm);
sock.setKeepAlive(true);
data = ByteBuffer.allocate(bufsz);
data.position(data.limit()); // looks like write complete
SelectionKey dsk = usc.register(selector, SelectionKey.OP_READ, null);
Connection conn = newConnection(dsk); // contains socket i/o code
conn.setName(nm);
dsk.attach(conn); // link it to the key so we can find it
} catch (IOException re) {
log.severe("registration error: " + re.getMessage());
log.log(Level.FINE, "details: ", re);
}
}
protected abstract PacketConnection newConnection(SelectionKey dsk);
private void doRead(SelectionKey sk) {
PacketConnection conn = (PacketConnection) sk.attachment(); // get our
// connection
conn.doRead();
}
private void doWrite(SelectionKey sk) {
PacketConnection conn = (PacketConnection) sk.attachment(); // get our
// connection
conn.doWrite();
}
private void doConnect(SelectionKey sk) {
PacketConnection conn = (PacketConnection) sk.attachment(); // get our
// connection
conn.doConnect();
}
}
| 1,427 |
540 |
<reponame>adil8531/stripe-java
package com.stripe.param.issuing;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import com.google.common.collect.ImmutableMap;
import com.stripe.param.common.EmptyParam;
import java.util.Map;
import org.junit.jupiter.api.Test;
class TransactionUpdateParamsTest {
@Test
public void testSetMetadataEmpty() {
TransactionUpdateParams transactionUpdateParams =
TransactionUpdateParams.builder().setMetadata(EmptyParam.EMPTY).build();
Map<String, Object> untypedParams = transactionUpdateParams.toMap();
assertEquals(null, untypedParams.get("metadata"));
}
@Test
@SuppressWarnings("unchecked")
public void testSetMetadataMap() {
Map<String, String> metadata = ImmutableMap.of("key_1", "value_1", "key_2", "value_2");
TransactionUpdateParams transactionUpdateParams =
TransactionUpdateParams.builder().setMetadata(metadata).build();
Map<String, Object> untypedParams = transactionUpdateParams.toMap();
assertTrue(untypedParams.get("metadata") instanceof Map<?, ?>);
assertEquals("value_1", ((Map<String, String>) untypedParams.get("metadata")).get("key_1"));
assertEquals("value_2", ((Map<String, String>) untypedParams.get("metadata")).get("key_2"));
}
@Test
@SuppressWarnings("unchecked")
public void testPutMetadata() {
TransactionUpdateParams transactionUpdateParams =
TransactionUpdateParams.builder()
.putMetadata("key_1", "value_1")
.putMetadata("key_2", "value_2")
.putMetadata("key_null", null)
.build();
Map<String, Object> untypedParams = transactionUpdateParams.toMap();
assertTrue(untypedParams.get("metadata") instanceof Map<?, ?>);
assertEquals("value_1", ((Map<String, String>) untypedParams.get("metadata")).get("key_1"));
assertEquals("value_2", ((Map<String, String>) untypedParams.get("metadata")).get("key_2"));
assertNull(((Map<String, String>) untypedParams.get("metadata")).get("key_null"));
}
@Test
@SuppressWarnings("unchecked")
public void testPutAllMetadata() {
Map<String, String> metadata1 = ImmutableMap.of("key_1", "value_1");
Map<String, String> metadata2 = ImmutableMap.of("key_2", "value_2");
TransactionUpdateParams transactionUpdateParams =
TransactionUpdateParams.builder()
.putAllMetadata(metadata1)
.putAllMetadata(metadata2)
.build();
Map<String, Object> untypedParams = transactionUpdateParams.toMap();
assertTrue(untypedParams.get("metadata") instanceof Map<?, ?>);
assertEquals("value_1", ((Map<String, String>) untypedParams.get("metadata")).get("key_1"));
assertEquals("value_2", ((Map<String, String>) untypedParams.get("metadata")).get("key_2"));
}
}
| 1,076 |
307 |
#ifndef _DLGASTEROIDFIELDEDITOR_H
#define _DLGASTEROIDFIELDEDITOR_H
/*
* Created by Ian "Goober5000" Warfield and "z64555" for the FreeSpace2 Source
* Code Project.
*
* You may not sell or otherwise commercially exploit the source or things you
* create based on the source.
*/
#include "base/wxFRED_base.h"
#include <wx/wx.h>
class dlgAsteroidFieldEditor : public fredBase::dlgAsteroidFieldEditor
{
public:
dlgAsteroidFieldEditor( wxWindow* parent, wxWindowID id );
protected:
dlgAsteroidFieldEditor( void );
dlgAsteroidFieldEditor( const dlgAsteroidFieldEditor& T );
// Handlers for dlgAsteroidFieldEditor
void OnClose( wxCloseEvent& event );
void OnEnable( wxCommandEvent& event );
void OnContentType( wxCommandEvent& event );
void OnMode( wxCommandEvent& event );
void OnInnerBoxEnable( wxCommandEvent& event );
void OnOK( wxCommandEvent& event );
void OnCancel( wxCommandEvent& event );
private:
};
#endif // _DLGASTEROIDFIELDEDITOR_H
| 338 |
417 |
//
// NextViewController.h
// ZYBannerViewDemo
//
// Created by 张志延 on 15/12/6.
// Copyright (c) 2015年 mrdream. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface NextViewController : UIViewController
@end
| 91 |
973 |
/*_##########################################################################
_##
_## Copyright (C) 2013-2019 Pcap4J.org
_##
_##########################################################################
*/
package org.pcap4j.packet;
import static org.pcap4j.util.ByteArrays.BYTE_SIZE_IN_BYTES;
import static org.pcap4j.util.ByteArrays.SHORT_SIZE_IN_BYTES;
import java.io.Serializable;
import java.net.Inet6Address;
import java.util.ArrayList;
import java.util.List;
import org.pcap4j.packet.factory.PacketFactories;
import org.pcap4j.packet.namednumber.IcmpV6Code;
import org.pcap4j.packet.namednumber.IcmpV6Type;
import org.pcap4j.packet.namednumber.IpNumber;
import org.pcap4j.packet.namednumber.IpV6NeighborDiscoveryOptionType;
import org.pcap4j.util.ByteArrays;
/**
* @author <NAME>
* @since pcap4j 0.9.15
*/
public final class IcmpV6CommonPacket extends AbstractPacket {
/** */
private static final long serialVersionUID = 7643067752830062365L;
private final IcmpV6CommonHeader header;
private final Packet payload;
/**
* A static factory method. This method validates the arguments by {@link
* ByteArrays#validateBounds(byte[], int, int)}, which may throw exceptions undocumented here.
*
* @param rawData rawData
* @param offset offset
* @param length length
* @return a new IcmpV6CommonPacket object.
* @throws IllegalRawDataException if parsing the raw data fails.
*/
public static IcmpV6CommonPacket newPacket(byte[] rawData, int offset, int length)
throws IllegalRawDataException {
ByteArrays.validateBounds(rawData, offset, length);
return new IcmpV6CommonPacket(rawData, offset, length);
}
private IcmpV6CommonPacket(byte[] rawData, int offset, int length)
throws IllegalRawDataException {
this.header = new IcmpV6CommonHeader(rawData, offset, length);
int payloadLength = length - header.length();
if (payloadLength > 0) {
this.payload =
PacketFactories.getFactory(Packet.class, IcmpV6Type.class)
.newInstance(rawData, offset + header.length(), payloadLength, header.getType());
} else {
this.payload = null;
}
}
private IcmpV6CommonPacket(Builder builder) {
if (builder == null || builder.type == null || builder.code == null) {
StringBuilder sb = new StringBuilder();
sb.append("builder: ")
.append(builder)
.append(" builder.type: ")
.append(builder.type)
.append(" builder.code: ")
.append(builder.code);
throw new NullPointerException(sb.toString());
}
if (builder.correctChecksumAtBuild) {
if (builder.srcAddr == null || builder.dstAddr == null) {
StringBuilder sb = new StringBuilder();
sb.append("builder.srcAddr: ")
.append(builder.srcAddr)
.append(" builder.dstAddr: ")
.append(builder.dstAddr);
throw new NullPointerException(sb.toString());
}
}
this.payload = builder.payloadBuilder != null ? builder.payloadBuilder.build() : null;
this.header = new IcmpV6CommonHeader(builder, payload.getRawData());
}
@Override
public IcmpV6CommonHeader getHeader() {
return header;
}
@Override
public Packet getPayload() {
return payload;
}
@Override
public Builder getBuilder() {
return new Builder(this);
}
/**
* @param srcAddr srcAddr
* @param dstAddr dstAddr
* @param acceptZero acceptZero
* @return true if the packet represented by this object has a valid checksum; false otherwise.
*/
public boolean hasValidChecksum(Inet6Address srcAddr, Inet6Address dstAddr, boolean acceptZero) {
if (srcAddr == null || dstAddr == null) {
StringBuilder sb = new StringBuilder();
sb.append("srcAddr: ").append(srcAddr).append(" dstAddr: ").append(dstAddr);
throw new NullPointerException(sb.toString());
}
if (!srcAddr.getClass().isInstance(dstAddr)) {
StringBuilder sb = new StringBuilder();
sb.append("srcAddr: ").append(srcAddr).append(" dstAddr: ").append(dstAddr);
throw new IllegalArgumentException(sb.toString());
}
byte[] payloadData = payload != null ? payload.getRawData() : new byte[0];
short calculatedChecksum =
header.calcChecksum(srcAddr, dstAddr, header.getRawData(), payloadData);
if (calculatedChecksum == 0) {
return true;
}
if (header.checksum == 0 && acceptZero) {
return true;
}
return false;
}
/**
* @author <NAME>
* @since pcap4j 0.9.15
*/
public static final class Builder extends AbstractBuilder
implements ChecksumBuilder<IcmpV6CommonPacket> {
private IcmpV6Type type;
private IcmpV6Code code;
private short checksum;
private Packet.Builder payloadBuilder;
private Inet6Address srcAddr;
private Inet6Address dstAddr;
private boolean correctChecksumAtBuild;
/** */
public Builder() {}
private Builder(IcmpV6CommonPacket packet) {
this.type = packet.header.type;
this.code = packet.header.code;
this.checksum = packet.header.checksum;
this.payloadBuilder = packet.payload != null ? packet.payload.getBuilder() : null;
}
/**
* @param type type
* @return this Builder object for method chaining.
*/
public Builder type(IcmpV6Type type) {
this.type = type;
return this;
}
/**
* @param code code
* @return this Builder object for method chaining.
*/
public Builder code(IcmpV6Code code) {
this.code = code;
return this;
}
/**
* @param checksum checksum
* @return this Builder object for method chaining.
*/
public Builder checksum(short checksum) {
this.checksum = checksum;
return this;
}
@Override
public Builder payloadBuilder(Packet.Builder payloadBuilder) {
this.payloadBuilder = payloadBuilder;
return this;
}
@Override
public Packet.Builder getPayloadBuilder() {
return payloadBuilder;
}
/**
* used for checksum calculation.
*
* @param srcAddr srcAddr
* @return this Builder object for method chaining.
*/
public Builder srcAddr(Inet6Address srcAddr) {
this.srcAddr = srcAddr;
return this;
}
/**
* used for checksum calculation.
*
* @param dstAddr dstAddr
* @return this Builder object for method chaining.
*/
public Builder dstAddr(Inet6Address dstAddr) {
this.dstAddr = dstAddr;
return this;
}
@Override
public Builder correctChecksumAtBuild(boolean correctChecksumAtBuild) {
this.correctChecksumAtBuild = correctChecksumAtBuild;
return this;
}
@Override
public IcmpV6CommonPacket build() {
return new IcmpV6CommonPacket(this);
}
}
/**
* @author <NAME>
* @since pcap4j 0.9.15
*/
public static final class IcmpV6CommonHeader extends AbstractHeader {
/*
* 0 15
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* | Type | Code |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* | Checksum |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*
*/
/** */
private static final long serialVersionUID = -7473322861606186L;
private static final int TYPE_OFFSET = 0;
private static final int TYPE_SIZE = BYTE_SIZE_IN_BYTES;
private static final int CODE_OFFSET = TYPE_OFFSET + TYPE_SIZE;
private static final int CODE_SIZE = BYTE_SIZE_IN_BYTES;
private static final int CHECKSUM_OFFSET = CODE_OFFSET + CODE_SIZE;
private static final int CHECKSUM_SIZE = SHORT_SIZE_IN_BYTES;
private static final int ICMPV6_COMMON_HEADER_SIZE = CHECKSUM_OFFSET + CHECKSUM_SIZE;
private static final int ICMPV6_PSEUDO_HEADER_SIZE = 40;
private final IcmpV6Type type;
private final IcmpV6Code code;
private final short checksum;
private IcmpV6CommonHeader(byte[] rawData, int offset, int length)
throws IllegalRawDataException {
if (length < ICMPV6_COMMON_HEADER_SIZE) {
StringBuilder sb = new StringBuilder(80);
sb.append("The data is too short to build an ICMPv6 common header(")
.append(ICMPV6_COMMON_HEADER_SIZE)
.append(" bytes). data: ")
.append(ByteArrays.toHexString(rawData, " "))
.append(", offset: ")
.append(offset)
.append(", length: ")
.append(length);
throw new IllegalRawDataException(sb.toString());
}
this.type = IcmpV6Type.getInstance(ByteArrays.getByte(rawData, TYPE_OFFSET + offset));
this.code =
IcmpV6Code.getInstance(type.value(), ByteArrays.getByte(rawData, CODE_OFFSET + offset));
this.checksum = ByteArrays.getShort(rawData, CHECKSUM_OFFSET + offset);
}
private IcmpV6CommonHeader(Builder builder, byte[] payload) {
this.type = builder.type;
this.code = builder.code;
if (builder.correctChecksumAtBuild) {
if (PacketPropertiesLoader.getInstance().icmpV6CalcChecksum()) {
this.checksum =
calcChecksum(builder.srcAddr, builder.dstAddr, buildRawData(true), payload);
} else {
this.checksum = (short) 0;
}
} else {
this.checksum = builder.checksum;
}
}
private short calcChecksum(
Inet6Address srcAddr, Inet6Address dstAddr, byte[] header, byte[] payload) {
byte[] data;
int destPos;
int totalLength = payload.length + length();
if ((totalLength % 2) != 0) {
data = new byte[totalLength + 1 + ICMPV6_PSEUDO_HEADER_SIZE];
destPos = totalLength + 1;
} else {
data = new byte[totalLength + ICMPV6_PSEUDO_HEADER_SIZE];
destPos = totalLength;
}
System.arraycopy(header, 0, data, 0, header.length);
System.arraycopy(payload, 0, data, header.length, payload.length);
// pseudo header
System.arraycopy(srcAddr.getAddress(), 0, data, destPos, srcAddr.getAddress().length);
destPos += srcAddr.getAddress().length;
System.arraycopy(dstAddr.getAddress(), 0, data, destPos, dstAddr.getAddress().length);
destPos += dstAddr.getAddress().length;
destPos += 3;
data[destPos] = IpNumber.ICMPV6.value();
destPos++;
System.arraycopy(
ByteArrays.toByteArray((short) totalLength), 0, data, destPos, SHORT_SIZE_IN_BYTES);
destPos += SHORT_SIZE_IN_BYTES;
return ByteArrays.calcChecksum(data);
}
/** @return type */
public IcmpV6Type getType() {
return type;
}
/** @return code */
public IcmpV6Code getCode() {
return code;
}
/** @return checksum */
public short getChecksum() {
return checksum;
}
@Override
protected List<byte[]> getRawFields() {
return getRawFields(false);
}
private List<byte[]> getRawFields(boolean zeroInsteadOfChecksum) {
List<byte[]> rawFields = new ArrayList<byte[]>();
rawFields.add(ByteArrays.toByteArray(type.value()));
rawFields.add(ByteArrays.toByteArray(code.value()));
rawFields.add(ByteArrays.toByteArray(zeroInsteadOfChecksum ? (short) 0 : checksum));
return rawFields;
}
private byte[] buildRawData(boolean zeroInsteadOfChecksum) {
return ByteArrays.concatenate(getRawFields(zeroInsteadOfChecksum));
}
@Override
public int length() {
return ICMPV6_COMMON_HEADER_SIZE;
}
@Override
protected String buildString() {
StringBuilder sb = new StringBuilder();
String ls = System.getProperty("line.separator");
sb.append("[ICMPv6 Common Header (").append(length()).append(" bytes)]").append(ls);
sb.append(" Type: ").append(type).append(ls);
sb.append(" Code: ").append(code).append(ls);
sb.append(" Checksum: 0x").append(ByteArrays.toHexString(checksum, "")).append(ls);
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!this.getClass().isInstance(obj)) {
return false;
}
IcmpV6CommonHeader other = (IcmpV6CommonHeader) obj;
return checksum == other.checksum && type.equals(other.type) && code.equals(other.code);
}
@Override
protected int calcHashCode() {
int result = 17;
result = 31 * result + type.hashCode();
result = 31 * result + code.hashCode();
result = 31 * result + checksum;
return result;
}
}
/**
* The interface representing an IPv6 neighbor discovery option. If you use {@link
* org.pcap4j.packet.factory.propertiesbased.PropertiesBasedPacketFactory
* PropertiesBasedPacketFactory}, classes which implement this interface must implement the
* following method: {@code public static IpV6NeighborDiscoveryOption newInstance(byte[] rawData,
* int offset, int length) throws IllegalRawDataException}
*
* @author <NAME>
* @since pcap4j 0.9.15
*/
public interface IpV6NeighborDiscoveryOption extends Serializable {
/** @return type */
public IpV6NeighborDiscoveryOptionType getType();
/** @return length */
public int length();
/** @return raw data */
public byte[] getRawData();
}
}
| 5,237 |
3,609 |
<reponame>peppingdore/tracy<filename>profiler/src/NativeWindow.hpp
#ifndef __NATIVEWINDOW_HPP__
#define __NATIVEWINDOW_HPP__
void* GetMainWindowNative();
#endif
| 67 |
453 |
<gh_stars>100-1000
// Copyright (c) the JPEG XL Project Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
#ifndef TOOLS_FLICKER_TEST_TEST_WINDOW_H_
#define TOOLS_FLICKER_TEST_TEST_WINDOW_H_
#include <QByteArray>
#include <QDir>
#include <QMainWindow>
#include <QStringList>
#include <QTextStream>
#include "tools/comparison_viewer/image_loading.h"
#include "tools/flicker_test/parameters.h"
#include "tools/flicker_test/ui_test_window.h"
namespace jxl {
class FlickerTestWindow : public QMainWindow {
Q_OBJECT
public:
explicit FlickerTestWindow(FlickerTestParameters parameters,
QWidget* parent = nullptr);
~FlickerTestWindow() override = default;
bool proceedWithTest() const { return proceed_; }
private slots:
void processTestResult(const QString& imageName, SplitView::Side originalSide,
SplitView::Side clickedSide, int clickDelayMSecs);
private:
void nextImage();
Ui::FlickerTestWindow ui_;
bool proceed_ = true;
const QByteArray monitorProfile_;
FlickerTestParameters parameters_;
QDir originalFolder_, alteredFolder_;
QFile outputFile_;
QTextStream outputStream_;
QStringList remainingImages_;
};
} // namespace jxl
#endif // TOOLS_FLICKER_TEST_TEST_WINDOW_H_
| 492 |
1,444 |
<filename>Mage.Sets/src/mage/cards/p/PredatoryHunger.java
package mage.cards.p;
import java.util.UUID;
import mage.constants.SubType;
import mage.target.common.TargetCreaturePermanent;
import mage.abilities.Ability;
import mage.abilities.common.SpellCastOpponentTriggeredAbility;
import mage.abilities.effects.common.AttachEffect;
import mage.abilities.effects.common.counter.AddCountersAttachedEffect;
import mage.constants.Outcome;
import mage.target.TargetPermanent;
import mage.abilities.keyword.EnchantAbility;
import mage.cards.CardImpl;
import mage.cards.CardSetInfo;
import mage.constants.CardType;
import mage.counters.CounterType;
import mage.filter.StaticFilters;
/**
*
* @author TheElk801
*/
public final class PredatoryHunger extends CardImpl {
public PredatoryHunger(UUID ownerId, CardSetInfo setInfo) {
super(ownerId, setInfo, new CardType[]{CardType.ENCHANTMENT}, "{G}");
this.subtype.add(SubType.AURA);
// Enchant creature
TargetPermanent auraTarget = new TargetCreaturePermanent();
this.getSpellAbility().addTarget(auraTarget);
this.getSpellAbility().addEffect(new AttachEffect(Outcome.BoostCreature));
Ability ability = new EnchantAbility(auraTarget.getTargetName());
this.addAbility(ability);
// Whenever an opponent casts a creature spell, put a +1/+1 counter on enchanted creature.
this.addAbility(new SpellCastOpponentTriggeredAbility(
new AddCountersAttachedEffect(
CounterType.P1P1.createInstance(),
"enchanted creature"
), StaticFilters.FILTER_SPELL_A_CREATURE, false
));
}
private PredatoryHunger(final PredatoryHunger card) {
super(card);
}
@Override
public PredatoryHunger copy() {
return new PredatoryHunger(this);
}
}
| 690 |
2,701 |
#include "crypto_stream_aes128ctr.h"
#ifdef __GNUC__
# pragma GCC diagnostic ignored "-Wdeprecated-declarations"
#endif
int
crypto_stream_aes128ctr(unsigned char *out, unsigned long long outlen,
const unsigned char *n, const unsigned char *k)
{
unsigned char d[crypto_stream_aes128ctr_BEFORENMBYTES];
crypto_stream_aes128ctr_beforenm(d, k);
crypto_stream_aes128ctr_afternm(out, outlen, n, d);
return 0;
}
int
crypto_stream_aes128ctr_xor(unsigned char *out, const unsigned char *in,
unsigned long long inlen, const unsigned char *n,
const unsigned char *k)
{
unsigned char d[crypto_stream_aes128ctr_BEFORENMBYTES];
crypto_stream_aes128ctr_beforenm(d, k);
crypto_stream_aes128ctr_xor_afternm(out, in, inlen, n, d);
return 0;
}
| 376 |
877 |
<reponame>reactjs-alex-alex2006hw/react-native-worker-040<filename>android/src/main/java/co/apptailor/Worker/JSService.java
//package co.apptailor.Worker;
//
//import android.app.Service;
//import android.content.Intent;
//import android.os.Handler;
//import android.os.IBinder;
//import android.os.Looper;
//import android.support.annotation.Nullable;
//import android.util.Log;
//
//import com.facebook.react.bridge.ReactApplicationContext;
//import com.facebook.react.common.ApplicationHolder;
//import com.facebook.react.devsupport.DevInternalSettings;
//import com.facebook.react.devsupport.DevServerHelper;
//import com.facebook.soloader.SoLoader;
//
//import java.io.File;
//
//public class JSService extends Service {
// private static final String TAG = "JSService";
// private JSWorker worker;
//
// @Nullable
// @Override
// public IBinder onBind(Intent intent) {
// return null;
// }
//
// @Override
// public void onCreate() {
// super.onCreate();
// SoLoader.init(this, /* native exopackage */ false);
//// try {
//// ApplicationHolder.getApplication();
//// }
//// catch (AssertionError err) {
//// ApplicationHolder.setApplication(getApplication());
//// }
// }
//
// @Override
// public void onDestroy() {
// super.onDestroy();
// clean();
// }
//
// @Override
// public int onStartCommand(Intent intent, int flags, int startId) {
// Log.d(TAG, "Starting background JS Service");
//
// DevInternalSettings devInternalSettings = new DevInternalSettings(this, new StubDevSupportManager());
// devInternalSettings.setHotModuleReplacementEnabled(false);
// devInternalSettings.setElementInspectorEnabled(false);
// devInternalSettings.setReloadOnJSChangeEnabled(false);
//
// DevServerHelper devServerHelper = new DevServerHelper(devInternalSettings);
//
// String bundleName = "src/service.bundle";
// String bundleSlug = bundleName.replaceAll("/", "_");
//
// final File bundleFile = new File(this.getFilesDir(), bundleSlug);
// worker = new JSWorker(bundleName, devServerHelper.getSourceUrl(bundleName), bundleFile.getAbsolutePath());
//
// final Handler mainHandler = new Handler(Looper.getMainLooper());
// final ReactApplicationContext context = new ReactApplicationContext(getApplicationContext());
//
// devServerHelper.downloadBundleFromURL(new DevServerHelper.BundleDownloadCallback() {
// @Override
// public void onSuccess() {
// mainHandler.post(new Runnable() {
// @Override
// public void run() {
// try {
// worker.runFromContext(context);
// } catch (Exception e) {
// Log.d(TAG, "Error while running service bundle");
// e.printStackTrace();
// }
// }
// });
// }
//
// @Override
// public void onFailure(Exception cause) {
// Log.d(TAG, "Error while downloading service bundle");
// cause.printStackTrace();
// }
// }, bundleName, bundleFile);
//
// return Service.START_STICKY;
// }
//
// private void clean() {
// if (worker != null) {
// worker.terminate();
// worker = null;
// }
// }
//}
| 1,498 |
852 |
<filename>TopQuarkAnalysis/TopEventProducers/python/tqafInputFiles_cff.py
import FWCore.ParameterSet.Config as cms
from PhysicsTools.PatAlgos.patInputFiles_cff import filesRelValProdTTbarAODSIM
relValTTbar = filesRelValProdTTbarAODSIM
| 82 |
1,567 |
<gh_stars>1000+
/*
* Copyright (C) 2017-2019 Alibaba Group Holding Limited
*/
/******************************************************************************
* @file drv_gpio.h
* @brief header file for gpio driver
* @version V1.0
* @date 02. June 2017
******************************************************************************/
#ifndef _CSI_GPIO_H_
#define _CSI_GPIO_H_
#include <stdint.h>
#include <stdbool.h>
#include <drv_common.h>
#ifdef __cplusplus
extern "C" {
#endif
/// definition for gpio pin handle.
typedef void *gpio_pin_handle_t;
/****** GPIO specific error codes *****/
typedef enum {
GPIO_ERROR_MODE = (DRV_ERROR_SPECIFIC + 1), ///< Specified Mode not supported
GPIO_ERROR_DIRECTION, ///< Specified direction not supported
GPIO_ERROR_IRQ_MODE, ///< Specified irq mode not supported
} gpio_error_e;
/*----- GPIO Control Codes: Mode -----*/
typedef enum {
GPIO_MODE_PULLNONE = 0, ///< pull none for input
GPIO_MODE_PULLUP, ///< pull up for input
GPIO_MODE_PULLDOWN, ///< pull down for input
GPIO_MODE_OPEN_DRAIN, ///< open drain mode for output
GPIO_MODE_PUSH_PULL, ///< push-pull mode for output
} gpio_mode_e;
/*----- GPIO Control Codes: Mode Parameters: Data Bits -----*/
typedef enum {
GPIO_DIRECTION_INPUT = 0, ///< gpio as input
GPIO_DIRECTION_OUTPUT, ///< gpio as output
} gpio_direction_e;
/*----- GPIO Control Codes: Mode Parameters: Parity -----*/
typedef enum {
GPIO_IRQ_MODE_RISING_EDGE = 0, ///< interrupt mode for rising edge
GPIO_IRQ_MODE_FALLING_EDGE, ///< interrupt mode for falling edge
GPIO_IRQ_MODE_DOUBLE_EDGE, ///< interrupt mode for double edge
GPIO_IRQ_MODE_LOW_LEVEL, ///< interrupt mode for low level
GPIO_IRQ_MODE_HIGH_LEVEL, ///< interrupt mode for high level
} gpio_irq_mode_e;
typedef void (*gpio_event_cb_t)(int32_t idx); ///< gpio Event call back.
/**
\brief Initialize GPIO handle.
\param[in] gpio_pin gpio pin idx.
\param[in] cb_event event callback function \ref gpio_event_cb_t
\return gpio_pin_handle
*/
gpio_pin_handle_t csi_gpio_pin_initialize(int32_t gpio_pin, gpio_event_cb_t cb_event);
/**
\brief De-initialize GPIO pin handle.stops operation and releases the software resources used by the handle.
\param[in] handle gpio pin handle to operate.
\return error code
*/
int32_t csi_gpio_pin_uninitialize(gpio_pin_handle_t handle);
/**
\brief control gpio power.
\param[in] handle gpio handle to operate.
\param[in] state power state.\ref csi_power_stat_e.
\return error code
*/
int32_t csi_gpio_power_control(gpio_pin_handle_t handle, csi_power_stat_e state);
/**
\brief config pin mode
\param[in] pin gpio pin handle to operate.
\param[in] mode \ref gpio_mode_e
\return error code
*/
int32_t csi_gpio_pin_config_mode(gpio_pin_handle_t handle,
gpio_mode_e mode);
/**
\brief config pin direction
\param[in] pin gpio pin handle to operate.
\param[in] dir \ref gpio_direction_e
\return error code
*/
int32_t csi_gpio_pin_config_direction(gpio_pin_handle_t handle,
gpio_direction_e dir);
/**
\brief config pin
\param[in] pin gpio pin handle to operate.
\param[in] mode \ref gpio_mode_e
\param[in] dir \ref gpio_direction_e
\return error code
*/
int32_t csi_gpio_pin_config(gpio_pin_handle_t handle,
gpio_mode_e mode,
gpio_direction_e dir);
/**
\brief Set one or zero to the selected GPIO pin.
\param[in] pin gpio pin handle to operate.
\param[in] value value to be set
\return error code
*/
int32_t csi_gpio_pin_write(gpio_pin_handle_t handle, bool value);
/**
\brief Get the value of selected GPIO pin.
\param[in] pin gpio pin handle to operate.
\param[out] value buffer to store the pin value
\return error code
*/
int32_t csi_gpio_pin_read(gpio_pin_handle_t handle, bool *value);
/**
\brief set GPIO interrupt mode.
\param[in] pin gpio pin handle to operate.
\param[in] mode irq mode to be set
\param[in] enable enable flag
\return error code
*/
int32_t csi_gpio_pin_set_irq(gpio_pin_handle_t handle, gpio_irq_mode_e mode, bool enable);
#ifdef __cplusplus
}
#endif
#endif /* _CSI_GPIO_H_ */
| 2,094 |
575 |
<reponame>Ron423c/chromium
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/offline_pages/android/evaluation/offline_page_evaluation_bridge.h"
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "base/android/callback_android.h"
#include "base/android/jni_android.h"
#include "base/android/jni_array.h"
#include "base/android/jni_string.h"
#include "base/bind.h"
#include "base/sequenced_task_runner.h"
#include "base/task/post_task.h"
#include "chrome/android/chrome_jni_headers/OfflinePageEvaluationBridge_jni.h"
#include "chrome/browser/browser_process.h"
#include "chrome/browser/offline_pages/android/background_scheduler_bridge.h"
#include "chrome/browser/offline_pages/android/evaluation/evaluation_test_scheduler.h"
#include "chrome/browser/offline_pages/background_loader_offliner.h"
#include "chrome/browser/offline_pages/offline_page_model_factory.h"
#include "chrome/browser/offline_pages/request_coordinator_factory.h"
#include "chrome/browser/profiles/profile.h"
#include "chrome/browser/profiles/profile_android.h"
#include "chrome/common/chrome_constants.h"
#include "components/offline_pages/core/background/offliner.h"
#include "components/offline_pages/core/background/offliner_policy.h"
#include "components/offline_pages/core/background/request_coordinator.h"
#include "components/offline_pages/core/background/request_notifier.h"
#include "components/offline_pages/core/background/request_queue.h"
#include "components/offline_pages/core/background/request_queue_store.h"
#include "components/offline_pages/core/background/save_page_request.h"
#include "components/offline_pages/core/downloads/download_notifying_observer.h"
#include "components/offline_pages/core/offline_page_item.h"
#include "components/offline_pages/core/offline_page_model.h"
#include "content/public/browser/browser_context.h"
using base::android::ConvertJavaStringToUTF8;
using base::android::ConvertUTF16ToJavaString;
using base::android::ConvertUTF8ToJavaString;
using base::android::JavaParamRef;
using base::android::JavaRef;
using base::android::ScopedJavaGlobalRef;
using base::android::ScopedJavaLocalRef;
namespace network {
class NetworkQualityTracker;
}
namespace offline_pages {
namespace android {
namespace {
const char kNativeTag[] = "OPNative";
const base::FilePath::CharType kTestRequestQueueDirname[] =
FILE_PATH_LITERAL("Offline Pages/test_request_queue");
void JNI_OfflinePageEvaluationBridge_ToJavaOfflinePageList(
JNIEnv* env,
const JavaRef<jobject>& j_result_obj,
const std::vector<OfflinePageItem>& offline_pages) {
for (const OfflinePageItem& offline_page : offline_pages) {
Java_OfflinePageEvaluationBridge_createOfflinePageAndAddToList(
env, j_result_obj,
ConvertUTF8ToJavaString(env, offline_page.url.spec()),
offline_page.offline_id,
ConvertUTF8ToJavaString(env, offline_page.client_id.name_space),
ConvertUTF8ToJavaString(env, offline_page.client_id.id),
ConvertUTF16ToJavaString(env, offline_page.title),
ConvertUTF8ToJavaString(env, offline_page.file_path.value()),
offline_page.file_size, offline_page.creation_time.ToJavaTime(),
offline_page.access_count, offline_page.last_access_time.ToJavaTime(),
ConvertUTF8ToJavaString(env, offline_page.request_origin));
}
}
ScopedJavaLocalRef<jobject>
JNI_OfflinePageEvaluationBridge_ToJavaSavePageRequest(
JNIEnv* env,
const SavePageRequest& request) {
return Java_OfflinePageEvaluationBridge_createSavePageRequest(
env, static_cast<int>(request.request_state()), request.request_id(),
ConvertUTF8ToJavaString(env, request.url().spec()),
ConvertUTF8ToJavaString(env, request.client_id().name_space),
ConvertUTF8ToJavaString(env, request.client_id().id));
}
ScopedJavaLocalRef<jobjectArray>
JNI_OfflinePageEvaluationBridge_CreateJavaSavePageRequests(
JNIEnv* env,
std::vector<std::unique_ptr<SavePageRequest>> requests) {
ScopedJavaLocalRef<jclass> save_page_request_clazz = base::android::GetClass(
env, "org/chromium/chrome/browser/offlinepages/SavePageRequest");
jobjectArray joa = env->NewObjectArray(
requests.size(), save_page_request_clazz.obj(), nullptr);
base::android::CheckException(env);
for (size_t i = 0; i < requests.size(); i++) {
SavePageRequest request = *(requests[i]);
ScopedJavaLocalRef<jobject> j_save_page_request =
JNI_OfflinePageEvaluationBridge_ToJavaSavePageRequest(env, request);
env->SetObjectArrayElement(joa, i, j_save_page_request.obj());
}
return ScopedJavaLocalRef<jobjectArray>(env, joa);
}
void GetAllPagesCallback(
const ScopedJavaGlobalRef<jobject>& j_result_obj,
const ScopedJavaGlobalRef<jobject>& j_callback_obj,
const OfflinePageModel::MultipleOfflinePageItemResult& result) {
JNIEnv* env = base::android::AttachCurrentThread();
JNI_OfflinePageEvaluationBridge_ToJavaOfflinePageList(env, j_result_obj,
result);
base::android::RunObjectCallbackAndroid(j_callback_obj, j_result_obj);
}
void OnGetAllRequestsDone(
const ScopedJavaGlobalRef<jobject>& j_callback_obj,
std::vector<std::unique_ptr<SavePageRequest>> all_requests) {
JNIEnv* env = base::android::AttachCurrentThread();
ScopedJavaLocalRef<jobjectArray> j_result_obj =
JNI_OfflinePageEvaluationBridge_CreateJavaSavePageRequests(
env, std::move(all_requests));
base::android::RunObjectCallbackAndroid(j_callback_obj, j_result_obj);
}
void OnRemoveRequestsDone(const ScopedJavaGlobalRef<jobject>& j_callback_obj,
const MultipleItemStatuses& removed_request_results) {
base::android::RunIntCallbackAndroid(
j_callback_obj, static_cast<int>(removed_request_results.size()));
}
std::unique_ptr<KeyedService> GetTestingRequestCoordinator(
content::BrowserContext* context,
std::unique_ptr<OfflinerPolicy> policy,
std::unique_ptr<Offliner> offliner) {
scoped_refptr<base::SequencedTaskRunner> background_task_runner =
base::CreateSequencedTaskRunner({base::MayBlock()});
Profile* profile = Profile::FromBrowserContext(context);
base::FilePath queue_store_path =
profile->GetPath().Append(kTestRequestQueueDirname);
std::unique_ptr<RequestQueueStore> queue_store(
new RequestQueueStore(background_task_runner, queue_store_path));
std::unique_ptr<RequestQueue> queue(new RequestQueue(std::move(queue_store)));
std::unique_ptr<android::EvaluationTestScheduler> scheduler(
new android::EvaluationTestScheduler());
network::NetworkQualityTracker* network_quality_tracker =
g_browser_process->network_quality_tracker();
std::unique_ptr<RequestCoordinator> request_coordinator =
std::make_unique<RequestCoordinator>(
std::move(policy), std::move(offliner), std::move(queue),
std::move(scheduler), network_quality_tracker);
request_coordinator->SetInternalStartProcessingCallbackForTest(
base::BindRepeating(
&android::EvaluationTestScheduler::ImmediateScheduleCallback,
base::Unretained(scheduler.get())));
return std::move(request_coordinator);
}
std::unique_ptr<KeyedService> GetTestBackgroundLoaderRequestCoordinator(
content::BrowserContext* context) {
std::unique_ptr<OfflinerPolicy> policy(new OfflinerPolicy());
std::unique_ptr<Offliner> offliner(new BackgroundLoaderOffliner(
context, policy.get(),
OfflinePageModelFactory::GetForBrowserContext(context),
nullptr)); // no need to connect LoadTerminationListener for harness.
return GetTestingRequestCoordinator(context, std::move(policy),
std::move(offliner));
}
RequestCoordinator* GetRequestCoordinator(Profile* profile,
bool use_evaluation_scheduler) {
if (!use_evaluation_scheduler) {
return RequestCoordinatorFactory::GetForBrowserContext(profile);
}
return static_cast<RequestCoordinator*>(
RequestCoordinatorFactory::GetInstance()->SetTestingFactoryAndUse(
profile,
base::BindRepeating(&GetTestBackgroundLoaderRequestCoordinator)));
}
} // namespace
static jlong JNI_OfflinePageEvaluationBridge_CreateBridgeForProfile(
JNIEnv* env,
const JavaParamRef<jobject>& obj,
const JavaParamRef<jobject>& j_profile,
const jboolean j_use_evaluation_scheduler) {
Profile* profile = ProfileAndroid::FromProfileAndroid(j_profile);
OfflinePageModel* offline_page_model =
OfflinePageModelFactory::GetForBrowserContext(profile);
RequestCoordinator* request_coordinator = GetRequestCoordinator(
profile, static_cast<bool>(j_use_evaluation_scheduler));
if (offline_page_model == nullptr || request_coordinator == nullptr)
return 0;
OfflinePageEvaluationBridge* bridge = new OfflinePageEvaluationBridge(
env, obj, profile, offline_page_model, request_coordinator);
return reinterpret_cast<jlong>(bridge);
}
OfflinePageEvaluationBridge::OfflinePageEvaluationBridge(
JNIEnv* env,
const JavaParamRef<jobject>& obj,
content::BrowserContext* browser_context,
OfflinePageModel* offline_page_model,
RequestCoordinator* request_coordinator)
: weak_java_ref_(env, obj),
browser_context_(browser_context),
offline_page_model_(offline_page_model),
request_coordinator_(request_coordinator) {
DCHECK(offline_page_model_);
DCHECK(request_coordinator_);
NotifyIfDoneLoading();
offline_page_model_->AddObserver(this);
request_coordinator_->AddObserver(this);
offline_page_model_->GetLogger()->SetClient(this);
request_coordinator_->GetLogger()->SetClient(this);
}
OfflinePageEvaluationBridge::~OfflinePageEvaluationBridge() {}
void OfflinePageEvaluationBridge::Destroy(JNIEnv* env,
const JavaParamRef<jobject>&) {
offline_page_model_->RemoveObserver(this);
request_coordinator_->RemoveObserver(this);
delete this;
}
// Implement OfflinePageModel::Observer
void OfflinePageEvaluationBridge::OfflinePageModelLoaded(
OfflinePageModel* model) {
DCHECK_EQ(offline_page_model_, model);
NotifyIfDoneLoading();
}
void OfflinePageEvaluationBridge::OfflinePageAdded(
OfflinePageModel* model,
const OfflinePageItem& added_page) {}
void OfflinePageEvaluationBridge::OfflinePageDeleted(
const OfflinePageItem& item) {}
// Implement RequestCoordinator::Observer
void OfflinePageEvaluationBridge::OnAdded(const SavePageRequest& request) {
JNIEnv* env = base::android::AttachCurrentThread();
ScopedJavaLocalRef<jobject> obj = weak_java_ref_.get(env);
if (obj.is_null())
return;
Java_OfflinePageEvaluationBridge_savePageRequestAdded(
env, obj,
JNI_OfflinePageEvaluationBridge_ToJavaSavePageRequest(env, request));
}
void OfflinePageEvaluationBridge::OnCompleted(
const SavePageRequest& request,
RequestNotifier::BackgroundSavePageResult status) {
JNIEnv* env = base::android::AttachCurrentThread();
ScopedJavaLocalRef<jobject> obj = weak_java_ref_.get(env);
if (obj.is_null())
return;
Java_OfflinePageEvaluationBridge_savePageRequestCompleted(
env, obj,
JNI_OfflinePageEvaluationBridge_ToJavaSavePageRequest(env, request),
static_cast<int>(status));
}
void OfflinePageEvaluationBridge::OnChanged(const SavePageRequest& request) {
JNIEnv* env = base::android::AttachCurrentThread();
ScopedJavaLocalRef<jobject> obj = weak_java_ref_.get(env);
if (obj.is_null())
return;
Java_OfflinePageEvaluationBridge_savePageRequestChanged(
env, obj,
JNI_OfflinePageEvaluationBridge_ToJavaSavePageRequest(env, request));
}
void OfflinePageEvaluationBridge::OnNetworkProgress(
const SavePageRequest& request,
int64_t received_bytes) {}
void OfflinePageEvaluationBridge::CustomLog(const std::string& message) {
JNIEnv* env = base::android::AttachCurrentThread();
ScopedJavaLocalRef<jobject> obj = weak_java_ref_.get(env);
if (obj.is_null())
return;
Java_OfflinePageEvaluationBridge_log(env, obj,
ConvertUTF8ToJavaString(env, kNativeTag),
ConvertUTF8ToJavaString(env, message));
}
void OfflinePageEvaluationBridge::GetAllPages(
JNIEnv* env,
const JavaParamRef<jobject>& obj,
const JavaParamRef<jobject>& j_result_obj,
const JavaParamRef<jobject>& j_callback_obj) {
DCHECK(j_result_obj);
DCHECK(j_callback_obj);
ScopedJavaGlobalRef<jobject> j_result_ref(j_result_obj);
ScopedJavaGlobalRef<jobject> j_callback_ref(j_callback_obj);
offline_page_model_->GetAllPages(
base::BindOnce(&GetAllPagesCallback, j_result_ref, j_callback_ref));
}
bool OfflinePageEvaluationBridge::PushRequestProcessing(
JNIEnv* env,
const JavaParamRef<jobject>& obj,
const JavaParamRef<jobject>& j_callback_obj) {
ScopedJavaGlobalRef<jobject> j_callback_ref(j_callback_obj);
DCHECK(request_coordinator_);
base::android::RunBooleanCallbackAndroid(j_callback_obj, false);
return request_coordinator_->StartImmediateProcessing(base::BindRepeating(
&base::android::RunBooleanCallbackAndroid, j_callback_ref));
}
void OfflinePageEvaluationBridge::SavePageLater(
JNIEnv* env,
const JavaParamRef<jobject>& obj,
const JavaParamRef<jstring>& j_url,
const JavaParamRef<jstring>& j_namespace,
const JavaParamRef<jstring>& j_client_id,
jboolean user_requested) {
offline_pages::ClientId client_id;
client_id.name_space = ConvertJavaStringToUTF8(env, j_namespace);
client_id.id = ConvertJavaStringToUTF8(env, j_client_id);
RequestCoordinator::SavePageLaterParams params;
params.url = GURL(ConvertJavaStringToUTF8(env, j_url));
params.client_id = client_id;
params.user_requested = static_cast<bool>(user_requested);
request_coordinator_->SavePageLater(params);
}
void OfflinePageEvaluationBridge::GetRequestsInQueue(
JNIEnv* env,
const JavaParamRef<jobject>& obj,
const JavaParamRef<jobject>& j_callback_obj) {
ScopedJavaGlobalRef<jobject> j_callback_ref(j_callback_obj);
request_coordinator_->GetAllRequests(
base::BindOnce(&OnGetAllRequestsDone, j_callback_ref));
}
void OfflinePageEvaluationBridge::RemoveRequestsFromQueue(
JNIEnv* env,
const JavaParamRef<jobject>& obj,
const JavaParamRef<jlongArray>& j_request_ids,
const JavaParamRef<jobject>& j_callback_obj) {
std::vector<int64_t> request_ids;
base::android::JavaLongArrayToInt64Vector(env, j_request_ids, &request_ids);
ScopedJavaGlobalRef<jobject> j_callback_ref(j_callback_obj);
request_coordinator_->RemoveRequests(
request_ids, base::BindOnce(&OnRemoveRequestsDone, j_callback_ref));
}
void OfflinePageEvaluationBridge::NotifyIfDoneLoading() const {
JNIEnv* env = base::android::AttachCurrentThread();
ScopedJavaLocalRef<jobject> obj = weak_java_ref_.get(env);
if (obj.is_null())
return;
Java_OfflinePageEvaluationBridge_offlinePageModelLoaded(env, obj);
}
} // namespace android
} // namespace offline_pages
| 5,427 |
778 |
// | / |
// ' / __| _` | __| _ \ __|
// . \ | ( | | ( |\__ `
// _|\_\_| \__,_|\__|\___/ ____/
// Multi-Physics
//
// License: BSD License
// Kratos default license: kratos/license.txt
//
// Main authors: <NAME>
//
#if !defined (KRATOS_TRILINOS_SOLVER_UTILITIES_H_INCLUDED)
#define KRATOS_TRILINOS_SOLVER_UTILITIES_H_INCLUDED
// External includes
#include "Teuchos_ParameterList.hpp"
// Project includes
#include "includes/define.h"
#include "includes/kratos_parameters.h"
namespace Kratos {
namespace TrilinosSolverUtilities {
void SetTeuchosParameters(const Parameters rSettings, Teuchos::ParameterList& rParameterlist)
{
for (auto it = rSettings.begin(); it != rSettings.end(); ++it) {
if (it->IsString()) rParameterlist.set(it.name(), it->GetString());
else if (it->IsInt()) rParameterlist.set(it.name(), it->GetInt());
else if (it->IsBool()) rParameterlist.set(it.name(), it->GetBool());
else if (it->IsDouble()) rParameterlist.set(it.name(), it->GetDouble());
}
}
} // namespace TrilinosSolverUtilities.
} // namespace Kratos.
#endif // KRATOS_TRILINOS_SOLVER_UTILITIES_H_INCLUDED defined
| 533 |
2,151 |
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "ui/accessibility/ax_host_delegate.h"
#include "ui/accessibility/ax_tree_id_registry.h"
namespace ui {
AXHostDelegate::AXHostDelegate()
: tree_id_(AXTreeIDRegistry::GetInstance()->GetOrCreateAXTreeID(this)) {}
AXHostDelegate::AXHostDelegate(int32_t tree_id) : tree_id_(tree_id) {
AXTreeIDRegistry::GetInstance()->SetDelegateForID(this, tree_id);
}
AXHostDelegate::~AXHostDelegate() {
AXTreeIDRegistry::GetInstance()->RemoveAXTreeID(tree_id_);
}
} // namespace ui
| 236 |
335 |
{
"word": "Voluptuary",
"definitions": [
"a person whose chief interests are luxury and the gratification of sensual appetites"
],
"parts-of-speech": "Noun"
}
| 70 |
2,114 |
<reponame>rahogata/Jest<gh_stars>1000+
package io.searchbox.action;
import com.google.gson.Gson;
import io.searchbox.client.JestResult;
/**
* @author <NAME>
*/
public abstract class GenericResultAbstractDocumentTargetedAction extends AbstractDocumentTargetedAction<JestResult> {
public GenericResultAbstractDocumentTargetedAction(Builder builder) {
super(builder);
}
@Override
public JestResult createNewElasticSearchResult(String responseBody, int statusCode, String reasonPhrase, Gson gson) {
return createNewElasticSearchResult(new JestResult(gson), responseBody, statusCode, reasonPhrase, gson);
}
}
| 208 |
4,119 |
<filename>ignite/metrics/ssim.py
from typing import Callable, Sequence, Union
import torch
import torch.nn.functional as F
from ignite.exceptions import NotComputableError
from ignite.metrics.metric import Metric, reinit__is_reduced, sync_all_reduce
__all__ = ["SSIM"]
class SSIM(Metric):
"""
Computes Structual Similarity Index Measure
Args:
data_range: Range of the image. Typically, ``1.0`` or ``255``.
kernel_size: Size of the kernel. Default: (11, 11)
sigma: Standard deviation of the gaussian kernel.
Argument is used if ``gaussian=True``. Default: (1.5, 1.5)
k1: Parameter of SSIM. Default: 0.01
k2: Parameter of SSIM. Default: 0.03
gaussian: ``True`` to use gaussian kernel, ``False`` to use uniform kernel
output_transform: A callable that is used to transform the
:class:`~ignite.engine.engine.Engine`'s ``process_function``'s output into the
form expected by the metric.
device: specifies which device updates are accumulated on. Setting the metric's
device to be the same as your ``update`` arguments ensures the ``update`` method is non-blocking. By
default, CPU.
Examples:
To use with ``Engine`` and ``process_function``, simply attach the metric instance to the engine.
The output of the engine's ``process_function`` needs to be in the format of
``(y_pred, y)`` or ``{'y_pred': y_pred, 'y': y, ...}``.
``y_pred`` and ``y`` can be un-normalized or normalized image tensors. Depending on that, the user might need
to adjust ``data_range``. ``y_pred`` and ``y`` should have the same shape.
.. testcode::
metric = SSIM(data_range=1.0)
metric.attach(default_evaluator, 'ssim')
preds = torch.rand([4, 3, 16, 16])
target = preds * 0.75
state = default_evaluator.run([[preds, target]])
print(state.metrics['ssim'])
.. testoutput::
0.9218971...
.. versionadded:: 0.4.2
"""
def __init__(
self,
data_range: Union[int, float],
kernel_size: Union[int, Sequence[int]] = (11, 11),
sigma: Union[float, Sequence[float]] = (1.5, 1.5),
k1: float = 0.01,
k2: float = 0.03,
gaussian: bool = True,
output_transform: Callable = lambda x: x,
device: Union[str, torch.device] = torch.device("cpu"),
):
if isinstance(kernel_size, int):
self.kernel_size = [kernel_size, kernel_size] # type: Sequence[int]
elif isinstance(kernel_size, Sequence):
self.kernel_size = kernel_size
else:
raise ValueError("Argument kernel_size should be either int or a sequence of int.")
if isinstance(sigma, float):
self.sigma = [sigma, sigma] # type: Sequence[float]
elif isinstance(sigma, Sequence):
self.sigma = sigma
else:
raise ValueError("Argument sigma should be either float or a sequence of float.")
if any(x % 2 == 0 or x <= 0 for x in self.kernel_size):
raise ValueError(f"Expected kernel_size to have odd positive number. Got {kernel_size}.")
if any(y <= 0 for y in self.sigma):
raise ValueError(f"Expected sigma to have positive number. Got {sigma}.")
super(SSIM, self).__init__(output_transform=output_transform, device=device)
self.gaussian = gaussian
self.c1 = (k1 * data_range) ** 2
self.c2 = (k2 * data_range) ** 2
self.pad_h = (self.kernel_size[0] - 1) // 2
self.pad_w = (self.kernel_size[1] - 1) // 2
self._kernel = self._gaussian_or_uniform_kernel(kernel_size=self.kernel_size, sigma=self.sigma)
@reinit__is_reduced
def reset(self) -> None:
# Not a tensor because batch size is not known in advance.
self._sum_of_batchwise_ssim = 0.0 # type: Union[float, torch.Tensor]
self._num_examples = 0
self._kernel = self._gaussian_or_uniform_kernel(kernel_size=self.kernel_size, sigma=self.sigma)
def _uniform(self, kernel_size: int) -> torch.Tensor:
max, min = 2.5, -2.5
ksize_half = (kernel_size - 1) * 0.5
kernel = torch.linspace(-ksize_half, ksize_half, steps=kernel_size, device=self._device)
for i, j in enumerate(kernel):
if min <= j <= max:
kernel[i] = 1 / (max - min)
else:
kernel[i] = 0
return kernel.unsqueeze(dim=0) # (1, kernel_size)
def _gaussian(self, kernel_size: int, sigma: float) -> torch.Tensor:
ksize_half = (kernel_size - 1) * 0.5
kernel = torch.linspace(-ksize_half, ksize_half, steps=kernel_size, device=self._device)
gauss = torch.exp(-0.5 * (kernel / sigma).pow(2))
return (gauss / gauss.sum()).unsqueeze(dim=0) # (1, kernel_size)
def _gaussian_or_uniform_kernel(self, kernel_size: Sequence[int], sigma: Sequence[float]) -> torch.Tensor:
if self.gaussian:
kernel_x = self._gaussian(kernel_size[0], sigma[0])
kernel_y = self._gaussian(kernel_size[1], sigma[1])
else:
kernel_x = self._uniform(kernel_size[0])
kernel_y = self._uniform(kernel_size[1])
return torch.matmul(kernel_x.t(), kernel_y) # (kernel_size, 1) * (1, kernel_size)
@reinit__is_reduced
def update(self, output: Sequence[torch.Tensor]) -> None:
y_pred, y = output[0].detach(), output[1].detach()
if y_pred.dtype != y.dtype:
raise TypeError(
f"Expected y_pred and y to have the same data type. Got y_pred: {y_pred.dtype} and y: {y.dtype}."
)
if y_pred.shape != y.shape:
raise ValueError(
f"Expected y_pred and y to have the same shape. Got y_pred: {y_pred.shape} and y: {y.shape}."
)
if len(y_pred.shape) != 4 or len(y.shape) != 4:
raise ValueError(
f"Expected y_pred and y to have BxCxHxW shape. Got y_pred: {y_pred.shape} and y: {y.shape}."
)
channel = y_pred.size(1)
if len(self._kernel.shape) < 4:
self._kernel = self._kernel.expand(channel, 1, -1, -1).to(device=y_pred.device)
y_pred = F.pad(y_pred, [self.pad_w, self.pad_w, self.pad_h, self.pad_h], mode="reflect")
y = F.pad(y, [self.pad_w, self.pad_w, self.pad_h, self.pad_h], mode="reflect")
input_list = torch.cat([y_pred, y, y_pred * y_pred, y * y, y_pred * y])
outputs = F.conv2d(input_list, self._kernel, groups=channel)
output_list = [outputs[x * y_pred.size(0) : (x + 1) * y_pred.size(0)] for x in range(len(outputs))]
mu_pred_sq = output_list[0].pow(2)
mu_target_sq = output_list[1].pow(2)
mu_pred_target = output_list[0] * output_list[1]
sigma_pred_sq = output_list[2] - mu_pred_sq
sigma_target_sq = output_list[3] - mu_target_sq
sigma_pred_target = output_list[4] - mu_pred_target
a1 = 2 * mu_pred_target + self.c1
a2 = 2 * sigma_pred_target + self.c2
b1 = mu_pred_sq + mu_target_sq + self.c1
b2 = sigma_pred_sq + sigma_target_sq + self.c2
ssim_idx = (a1 * a2) / (b1 * b2)
self._sum_of_batchwise_ssim += torch.mean(ssim_idx, (1, 2, 3), dtype=torch.float64).to(self._device)
self._num_examples += y.shape[0]
@sync_all_reduce("_sum_of_batchwise_ssim", "_num_examples")
def compute(self) -> torch.Tensor:
if self._num_examples == 0:
raise NotComputableError("SSIM must have at least one example before it can be computed.")
return torch.sum(self._sum_of_batchwise_ssim / self._num_examples) # type: ignore[arg-type]
| 3,513 |
461 |
#ifndef MAIN_H_
#define MAIN_H_
#include<stdio.h>
#include<stdint.h>
#define BUFFERNUM 1024
#define DATASIZE 1000
#define STATUS_PENDING 0x01
#define STATUS_READ 0x02
#define STATUS_WRITTEN 0x04
#define STATUS_ALL 0x08
#define STATUS_HEAD 0x10
#define STATUS_TAIL 0x20
#define SIZE_HEADER 5
int read_pro(FILE *inFile, int idShmHead, int idShmTail, int idSem);
int write_pro(FILE *outFile, int idShmHead, int idShmTail, int idSem);
#endif
| 206 |
864 |
<gh_stars>100-1000
/**********************************************************************************************************************
This file is part of the Control Toolbox (https://github.com/ethz-adrl/control-toolbox), copyright by ETH Zurich.
Licensed under the BSD-2 license (see LICENSE file in main directory)
**********************************************************************************************************************/
#pragma once
#include <type_traits>
#include <ct/optcon/problem/ContinuousOptConProblem.h>
#include <ct/optcon/solver/OptConSolver.h>
#include "MpcSettings.h"
#include "MpcTimeKeeper.h"
#include "policyhandler/PolicyHandler.h"
#include "timehorizon/MpcTimeHorizon.h"
#include <ct/optcon/solver/NLOptConSolver.hpp>
#include "policyhandler/default/StateFeedbackPolicyHandler.h"
//#define DEBUG_PRINT_MPC //! use this flag to enable debug printouts in the MPC implementation
namespace ct {
namespace optcon {
/** \defgroup MPC MPC
*
* \brief Model Predictive Control Module
*/
/**
* \ingroup MPC
*+
* \brief Main MPC class.
*
* This MPC class allows to use any solver that derives from the OptConSolver base class in Model-Predictive-Control fashion.
* MPC will automatically construct the solver
*
* Main assumptions:
* This MPC class is deliberately designed such that the time-keeping is managed by itself. The main assumption is that the controller
* which is designed here, gets applied to the system instantaneously after the run() call is executed. Furthermore, we assume that all Optimal Control Problems start at
* time zero. This also applies to the cost- and the constraint functionals which are to be provided by the user.
*
* Sidenotes:
* between the calls to run(), the user can arbitrarily modify his cost-functions, etc. in order to change the problem.
*
* @param OPTCON_SOLVER
* the optimal control solver to be employed, for example SLQ or DMS
*
*/
template <typename OPTCON_SOLVER>
class MPC
{
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
static const size_t STATE_DIM = OPTCON_SOLVER::STATE_D;
static const size_t CONTROL_DIM = OPTCON_SOLVER::CONTROL_D;
static const size_t P_DIM = OPTCON_SOLVER::POS_DIM;
static const size_t V_DIM = OPTCON_SOLVER::VEL_DIM;
using Scalar_t = typename OPTCON_SOLVER::Scalar_t;
using Policy_t = typename OPTCON_SOLVER::Policy_t;
using OptConProblem_t = ContinuousOptConProblem<STATE_DIM, CONTROL_DIM, Scalar_t>;
//! MPC solver constructor
/*!
*
* @param problem
* the optimal control problem set up by the user
* @param solverSettings
* settings class/struct for the optimal control solver of choice. Be sure to tune the solver settings such that they are suitable for MPC!
* @param mpcsettings
* mpc-specific settings, see class MpcSettings.h
* @param customPolicyHandler
* user-provided custom policy handler, which derives from base class 'PolicyHandler'.
* If not specified, MPC will use one of its default implementations or throw an error if there is no default match.
* @param customTimeHorizon
* user-provided custom time horizon strategy, which derives from base class 'MpcTimeHorizon'.
* If not specified, MPC will use one of its default implementations or throw an error if there is no default match.
*/
MPC(const OptConProblem_t& problem,
const typename OPTCON_SOLVER::Settings_t& solverSettings,
const mpc_settings& mpcsettings = mpc_settings(),
std::shared_ptr<PolicyHandler<Policy_t, STATE_DIM, CONTROL_DIM, Scalar_t>> customPolicyHandler = nullptr,
std::shared_ptr<tpl::MpcTimeHorizon<Scalar_t>> customTimeHorizon = nullptr);
//! Allows access to the solver member, required mainly for unit testing.
/*!
* @return reference to the optimal control problem solver
*/
OPTCON_SOLVER& getSolver();
//! Additional method to insert a custom time horizon strategy, independent from the constructor
/*!
* @param timeHorizonStrategy
* the time horizon strategy provided by the user
*/
void setTimeHorizonStrategy(std::shared_ptr<tpl::MpcTimeHorizon<Scalar_t>> timeHorizonStrategy);
//! set a new initial guess for the policy
/**
* @param initGuess
*/
void setInitialGuess(const Policy_t& initGuess);
//! Check if final time horizon for this task was reached
bool timeHorizonReached();
//! retrieve the time that elapsed since the first successful solve() call to an Optimal Control Problem
/*!
* @param the external time stamp
* @return time elapsed, the returned time can be used externally, for example to update cost functions
*/
const Scalar_t timeSinceFirstSuccessfulSolve(const Scalar_t& extTime);
//! perform forward integration of the measured system state, to compensate for expected or already occurred time lags
/*!
* State forward integration
* @param t_forward_start
* time where forward integration starts
* @param t_forward_stop
* time where forward integration stops
* @param x_start
* initial state for forward integration, gets overwritten with forward-integrated state
* @param forwardIntegrationController
* (optional) external controller for forward integration
*
* \warning The effect of the integration will vanish one the MPC frequency is higher than the sampling frequency
*/
void doForwardIntegration(const Scalar_t& t_forward_start,
const Scalar_t& t_forward_stop,
core::StateVector<STATE_DIM, Scalar_t>& x_start,
const std::shared_ptr<core::Controller<STATE_DIM, CONTROL_DIM, Scalar_t>> forwardIntegrationController =
nullptr);
/*!
* Prepare MPC iteration
* @param ext_ts the current external time
*/
void prepareIteration(const Scalar_t& ext_ts);
//! finish MPC iteration
/*!
* @param x
* current system state
* @param x_ts
* time stamp of the current state (external time in seconds)
* @param newPolicy
* the new policy calculated based on above state, the timing info and the underlying OptConProblem
* @param newPolicy_ts
* time stamp of the resulting policy. This indicates when the policy is supposed to start being applied, relative to
* the user-provided state-timestamp x_ts.
* @param forwardIntegrationController
* optional input: in some scenarios, we wish to use a different kind controller for forward integrating the system than the one we are optimizing
* Such a controller can be handed over here as additional argument. If set to empty, MPC uses its own optimized controller from
* the last iteration, thus assuming perfect control trajectory tracking.
* @return true if solve was successful, false otherwise.
*/
bool finishIteration(const core::StateVector<STATE_DIM, Scalar_t>& x,
const Scalar_t x_ts,
Policy_t& newPolicy,
Scalar_t& newPolicy_ts,
const std::shared_ptr<core::Controller<STATE_DIM, CONTROL_DIM, Scalar_t>> forwardIntegrationController =
nullptr);
//! reset the mpc problem and provide new problem time horizon (mandatory)
void resetMpc(const Scalar_t& newTimeHorizon);
//! update the mpc settings in all instances (main class, time keeper class, etc)
/*!
* update the mpc settings in all instances
* @param settings
* the new mpc settings provided by the user.
*/
void updateSettings(const mpc_settings& settings);
//! printout simple statistical data
void printMpcSummary();
private:
//! state forward propagation (for delay compensation)
/*!
* Perform forward integration about the given prediction horizon.
* - uses an arbitrary controller given, which is important for hierarchical setups where the actual controller may be refined further
* @param startTime
* time where forward integration starts w.r.t. the current policy
* @param stopTime
* time where forward integration stops w.r.t. the current policy
* @param state
* state to be forward propagated
* @param controller
* the controller to be used for forward propagation
*/
void integrateForward(const Scalar_t startTime,
const Scalar_t stopTime,
core::StateVector<STATE_DIM, Scalar_t>& state,
const std::shared_ptr<core::Controller<STATE_DIM, CONTROL_DIM, Scalar_t>>& controller);
void checkSettings(const mpc_settings& settings);
//! timings for pre-integration
Scalar_t t_forward_start_;
Scalar_t t_forward_stop_;
//! optimal control solver employed for mpc
OPTCON_SOLVER solver_;
//! mpc settings
mpc_settings mpc_settings_;
//! dynamics instance for forward integration
typename OPTCON_SOLVER::OptConProblem_t::DynamicsPtr_t dynamics_;
//! integrator for forward integration
ct::core::Integrator<STATE_DIM, Scalar_t> forwardIntegrator_;
//! true for first run
bool firstRun_;
//! counter which gets incremented at every call of the run() method
size_t runCallCounter_;
//! policy handler, which takes care of warm-starting
std::shared_ptr<PolicyHandler<Policy_t, STATE_DIM, CONTROL_DIM, Scalar_t>> policyHandler_;
//! currently optimal policy, initial guess respectively
Policy_t currentPolicy_;
//! time horizon strategy, e.g. receding horizon optimal control
std::shared_ptr<tpl::MpcTimeHorizon<Scalar_t>> timeHorizonStrategy_;
//! time keeper
tpl::MpcTimeKeeper<Scalar_t> timeKeeper_;
};
} //namespace optcon
} //namespace ct
| 3,017 |
438 |
/* FractionAtom.java
* =========================================================================
* This file is originally part of the JMathTeX Library - http://jmathtex.sourceforge.net
*
* Copyright (C) 2004-2007 Universiteit Gent
* Copyright (C) 2009 <NAME>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or (at
* your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* A copy of the GNU General Public License can be found in the file
* LICENSE.txt provided with the source distribution of this program (see
* the META-INF directory in the source jar). This license can also be
* found on the GNU website at http://www.gnu.org/licenses/gpl.html.
*
* If you did not receive a copy of the GNU General Public License along
* with this program, contact the lead developer, or write to the Free
* Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*
*/
package org.scilab.forge.jlatexmath.core;
/**
* An atom representing a fraction.
*/
public class FractionAtom extends Atom {
// whether the default thickness should not be used for the fraction line
private boolean noDefault = false;
// unit used for the thickness of the fraction line
private int unit;
// alignment settings for the numerator and denominator
private int numAlign = TeXConstants.ALIGN_CENTER,
denomAlign = TeXConstants.ALIGN_CENTER;
// the atoms representing the numerator and denominator
private Atom numerator, denominator;
// thickness of the fraction line
private float thickness;
// thickness of the fraction line relative to the default thickness
private float defFactor;
// whether the "defFactor" value should be used
private boolean defFactorSet = false;
/**
* Uses the default thickness for the fraction line
*
* @param num
* the numerator
* @param den
* the denominator
*/
public FractionAtom(Atom num, Atom den) {
this(num, den, true);
}
/**
* Uses the default thickness for the fraction line
*
* @param num
* the numerator
* @param den
* the denominator
* @param rule
* whether the fraction line should be drawn
*/
public FractionAtom(Atom num, Atom den, boolean rule) {
this(num, den, !rule, TeXConstants.UNIT_PIXEL, 0f);
}
/**
* Depending on noDef, the given thickness and unit will be used (<-> the
* default thickness).
*
* @param num
* the numerator
* @param den
* the denominator
* @param noDef
* whether the default thickness should not be used for the
* fraction line
* @param unit
* a unit constant for the line thickness
* @param t
* the thickness of the fraction line (in the given unit)
* @throws InvalidUnitException
* if the given integer is not a valid unit constant
*/
public FractionAtom(Atom num, Atom den, boolean noDef, int unit, float t)
throws InvalidUnitException {
// check unit
SpaceAtom.checkUnit(unit);
// unit ok
numerator = num;
denominator = den;
noDefault = noDef;
thickness = t;
this.unit = unit;
type = TeXConstants.TYPE_INNER;
}
/**
* Uses the default thickness for the fraction line.
*
* @param num
* the numerator
* @param den
* the denominator
* @param rule
* whether the fraction line should be drawn
* @param numAlign
* alignment of the numerator
* @param denomAlign
* alignment of the denominator
*/
public FractionAtom(Atom num, Atom den, boolean rule, int numAlign,
int denomAlign) {
this(num, den, rule);
this.numAlign = checkAlignment(numAlign);
this.denomAlign = checkAlignment(denomAlign);
}
/**
* The thickness of the fraction line will be "defFactor" times the default
* thickness.
*
* @param num
* the numerator
* @param den
* the denominator
* @param defFactor
* the thickness of the fraction line relative to the default
* thickness
* @param numAlign
* alignment of the numerator
* @param denomAlign
* alignment of the denominator
*/
public FractionAtom(Atom num, Atom den, float defFactor, int numAlign,
int denomAlign) {
this(num, den, true, numAlign, denomAlign);
this.defFactor = defFactor;
defFactorSet = true;
}
/**
* The thickness of the fraction line is determined by the given value "t"
* in the given unit.
*
* @param num
* the numerator
* @param den
* the denominator
* @param unit
* a unit constant for the line thickness
* @param t
* the thickness of the fraction line (in the given unit)
* @param numAlign
* alignment of the numerator
* @param denomAlign
* alignment of the denominator
*/
public FractionAtom(Atom num, Atom den, int unit, float t, int numAlign,
int denomAlign) {
this(num, den, unit, t);
this.numAlign = checkAlignment(numAlign);
this.denomAlign = checkAlignment(denomAlign);
}
/**
* The thickness of the fraction line is determined by the given value "t"
* in the given unit.
*
* @param num
* the numerator
* @param den
* the denominator
* @param unit
* a unit constant for the line thickness
* @param t
* the thickness of the fraction line (in the given unit)
*/
public FractionAtom(Atom num, Atom den, int unit, float t) {
this(num, den, true, unit, t);
}
// Checks if the alignment constant is valid.
// If not, a default value will be used.
private int checkAlignment(int align) {
if (align == TeXConstants.ALIGN_LEFT
|| align == TeXConstants.ALIGN_RIGHT)
return align;
else
return TeXConstants.ALIGN_CENTER;
}
public Box createBox(TeXEnvironment env) {
TeXFont tf = env.getTeXFont();
int style = env.getStyle();
// set thickness to default if default value should be used
float drt = tf.getDefaultRuleThickness(style);
if (noDefault)
// convert the thickness to pixels
thickness *= SpaceAtom.getFactor(unit, env);
else
thickness = (defFactorSet ? defFactor * drt : drt);
// create equal width boxes (in appropriate styles)
Box num = (numerator == null ? new StrutBox(0, 0, 0, 0) : numerator
.createBox(env.numStyle()));
Box denom = (denominator == null ? new StrutBox(0, 0, 0, 0)
: denominator.createBox(env.denomStyle()));
if (num.getWidth() < denom.getWidth())
num = new HorizontalBox(num, denom.getWidth(), numAlign);
else
denom = new HorizontalBox(denom, num.getWidth(), denomAlign);
// calculate default shift amounts
float shiftUp, shiftDown;
if (style < TeXConstants.STYLE_TEXT) {
shiftUp = tf.getNum1(style);
shiftDown = tf.getDenom1(style);
} else {
shiftDown = tf.getDenom2(style);
if (thickness > 0)
shiftUp = tf.getNum2(style);
else
shiftUp = tf.getNum3(style);
}
// upper part of vertical box = numerator
VerticalBox vBox = new VerticalBox();
vBox.add(num);
// calculate clearance clr, adjust shift amounts and create vertical box
float clr, delta, axis = tf.getAxisHeight(style);
if (thickness > 0) { // WITH fraction rule
// clearance clr
if (style < TeXConstants.STYLE_TEXT)
clr = 3 * thickness;
else
clr = thickness;
// adjust shift amounts
delta = thickness / 2;
float kern1 = shiftUp - num.getDepth() - (axis + delta), kern2 = axis
- delta - (denom.getHeight() - shiftDown);
float delta1 = clr - kern1, delta2 = clr - kern2;
if (delta1 > 0) {
shiftUp += delta1;
kern1 += delta1;
}
if (delta2 > 0) {
shiftDown += delta2;
kern2 += delta2;
}
// fill vertical box
vBox.add(new StrutBox(0, kern1, 0, 0));
vBox.add(new HorizontalRule(thickness, num.getWidth(), 0));
vBox.add(new StrutBox(0, kern2, 0, 0));
} else { // WITHOUT fraction rule
// clearance clr
if (style < TeXConstants.STYLE_TEXT)
clr = 7 * drt;
else
clr = 3 * drt;
// adjust shift amounts
float kern = shiftUp - num.getDepth()
- (denom.getHeight() - shiftDown);
delta = (clr - kern) / 2;
if (delta > 0) {
shiftUp += delta;
shiftDown += delta;
kern += 2 * delta;
}
// fill vertical box
vBox.add(new StrutBox(0, kern, 0, 0));
}
// finish vertical box
vBox.add(denom);
vBox.setHeight(shiftUp + num.getHeight());
vBox.setDepth(shiftDown + denom.getDepth());
// \nulldelimiterspace is set by default to 1.2pt = 0.12em)
float f = new SpaceAtom(TeXConstants.UNIT_EM, 0.12f, 0, 0).createBox(
env).getWidth();
return new HorizontalBox(vBox, vBox.getWidth() + 2 * f,
TeXConstants.ALIGN_CENTER);
}
}
| 3,393 |
1,006 |
<gh_stars>1000+
/****************************************************************************
* arch/arm/src/lc823450/lc823450_dma.h
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership. The
* ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
****************************************************************************/
#ifndef __ARCH_ARM_SRC_LC823450_LC823450_DMA_H
#define __ARCH_ARM_SRC_LC823450_LC823450_DMA_H
/****************************************************************************
* Included Files
****************************************************************************/
#include <nuttx/config.h>
#include <sys/types.h>
#include <arch/irq.h>
#include "chip.h"
/****************************************************************************
* Pre-processor Definitions
****************************************************************************/
#define LC823450_DMA_ITC (1 << 31)
#define LC823450_DMA_SRCWIDTH_BYTE (0 << 18)
#define LC823450_DMA_SRCWIDTH_HWORD (1 << 18)
#define LC823450_DMA_SRCWIDTH_WORD (2 << 18)
#define LC823450_DMA_DSTWIDTH_BYTE (0 << 21)
#define LC823450_DMA_DSTWIDTH_HWORD (1 << 21)
#define LC823450_DMA_DSTWIDTH_WORD (2 << 21)
#define LC823450_DMA_SRCINC (1 << 26)
#define LC823450_DMA_DSTINC (1 << 27)
#define LC823450_DMA_SBS_SHIFT 12
#define LC823450_DMA_DBS_SHIFT 15
#define LC823450_DMA_BS_1 0
#define LC823450_DMA_BS_4 1
#define LC823450_DMA_BS_8 2
#define LC823450_DMA_BS_16 3
#define LC823450_DMA_BS_32 4
#define LC823450_DMA_BS_64 5
#define LC823450_DMA_BS_128 6
#define LC823450_DMA_BS_256 7
#define LC823450_DMA_TRANSSIZE_MASK (0xfff << 0)
#define LC823450_DMA_MAX_TRANSSIZE 0xff0
/* HighPriority */
#define DMA_CHANNEL_SIOTX 0
#define DMA_CHANNEL_UART1RX 1
#define DMA_CHANNEL_UART1TX 2
#define DMA_CHANNEL_USBDEV 3
#define DMA_CHANNEL_AUDIOWR 4
#define DMA_CHANNEL_AUDIORD 5
#if 0
#define DMA_CHANNEL_??? 6
#endif
#define DMA_CHANNEL_VIRTUAL 7
#define DMA_CHANNEL_NUM 8
#define DMA_REQUEST_UART0RX 0
#define DMA_REQUEST_UART0TX 1
#define DMA_REQUEST_UART1RX 2
#define DMA_REQUEST_UART1TX 3
#define DMA_REQUEST_UART2RX 4
#define DMA_REQUEST_UART2TX 5
#define DMA_REQUEST_SIORX 6
#define DMA_REQUEST_SIOTX 7
#define DMA_REQUEST_AUDIOBUF0 8
#define DMA_REQUEST_AUDIOBUF1 9
#define DMA_REQUEST_AUDIOBUF2 10
#define DMA_REQUEST_AUDIOBUF3 11
#define DMA_REQUEST_AUDIOBUF4 12
#define DMA_REQUEST_AUDIOBUF5 13
#define DMA_REQUEST_AUDIOBUF6 14
#define DMA_REQUEST_AUDIOBUF7 15
#define DMA_REQUEST_USBDEV 22
/****************************************************************************
* Public Types
****************************************************************************/
struct lc823450_dma_llist
{
uint32_t srcaddr;
uint32_t dstaddr;
uint32_t nextlli;
uint32_t ctrl;
};
typedef void *DMA_HANDLE;
typedef void (*dma_callback_t)(DMA_HANDLE handle, void *arg, int result);
/****************************************************************************
* Public Data
****************************************************************************/
#undef EXTERN
#if defined(__cplusplus)
#define EXTERN extern "C"
extern "C"
{
#else
#define EXTERN extern
#endif
/****************************************************************************
* Public Function Prototypes
****************************************************************************/
void lc823450_dmaconfigure(uint8_t dmarequest, bool alternate);
DMA_HANDLE lc823450_dmachannel(int ch);
void lc823450_dmafree(DMA_HANDLE handle);
void lc823450_dmarequest(DMA_HANDLE handle, uint8_t dmarequest);
int lc823450_dmasetup(DMA_HANDLE handle, uint32_t control,
uint32_t srcaddr, uint32_t destaddr, size_t nxfrs);
int lc823450_dmallsetup(DMA_HANDLE handle, uint32_t control,
uint32_t srcaddr, uint32_t destaddr,
size_t nxfrs, uint32_t llist);
void lc823450_dmareauest_dir(DMA_HANDLE handle, uint8_t dmarequest,
int m2p);
int lc823450_dmastart(DMA_HANDLE handle, dma_callback_t callback,
void *arg);
void lc823450_dmastop(DMA_HANDLE handle);
int lc823450_dmaremain(DMA_HANDLE handle);
#undef EXTERN
#if defined(__cplusplus)
}
#endif
#endif /* __ARCH_ARM_SRC_LC823450_LC823450_DMA_H */
| 2,324 |
1,405 |
package a.a.a.a.a.a;
import a.a.a.a.a.a;
import a.a.a.a.a.a.e.k;
import a.a.a.a.a.a.e.l;
import a.a.a.a.a.a.e.o;
import a.a.a.a.a.h;
import a.a.a.a.a.i;
import a.a.a.a.a.n;
import java.util.Vector;
public final class c implements Runnable {
/* renamed from: a reason: collision with root package name */
private static int f3a = 10;
private a b;
private a c;
private Vector d;
private Vector e;
private boolean f = false;
private boolean g = false;
private Object h = new Object();
private Thread i;
private Object j = new Object();
private Object k = new Object();
private boolean l = false;
private a.a.a.a.a.a.d.a m;
c(a.a.a.a.a.a.d.a aVar, a aVar2) {
this.m = aVar;
this.c = aVar2;
this.d = new Vector(f3a);
this.e = new Vector(f3a);
}
public final void a() {
if (!this.f) {
this.f = true;
this.g = false;
this.i = new Thread(this, "Micro Client Callback");
this.i.start();
}
}
public final void a(o oVar) {
if (this.b != null) {
synchronized (this.k) {
if (!this.g && this.d.size() >= f3a) {
try {
this.m.a(709);
this.k.wait();
} catch (InterruptedException e2) {
}
}
}
if (!this.g) {
this.d.addElement(oVar);
synchronized (this.j) {
this.m.a(710);
this.j.notifyAll();
}
}
}
}
public final void a(a aVar) {
this.b = aVar;
}
public final void a(h hVar) {
if (this.b != null) {
this.e.addElement(hVar);
synchronized (this.j) {
if (this.m.a()) {
this.m.a(715, new Object[]{hVar});
}
this.j.notifyAll();
}
}
}
public final void a(Throwable th) {
if (this.b != null) {
this.m.a(708, null, th);
this.b.a(th);
}
}
public final void b() {
if (this.f) {
this.m.a(700);
this.f = false;
if (!Thread.currentThread().equals(this.i)) {
try {
synchronized (this.h) {
synchronized (this.j) {
this.m.a(701);
this.j.notifyAll();
}
this.m.a(702);
this.h.wait();
}
} catch (InterruptedException e2) {
}
}
this.m.a(703);
}
}
public final void c() {
this.g = true;
synchronized (this.k) {
this.m.a(711);
this.k.notifyAll();
}
synchronized (this.k) {
if (this.l) {
try {
this.m.a(712);
this.k.wait();
} catch (InterruptedException e2) {
}
}
}
}
/* access modifiers changed from: protected */
public final Thread d() {
return this.i;
}
public final void run() {
while (this.f) {
try {
synchronized (this.j) {
if (this.d.isEmpty() && this.e.isEmpty()) {
this.m.a(704);
this.j.wait();
}
}
} catch (InterruptedException e2) {
}
if (this.f) {
if (!this.e.isEmpty() && this.b != null) {
h hVar = (h) this.e.elementAt(0);
this.e.removeElementAt(0);
if (this.m.a()) {
this.m.a(705, new Object[]{hVar});
}
this.b.a(hVar);
}
if (!this.d.isEmpty()) {
if (this.g) {
this.d.clear();
} else if (this.c.a()) {
this.l = true;
o oVar = (o) this.d.elementAt(0);
this.d.removeElementAt(0);
if (this.c.a() && this.b != null) {
String g2 = oVar.g();
n a2 = g2 != null ? this.c.a(g2) : null;
try {
if (this.m.a()) {
this.m.a(713, new Object[]{a2.a(), new Integer(oVar.i())});
}
this.b.a(a2, oVar.h());
if (oVar.h().d() == 1) {
this.c.b(new k(oVar));
} else if (oVar.h().d() == 2) {
this.c.a(oVar);
this.c.b(new l(oVar));
}
} catch (Exception e3) {
this.m.a(714, null, e3);
this.c.a(new i(e3));
}
}
this.l = false;
}
}
}
synchronized (this.k) {
this.m.a(706);
this.k.notifyAll();
}
}
this.d.clear();
synchronized (this.h) {
this.m.a(707);
this.h.notifyAll();
}
}
}
| 3,750 |
410 |
<filename>src/Commands/DeselectSurfaceCmd.h
#pragma once
#include "SurfaceManager.h"
#include "BaseCmd.h"
#include "BaseSurface.h"
class ofxPiMapper;
namespace ofx {
namespace piMapper {
class DeselectSurfaceCmd : public BaseUndoCmd {
public:
DeselectSurfaceCmd(SurfaceManager * sm);
void exec();
void undo();
private:
SurfaceManager * _surfaceManager;
BaseSurface * _surface;
int _selectedVertexIndex;
};
} // namespace piMapper
} // namespace ofx
| 176 |
590 |
#ifndef __R328_IRQ_H__
#define __R328_IRQ_H__
#ifdef __cplusplus
extern "C" {
#endif
#define R328_IRQ_NMI (64)
#define R328_IRQ_DMA (66)
#define R328_IRQ_VAD_WAKE (67)
#define R328_IRQ_VAD_DATA (68)
#define R328_IRQ_USB_DEVICE (69)
#define R328_IRQ_USB_EHCI (70)
#define R328_IRQ_USB_OHCI (71)
#define R328_IRQ_GPIOB (75)
#define R328_IRQ_GPIOE (76)
#define R328_IRQ_GPIOG (78)
#define R328_IRQ_GPIOH (79)
#define R328_IRQ_GPADC (80)
#define R328_IRQ_THERMAL (81)
#define R328_IRQ_LRADC (82)
#define R328_IRQ_OWA (83)
#define R328_IRQ_DMIC (84)
#define R328_IRQ_MSI (86)
#define R328_IRQ_SMC (87)
#define R328_IRQ_WDOG (88)
#define R328_IRQ_CCU_FERR (89)
#define R328_IRQ_BUS_TIMEOUT (90)
#define R328_IRQ_PSI (91)
#define R328_IRQ_LEDC (92)
#define R328_IRQ_AUDIO_DAC (93)
#define R328_IRQ_AUDIO_ADC (94)
#define R328_IRQ_CE_NS (97)
#define R328_IRQ_CE (98)
#define R328_IRQ_I2S0 (99)
#define R328_IRQ_I2S1 (100)
#define R328_IRQ_I2S2 (101)
#define R328_IRQ_TWI0 (102)
#define R328_IRQ_TWI1 (103)
#define R328_IRQ_SMHC1 (106)
#define R328_IRQ_UART0 (108)
#define R328_IRQ_UART1 (109)
#define R328_IRQ_UART2 (110)
#define R328_IRQ_UART3 (111)
#define R328_IRQ_SPI0 (113)
#define R328_IRQ_SPI1 (114)
#define R328_IRQ_HSTIME0 (115)
#define R328_IRQ_HSTIME1 (116)
#define R328_IRQ_TIME0 (117)
#define R328_IRQ_TIME1 (118)
#define R328_IRQ_C0_CTI0 (160)
#define R328_IRQ_C0_CTI1 (161)
#define R328_IRQ_C0_COMMTX0 (162)
#define R328_IRQ_C0_COMMTX1 (163)
#define R328_IRQ_C0_COMMRX0 (164)
#define R328_IRQ_C0_COMMRX1 (165)
#define R328_IRQ_C0_PMU0 (166)
#define R328_IRQ_C0_PMU1 (167)
#define R328_IRQ_C0_AXI_ERROR (168)
#define R328_IRQ_GPIOB0 (192)
#define R328_IRQ_GPIOB1 (193)
#define R328_IRQ_GPIOB2 (194)
#define R328_IRQ_GPIOB3 (195)
#define R328_IRQ_GPIOB4 (196)
#define R328_IRQ_GPIOB5 (197)
#define R328_IRQ_GPIOB6 (198)
#define R328_IRQ_GPIOB7 (199)
#define R328_IRQ_GPIOB8 (200)
#define R328_IRQ_GPIOB9 (201)
#define R328_IRQ_GPIOB10 (202)
#define R328_IRQ_GPIOB11 (203)
#define R328_IRQ_GPIOB12 (204)
#define R328_IRQ_GPIOB13 (205)
#define R328_IRQ_GPIOE0 (224)
#define R328_IRQ_GPIOE1 (225)
#define R328_IRQ_GPIOE2 (226)
#define R328_IRQ_GPIOE3 (227)
#define R328_IRQ_GPIOE4 (228)
#define R328_IRQ_GPIOE5 (229)
#define R328_IRQ_GPIOE6 (230)
#define R328_IRQ_GPIOG0 (256)
#define R328_IRQ_GPIOG1 (257)
#define R328_IRQ_GPIOG2 (258)
#define R328_IRQ_GPIOG3 (259)
#define R328_IRQ_GPIOG4 (260)
#define R328_IRQ_GPIOG5 (261)
#define R328_IRQ_GPIOG6 (262)
#define R328_IRQ_GPIOG7 (263)
#define R328_IRQ_GPIOG8 (264)
#define R328_IRQ_GPIOG9 (265)
#define R328_IRQ_GPIOG10 (266)
#define R328_IRQ_GPIOG11 (267)
#define R328_IRQ_GPIOG12 (268)
#define R328_IRQ_GPIOG13 (269)
#define R328_IRQ_GPIOG14 (270)
#define R328_IRQ_GPIOG15 (271)
#define R328_IRQ_GPIOH0 (288)
#define R328_IRQ_GPIOH1 (289)
#define R328_IRQ_GPIOH2 (290)
#define R328_IRQ_GPIOH3 (291)
#define R328_IRQ_GPIOH4 (292)
#define R328_IRQ_GPIOH5 (293)
#define R328_IRQ_GPIOH6 (294)
#define R328_IRQ_GPIOH7 (295)
#define R328_IRQ_GPIOH8 (296)
#define R328_IRQ_GPIOH9 (297)
#ifdef __cplusplus
}
#endif
#endif /* __R328_IRQ_H__ */
| 1,913 |
400 |
import cv2
import random
import numpy as np
from PIL import Image
from PIL import ImageOps
from PIL import ImageFilter
from torch import Tensor
from typing import List
from typing import Tuple
from typing import Optional
from skimage.transform import resize
from torchvision.transforms import transforms
from torchvision.transforms import InterpolationMode
from .A import *
from .pt import *
from .general import *
from .....data import Compose
from .....data import Transforms
from .....misc.toolkit import min_max_normalize
from .....misc.toolkit import imagenet_normalize
@Transforms.register("for_generation")
class TransformForGeneration(Compose):
def __init__(
self,
img_size: Optional[int] = None,
*,
inverse: bool = False,
to_gray: bool = False,
to_rgb: bool = False,
):
transform_list: List[Transforms] = []
if img_size is not None:
transform_list.extend([Resize(img_size), ToArray()])
if to_rgb:
if to_gray:
msg = "should not use `to_rgb` and `to_gray` at the same time"
raise ValueError(msg)
transform_list.append(ToRGB())
elif to_gray:
transform_list.append(ToGray())
transform_list.extend([ToTensor(), N1To1()])
if inverse:
transform_list.append(InverseN1To1())
super().__init__(transform_list)
@Transforms.register("for_imagenet")
class TransformForImagenet(Compose):
def __init__(self, img_size: int = 224): # type: ignore
super().__init__([AResize(img_size), ANormalize(), ToTensor()])
@Transforms.register("ssl")
class SSLTransform(Transforms):
class Augmentation:
class GaussianBlur:
def __init__(
self,
p: float = 0.5,
radius_min: float = 0.1,
radius_max: float = 2.0,
):
self.prob = p
self.radius_min = radius_min
self.radius_max = radius_max
def __call__(self, img: Image) -> Image:
if random.random() > self.prob:
return img
r = random.uniform(self.radius_min, self.radius_max)
return img.filter(ImageFilter.GaussianBlur(radius=r))
class Solarization:
def __init__(self, p: float):
self.p = p
def __call__(self, img: Image) -> Image:
if random.random() > self.p:
return img
return ImageOps.solarize(img)
def __init__(
self,
img_size: int,
to_gray: bool,
local_crops_number: int,
local_crops_scale: Tuple[float, float],
global_crops_scale: Tuple[float, float],
):
self.to_gray = ToGray().fn if to_gray else None
flip_and_color_jitter = transforms.Compose(
[
transforms.RandomHorizontalFlip(p=0.5),
ColorJitter(p=0.8),
transforms.RandomGrayscale(p=0.2),
]
)
normalize = transforms.Compose(
[
transforms.ToTensor(),
transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)),
]
)
# global crop 1
self.global_transform1 = transforms.Compose(
[
transforms.RandomResizedCrop(
img_size,
scale=global_crops_scale,
interpolation=InterpolationMode.BICUBIC,
),
flip_and_color_jitter,
self.GaussianBlur(1.0),
normalize,
]
)
# global crop 2
self.global_transform2 = transforms.Compose(
[
transforms.RandomResizedCrop(
img_size,
scale=global_crops_scale,
interpolation=InterpolationMode.BICUBIC,
),
flip_and_color_jitter,
self.GaussianBlur(0.1),
self.Solarization(0.2),
normalize,
]
)
# local crop
self.local_crops_number = local_crops_number
self.local_transform = transforms.Compose(
[
transforms.RandomResizedCrop(
int(img_size * 3 / 7),
scale=local_crops_scale,
interpolation=InterpolationMode.BICUBIC,
),
flip_and_color_jitter,
self.GaussianBlur(0.5),
normalize,
]
)
def __call__(self, image: Image) -> Image:
image = image.convert("RGB")
crops = [self.global_transform1(image), self.global_transform2(image)]
for _ in range(self.local_crops_number):
crops.append(self.local_transform(image))
if self.to_gray is not None:
crops = [self.to_gray(crop) for crop in crops]
return crops
def __init__(
self,
img_size: int,
to_gray: bool = False,
local_crops_number: int = 8,
local_crops_scale: Tuple[float, float] = (0.05, 0.4),
global_crops_scale: Tuple[float, float] = (0.4, 1.0),
):
super().__init__()
self.fn = self.Augmentation(
img_size,
to_gray,
local_crops_number,
local_crops_scale,
global_crops_scale,
)
@property
def need_batch_process(self) -> bool:
return False
@Transforms.register("ssl_test")
class SSLTestTransform(Transforms):
def __init__(self, img_size: int, to_gray: bool = False):
super().__init__()
self.img_size = img_size
self.to_gray = ToGray().fn if to_gray else None
self.larger_size = int(round(img_size * 8.0 / 7.0))
def fn(self, img: Image.Image) -> Tensor:
img = img.convert("RGB")
img.thumbnail((self.larger_size, self.larger_size), Image.ANTIALIAS)
img_arr = np.array(img)
resized_img = resize(img_arr, (self.img_size, self.img_size), mode="constant")
resized_img = resized_img.astype(np.float32)
img_arr = min_max_normalize(resized_img)
img_arr = imagenet_normalize(img_arr)
if self.to_gray is not None:
img_arr = self.to_gray(img_arr)
return img_arr.transpose([2, 0, 1])
@property
def need_batch_process(self) -> bool:
return False
@Transforms.register("a_bundle")
class ABundleTransform(Compose):
def __init__(
self,
*,
resize_size: int = 320,
crop_size: Optional[int] = 288,
p: float = 0.5,
label_alias: Optional[str] = None,
):
transform_list: List[Transforms]
transform_list = [AResize(resize_size, label_alias=label_alias)]
if crop_size is not None:
transform_list.append(ARandomCrop(crop_size, label_alias=label_alias))
transform_list.extend(
[
AHFlip(p, label_alias=label_alias),
AVFlip(p, label_alias=label_alias),
AShiftScaleRotate(p, cv2.BORDER_CONSTANT, label_alias=label_alias),
ARGBShift(p=p, label_alias=label_alias),
ASolarize(p=p, label_alias=label_alias),
AGaussianBlur(p=p, label_alias=label_alias),
AHueSaturationValue(p=p, label_alias=label_alias),
ARandomBrightnessContrast(p=p, label_alias=label_alias),
ANormalize(label_alias=label_alias),
AToTensor(label_alias=label_alias),
]
)
super().__init__(transform_list)
@Transforms.register("a_bundle_test")
class ABundleTestTransform(Compose):
def __init__(self, *, resize_size: int = 320, label_alias: Optional[str] = None):
super().__init__(
[
AResize(resize_size, label_alias=label_alias),
ANormalize(label_alias=label_alias),
AToTensor(label_alias=label_alias),
]
)
@Transforms.register("style_transfer")
class StyleTransferTransform(Compose):
def __init__(
self,
*,
resize_size: int = 512,
crop_size: int = 256,
label_alias: Optional[str] = None,
):
super().__init__(
[
AResize(resize_size, label_alias=label_alias),
ARandomCrop(crop_size, label_alias=label_alias),
AToRGB(),
AToTensor(label_alias=label_alias),
]
)
@Transforms.register("style_transfer_test")
class StyleTransferTestTransform(Compose):
def __init__(self, *, resize_size: int = 256, label_alias: Optional[str] = None):
super().__init__(
[
AResize(resize_size, label_alias=label_alias),
AToRGB(),
AToTensor(label_alias=label_alias),
]
)
@Transforms.register("clf")
class ClassificationTransform(Compose):
def __init__(
self,
*,
p: float = 0.5,
resize_size: int = 512,
label_alias: Optional[str] = None,
):
if label_alias is not None:
raise ValueError("`label_alias` should not be provided in `Classification`")
super().__init__(
[
AResize(int(resize_size * 1.2)),
ToRGB(),
ARandomCrop(resize_size),
AHFlip(p),
AToTensor(),
ColorJitter(p=min(1.0, p * 1.6)),
RandomErase(p=p),
Normalize(),
]
)
@Transforms.register("clf_test")
class ClassificationTestTransform(Compose):
def __init__(self, *, resize_size: int = 512, label_alias: Optional[str] = None):
if label_alias is not None:
raise ValueError("`label_alias` should not be provided in `Classification`")
super().__init__([AResize(resize_size), ToRGB(), ANormalize(), AToTensor()])
__all__ = [
"TransformForGeneration",
"TransformForImagenet",
"SSLTransform",
"SSLTestTransform",
"ABundleTransform",
"ABundleTestTransform",
"StyleTransferTransform",
"StyleTransferTestTransform",
"ClassificationTransform",
"ClassificationTestTransform",
]
| 5,511 |
641 |
<gh_stars>100-1000
package com.docker.atsea.repositories;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
import com.docker.atsea.model.Product;
@Repository
@Transactional
public interface ProductRepository extends JpaRepository<Product, Long> {
Product findByName(String name);
}
| 131 |
3,102 |
#if 0
// RUN: not %clang_cc1 -verify %s 2>&1 | FileCheck %s
// Please note that all comments are inside "#if 0" blocks so that
// VerifyDiagnosticConsumer sees no comments while processing this
// test-case (and hence no expected-* directives).
#endif
#include "verify2.h"
#error source
#if 0
// expected-error {{should be ignored}}
// CHECK: error: no expected directives found: consider use of 'expected-no-diagnostics'
// CHECK-NEXT: error: 'error' diagnostics seen but not expected:
// CHECK-NEXT: Line 5: header
// CHECK-NEXT: Line 10: source
// CHECK-NEXT: 3 errors generated.
#endif
#ifdef CHECK2
// RUN: not %clang_cc1 -DCHECK2 -verify %s 2>&1 | FileCheck -check-prefix=CHECK2 %s
// The following checks that -verify can match "any line" in an included file.
// The location of the diagnostic need therefore only match in the file, not to
// a specific line number. This is useful where -verify is used as a testing
// tool for 3rd-party libraries where headers may change and the specific line
// number of a diagnostic in a header is not important.
// [email protected]:* {{header}}
// [email protected]:* {{unknown}}
// CHECK2: error: 'error' diagnostics expected but not seen:
// CHECK2-NEXT: File {{.*}}verify2.h Line * (directive at {{.*}}verify2.c:32): unknown
// CHECK2-NEXT: error: 'error' diagnostics seen but not expected:
// CHECK2-NEXT: File {{.*}}verify2.c Line 10: source
// CHECK2-NEXT: 2 errors generated.
#endif
| 486 |
742 |
from __future__ import division, print_function, absolute_import
import time
import datetime
import sys
import traceback
import socket
import threading
import os
import signal
import atexit
import platform
import random
import math
from .runtime import min_version, runtime_info, register_signal
from .utils import timestamp, generate_uuid
from .config import Config
from .config_loader import ConfigLoader
from .message_queue import MessageQueue
from .frame_cache import FrameCache
from .reporters.process_reporter import ProcessReporter
from .reporters.profile_reporter import ProfileReporter, ProfilerConfig
from .reporters.error_reporter import ErrorReporter
from .reporters.span_reporter import SpanReporter
from .profilers.cpu_profiler import CPUProfiler
from .profilers.allocation_profiler import AllocationProfiler
from .profilers.block_profiler import BlockProfiler
class Span(object):
def __init__(self, stop_func = None):
if stop_func:
self.stop_func = stop_func
else:
self.stop_func = None
def stop(self):
if self.stop_func:
self.stop_func()
def __enter__(self):
pass
def __exit__(self, exc_type, exc_value, traceback):
self.stop()
class Agent(object):
AGENT_VERSION = "1.2.6"
SAAS_DASHBOARD_ADDRESS = "https://agent-api.stackimpact.com"
def __init__(self, **kwargs):
self.agent_started = False
self.agent_destroyed = False
self.profiler_active = False
self.span_active = False
self.main_thread_func = None
self.run_ts = None
self.run_id = None
self.config = Config(self)
self.config_loader = ConfigLoader(self)
self.message_queue = MessageQueue(self)
self.frame_cache = FrameCache(self)
self.process_reporter = ProcessReporter(self)
self.error_reporter = ErrorReporter(self)
self.span_reporter = SpanReporter(self)
config = ProfilerConfig()
config.log_prefix = 'CPU profiler'
config.max_profile_duration = 20
config.max_span_duration = 5
config.max_span_count = 30
config.span_interval = 20
config.report_interval = 120
self.cpu_reporter = ProfileReporter(self, CPUProfiler(self), config)
config = ProfilerConfig()
config.log_prefix = 'Allocation profiler'
config.max_profile_duration = 20
config.max_span_duration = 5
config.max_span_count = 30
config.span_interval = 20
config.report_interval = 120
self.allocation_reporter = ProfileReporter(self, AllocationProfiler(self), config)
config = ProfilerConfig()
config.log_prefix = 'Block profiler'
config.max_profile_duration = 20
config.max_span_duration = 5
config.max_span_count = 30
config.span_interval = 20
config.report_interval = 120
self.block_reporter = ProfileReporter(self, BlockProfiler(self), config)
self.options = None
def get_option(self, name, default_val=None):
if name not in self.options:
return default_val
else:
return self.options[name]
def start(self, **kwargs):
if not min_version(2, 7) and not min_version(3, 4):
raise Exception('Supported Python versions 2.6 or higher and 3.4 or higher')
if platform.python_implementation() != 'CPython':
raise Exception('Supported Python interpreter is CPython')
if self.agent_destroyed:
self.log('Destroyed agent cannot be started')
return
if self.agent_started:
return
self.options = kwargs
if 'auto_profiling' not in self.options:
self.options['auto_profiling'] = True
if 'dashboard_address' not in self.options:
self.options['dashboard_address'] = self.SAAS_DASHBOARD_ADDRESS
if 'agent_key' not in self.options:
raise Exception('missing option: agent_key')
if 'app_name' not in self.options:
raise Exception('missing option: app_name')
if 'host_name' not in self.options:
self.options['host_name'] = socket.gethostname()
self.run_id = generate_uuid()
self.run_ts = timestamp()
self.config_loader.start()
self.message_queue.start()
self.frame_cache.start()
self.cpu_reporter.setup()
self.allocation_reporter.setup()
self.block_reporter.setup()
self.span_reporter.setup()
self.error_reporter.setup()
self.process_reporter.setup()
# execute main_thread_func in main thread on signal
def _signal_handler(signum, frame):
if(self.main_thread_func):
func = self.main_thread_func
self.main_thread_func = None
try:
func()
except Exception:
self.exception()
return True
if not runtime_info.OS_WIN:
register_signal(signal.SIGUSR2, _signal_handler)
if self.get_option('auto_destroy') is None or self.get_option('auto_destroy') is True:
# destroy agent on exit
def _exit_handler(*arg):
if not self.agent_started or self.agent_destroyed:
return
try:
self.message_queue.flush()
self.destroy()
except Exception:
self.exception()
atexit.register(_exit_handler)
if not runtime_info.OS_WIN:
register_signal(signal.SIGQUIT, _exit_handler, once = True)
register_signal(signal.SIGINT, _exit_handler, once = True)
register_signal(signal.SIGTERM, _exit_handler, once = True)
register_signal(signal.SIGHUP, _exit_handler, once = True)
self.agent_started = True
self.log('Agent started')
def enable(self):
if not self.config.is_agent_enabled():
self.cpu_reporter.start()
self.allocation_reporter.start()
self.block_reporter.start()
self.span_reporter.start()
self.error_reporter.start()
self.process_reporter.start()
self.config.set_agent_enabled(True)
def disable(self):
if self.config.is_agent_enabled():
self.cpu_reporter.stop()
self.allocation_reporter.stop()
self.block_reporter.stop()
self.span_reporter.stop()
self.error_reporter.stop()
self.process_reporter.stop()
self.config.set_agent_enabled(False)
def profile(self, name='Default'):
if not self.agent_started or self.span_active:
return Span(None)
self.span_active = True
selected_reporter = None
active_reporters = []
if self.cpu_reporter.started:
active_reporters.append(self.cpu_reporter)
if self.allocation_reporter.started:
active_reporters.append(self.allocation_reporter)
if self.block_reporter.started:
active_reporters.append(self.block_reporter)
if len(active_reporters) > 0:
selected_reporter = active_reporters[int(math.floor(random.random() * len(active_reporters)))]
if not selected_reporter.start_profiling(True, True):
selected_reporter = None
start_timestamp = time.time()
def stop_func():
if selected_reporter:
selected_reporter.stop_profiling()
duration = time.time() - start_timestamp
self.span_reporter.record_span(name, duration)
if not self.get_option('auto_profiling'):
self.config_loader.load(True)
if selected_reporter:
selected_reporter.report(True);
self.message_queue.flush(True)
self.span_active = False
return Span(stop_func)
def _start_profiler(self, reporter):
if not self.agent_started or self.get_option('auto_profiling'):
return
self.span_active = True
reporter.start()
reporter.start_profiling(True, False)
def _stop_profiler(self, reporter):
if not self.agent_started or self.get_option('auto_profiling'):
return
reporter.stop_profiling()
reporter.report(False)
reporter.stop()
self.message_queue.flush(False)
self.span_active = False
def start_cpu_profiler(self):
self._start_profiler(self.cpu_reporter)
def stop_cpu_profiler(self):
self._stop_profiler(self.cpu_reporter)
def start_allocation_profiler(self):
self._start_profiler(self.allocation_reporter)
def stop_allocation_profiler(self):
self._stop_profiler(self.allocation_reporter)
def start_block_profiler(self):
self._start_profiler(self.block_reporter)
def stop_block_profiler(self):
self._stop_profiler(self.block_reporter)
def destroy(self):
if not self.agent_started:
self.log('Agent has not been started')
return
if self.agent_destroyed:
return
self.config_loader.stop()
self.message_queue.stop()
self.frame_cache.stop()
self.cpu_reporter.stop()
self.allocation_reporter.stop()
self.block_reporter.stop()
self.error_reporter.stop()
self.span_reporter.stop()
self.process_reporter.stop()
self.cpu_reporter.destroy()
self.allocation_reporter.destroy()
self.block_reporter.destroy()
self.error_reporter.destroy()
self.span_reporter.destroy()
self.process_reporter.destroy()
self.agent_destroyed = True
self.log('Agent destroyed')
def log_prefix(self):
return '[' + datetime.datetime.now().strftime('%H:%M:%S.%f') + '] StackImpact ' + self.AGENT_VERSION + ':'
def log(self, message):
if self.get_option('debug'):
print(self.log_prefix(), message)
def print_err(self, *args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def error(self, message):
if self.get_option('debug'):
self.print_err(self.log_prefix(), message)
def exception(self):
if self.get_option('debug'):
traceback.print_exc()
def delay(self, timeout, func, *args):
def func_wrapper():
try:
func(*args)
except Exception:
self.exception()
t = threading.Timer(timeout, func_wrapper, ())
t.start()
return t
def schedule(self, timeout, interval, func, *args):
tw = TimerWraper()
def func_wrapper():
start = time.time()
try:
func(*args)
except Exception:
self.exception()
with tw.cancel_lock:
if not tw.canceled:
tw.timer = threading.Timer(abs(interval - (time.time() - start)), func_wrapper, ())
tw.timer.start()
tw.timer = threading.Timer(timeout, func_wrapper, ())
tw.timer.start()
return tw
def run_in_thread(self, func):
def func_wrapper():
try:
func()
except Exception:
self.exception()
t = threading.Thread(target=func_wrapper)
t.start()
return t
def run_in_main_thread(self, func):
if self.main_thread_func:
return False
self.main_thread_func = func
os.kill(os.getpid(), signal.SIGUSR2)
return True
class TimerWraper(object):
def __init__(self):
self.timer = None
self.cancel_lock = threading.Lock()
self.canceled = False
def cancel(self):
with self.cancel_lock:
self.canceled = True
self.timer.cancel()
| 5,514 |
1,352 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
#include "shell/commands.h"
bool recover(command_executor *e, shell_context *sc, arguments args)
{
static struct option long_options[] = {{"node_list_file", required_argument, 0, 'f'},
{"node_list_str", required_argument, 0, 's'},
{"wait_seconds", required_argument, 0, 'w'},
{"skip_bad_nodes", no_argument, 0, 'b'},
{"skip_lost_partitions", no_argument, 0, 'l'},
{"output", required_argument, 0, 'o'},
{0, 0, 0, 0}};
std::string node_list_file;
std::string node_list_str;
int wait_seconds = 100;
std::string output_file;
bool skip_bad_nodes = false;
bool skip_lost_partitions = false;
optind = 0;
while (true) {
int option_index = 0;
int c;
c = getopt_long(args.argc, args.argv, "f:s:w:o:bl", long_options, &option_index);
if (c == -1)
break;
switch (c) {
case 'f':
node_list_file = optarg;
break;
case 's':
node_list_str = optarg;
break;
case 'w':
if (!dsn::buf2int32(optarg, wait_seconds)) {
fprintf(stderr, "parse %s as wait_seconds failed\n", optarg);
return false;
}
break;
case 'o':
output_file = optarg;
break;
case 'b':
skip_bad_nodes = true;
break;
case 'l':
skip_lost_partitions = true;
break;
default:
return false;
}
}
if (wait_seconds <= 0) {
fprintf(stderr, "invalid wait_seconds %d, should be positive number\n", wait_seconds);
return false;
}
if (node_list_str.empty() && node_list_file.empty()) {
fprintf(stderr, "should specify one of node_list_file/node_list_str\n");
return false;
}
if (!node_list_str.empty() && !node_list_file.empty()) {
fprintf(stderr, "should only specify one of node_list_file/node_list_str\n");
return false;
}
std::vector<dsn::rpc_address> node_list;
if (!node_list_str.empty()) {
std::vector<std::string> tokens;
dsn::utils::split_args(node_list_str.c_str(), tokens, ',');
if (tokens.empty()) {
fprintf(stderr, "can't parse node from node_list_str\n");
return true;
}
for (std::string &token : tokens) {
dsn::rpc_address node;
if (!node.from_string_ipv4(token.c_str())) {
fprintf(stderr, "parse %s as a ip:port node failed\n", token.c_str());
return true;
}
node_list.push_back(node);
}
} else {
std::ifstream file(node_list_file);
if (!file) {
fprintf(stderr, "open file %s failed\n", node_list_file.c_str());
return true;
}
std::string str;
int lineno = 0;
while (std::getline(file, str)) {
lineno++;
boost::trim(str);
if (str.empty() || str[0] == '#' || str[0] == ';')
continue;
dsn::rpc_address node;
if (!node.from_string_ipv4(str.c_str())) {
fprintf(stderr,
"parse %s at file %s line %d as ip:port failed\n",
str.c_str(),
node_list_file.c_str(),
lineno);
return true;
}
node_list.push_back(node);
}
if (node_list.empty()) {
fprintf(stderr, "no node specified in file %s\n", node_list_file.c_str());
return true;
}
}
dsn::error_code ec = sc->ddl_client->do_recovery(
node_list, wait_seconds, skip_bad_nodes, skip_lost_partitions, output_file);
if (!output_file.empty()) {
std::cout << "recover complete with err = " << ec.to_string() << std::endl;
}
return true;
}
dsn::rpc_address diagnose_recommend(const ddd_partition_info &pinfo);
dsn::rpc_address diagnose_recommend(const ddd_partition_info &pinfo)
{
if (pinfo.config.last_drops.size() < 2)
return dsn::rpc_address();
std::vector<dsn::rpc_address> last_two_nodes(pinfo.config.last_drops.end() - 2,
pinfo.config.last_drops.end());
std::vector<ddd_node_info> last_dropped;
for (auto &node : last_two_nodes) {
auto it = std::find_if(pinfo.dropped.begin(),
pinfo.dropped.end(),
[&node](const ddd_node_info &r) { return r.node == node; });
if (it->is_alive && it->is_collected)
last_dropped.push_back(*it);
}
if (last_dropped.size() == 1) {
const ddd_node_info &ninfo = last_dropped.back();
if (ninfo.last_committed_decree >= pinfo.config.last_committed_decree)
return ninfo.node;
} else if (last_dropped.size() == 2) {
const ddd_node_info &secondary = last_dropped.front();
const ddd_node_info &latest = last_dropped.back();
// Select a best node to be the new primary, following the rule:
// - choose the node with the largest last committed decree
// - if last committed decree is the same, choose node with the largest ballot
if (latest.last_committed_decree == secondary.last_committed_decree &&
latest.last_committed_decree >= pinfo.config.last_committed_decree)
return latest.ballot >= secondary.ballot ? latest.node : secondary.node;
if (latest.last_committed_decree > secondary.last_committed_decree &&
latest.last_committed_decree >= pinfo.config.last_committed_decree)
return latest.node;
if (secondary.last_committed_decree > latest.last_committed_decree &&
secondary.last_committed_decree >= pinfo.config.last_committed_decree)
return secondary.node;
}
return dsn::rpc_address();
}
bool ddd_diagnose(command_executor *e, shell_context *sc, arguments args)
{
static struct option long_options[] = {{"gpid", required_argument, 0, 'g'},
{"diagnose", no_argument, 0, 'd'},
{"auto_diagnose", no_argument, 0, 'a'},
{"skip_prompt", no_argument, 0, 's'},
{"output", required_argument, 0, 'o'},
{0, 0, 0, 0}};
std::string out_file;
dsn::gpid id(-1, -1);
bool diagnose = false;
bool auto_diagnose = false;
bool skip_prompt = false;
optind = 0;
while (true) {
int option_index = 0;
int c;
c = getopt_long(args.argc, args.argv, "g:daso:", long_options, &option_index);
if (c == -1)
break;
switch (c) {
case 'g':
int pid;
if (id.parse_from(optarg)) {
// app_id.partition_index
} else if (sscanf(optarg, "%d", &pid) == 1) {
// app_id
id.set_app_id(pid);
} else {
fprintf(stderr, "ERROR: invalid gpid %s\n", optarg);
return false;
}
break;
case 'd':
diagnose = true;
break;
case 'a':
auto_diagnose = true;
break;
case 's':
skip_prompt = true;
break;
case 'o':
out_file = optarg;
break;
default:
return false;
}
}
std::vector<ddd_partition_info> ddd_partitions;
::dsn::error_code ret = sc->ddl_client->ddd_diagnose(id, ddd_partitions);
if (ret != dsn::ERR_OK) {
fprintf(stderr, "ERROR: DDD diagnose failed with err = %s\n", ret.to_string());
return true;
}
std::streambuf *buf;
std::ofstream of;
if (!out_file.empty()) {
of.open(out_file);
buf = of.rdbuf();
} else {
buf = std::cout.rdbuf();
}
std::ostream out(buf);
out << "Total " << ddd_partitions.size() << " ddd partitions:" << std::endl;
out << std::endl;
int proposed_count = 0;
int i = 0;
for (const ddd_partition_info &pinfo : ddd_partitions) {
out << "(" << ++i << ") " << pinfo.config.pid.to_string() << std::endl;
out << " config: ballot(" << pinfo.config.ballot << "), "
<< "last_committed(" << pinfo.config.last_committed_decree << ")" << std::endl;
out << " ----" << std::endl;
dsn::rpc_address latest_dropped, secondary_latest_dropped;
if (pinfo.config.last_drops.size() > 0)
latest_dropped = pinfo.config.last_drops[pinfo.config.last_drops.size() - 1];
if (pinfo.config.last_drops.size() > 1)
secondary_latest_dropped = pinfo.config.last_drops[pinfo.config.last_drops.size() - 2];
int j = 0;
for (const ddd_node_info &n : pinfo.dropped) {
char time_buf[30];
::dsn::utils::time_ms_to_string(n.drop_time_ms, time_buf);
out << " dropped[" << j++ << "]: "
<< "node(" << n.node.to_string() << "), "
<< "drop_time(" << time_buf << "), "
<< "alive(" << (n.is_alive ? "true" : "false") << "), "
<< "collected(" << (n.is_collected ? "true" : "false") << "), "
<< "ballot(" << n.ballot << "), "
<< "last_committed(" << n.last_committed_decree << "), "
<< "last_prepared(" << n.last_prepared_decree << ")";
if (n.node == latest_dropped)
out << " <== the latest";
else if (n.node == secondary_latest_dropped)
out << " <== the secondary latest";
out << std::endl;
}
out << " ----" << std::endl;
j = 0;
for (const ::dsn::rpc_address &r : pinfo.config.last_drops) {
out << " last_drops[" << j++ << "]: "
<< "node(" << r.to_string() << ")";
if (j == (int)pinfo.config.last_drops.size() - 1)
out << " <== the secondary latest";
else if (j == (int)pinfo.config.last_drops.size())
out << " <== the latest";
out << std::endl;
}
out << " ----" << std::endl;
out << " ddd_reason: " << pinfo.reason << std::endl;
if (diagnose) {
out << " ----" << std::endl;
dsn::rpc_address primary = diagnose_recommend(pinfo);
out << " recommend_primary: "
<< (primary.is_invalid() ? "none" : primary.to_string());
if (primary == latest_dropped)
out << " <== the latest";
else if (primary == secondary_latest_dropped)
out << " <== the secondary latest";
out << std::endl;
bool skip_this = false;
if (!primary.is_invalid() && !auto_diagnose && !skip_prompt) {
do {
std::cout << " > Are you sure to use the recommend primary? [y/n/s(skip)]: ";
char c;
std::cin >> c;
if (c == 'y') {
break;
} else if (c == 'n') {
primary.set_invalid();
break;
} else if (c == 's') {
skip_this = true;
std::cout << " > You have choosed to skip diagnosing this partition."
<< std::endl;
break;
}
} while (true);
}
if (primary.is_invalid() && !skip_prompt && !skip_this) {
do {
std::cout << " > Please input the primary node: ";
std::string addr;
std::cin >> addr;
if (primary.from_string_ipv4(addr.c_str())) {
break;
} else {
std::cout << " > Sorry, you have input an invalid node address."
<< std::endl;
}
} while (true);
}
if (!primary.is_invalid() && !skip_this) {
dsn::replication::configuration_balancer_request request;
request.gpid = pinfo.config.pid;
request.action_list = {
new_proposal_action(primary, primary, config_type::CT_ASSIGN_PRIMARY)};
request.force = false;
dsn::error_code err = sc->ddl_client->send_balancer_proposal(request);
out << " propose_request: propose -g " << request.gpid.to_string()
<< " -p ASSIGN_PRIMARY -t " << primary.to_string() << " -n "
<< primary.to_string() << std::endl;
out << " propose_response: " << err.to_string() << std::endl;
proposed_count++;
} else {
out << " propose_request: none" << std::endl;
}
}
out << std::endl;
out << "Proposed count: " << proposed_count << "/" << ddd_partitions.size() << std::endl;
out << std::endl;
}
std::cout << "Diagnose ddd done." << std::endl;
return true;
}
| 7,535 |
1,178 |
#ifndef _XT_U32_H
#define _XT_U32_H 1
#include <linux/types.h>
enum xt_u32_ops {
XT_U32_AND,
XT_U32_LEFTSH,
XT_U32_RIGHTSH,
XT_U32_AT,
};
struct xt_u32_location_element {
__u32 number;
__u8 nextop;
};
struct xt_u32_value_element {
__u32 min;
__u32 max;
};
/*
* Any way to allow for an arbitrary number of elements?
* For now, I settle with a limit of 10 each.
*/
#define XT_U32_MAXSIZE 10
struct xt_u32_test {
struct xt_u32_location_element location[XT_U32_MAXSIZE+1];
struct xt_u32_value_element value[XT_U32_MAXSIZE+1];
__u8 nnums;
__u8 nvalues;
};
struct xt_u32 {
struct xt_u32_test tests[XT_U32_MAXSIZE+1];
__u8 ntests;
__u8 invert;
};
#endif /* _XT_U32_H */
| 330 |
2,542 |
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
#include "stdafx.h"
using namespace std;
using namespace Common;
using namespace ServiceModel;
ImageTypeDescription::ImageTypeDescription()
: Name(),
Os(L"")
{
}
bool ImageTypeDescription::operator== (ImageTypeDescription const & other) const
{
return (StringUtility::AreEqualCaseInsensitive(Name, other.Name) &&
StringUtility::AreEqualCaseInsensitive(Os, other.Os));
}
bool ImageTypeDescription::operator!= (ImageTypeDescription const & other) const
{
return !(*this == other);
}
void ImageTypeDescription::WriteTo(TextWriter & w, FormatOptions const &) const
{
w.Write("ImageTypeDescription { ");
w.Write("Name = {0}, ", Name);
w.Write("Os = {0}, ", Os);
w.Write("}");
}
void ImageTypeDescription::ReadFromXml(
XmlReaderUPtr const & xmlReader)
{
xmlReader->StartElement(
*SchemaNames::Element_Image,
*SchemaNames::Namespace);
this->Name = xmlReader->ReadAttributeValue(*SchemaNames::Attribute_Name);
if (xmlReader->HasAttribute(*SchemaNames::Attribute_Os))
{
this->Os = xmlReader->ReadAttributeValue(*SchemaNames::Attribute_Os);
}
// Read the rest of the empty element
xmlReader->ReadElement();
}
Common::ErrorCode ImageTypeDescription::WriteToXml(XmlWriterUPtr const & xmlWriter)
{ //<Image>
ErrorCode er = xmlWriter->WriteStartElement(*SchemaNames::Element_EnvironmentVariable, L"", *SchemaNames::Namespace);
if (!er.IsSuccess())
{
return er;
}
er = xmlWriter->WriteAttribute(*SchemaNames::Attribute_Name, this->Name);
if (!er.IsSuccess())
{
return er;
}
er = xmlWriter->WriteAttribute(*SchemaNames::Attribute_Os, this->Os);
if (!er.IsSuccess())
{
return er;
}
//</Image>
return xmlWriter->WriteEndElement();
}
void ImageTypeDescription::clear()
{
this->Name.clear();
this->Os.clear();
}
| 733 |
344 |
from gitlabber.format import PrintFormat
import pytest
import re
def test_format_parse():
assert PrintFormat.JSON == PrintFormat.argparse("JSON")
def test_format_string():
assert "json" == PrintFormat.__str__(PrintFormat.JSON)
def test_repr():
retval = repr(PrintFormat.JSON)
match = re.match("^<PrintFormat: ({.*})>$", retval)
def test_format_invalid():
assert "invalid_value" == PrintFormat.argparse("invalid_value")
| 162 |
14,668 |
<filename>chrome/browser/chromeos/policy/dlp/mock_dlp_content_manager.h
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_CHROMEOS_POLICY_DLP_MOCK_DLP_CONTENT_MANAGER_H_
#define CHROME_BROWSER_CHROMEOS_POLICY_DLP_MOCK_DLP_CONTENT_MANAGER_H_
#include "chrome/browser/chromeos/policy/dlp/dlp_content_manager.h"
#include "testing/gmock/include/gmock/gmock.h"
namespace policy {
class MockDlpContentManager : public DlpContentManager {
public:
MockDlpContentManager();
~MockDlpContentManager() override;
MOCK_METHOD(void,
OnConfidentialityChanged,
(content::WebContents*, const DlpContentRestrictionSet&),
(override));
MOCK_METHOD(void,
OnWebContentsDestroyed,
(content::WebContents*),
(override));
MOCK_METHOD(void, OnVisibilityChanged, (content::WebContents*), (override));
MOCK_METHOD(void,
CheckScreenShareRestriction,
(const content::DesktopMediaID& media_id,
const std::u16string& application_title,
OnDlpRestrictionCheckedCallback callback),
(override));
};
} // namespace policy
#endif // CHROME_BROWSER_CHROMEOS_POLICY_DLP_MOCK_DLP_CONTENT_MANAGER_H_
| 573 |
615 |
/* ************************************************************************
* Copyright 2013 Advanced Micro Devices, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ************************************************************************/
#ifndef BLAS_KGEN_LEGACY_H_
#define BLAS_KGEN_LEGACY_H_
#include "../blas_kgen.h"
/**
* @internal
* @brief Block multiplier flags
* @ingroup BLAS_MAJOR_GENS
*/
typedef enum BlkmulFlags {
BLKMUL_NO_FLAGS, /**< No flags */
BLKMUL_TRANSPOSE = 0x01, /**< Transpose result */
BLKMUL_IMAGE_PACKED = 0x02, /**< Data in image are packed */
/**
* Accumulate multiplication results to a
* private location provided by caller
*/
BLKMUL_OUTPUT_PRIVATE = 0x04,
BLKMUL_SKEW_ROW = 0x08, /**< Use skew over block rows */
BLKMUL_SKEW_COLUMN = 0x10, /**< Use skew over block columns */
BLKMUL_INLINE = 0x20, /**< Generate an inline version */
BLKMUL_TRANSPOSED_B = 0x40, /**< Block B is transposed */
/** Don't use "&" operation in cyclic address evaluation, use always "%" */
BLKMUL_AVOID_AND = 0x80
} BlkMulFlags;
/**
* @internal
* @brief Block multiplier core
* @ingroup BLAS_MAJOR_GENS
*/
typedef enum BlkmulCore {
/** Use separate multiplication and summation implemented by hand */
BLKMUL_SEPARATE_MULADD,
/** Use the 'dot' function */
BLKMUL_DOT,
/** Use the 'mad' function */
BLKMUL_MAD
} BlkmulCore;
/**
* @internal
* @brief Argument names for the inline version of the block
* multiplier
* @ingroup BLAS_MAJOR_GENS
*/
typedef struct BlkmulArgNames {
const char *coordA; /**< Matrix A start coordinates */
const char *coordB; /**< Matrix B start coordinates */
const char *skewRow; /**< Skew over rows */
const char *skewCol; /**< Skew over columns */
const char *k; /**< Counter name in the loop over K */
const char *vectBoundK; /**< Bound in the loop over K */
} BlkmulArgNames;
/**
* @internal
* @brief Options for matrix block multiplication
* generator
* @ingroup BLAS_MAJOR_GENS
*/
typedef struct BlkMulOpts {
/** OpenCL memory object type storing matrix (whole or its blocks) A */
CLMemType aMobj;
/** OpenCL memory object type storing matrix (whole or its blocks) A */
CLMemType bMobj;
BlkMulFlags flags; /**< Specific flags */
BlkmulCore core; /**< Multiply and add core */
/** List of argument names for the inline version */
BlkmulArgNames argNames;
} BlkMulOpts;
void
declareBlasEnums(struct KgenContext *ctx);
/**
* @internal
* @brief Matrix block multiplication generator
*
* @param[out] ctx Generator context
* @param[in] subdims Subproblem dimensions; the first level reflects
* dimensions of the large blocks processed with the
* whole work group, and the second level
* reflects sizes of immediately multiplied small
* blocks within the single work item
* @param[in] dtype Data type the multiplying function will be
* generated for
* @param[in] opts Block multiplication options
*
* Generated functions have the following definitions: \n
*\n
* For the buffer based version:
* @code
* void
* funcName(
* <type> alpha,
* LPtr A,
* LPtr B,
* LPtr C,
* [,int2 skewRow]
* [,int skewCol]);
* @endcode
*
* Function naming rule:
* (type prefix)gemmBlock[Transp]_<width>_<height>
*\n
* It's assumed A, B and C point to start of data to be
* processed during this step.
*\n
* For the image based version: \n
* @code
* void
* funcName(
* <type> alpha,
* __read_only image2d_t A,
* int2 coordA,
* __read_only image2d_t B,
* int2 coordB,
* LPtr C,
* [,int2 skewRow],
* [,int skewCol]);
* @endcode
*
* Where coordA and coordB mean start image coordinates to fetch data from.
*\n
* For the image based version a mixed variant is possible when
* either A or B blocks are passed through the local memory.
*\n
* The 'skewRow' and 'skewCol' are optional arguments if the
* 'BLKMUL_SKEW_ROW' and "BLKMUL_SKEW_COLUMN" flag is specified
* respectively. 'y' field of the row skew is for the block A, and the
* 'x' one is for the block B.
*\n
* Output result can be put directly into a private location provided by the
* caller instead of the local one. It is achieved with 'BLKMUL_OUTPUT_PRIVATE'
* flag using.
*\n
* Pointer to this location should have the following types depending on the type
* of processed data: \n
* - float4 - for float
* - float2 - for complex float
* - double2 - for double and complex double
*\n\n
* Alpha is not taken in this case.
*\n
* The multiplier can be generated as well in the form of the dedicated
* function as in the inline form inserted to a kernel. \n In case of inline
* version the block multiplier becomes in fact the tile multiplier. In this
* case the caller should provide iteration over K.
*
* @return 0 on success, -EOVERFLOW on source buffer overflowing
*/
/**
* @internal
* @defgroup BLAS_MAJOR_GENS BLAS specific generators
* @ingroup MAJOR_GENS
*/
/*@{*/
int
blkMulGen(
struct KgenContext *ctx,
const SubproblemDim subdims[2],
DataType dtype,
const BlkMulOpts *opts);
int
updateResultGenOld(
struct KgenContext *ctx,
const BlasGenSettings *gset,
UpdateResultOp op,
UpdateResultFlags flags,
const UpresVarNames *uvarNames);
/*@}*/
#endif /* BLAS_KGEN_LEGACY_H_ */
| 2,209 |
2,291 |
{
"id" : 65,
"status" : "Accepted",
"summary" : "Ability to add \"OnZoomChanged\" event listener to OpenStreetMapView instance.",
"labels" : [ "Type-Enhancement", "Priority-Medium" ],
"stars" : 0,
"commentCount" : 5,
"comments" : [ {
"id" : 0,
"commenterId" : 3456884467250674452,
"content" : "Please add an ability to be notified on zoom level change outside of OpenStreetMapView class. Just implement ability to add "OnZoomChanged" event listener to OpenStreetMapView instance. This event should be fired after:\r\n- zoomIn;\r\n- zoomOut;\r\n- setZoomLevel;\r\n- any other methods, which change map zoom level?\r\n",
"timestamp" : 1277392517,
"attachments" : [ ]
}, {
"id" : 1,
"commenterId" : 3456884467250674452,
"content" : "Ability to handle this event in Overlays will be also useful (for example, in custom overlay, which behavior depends on current zoom level).",
"timestamp" : 1277392656,
"attachments" : [ ]
}, {
"id" : 2,
"commenterId" : 8937367184059112911,
"content" : "See also issue 55.",
"timestamp" : 1277401776,
"attachments" : [ ]
}, {
"id" : 3,
"commenterId" : 8937367184059112911,
"content" : "",
"timestamp" : 1278600042,
"attachments" : [ ]
}, {
"id" : 4,
"commenterId" : 8937367184059112911,
"content" : "Do the fixes for issue 55 also fix this?",
"timestamp" : 1284970465,
"attachments" : [ ]
} ]
}
| 571 |
872 |
<reponame>WswSummer15/light-reading-cloud
package cn.zealon.readingcloud.book.controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* 分类接口
* @author: zealon
* @since: 2019/4/3
*/
@RestController
@RequestMapping("category")
public class CategoryController {
}
| 124 |
451 |
<reponame>kikislater/micmac
#include <assert.h>
#include "NavigationData.h"
// -------------------------------------------------------------------------------
// Ajout d'un slot de navigation aux données
// -------------------------------------------------------------------------------
void NavigationData::addNavigationSlot(NavigationSlot &slot){
// Add slot to nav data
this->navSlots.push_back(slot);
// Increment PRN number
this->PRN_COUNT[slot.getPRN()-1]++;
}
// -------------------------------------------------------------------------------
// Test si les éphémérides contiennent une données respectant les 3 conditions :
// (1) Le PRN correspond
// (2) La date du paquet est < à la date requise
// (3) La date du paquet est la plus récente possible
// -------------------------------------------------------------------------------
bool NavigationData::hasEphemeris(int PRN, GPSTime time){
if ((PRN < 1) || (PRN > 36)){
return false;
}
size_t idx;
bool found = false;
NavigationSlot slot;
for (idx=this->navSlots.size()-1; idx>=0; idx--){
if (this->navSlots.at(idx).getPRN() != PRN) continue; // Contrainte 1
if (this->navSlots.at(idx).getTime() > time) continue; // Contrainte 2
found = true; break; // Contrainte 3
}
if ((!found) || (time - this->navSlots.at(idx).getTime() > COEFF_SECURITY*RINEX_NAV_INTERVAL_SEC)){
return false;
}
return true;
}
// -------------------------------------------------------------------------------
// Test si les éphémérides contiennent une données respectant les 3 conditions :
// (1) Le PRN correspond
// (2) La date du paquet est < à la date requise
// (3) La date du paquet est la plus récente possible
// -------------------------------------------------------------------------------
bool NavigationData::hasEphemeris(std::string PRN, GPSTime time){
if (PRN.substr(0,1) != this->constellation) return false;
return this->hasEphemeris(std::stoi(PRN.substr(1,2)), time);
}
// -------------------------------------------------------------------------------
// Récupération du paquet d'éphéméride respectant les 3 conditions suivantes :
// (1) Le PRN correspond
// (2) La date du paquet est < à la date requise
// (3) La date du paquet est la plus récente possible
// -------------------------------------------------------------------------------
NavigationSlot& NavigationData::getNavigationSlot(int PRN, GPSTime time){
if ((PRN < 1) || (PRN > 32)){
std::cout << "ERROR: [" << this->constellation << Utils::formatNumber(PRN,"%02d");
std::cout << "] is not a valid GNSS PRN number" << std::endl;
assert (false);
}
size_t idx;
bool found = false;
NavigationSlot slot;
for (idx=this->navSlots.size()-1; idx>=0; idx--){
if (this->navSlots.at(idx).getPRN() != PRN) continue; // Contrainte 1
if (this->navSlots.at(idx).getTime() > time) continue; // Contrainte 2
found = true; break; // Contrainte 3
}
if ((!found) || (time - this->navSlots.at(idx).getTime() > COEFF_SECURITY*RINEX_NAV_INTERVAL_SEC)){
std::cout << PRN<< std::endl;
std::cout << "ERROR: GNSS time [" << time << "] is out of rinex nav file range for PRN [";
std::cout << this->constellation << Utils::formatNumber(PRN,"%02d") << "]" << std::endl;
assert (false);
}
return this->navSlots.at(idx);
}
// -------------------------------------------------------------------------------
// Calcul des positions des satellite à partir d'un slot de données de rinex .nav
// L'argument pseudorange permet de déduire le temps de propagation du signal
// -------------------------------------------------------------------------------
std::vector<ECEFCoords> NavigationData::computeSatellitePos(std::vector<std::string> PRN, GPSTime t, std::vector<double> psr){
std::vector<ECEFCoords> XYZ;
for (unsigned i=0; i<PRN.size(); i++){
XYZ.push_back(computeSatellitePos(PRN.at(i), t, psr.at(i)));
}
return XYZ;
}
// -------------------------------------------------------------------------------
// Calcul des erreur d'horloge de tous les satellites
// L'argument pseudorange permet de déduire le temps de propagation du signal
// -------------------------------------------------------------------------------
std::vector<double> NavigationData::computeSatelliteClockError(std::vector<std::string> PRN, GPSTime t, std::vector<double> psr){
std::vector<double> T;
for (unsigned i=0; i<PRN.size(); i++){
T.push_back(computeSatelliteClockError(PRN.at(i), t, psr.at(i)));
}
return T;
}
// -------------------------------------------------------------------------------
// Récupératuion du paquet d'éphéméride respectant les 3 conditions suivantes :
// (1) Le PRN correspond
// (2) La date du paquet est < à la date requise
// (3) La date du paquet est la plus récente possible
// -------------------------------------------------------------------------------
NavigationSlot& NavigationData::getNavigationSlot(std::string PRN, GPSTime time){
return this->getNavigationSlot(std::stoi(PRN.substr(1,2)), time);
}
// -------------------------------------------------------------------------------
// Calcul d'une position de satellite à partir d'un slot de données de rinex .nav
// L'argument pseudorange permet de déduire le temps de propagation du signal
// -------------------------------------------------------------------------------
ECEFCoords NavigationData::computeSatellitePos(int PRN, GPSTime time, double pseudorange){
return this->getNavigationSlot(PRN, time).computeSatellitePos(time, pseudorange);
}
// -------------------------------------------------------------------------------
// Calcul d'une position de satellite à partir d'un slot de données de rinex .nav
// L'argument pseudorange permet de déduire le temps de propagation du signal
// -------------------------------------------------------------------------------
ECEFCoords NavigationData::computeSatellitePos(std::string PRN, GPSTime time, double pseudorange){
return this->computeSatellitePos(std::stoi(PRN.substr(1,2)), time, pseudorange);
}
// -------------------------------------------------------------------------------
// Calcul d'une position de satellite à partir d'un slot de données de rinex .nav
// -------------------------------------------------------------------------------
ECEFCoords NavigationData::computeSatellitePos(int PRN, GPSTime time){
return this->computeSatellitePos(PRN, time, 0);
}
// -------------------------------------------------------------------------------
// Calcul d'une position de satellite à partir d'un slot de données de rinex .nav
// -------------------------------------------------------------------------------
ECEFCoords NavigationData::computeSatellitePos(std::string PRN, GPSTime time){
return this->computeSatellitePos(std::stoi(PRN.substr(1,2)), time);
}
// -------------------------------------------------------------------------------
// Calcul de l'erreur d'horloge d'un satellite
// L'argument pseudorange permet de déduire le temps de propagation du signal
// -------------------------------------------------------------------------------
double NavigationData::computeSatelliteClockError(int PRN, GPSTime time, double pseudorange){
return this->getNavigationSlot(PRN, time).computeSatelliteClockError(time, pseudorange);
}
// -------------------------------------------------------------------------------
// Calcul de l'erreur d'horloge d'un satellite
// L'argument pseudorange permet de déduire le temps de propagation du signal
// -------------------------------------------------------------------------------
double NavigationData::computeSatelliteClockError(std::string PRN, GPSTime time, double pseudorange){
return this->computeSatelliteClockError(std::stoi(PRN.substr(1,2)), time, pseudorange);
}
// -------------------------------------------------------------------------------
// Calcul de l'erreur d'horloge d'un satellite
// -------------------------------------------------------------------------------
double NavigationData::computeSatelliteClockError(int PRN, GPSTime time){
return this->computeSatelliteClockError(PRN, time, 0);
}
// -------------------------------------------------------------------------------
// Calcul de l'erreur d'horloge d'un satellite
// -------------------------------------------------------------------------------
double NavigationData::computeSatelliteClockError(std::string PRN, GPSTime time){
return this->computeSatelliteClockError(std::stoi(PRN.substr(1,2)), time);
}
// -------------------------------------------------------------------------------
// Vitesse (ECEF) d'un satellite par différence finie centrée
// -------------------------------------------------------------------------------
ECEFCoords NavigationData::computeSatelliteSpeed(std::string PRN, GPSTime time){
// Différence centrée en O(h^2)
if ((this->hasEphemeris(PRN, time.addSeconds(-1))) && (this->hasEphemeris(PRN, time.addSeconds(+1)))){
ECEFCoords sat_pos_bwd = this->computeSatellitePos(PRN, time.addSeconds(-1));
ECEFCoords sat_pos_fwd = this->computeSatellitePos(PRN, time.addSeconds(+1));
ECEFCoords speed = sat_pos_fwd - sat_pos_bwd;
speed.scalar(0.5);
return speed;
}
// Différence finie en O(h)
ECEFCoords sat_pos_median = this->computeSatellitePos(PRN, time);
// Différence avant
if (this->hasEphemeris(PRN, time.addSeconds(+1))){
ECEFCoords sat_pos_fwd = this->computeSatellitePos(PRN, time.addSeconds(+1));
ECEFCoords speed = sat_pos_fwd - sat_pos_median;
return speed;
}
// Différence arrière
if (this->hasEphemeris(PRN, time.addSeconds(-1))){
ECEFCoords sat_pos_bwd = this->computeSatellitePos(PRN, time.addSeconds(-1));
ECEFCoords speed = sat_pos_median - sat_pos_bwd;
return speed;
}
ECEFCoords null;
return null;
}
// -------------------------------------------------------------------------------
// Vitesses (ECEF) d'un groupe de satellites par différence finie centrée
// -------------------------------------------------------------------------------
std::vector<ECEFCoords> NavigationData::computeSatelliteSpeed(std::vector<std::string> PRN, GPSTime time){
std::vector<ECEFCoords> speeds;
for (unsigned i=0; i<PRN.size(); i++){
speeds.push_back(this->computeSatelliteSpeed(PRN.at(i), time));
}
return speeds;
}
| 3,352 |
345 |
<gh_stars>100-1000
/**
* Attribution-ShareAlike 3.0 Unported (CC BY-SA 3.0)
* https://creativecommons.org/licenses/by-sa/3.0/
* https://codegolf.stackexchange.com/a/48333
* https://codegolf.stackexchange.com/users/10588/ilya-gazman
*/
package org.deepfake_http.thirdparty;
public class RemoveCommentsUtils {
public static final int DEFAULT = 1;
public static final int ESCAPE = 2;
public static final int STRING = 3;
public static final int ONE_LINE_COMMENT = 4;
public static final int MULTI_LINE_COMMENT = 5;
/**
*
* @param s
* @return
*/
public static String removeComments(String s) {
String out = "";
int mod = DEFAULT;
for (int i = 0; i < s.length(); i++) {
String substring = s.substring(i, Math.min(i + 2, s.length()));
char c = s.charAt(i);
switch (mod) {
case DEFAULT: // default
mod = substring.equals("/*") ? MULTI_LINE_COMMENT : substring.equals("//") ? ONE_LINE_COMMENT : c == '"' ? STRING : DEFAULT;
break;
case STRING: // string
mod = c == '"' ? DEFAULT : c == '\\' ? ESCAPE : STRING;
break;
case ESCAPE: // string
mod = STRING;
break;
case ONE_LINE_COMMENT: // one line comment
mod = c == '\n' ? DEFAULT : ONE_LINE_COMMENT;
continue;
case MULTI_LINE_COMMENT: // multi line comment
mod = substring.equals("*/") ? DEFAULT : MULTI_LINE_COMMENT;
i += mod == DEFAULT ? 1 : 0;
continue;
}
out += mod < 4 ? c : "";
}
return out;
}
}
| 650 |
346 |
{
"images": {
"icon-small": "https://downloads.mesosphere.com/assets/universe/000/eremetic-icon-small.png",
"icon-medium": "https://downloads.mesosphere.com/assets/universe/000/eremetic-icon-medium.png",
"icon-large": "https://downloads.mesosphere.com/assets/universe/000/eremetic-icon-large.png"
},
"assets": {
"container": {
"docker": {
"eremetic": "alde/eremetic:0.27.0"
}
}
}
}
| 184 |
28,056 |
<filename>src/test/java/com/alibaba/json/bvt/issue_1000/Issue1089.java<gh_stars>1000+
package com.alibaba.json.bvt.issue_1000;
import com.alibaba.fastjson.JSON;
import junit.framework.TestCase;
/**
* Created by wenshao on 20/03/2017.
*/
public class Issue1089 extends TestCase {
public void test_for_issue() throws Exception {
String json = "{\"ab\":123,\"a_b\":456}";
TestBean tb = JSON.parseObject(json, TestBean.class);
assertEquals(123, tb.getAb());
}
public static class TestBean {
private int ab;
public int getAb() {
return ab;
}
public void setAb(int ab) {
this.ab = ab;
}
}
}
| 309 |
1,350 |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.monitor.query.implementation.logs;
import com.azure.core.annotation.BodyParam;
import com.azure.core.annotation.ExpectedResponses;
import com.azure.core.annotation.Get;
import com.azure.core.annotation.HeaderParam;
import com.azure.core.annotation.Host;
import com.azure.core.annotation.HostParam;
import com.azure.core.annotation.PathParam;
import com.azure.core.annotation.Post;
import com.azure.core.annotation.QueryParam;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceInterface;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.annotation.UnexpectedResponseExceptionType;
import com.azure.core.http.rest.Response;
import com.azure.core.http.rest.RestProxy;
import com.azure.core.util.Context;
import com.azure.core.util.FluxUtil;
import com.azure.monitor.query.implementation.logs.models.QueryBody;
import com.azure.monitor.query.implementation.logs.models.QueryResults;
import com.azure.monitor.query.implementation.logs.models.BatchRequest;
import com.azure.monitor.query.implementation.logs.models.BatchResponse;
import com.azure.monitor.query.implementation.logs.models.ErrorResponseException;
import java.time.Duration;
import reactor.core.publisher.Mono;
/** An instance of this class provides access to all the operations defined in Queries. */
public final class QueriesImpl {
/** The proxy service used to perform REST calls. */
private final QueriesService service;
/** The service client containing this operation class. */
private final AzureLogAnalyticsImpl client;
/**
* Initializes an instance of QueriesImpl.
*
* @param client the instance of the service client containing this operation class.
*/
QueriesImpl(AzureLogAnalyticsImpl client) {
this.service = RestProxy.create(QueriesService.class, client.getHttpPipeline(), client.getSerializerAdapter());
this.client = client;
}
/**
* The interface defining all the services for AzureLogAnalyticsQueries to be used by the proxy service to perform
* REST calls.
*/
@Host("{$host}")
@ServiceInterface(name = "AzureLogAnalyticsQue")
private interface QueriesService {
@Get("workspaces/{workspaceId}/query")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ErrorResponseException.class)
Mono<Response<QueryResults>> get(
@HostParam("$host") String host,
@PathParam("workspaceId") String workspaceId,
@QueryParam("query") String query,
@QueryParam("timespan") Duration timespan,
@HeaderParam("Accept") String accept,
Context context);
@Post("workspaces/{workspaceId}/query")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ErrorResponseException.class)
Mono<Response<QueryResults>> execute(
@HostParam("$host") String host,
@PathParam("workspaceId") String workspaceId,
@HeaderParam("Prefer") String prefer,
@BodyParam("application/json") QueryBody body,
@HeaderParam("Accept") String accept,
Context context);
@Post("$batch")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ErrorResponseException.class)
Mono<Response<BatchResponse>> batch(
@HostParam("$host") String host,
@BodyParam("application/json") BatchRequest body,
@HeaderParam("Accept") String accept,
Context context);
}
/**
* Executes an Analytics query for data.
*
* @param workspaceId ID of the workspace. This is Workspace ID from the Properties blade in the Azure portal.
* @param query The Analytics query. Learn more about the [Analytics query
* syntax](https://azure.microsoft.com/documentation/articles/app-insights-analytics-reference/).
* @param timespan Optional. The timespan over which to query data. This is an ISO8601 time period value. This
* timespan is applied in addition to any that are specified in the query expression.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ErrorResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return contains the tables, columns & rows resulting from a query.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<QueryResults>> getWithResponseAsync(String workspaceId, String query, Duration timespan) {
if (this.client.getHost() == null) {
return Mono.error(
new IllegalArgumentException("Parameter this.client.getHost() is required and cannot be null."));
}
if (workspaceId == null) {
return Mono.error(new IllegalArgumentException("Parameter workspaceId is required and cannot be null."));
}
if (query == null) {
return Mono.error(new IllegalArgumentException("Parameter query is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil.withContext(
context -> service.get(this.client.getHost(), workspaceId, query, timespan, accept, context));
}
/**
* Executes an Analytics query for data.
*
* @param workspaceId ID of the workspace. This is Workspace ID from the Properties blade in the Azure portal.
* @param query The Analytics query. Learn more about the [Analytics query
* syntax](https://azure.microsoft.com/documentation/articles/app-insights-analytics-reference/).
* @param timespan Optional. The timespan over which to query data. This is an ISO8601 time period value. This
* timespan is applied in addition to any that are specified in the query expression.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ErrorResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return contains the tables, columns & rows resulting from a query.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<QueryResults>> getWithResponseAsync(
String workspaceId, String query, Duration timespan, Context context) {
if (this.client.getHost() == null) {
return Mono.error(
new IllegalArgumentException("Parameter this.client.getHost() is required and cannot be null."));
}
if (workspaceId == null) {
return Mono.error(new IllegalArgumentException("Parameter workspaceId is required and cannot be null."));
}
if (query == null) {
return Mono.error(new IllegalArgumentException("Parameter query is required and cannot be null."));
}
final String accept = "application/json";
return service.get(this.client.getHost(), workspaceId, query, timespan, accept, context);
}
/**
* Executes an Analytics query for data.
*
* @param workspaceId ID of the workspace. This is Workspace ID from the Properties blade in the Azure portal.
* @param query The Analytics query. Learn more about the [Analytics query
* syntax](https://azure.microsoft.com/documentation/articles/app-insights-analytics-reference/).
* @param timespan Optional. The timespan over which to query data. This is an ISO8601 time period value. This
* timespan is applied in addition to any that are specified in the query expression.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ErrorResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return contains the tables, columns & rows resulting from a query.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<QueryResults> getAsync(String workspaceId, String query, Duration timespan) {
return getWithResponseAsync(workspaceId, query, timespan)
.flatMap(
(Response<QueryResults> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
}
/**
* Executes an Analytics query for data.
*
* @param workspaceId ID of the workspace. This is Workspace ID from the Properties blade in the Azure portal.
* @param query The Analytics query. Learn more about the [Analytics query
* syntax](https://azure.microsoft.com/documentation/articles/app-insights-analytics-reference/).
* @param timespan Optional. The timespan over which to query data. This is an ISO8601 time period value. This
* timespan is applied in addition to any that are specified in the query expression.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ErrorResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return contains the tables, columns & rows resulting from a query.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<QueryResults> getAsync(String workspaceId, String query, Duration timespan, Context context) {
return getWithResponseAsync(workspaceId, query, timespan, context)
.flatMap(
(Response<QueryResults> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
}
/**
* Executes an Analytics query for data.
*
* @param workspaceId ID of the workspace. This is Workspace ID from the Properties blade in the Azure portal.
* @param query The Analytics query. Learn more about the [Analytics query
* syntax](https://azure.microsoft.com/documentation/articles/app-insights-analytics-reference/).
* @param timespan Optional. The timespan over which to query data. This is an ISO8601 time period value. This
* timespan is applied in addition to any that are specified in the query expression.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ErrorResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return contains the tables, columns & rows resulting from a query.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public QueryResults get(String workspaceId, String query, Duration timespan) {
return getAsync(workspaceId, query, timespan).block();
}
/**
* Executes an Analytics query for data.
*
* @param workspaceId ID of the workspace. This is Workspace ID from the Properties blade in the Azure portal.
* @param query The Analytics query. Learn more about the [Analytics query
* syntax](https://azure.microsoft.com/documentation/articles/app-insights-analytics-reference/).
* @param timespan Optional. The timespan over which to query data. This is an ISO8601 time period value. This
* timespan is applied in addition to any that are specified in the query expression.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ErrorResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return contains the tables, columns & rows resulting from a query.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<QueryResults> getWithResponse(
String workspaceId, String query, Duration timespan, Context context) {
return getWithResponseAsync(workspaceId, query, timespan, context).block();
}
/**
* Executes an Analytics query for data. [Here](https://dev.loganalytics.io/documentation/Using-the-API) is an
* example for using POST with an Analytics query.
*
* @param workspaceId ID of the workspace. This is Workspace ID from the Properties blade in the Azure portal.
* @param body The Analytics query. Learn more about the [Analytics query
* syntax](https://azure.microsoft.com/documentation/articles/app-insights-analytics-reference/).
* @param prefer Optional. The prefer header to set server timeout, query statistics and visualization information.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ErrorResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return contains the tables, columns & rows resulting from a query.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<QueryResults>> executeWithResponseAsync(String workspaceId, QueryBody body, String prefer) {
if (this.client.getHost() == null) {
return Mono.error(
new IllegalArgumentException("Parameter this.client.getHost() is required and cannot be null."));
}
if (workspaceId == null) {
return Mono.error(new IllegalArgumentException("Parameter workspaceId is required and cannot be null."));
}
if (body == null) {
return Mono.error(new IllegalArgumentException("Parameter body is required and cannot be null."));
} else {
body.validate();
}
final String accept = "application/json";
return FluxUtil.withContext(
context -> service.execute(this.client.getHost(), workspaceId, prefer, body, accept, context));
}
/**
* Executes an Analytics query for data. [Here](https://dev.loganalytics.io/documentation/Using-the-API) is an
* example for using POST with an Analytics query.
*
* @param workspaceId ID of the workspace. This is Workspace ID from the Properties blade in the Azure portal.
* @param body The Analytics query. Learn more about the [Analytics query
* syntax](https://azure.microsoft.com/documentation/articles/app-insights-analytics-reference/).
* @param prefer Optional. The prefer header to set server timeout, query statistics and visualization information.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ErrorResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return contains the tables, columns & rows resulting from a query.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<QueryResults>> executeWithResponseAsync(
String workspaceId, QueryBody body, String prefer, Context context) {
if (this.client.getHost() == null) {
return Mono.error(
new IllegalArgumentException("Parameter this.client.getHost() is required and cannot be null."));
}
if (workspaceId == null) {
return Mono.error(new IllegalArgumentException("Parameter workspaceId is required and cannot be null."));
}
if (body == null) {
return Mono.error(new IllegalArgumentException("Parameter body is required and cannot be null."));
} else {
body.validate();
}
final String accept = "application/json";
return service.execute(this.client.getHost(), workspaceId, prefer, body, accept, context);
}
/**
* Executes an Analytics query for data. [Here](https://dev.loganalytics.io/documentation/Using-the-API) is an
* example for using POST with an Analytics query.
*
* @param workspaceId ID of the workspace. This is Workspace ID from the Properties blade in the Azure portal.
* @param body The Analytics query. Learn more about the [Analytics query
* syntax](https://azure.microsoft.com/documentation/articles/app-insights-analytics-reference/).
* @param prefer Optional. The prefer header to set server timeout, query statistics and visualization information.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ErrorResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return contains the tables, columns & rows resulting from a query.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<QueryResults> executeAsync(String workspaceId, QueryBody body, String prefer) {
return executeWithResponseAsync(workspaceId, body, prefer)
.flatMap(
(Response<QueryResults> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
}
/**
* Executes an Analytics query for data. [Here](https://dev.loganalytics.io/documentation/Using-the-API) is an
* example for using POST with an Analytics query.
*
* @param workspaceId ID of the workspace. This is Workspace ID from the Properties blade in the Azure portal.
* @param body The Analytics query. Learn more about the [Analytics query
* syntax](https://azure.microsoft.com/documentation/articles/app-insights-analytics-reference/).
* @param prefer Optional. The prefer header to set server timeout, query statistics and visualization information.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ErrorResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return contains the tables, columns & rows resulting from a query.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<QueryResults> executeAsync(String workspaceId, QueryBody body, String prefer, Context context) {
return executeWithResponseAsync(workspaceId, body, prefer, context)
.flatMap(
(Response<QueryResults> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
}
/**
* Executes an Analytics query for data. [Here](https://dev.loganalytics.io/documentation/Using-the-API) is an
* example for using POST with an Analytics query.
*
* @param workspaceId ID of the workspace. This is Workspace ID from the Properties blade in the Azure portal.
* @param body The Analytics query. Learn more about the [Analytics query
* syntax](https://azure.microsoft.com/documentation/articles/app-insights-analytics-reference/).
* @param prefer Optional. The prefer header to set server timeout, query statistics and visualization information.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ErrorResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return contains the tables, columns & rows resulting from a query.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public QueryResults execute(String workspaceId, QueryBody body, String prefer) {
return executeAsync(workspaceId, body, prefer).block();
}
/**
* Executes an Analytics query for data. [Here](https://dev.loganalytics.io/documentation/Using-the-API) is an
* example for using POST with an Analytics query.
*
* @param workspaceId ID of the workspace. This is Workspace ID from the Properties blade in the Azure portal.
* @param body The Analytics query. Learn more about the [Analytics query
* syntax](https://azure.microsoft.com/documentation/articles/app-insights-analytics-reference/).
* @param prefer Optional. The prefer header to set server timeout, query statistics and visualization information.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ErrorResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return contains the tables, columns & rows resulting from a query.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<QueryResults> executeWithResponse(
String workspaceId, QueryBody body, String prefer, Context context) {
return executeWithResponseAsync(workspaceId, body, prefer, context).block();
}
/**
* Executes a batch of Analytics queries for data. [Here](https://dev.loganalytics.io/documentation/Using-the-API)
* is an example for using POST with an Analytics query.
*
* @param body The batch request body.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ErrorResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response to a batch query.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<BatchResponse>> batchWithResponseAsync(BatchRequest body) {
if (this.client.getHost() == null) {
return Mono.error(
new IllegalArgumentException("Parameter this.client.getHost() is required and cannot be null."));
}
if (body == null) {
return Mono.error(new IllegalArgumentException("Parameter body is required and cannot be null."));
} else {
body.validate();
}
final String accept = "application/json";
return FluxUtil.withContext(context -> service.batch(this.client.getHost(), body, accept, context));
}
/**
* Executes a batch of Analytics queries for data. [Here](https://dev.loganalytics.io/documentation/Using-the-API)
* is an example for using POST with an Analytics query.
*
* @param body The batch request body.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ErrorResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response to a batch query.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<Response<BatchResponse>> batchWithResponseAsync(BatchRequest body, Context context) {
if (this.client.getHost() == null) {
return Mono.error(
new IllegalArgumentException("Parameter this.client.getHost() is required and cannot be null."));
}
if (body == null) {
return Mono.error(new IllegalArgumentException("Parameter body is required and cannot be null."));
} else {
body.validate();
}
final String accept = "application/json";
return service.batch(this.client.getHost(), body, accept, context);
}
/**
* Executes a batch of Analytics queries for data. [Here](https://dev.loganalytics.io/documentation/Using-the-API)
* is an example for using POST with an Analytics query.
*
* @param body The batch request body.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ErrorResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response to a batch query.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<BatchResponse> batchAsync(BatchRequest body) {
return batchWithResponseAsync(body)
.flatMap(
(Response<BatchResponse> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
}
/**
* Executes a batch of Analytics queries for data. [Here](https://dev.loganalytics.io/documentation/Using-the-API)
* is an example for using POST with an Analytics query.
*
* @param body The batch request body.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ErrorResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response to a batch query.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Mono<BatchResponse> batchAsync(BatchRequest body, Context context) {
return batchWithResponseAsync(body, context)
.flatMap(
(Response<BatchResponse> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
}
/**
* Executes a batch of Analytics queries for data. [Here](https://dev.loganalytics.io/documentation/Using-the-API)
* is an example for using POST with an Analytics query.
*
* @param body The batch request body.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ErrorResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response to a batch query.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public BatchResponse batch(BatchRequest body) {
return batchAsync(body).block();
}
/**
* Executes a batch of Analytics queries for data. [Here](https://dev.loganalytics.io/documentation/Using-the-API)
* is an example for using POST with an Analytics query.
*
* @param body The batch request body.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ErrorResponseException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return response to a batch query.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<BatchResponse> batchWithResponse(BatchRequest body, Context context) {
return batchWithResponseAsync(body, context).block();
}
}
| 10,008 |
3,428 |
{"id":"01892","group":"easy-ham-1","checksum":{"type":"MD5","value":"701fbf72f73f9dacf3fd176e14660a04"},"text":"From <EMAIL> Wed Sep 25 10:23:58 2002\nReturn-Path: <<EMAIL>>\nDelivered-To: y<EMAIL>int.org\nReceived: from localhost (jalapeno [127.0.0.1])\n\tby jmason.org (Postfix) with ESMTP id 1431016F03\n\tfor <jm@localhost>; Wed, 25 Sep 2002 10:23:57 +0100 (IST)\nReceived: from jalapeno [127.0.0.1]\n\tby localhost with IMAP (fetchmail-5.9.0)\n\tfor jm@localhost (single-drop); Wed, 25 Sep 2002 10:23:57 +0100 (IST)\nReceived: from dogma.slashnull.org (localhost [127.0.0.1]) by\n dogma.slashnull.org (8.11.6/8.11.6) with ESMTP id g8P83hC18342 for\n <<EMAIL>>; Wed, 25 Sep 2002 09:03:43 +0100\nMessage-Id: <<EMAIL>>\nTo: yyyy<EMAIL>int.org\nFrom: boingboing <<EMAIL>>\nSubject: Horror writers against illiteracy\nDate: Wed, 25 Sep 2002 08:03:43 -0000\nContent-Type: text/plain; encoding=utf-8\n\nURL: http://boingboing.net/#85485985\nDate: Not supplied\n\nThe Horror Writers of America are hosting a charity auction on eBay to raise \nmoney for American literacy charities. \n\n Among the items up for auction: a rare softcover advance copy (bound \n galley) of Thomas Harris' The Silence of the Lambs issued by St. Martin's \n Press in 1988; the first U.S. hardcover edition of Clive Barker's The \n Damnation Game; and a bundle of limited-edition prints depicting scenes \n from Stephen King novels such as Carrie and The Shining. \n\nLink[1] Discuss[2]\n\n[1] http://www.wired.com/news/culture/0,1284,55319,00.html\n[2] http://www.quicktopic.com/boing/H/qWd2YRCqaFUqW\n\n\n"}
| 619 |
778 |
// | / |
// ' / __| _` | __| _ \ __|
// . \ | ( | | ( |\__ `
// _|\_\_| \__,_|\__|\___/ ____/
// Multi-Physics
//
// License: BSD License
// Kratos default license: kratos/license.txt
//
// Main authors: <NAME>
//
// System includes
// External includes
// Project includes
#include "includes/define_python.h"
#include "includes/process_info.h"
#include "python/add_process_info_to_python.h"
namespace Kratos
{
namespace Python
{
ProcessInfo::Pointer ProcessInfoGetPreviousSolutionStepInfo(ProcessInfo& rProcessInfo)
{
return rProcessInfo.pGetPreviousSolutionStepInfo();
}
ProcessInfo::Pointer ProcessInfoGetPreviousTimeStepInfo(ProcessInfo& rProcessInfo)
{
return rProcessInfo.pGetPreviousTimeStepInfo();
}
//
void AddProcessInfoToPython(pybind11::module& m)
{
namespace py = pybind11;
py::class_<ProcessInfo, ProcessInfo::Pointer, DataValueContainer, Flags >(m,"ProcessInfo")
.def(py::init<>())
.def("CreateSolutionStepInfo", &ProcessInfo::CreateSolutionStepInfo)
.def("GetPreviousSolutionStepInfo", ProcessInfoGetPreviousSolutionStepInfo)
.def("GetPreviousTimeStepInfo", ProcessInfoGetPreviousTimeStepInfo)
.def("__str__", PrintObject<ProcessInfo>)
;
}
} // namespace Python.
} // Namespace Kratos
| 501 |
1,013 |
<filename>ccore/include/pyclustering/cluster/cluster_data.hpp<gh_stars>1000+
/*!
@authors <NAME> (<EMAIL>)
@date 2014-2020
@copyright BSD-3-Clause
*/
#pragma once
#include <vector>
#include <memory>
namespace pyclustering {
namespace clst {
using noise = std::vector<size_t>;
using noise_ptr = std::shared_ptr<noise>;
using index_sequence = std::vector<std::size_t>;
using cluster = std::vector<std::size_t>;
using cluster_sequence = std::vector<cluster>;
using cluster_sequence_ptr = std::shared_ptr<cluster_sequence>;
/*!
@class cluster_data cluster_data.hpp pyclustering/cluster/cluster_data.hpp
@brief Represents result of cluster analysis.
*/
class cluster_data {
protected:
cluster_sequence m_clusters = { }; /**< Allocated clusters during clustering process. */
public:
/*!
@brief Default constructor that creates empty clustering data.
*/
cluster_data() = default;
/*!
@brief Copy constructor that creates clustering data that is the same to specified.
@param[in] p_other: another clustering data.
*/
cluster_data(const cluster_data & p_other) = default;
/*!
@brief Move constructor that creates clustering data from another by moving data.
@param[in] p_other: another clustering data.
*/
cluster_data(cluster_data && p_other) = default;
/*!
@brief Default destructor that destroy clustering data.
*/
virtual ~cluster_data() = default;
public:
/*!
@brief Returns reference to clusters.
*/
cluster_sequence & clusters();
/*!
@brief Returns constant reference to clusters.
*/
const cluster_sequence & clusters() const;
/*!
@brief Returns amount of clusters.
*/
std::size_t size() const;
public:
/*!
@brief Provides access to specified cluster.
@param[in] p_index: index of specified cluster.
*/
cluster & operator[](const size_t p_index);
/*!
@brief Provides access to specified cluster.
@param[in] p_index: index of specified cluster.
*/
const cluster & operator[](const size_t p_index) const;
/*!
@brief Compares clustering data.
@param[in] p_other: another clustering data that is used for comparison.
@return Returns true if both objects have the same amount of clusters with the same elements.
*/
bool operator==(const cluster_data & p_other) const;
/*!
@brief Compares clustering data.
@param[in] p_other: another clustering data that is used for comparison.
@return Returns true if both objects have are not the same.
*/
bool operator!=(const cluster_data & p_other) const;
};
}
}
| 1,269 |
13,057 |
/*
* Copyright (c) 2007 Mockito contributors
* This program is made available under the terms of the MIT License.
*/
package org.mockito.internal.matchers.apachecommons;
import java.io.Serializable;
import org.mockito.ArgumentMatcher;
public class ReflectionEquals implements ArgumentMatcher<Object>, Serializable {
private final Object wanted;
private final String[] excludeFields;
public ReflectionEquals(Object wanted, String... excludeFields) {
this.wanted = wanted;
this.excludeFields = excludeFields;
}
@Override
public boolean matches(Object actual) {
return EqualsBuilder.reflectionEquals(wanted, actual, excludeFields);
}
@Override
public String toString() {
return "refEq(" + wanted + ")";
}
}
| 261 |
13,585 |
<filename>mybatis-plus/src/test/java/com/baomidou/mybatisplus/test/rewrite/Entity.java
package com.baomidou.mybatisplus.test.rewrite;
import lombok.Data;
import java.io.Serializable;
/**
* @author miemie
* @since 2020-06-23
*/
@Data
public class Entity implements Serializable {
private static final long serialVersionUID = 6962439201546719734L;
private Long id;
private String name;
}
| 147 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.