max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
331 | <reponame>fakegit/Subtitles-View
package org.fordes.subview.controller;
import de.felixroske.jfxsupport.FXMLController;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.Initializable;
import javafx.scene.input.MouseEvent;
import javafx.scene.layout.GridPane;
import java.net.URL;
import java.util.ResourceBundle;
/**
* 构建导出 控制器
*
* @author fordes on 2020/10/6
*/
@FXMLController
public class BuildExportController extends BasicController implements Initializable {
@FXML
private GridPane buildPanel, previewPanel;
@Override
public void initialize(URL location, ResourceBundle resources) {
}
/*合成导出*/
public void onBuildManual(ActionEvent actionEvent) {
this.focus(buildPanel);
}
/*效果预览*/
public void onBuildPreview(ActionEvent actionEvent) {
this.focus(previewPanel);
}
/*替换原文件*/
public void onBuildReplaceSelected(MouseEvent mouseEvent) {
}
/*开始导出*/
public void onStartBuild(ActionEvent actionEvent) {
}
/*选择输出路径*/
public void onBuildChoosePath(ActionEvent actionEvent) {
}
} | 463 |
567 | package com.google.cloud.bigquery.utils.queryfixer.fixer;
import com.google.cloud.bigquery.utils.queryfixer.entity.FixResult;
import com.google.cloud.bigquery.utils.queryfixer.errors.NoMatchingSignatureError;
import com.google.common.collect.ImmutableSet;
import lombok.AllArgsConstructor;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
/**
* A class responsible for fixing {@link NoMatchingSignatureError}. Two scenarios could lead to this
* error. 1. Using Legacy Type Cast in Standard SQL, and 2. The data types of input arguments are
* not consistent with the function signature.
*
* <p>The fixer will first check the function name and number of arguments to see if this unmatched
* function belongs to legacy Type Cast. If it does, the fixer will convert it to SAFE_CAST or other
* special cast functions (e.g. TIMESTAMP_MICROS that casts from INT64 to TIMESTAMP). In the second
* scenario, the fixer will try to SAFE_CAST the input argument with unmatched data type to the one
* required by the signature.
*
* <p>Here are a few examples:
*
* <pre>
* String(123)
* </pre>
*
* is a legacy cast function and leads to an error "No matching signature for function STRING for
* argument types: INT64. Supported signature: STRING(TIMESTAMP, [STRING])" in standardSQL. The
* fixer will convert it to
*
* <pre>
* SAFE_CAST(123 AS STRING)
* </pre>
*
* For the second scenario, one example is
*
* <pre>
* TO_BASE64(123)
* </pre>
*
* where TO_BASE requires BYTES or STRING inputs, so it will be fixed as
*
* <pre>
* TO_BASE(SAFE_CAST(123 AS STRING))
* or
* TO_BASE(SAFE_CAST(123 AS BYTES))
* </pre>
*/
@AllArgsConstructor
public class NoMatchingSignatureFixer implements IFixer {
// the names of legacy CAST Functions that leads to NoMatchingSignature error.
private static final Set<String> castTypes =
ImmutableSet.<String>builder()
.add(TypeCast.STRING)
.add(TypeCast.TIMESTAMP)
.add(TypeCast.DATETIME)
.add(TypeCast.DATE)
.add(TypeCast.TIME)
.build();
private final String query;
private final NoMatchingSignatureError err;
@Override
public FixResult fix() {
if (castTypes.contains(err.getFunctionName()) && err.getArgumentTypes().size() == 1) {
String oldType = err.getArgumentTypes().get(0).getDataType();
String template = TypeCast.getCastTemplate(err.getFunctionName(), oldType);
FunctionFixer fixer = new FunctionFixer(query, err, template);
return fixer.fix();
}
List<String> templates = new ArrayList<>();
for (NoMatchingSignatureError.Signature signature : err.getExpectedSignatures()) {
List<String> argumentTemplates = convert(err.getArgumentTypes(), signature);
if (argumentTemplates == null) {
continue;
}
String functionTemplate = buildFunctionTemplate(err.getFunctionName(), argumentTemplates);
templates.add(functionTemplate);
}
FunctionFixer fixer = new FunctionFixer(query, err, templates);
return fixer.fix();
}
private String buildFunctionTemplate(String functionName, List<String> argumentTemplates) {
String arguments = String.join(", ", argumentTemplates);
return String.format("%s(%s)", functionName, arguments);
}
// This function checks whether the size of arguments fits in an expected signature.
private List<String> convert(
List<NoMatchingSignatureError.ArgumentType> arguments,
NoMatchingSignatureError.Signature signature) {
List<String> argumentTemplates = new ArrayList<>();
// A wildcard to match ANY type.
String any = null;
boolean atRequired = true;
int argsIndex = 0;
for (int i = 0; i < arguments.size(); i++) {
NoMatchingSignatureError.ArgumentType argument = arguments.get(i);
String sourceType = argument.getDataType();
NoMatchingSignatureError.ArgumentType expectedArgument =
getArgumentType(signature, atRequired, argsIndex);
if (expectedArgument == null) {
return null;
}
String expectedType = expectedArgument.getDataType();
// ANY equals the data type that matches the first ANY.
if (expectedType.equals(TypeCast.ANY)) {
expectedType = any == null ? any = sourceType : any;
}
String argumentHolder;
// place holder is 1-based index.
if (expectedType.equals(sourceType)) {
argumentHolder = String.format("{%s}", i + 1);
} else {
argumentHolder = TypeCast.getCastTemplate(expectedType, sourceType, i + 1);
}
argumentTemplates.add(argumentHolder);
argsIndex++;
// After matching all the required argument, it will start to match optional arguments.
if (atRequired && argsIndex == signature.getRequired().size()) {
atRequired = false;
argsIndex = 0;
}
}
return argumentTemplates;
}
private NoMatchingSignatureError.ArgumentType getArgumentType(
NoMatchingSignatureError.Signature signature, boolean atRequired, int argsIndex) {
if (atRequired) {
if (argsIndex < signature.getRequired().size()) {
return signature.getRequired().get(argsIndex);
}
return null;
}
// Try to get an optional argument type.
if (argsIndex < signature.getOptional().size()) {
return signature.getOptional().get(argsIndex);
}
// The index of optional arguments can be extended if the last argument is repeated.
if (!signature.getOptional().isEmpty()) {
int lastIndex = signature.getOptional().size() - 1;
NoMatchingSignatureError.ArgumentType lastArgs = signature.getOptional().get(lastIndex);
if (lastArgs.isRepeated()) {
return lastArgs;
}
}
return null;
}
}
| 1,989 |
1,794 | <reponame>rvencu/lxml
"""
Simple HTTP request dumper for tests.
"""
import sys
from contextlib import contextmanager
try:
import urlparse
except ImportError:
# Python 3
import urllib.parse as urlparse
@contextmanager
def webserver(app, port=0, host=None):
"""Context manager entry point for the 'with' statement.
Pass 0 as port number to dynamically allocate a free port.
Usage:
with webserver(wsgi_app_function, 8080) as host_url:
do_ws_calls(host_url)
"""
server = build_web_server(app, port, host or '127.0.0.1')
host, port = server.socket.getsockname()
import threading
thread = threading.Thread(target=server.serve_forever,
kwargs={'poll_interval': 0.5})
thread.setDaemon(True)
thread.start()
try:
yield 'http://%s:%s/' % (host, port) # yield control to 'with' body
finally:
server.shutdown()
server.server_close()
thread.join(timeout=1)
try:
from SocketServer import ThreadingMixIn
except ImportError:
# Python 3
from socketserver import ThreadingMixIn
import wsgiref.simple_server as wsgiserver
class WebServer(wsgiserver.WSGIServer, ThreadingMixIn):
"""A web server that starts a new thread for each request.
"""
class _RequestHandler(wsgiserver.WSGIRequestHandler):
def get_stderr(self):
# don't write to stderr
return sys.stdout
def log_message(self, format, *args):
# message = "wsmock(%s) %s" % (self.address_string(), format % args)
pass # don't log messages
def build_web_server(app, port, host=None):
server = wsgiserver.make_server(
host or '', port, app,
server_class=WebServer,
handler_class=_RequestHandler)
return server
class HTTPRequestCollector(object):
def __init__(self, response_data, response_code=200, headers=()):
self.requests = []
self.response_code = response_code
self.response_data = response_data
self.headers = list(headers or ())
def __call__(self, environ, start_response):
self.requests.append((
environ.get('PATH_INFO'),
urlparse.parse_qsl(environ.get('QUERY_STRING'))))
start_response('%s OK' % self.response_code, self.headers)
return [self.response_data]
| 933 |
679 | /**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
// MARKER(update_precomp.py): autogen include statement, do not remove
#include "precompiled_chart2.hxx"
#include "PieChartType.hxx"
#include "PropertyHelper.hxx"
#include "macros.hxx"
#include "PolarCoordinateSystem.hxx"
#include "AxisHelper.hxx"
#include "servicenames_charttypes.hxx"
#include "ContainerHelper.hxx"
#include "AxisIndexDefines.hxx"
#include "AxisHelper.hxx"
#include <com/sun/star/beans/PropertyAttribute.hpp>
#include <com/sun/star/chart2/AxisType.hpp>
using namespace ::com::sun::star;
using ::rtl::OUString;
using ::com::sun::star::beans::Property;
using ::com::sun::star::uno::Sequence;
using ::com::sun::star::uno::Reference;
using ::com::sun::star::uno::Any;
using ::osl::MutexGuard;
namespace
{
enum
{
PROP_PIECHARTTYPE_USE_RINGS,
PROP_PIECHARTTYPE_3DRELATIVEHEIGHT
};
void lcl_AddPropertiesToVector(
::std::vector< Property > & rOutProperties )
{
rOutProperties.push_back(
Property( C2U( "UseRings" ),
PROP_PIECHARTTYPE_USE_RINGS,
::getBooleanCppuType(),
beans::PropertyAttribute::BOUND
| beans::PropertyAttribute::MAYBEDEFAULT ));
rOutProperties.push_back(
Property( C2U("3DRelativeHeight"),
PROP_PIECHARTTYPE_3DRELATIVEHEIGHT,
::getCppuType( reinterpret_cast< const sal_Int32 * >(0)),
beans::PropertyAttribute::MAYBEVOID ));
}
struct StaticPieChartTypeDefaults_Initializer
{
::chart::tPropertyValueMap* operator()()
{
static ::chart::tPropertyValueMap aStaticDefaults;
lcl_AddDefaultsToMap( aStaticDefaults );
return &aStaticDefaults;
}
private:
void lcl_AddDefaultsToMap( ::chart::tPropertyValueMap & rOutMap )
{
::chart::PropertyHelper::setPropertyValueDefault( rOutMap, PROP_PIECHARTTYPE_USE_RINGS, false );
::chart::PropertyHelper::setPropertyValueDefault< sal_Int32 >( rOutMap, PROP_PIECHARTTYPE_3DRELATIVEHEIGHT, 100 );
}
};
struct StaticPieChartTypeDefaults : public rtl::StaticAggregate< ::chart::tPropertyValueMap, StaticPieChartTypeDefaults_Initializer >
{
};
struct StaticPieChartTypeInfoHelper_Initializer
{
::cppu::OPropertyArrayHelper* operator()()
{
static ::cppu::OPropertyArrayHelper aPropHelper( lcl_GetPropertySequence() );
return &aPropHelper;
}
private:
Sequence< Property > lcl_GetPropertySequence()
{
::std::vector< ::com::sun::star::beans::Property > aProperties;
lcl_AddPropertiesToVector( aProperties );
::std::sort( aProperties.begin(), aProperties.end(),
::chart::PropertyNameLess() );
return ::chart::ContainerHelper::ContainerToSequence( aProperties );
}
};
struct StaticPieChartTypeInfoHelper : public rtl::StaticAggregate< ::cppu::OPropertyArrayHelper, StaticPieChartTypeInfoHelper_Initializer >
{
};
struct StaticPieChartTypeInfo_Initializer
{
uno::Reference< beans::XPropertySetInfo >* operator()()
{
static uno::Reference< beans::XPropertySetInfo > xPropertySetInfo(
::cppu::OPropertySetHelper::createPropertySetInfo(*StaticPieChartTypeInfoHelper::get() ) );
return &xPropertySetInfo;
}
};
struct StaticPieChartTypeInfo : public rtl::StaticAggregate< uno::Reference< beans::XPropertySetInfo >, StaticPieChartTypeInfo_Initializer >
{
};
} // anonymous namespace
namespace chart
{
PieChartType::PieChartType(
const uno::Reference< uno::XComponentContext > & xContext,
sal_Bool bUseRings /* = sal_False */) :
ChartType( xContext )
{
if( bUseRings )
setFastPropertyValue_NoBroadcast( PROP_PIECHARTTYPE_USE_RINGS, uno::makeAny( bUseRings ));
}
PieChartType::PieChartType( const PieChartType & rOther ) :
ChartType( rOther )
{
}
PieChartType::~PieChartType()
{}
// ____ XCloneable ____
uno::Reference< util::XCloneable > SAL_CALL PieChartType::createClone()
throw (uno::RuntimeException)
{
return uno::Reference< util::XCloneable >( new PieChartType( *this ));
}
// ____ XChartType ____
::rtl::OUString SAL_CALL PieChartType::getChartType()
throw (uno::RuntimeException)
{
return CHART2_SERVICE_NAME_CHARTTYPE_PIE;
}
Reference< chart2::XCoordinateSystem > SAL_CALL
PieChartType::createCoordinateSystem( ::sal_Int32 DimensionCount )
throw (lang::IllegalArgumentException,
uno::RuntimeException)
{
Reference< chart2::XCoordinateSystem > xResult(
new PolarCoordinateSystem(
GetComponentContext(), DimensionCount, /* bSwapXAndYAxis */ sal_False ));
for( sal_Int32 i=0; i<DimensionCount; ++i )
{
Reference< chart2::XAxis > xAxis( xResult->getAxisByDimension( i, MAIN_AXIS_INDEX ) );
if( !xAxis.is() )
{
OSL_ENSURE(false,"a created coordinate system should have an axis for each dimension");
continue;
}
//hhhh todo make axis invisible
chart2::ScaleData aScaleData = xAxis->getScaleData();
aScaleData.Scaling = AxisHelper::createLinearScaling();
aScaleData.AxisType = chart2::AxisType::REALNUMBER;
if( i == 0 )
aScaleData.Orientation = chart2::AxisOrientation_REVERSE;
else
aScaleData.Orientation = chart2::AxisOrientation_MATHEMATICAL;
//remove explicit scalings from all axes
AxisHelper::removeExplicitScaling( aScaleData );
xAxis->setScaleData( aScaleData );
}
return xResult;
}
// ____ OPropertySet ____
uno::Any PieChartType::GetDefaultValue( sal_Int32 nHandle ) const
throw(beans::UnknownPropertyException)
{
const tPropertyValueMap& rStaticDefaults = *StaticPieChartTypeDefaults::get();
tPropertyValueMap::const_iterator aFound( rStaticDefaults.find( nHandle ) );
if( aFound == rStaticDefaults.end() )
return uno::Any();
return (*aFound).second;
}
// ____ OPropertySet ____
::cppu::IPropertyArrayHelper & SAL_CALL PieChartType::getInfoHelper()
{
return *StaticPieChartTypeInfoHelper::get();
}
// ____ XPropertySet ____
uno::Reference< beans::XPropertySetInfo > SAL_CALL PieChartType::getPropertySetInfo()
throw (uno::RuntimeException)
{
return *StaticPieChartTypeInfo::get();
}
uno::Sequence< ::rtl::OUString > PieChartType::getSupportedServiceNames_Static()
{
uno::Sequence< ::rtl::OUString > aServices( 3 );
aServices[ 0 ] = CHART2_SERVICE_NAME_CHARTTYPE_PIE;
aServices[ 1 ] = C2U( "com.sun.star.chart2.ChartType" );
aServices[ 2 ] = C2U( "com.sun.star.beans.PropertySet" );
return aServices;
}
// implement XServiceInfo methods basing upon getSupportedServiceNames_Static
APPHELPER_XSERVICEINFO_IMPL( PieChartType,
C2U( "com.sun.star.comp.chart.PieChartType" ));
} // namespace chart
| 2,905 |
921 | <filename>src/main/java/com/vladsch/md/nav/psi/util/TextMapElementType.java
// Copyright (c) 2015-2020 <NAME> <<EMAIL>> Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.vladsch.md.nav.psi.util;
import com.intellij.openapi.ui.ComboBox;
import com.vladsch.md.nav.MdBundle;
import com.vladsch.md.nav.psi.api.MdTextMapElementTypeProvider;
import icons.MdIcons;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.Icon;
import javax.swing.JComboBox;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Set;
public final class TextMapElementType {
final public static TextMapElementType NONE = new TextMapElementType("NONE", MdBundle.message("settings.link-map.element-type.none.display-name"), MdBundle.message("settings.link-map.element-type.none.banner"), true, true, true, MdIcons.Misc.LINK_MAP_NOT_USED);
final public static TextMapElementType[] EMPTY_TYPES = new TextMapElementType[0];
final public static TextMapElementType[] ELEMENT_TYPES = new TextMapElementType[] { NONE };
private final @NotNull String name;
private final @NotNull String displayName;
private final @NotNull String banner;
private final boolean hasOption;
private final boolean allowPrefix;
private final boolean hasReverseMap;
private final @Nullable Icon icon;
public TextMapElementType(@NotNull String name, @NotNull String displayName, @NotNull String banner, boolean hasOption, boolean allowPrefix, final boolean hasReverseMap, @Nullable Icon icon) {
this.name = name;
this.displayName = displayName;
this.banner = banner;
this.hasOption = hasOption;
this.allowPrefix = allowPrefix;
this.hasReverseMap = hasReverseMap;
this.icon = icon;
}
@NotNull
public String getBanner() {
return banner;
}
@NotNull
public String getName() {
return name;
}
@NotNull
public String getDisplayName() {
return displayName;
}
public boolean hasOption() {
return hasOption;
}
public boolean isAllowPrefix() {
return allowPrefix;
}
public boolean hasReverseMap() {
return hasReverseMap;
}
@Nullable
public Icon getIcon() {
return icon;
}
@NotNull
public static JComboBox<String> createComboBox(@NotNull TextMapElementType... exclude) {
JComboBox<String> comboBox = new ComboBox<>();
fillComboBox(comboBox, exclude);
return comboBox;
}
public static void fillComboBox(@NotNull JComboBox<String> comboBox, @NotNull TextMapElementType... exclude) {
Set<TextMapElementType> excluded = new HashSet<>(Arrays.asList(exclude));
comboBox.removeAllItems();
for (TextMapElementType item : getValues()) {
if (!excluded.contains(item)) {
String displayName = item.getDisplayName();
comboBox.addItem(displayName);
}
}
}
private static TextMapElementType[] ourValues = null;
@NotNull
public static TextMapElementType[] getValues() {
if (ourValues == null) {
ArrayList<TextMapElementType> elementTypes = new ArrayList<>();
for (MdTextMapElementTypeProvider provider : MdTextMapElementTypeProvider.EXTENSIONS.getValue()) {
TextMapElementType[] types = provider.getElementTypes();
elementTypes.addAll(Arrays.asList(types));
}
elementTypes.sort(Comparator.comparing(TextMapElementType::getDisplayName));
elementTypes.add(NONE);
ourValues = elementTypes.toArray(EMPTY_TYPES);
}
return ourValues;
}
@NotNull
public static TextMapElementType findEnum(@Nullable String displayName) {
if (displayName != null) {
for (TextMapElementType type : getValues()) {
if (type.getDisplayName().equals(displayName)) return type;
}
}
return NONE;
}
@NotNull
public static TextMapElementType getElementType(@Nullable String name) {
if (name != null) {
for (TextMapElementType type : getValues()) {
if (type.getName().equals(name)) return type;
}
}
return NONE;
}
}
| 1,767 |
1,444 | package mage.client.components.tray;
import java.awt.*;
import java.util.concurrent.TimeUnit;
import mage.client.MageFrame;
import org.apache.log4j.Logger;
import org.mage.plugins.card.utils.impl.ImageManagerImpl;
/**
* @author noxx
*/
public enum MageTray {
instance;
private static final Logger log = Logger.getLogger(MageTray.class);
private Image mainImage;
private Image flashedImage;
private TrayIcon trayIcon;
private int state = 0;
public void install() {
if (!SystemTray.isSupported()) {
log.warn("SystemTray is not supported");
return;
}
try {
mainImage = ImageManagerImpl.instance.getAppSmallImage();
flashedImage = ImageManagerImpl.instance.getAppFlashedImage();
trayIcon = new TrayIcon(mainImage);
trayIcon.setImageAutoSize(true);
trayIcon.addActionListener(e -> {
stopBlink();
MageFrame frame = MageFrame.getInstance();
frame.setVisible(true);
frame.setState(Frame.NORMAL);
});
final SystemTray tray = SystemTray.getSystemTray();
final PopupMenu popup = new PopupMenu();
MenuItem imagesItem = new MenuItem("Download images");
MenuItem iconsItem = new MenuItem("Download icons");
MenuItem stopBlinkItem = new MenuItem("Stop blinking");
MenuItem preferencesItem = new MenuItem("Preferences...");
MenuItem aboutItem = new MenuItem("About Mage");
MenuItem exitItem = new MenuItem("Exit");
imagesItem.addActionListener(e -> MageFrame.getInstance().downloadImages());
iconsItem.addActionListener(e -> MageFrame.getInstance().downloadAdditionalResources());
stopBlinkItem.addActionListener(e -> stopBlink());
preferencesItem.addActionListener(e -> MageFrame.getInstance().btnPreferencesActionPerformed(null));
aboutItem.addActionListener(e -> MageFrame.getInstance().btnAboutActionPerformed(null));
exitItem.addActionListener(e -> MageFrame.getInstance().exitApp());
popup.add(imagesItem);
popup.add(iconsItem);
popup.add(stopBlinkItem);
popup.add(preferencesItem);
popup.addSeparator();
popup.add(aboutItem);
popup.addSeparator();
popup.add(exitItem);
trayIcon.setPopupMenu(popup);
try {
tray.add(trayIcon);
} catch (AWTException e) {
log.error("TrayIcon could not be added: ", e);
}
} catch (Exception e) {
log.error(e);
}
}
public synchronized void blink() {
if (state == 0) {
synchronized (MageTray.class) {
if (state == 0) {
state = 1;
new Thread(() -> {
try {
int i = 0;
while (state != 3) {
trayIcon.setImage(i == 0 ? mainImage : flashedImage);
TimeUnit.MILLISECONDS.sleep(600);
i = i == 0 ? 1 : 0;
}
trayIcon.setImage(mainImage);
state = 0;
} catch (InterruptedException e) {
e.printStackTrace();
}
}).start();
}
}
}
}
public void stopBlink() {
if (state == 1) {
state = 3;
}
}
public void displayMessage(String message) {
if (trayIcon != null) {
trayIcon.displayMessage("Mage", message, TrayIcon.MessageType.INFO);
}
}
}
| 1,928 |
3,055 | /*
Fontname: -FreeType-Logisoso-Medium-R-Normal--38-380-72-72-P-46-ISO10646-1
Copyright: Created by <NAME> with FontForge 2.0 (http://fontforge.sf.net) - Brussels - 2009
Glyphs: 96/527
BBX Build Mode: 0
*/
const uint8_t u8g2_font_logisoso26_tr[2644] U8G2_FONT_SECTION("u8g2_font_logisoso26_tr") =
"`\0\4\4\5\6\4\6\6\31'\0\371\32\371\32\0\3[\6\303\12\67 \6\0@\60\6!\12C"
"[P\205\77\30\226\0\42\17\207h;\206\21&F\10\21\21&\4\0#;MS\60&\42\203H\20"
"\32Bh\10\31\42\203\210\14\42\62\210\4\211\7\17\222\20\31Dd\20\221A$\210<x\220\202\310 "
"\42\203H\20\32Bh\10\241!d\210\14\42\62\10\0$$-T.\256\241\63<\225f\11\231T\347"
"\214\222$Z\262Ni:\324\334)\22dJ\254Iup\350\254\0%\60NSP\226Q\212F\34!"
"\61\311\20\63c(\35Jt\354P\242C\211\216\35Jt(\321\261C\211\224\31r\202\310\210iFL"
"T\202\324\20\0&,mS\60\246r\247\322\224(\62j\310\250!CI\22\35J\260\334\261c#H"
"\215 b\204\204\21\22f\64R\243\306D%k\26%'\12\203X[\5\23\42B\0(\36Gk\60"
"\246Pd\306\220\31Cf\320\30j\6\315#\62\204&\42\64\210\14!BA\0)\33Gk\60\26\61"
"\204\10\15\42\64\21\31B\363\67\324\14\32Cf\14\231Qb\0*\31\213Y\71\246\201\203\204\14\11\61"
"\342\301\220CG\36$\31\22j\340 \0+\16J\331\22\246q\263y\360h\334\64\0,\10\243XO"
"\205$\21-\7jX\24\206\17.\7cXP\205\4/\35MS\60N\222#\251\34I\345H*G"
"R\71\222\344H*GR\71\222\312\221$\1\60\32MS\60\246bi\226\24I\205\314\234\375\277;v"
"\212D\221\22k\222\25\2\61\16Gk\60\246i\210<X\64\377\377\3\62!MS\60\246bi\226\224"
"P\205\314\234\271S$\302\21\244\222 \225\4\251$H%A\222\17\36$\63\35m\323/\206\17RR"
"H%A*I\232;X\224\350|\22\14Q\211%\252N\6\3\64$MS\60\256\221$\207\216\244r"
"\350H*\207\216\244d\320\230Ac\306\220\31Cf\314\203\7\351\206\316\15\0\65\37MS\60\206\17\230"
"\316\213BKX\30I\205\314\334\320\231\232;v\212D\221\22k\222\25\2\66!MS\60\246bi\226"
"\24I\205\314\350L\325,aQ&\325\71\273;v\212D\221\22k\222\25\2\67\36NS\60\206\337\215"
"\30Fb\30\211a$\211\322\222(\225DiI\224\322\241\264$\7\0\70&MS\60\246bi\226\24"
"Iu\316v\307F\220)\261&\315\222\22\252\220\231\263\356\330)\22EJ\254IV\10\0\71$MS"
"\60\246bi\226\24Iu\316\336\235\42Q\244\4\223E%\206Nj\356\330)\22EJ\254IV\10\0"
":\11\343\331Q\205\364\200\23;\12#\332P\205\364\200\223D<\25\350iR\276P\203\310PRf\224"
"\260Q\205JQ\65.\0=\15,YR\206\17\314C\366\340\201\1>\26\350iR\206p\243hUj"
"\230\250AD\212\220!\64*\34\0\77\33mS\60\246bi\226\224P\205\314\334\320\221\24\322%\311\241"
"\363=dC\247\2@L\367\323l\277\304-\337\31+U\262La\42\244\216L\224\4\215\32C$\312"
"\30\232\306\320\64\206\246\61\64\215\241i\14Mch\32C\323\30\232\306\320\64\206H\230@\363\240\220\23"
"RD\312\220\7E\36Py@\347\201<\10\351\226\15\0A(MS\60\256\241\4\353\335\261c#\206"
"\215\30\66b\24\11BD\6\215\31\64f\320\230Ac\210\354d\324\220Q\230\231\33B%MS\60\6"
"Tj\226\14*\61l\304\260\273\42\301b\311\222\61&F\221\30v\316\316N\241\61\261D\15*\0C"
"\32MS\60\246bi\226\224Pu\316\322\371\277;v\212D\221\22k\222\25\2D\26MS\60\206D"
"KX\214Bv\316\376\377\354\320\203\21K\22\1E\22NS\60\206\337\316o\25i\64v\376\366\301\3"
"\6F\20NS\60\206\337\316o\25i\64v\376\267\0G\34MS\60\246bi\226\224P\205\314\350|"
"\224;\273;v\212D\221\22L\26\225\30H\17MS\60\206q\366\337=\370\316\376w\3I\11C["
"P\205\177\60\0J\24MS\60\326\371\377O\315\231;E\242H\211\65\311\12\1K\64MS\60\206a"
"\247H\214\42\61\210\310 \42c\310\14!\64\204\320\10R\307\216\231\263\335\261c#H\15!\64\204\320"
"\30\62\203\210\14\42\62\212\304\260\21\303\10L\16NS\60\206\261\363\377\377\355\203\7\14M(MS\60"
"\206q\247P%J\243f\311\12\26\17\36\224(a\242\204\211\22F\206\30\31bF\210\231\60f\302\230"
"\263\357\6N\42MS\60\6b\227\241B\225(#\65jL\20\61A\304\310\20#$\214\220\60\243Q"
"\256\60\273\335\0O\32MS\60\246bi\226\24I\205\314\234\375\277;v\212D\221\22k\222\25\2P"
"\32MS\60\6Tj\226\214\61\61\12\331\71;;T\202\305\222DC\347\77\5Q\32MS\60\246b"
"i\226\24I\205\314\234\375\277;v\212D\221\22L\26\225\30R*MS\60\6Tj\226\214\61\61\354"
":;;Tb\211\232Dc\310\14\32\63h\314 \42\243\206\214\42\61\212\304\260\21\303\256\33S M"
"S\60\246bi\226\24I\205\314\234Q*+-Y\227D\315\231;E\242H\211\65\311\12\1T\16M"
"S\60\206\17R\15\235\377\377\257\0U\24MS\60\206q\366\377\277;v\212D\221\22k\222\25\2V"
".MS\60\206q\307N\221\30Eb\324\220QC\10\15!\64f\14\231\211\306\14\42\62\210\310\250\21"
"\244F\220\32\61\354:\333\25,I#\0W\63MS\60\6RxEb\24\211Q$F\221\30Eb"
"H\20\22C\202\220\30\42\202\304\24$F\34\31qd\337\250!Q\206D\31\42d\210\220\31C\2\0"
"X\65MS\60\6b\247H\214\32\62j\10\31\62c\310LD\202\20\11R#\206]W\260\334\261c"
"#F\221 D\202\320\230Ac\210\220!\62j\310\250\21\244\220\15Y'MS\60\206a\250H\214\32"
"\62\210\10\31\42d\6\15!\64d\24f\346,$Ir$\225#\251\34Ir$\225\0Z!MS"
"\60\206\17R\222\34Ir$\311\221\24\222$H\222 I\202$I\216$\71\222\344\203\7\11[\16H"
"k\60\206\7\254\346\377\177\365 \1\134\42MS\60\6\242C\211NJt(I\242CI\22\35J\222"
"\350P\242CI\22\35J\222\350P\222\4]\16Gk\60\206\7\206\346\377\177\364 \1^\15\253\330="
"\246q\245\316\220\240\206\0_\10Q@/\206\17\10`\12\246\360<\206\61d\6Ma(\217Rp&"
"\203\310\24\25)Cj\314\260\61\303\6\217%\245h\15\23bC\210\15\31\67\204\330\220\62'\234\264\71"
"#\0b\35mS\60\206\241\363\213BKX\230P\205\314\234\375\356\330)\25\17F,\31Q\10\0c"
"\33\215R\60\246bi\226\224P\205\314\234\321\371\356\330)\22EJ\254IV\10\0d\35mS\60\326"
"\371Q\211!+X\224P\205\314\234\375\356\330)\22EJ\60YTb\0e\33\215R\60\246bi\226"
"\24Iu\316\272\7\237NJ\222\324\220*\326$+\4\0f\30mS\60\266r\247\22\221 \64f\320"
"\320y\306\202\253\241\363\177\6\0g(m\323,\246\64JV\224P\205\314\234\271c\247HT\262&\321"
"\261\241CI\246Y\361@\324\71s\247H\260Xt\6\0h\24mS\60\206\241\363\213BKX\230P"
"\205\314\234\375\277\33i\21eS\60\205!\323\203\33\62\377\77)A\215\0j\24J\344,\276\331\303d"
"\334\374\377\337\31j\201\4\21\31\0k*mS\60\206\241\363\263\253H\14\42\62\210\310\30\62c\310\14"
"!\64d\324\10RX%RS\202\14%\204\206\214\42\61\354\30\1l\17gSP\205A\363\377\377\21"
"!\23f\10m\64\227Rp\207\21\245\12\255H\362\340\204\211\24\252J!#fn\234\271q\346\306\231"
"\33gn\234\271q\346\306\231\33gn\234\271q\346\306\231\33gn\234\271\11n\23\215R\60\206\21\205"
"\226\260\60\241\12\231\71\373\177\67\0o\32\215R\60\246bi\226\224P\205\314\234\375\356\330)\22EJ"
"\254IV\10\0p\36m\323,\206\21\205\226\260\60\241\12\231\71\373\335\261SJ\36\210X\62\242\320\320"
"\371\24\0q\35m\323,\246\22CV\260(\241\12\231\71\373\335\261S$\212\224`\262\250\304\320\371\1"
"r\21\212R\320\205\21\17\36\210\70sl\334\374\277\3s\37\214R\20\36ShT\24It\354 \311"
"c\250\20\222<f\354\20\11\42D\324 #\4\0t\23hKP\225Q\363\315\203$\243\346\377\212\220"
"\231B\4u\24\215R\60\206q\366\377\356\330)\22EJ\60YTb\0v+\215R\60\206q\346N"
"\221\30Eb\324\220QC\10\15!Cf\14\231\211\306\14\42\62\210\4\251\21\244F\14\33\61\354:s"
"\205\0wG\227Rp\207q\343\314\215;U\12U)\22\243J\15\31Uj\310(CC\310\234!B"
"\346\14\231)\306\14\32\63b\314\240\61#\210\14\42A#\22DF\220\32\61f\304\260\21cF\14;"
"s\354\314\61S\346J\25\2x*\215R\60\206q#F\221\30\65f\14\231\251F\220\32\61\316\134\311"
"\241#\13\226\33\61l\304\250\61\203\306\20\31\65d\324\271\1y/m\323,\16R#\10\21\31Dd"
"\20\221Ac\310\214!\63h\10\241!\204\206\214\42\61\212\304\260\313\314YX!IZ\211(\204\352 "
"\71\0z\27\215R\60\206\17R\22$X\220\302\202\24\26\244\260 \225\17\36${\34\311\343.\266Q"
"\204\312\24\42\65l\276\42D\25\251b\304\346g\244\210\225\42\66\0|\11#|.\206\377 \1}\33"
"\311\343.\206a\244\252\42\66\77\243\331\240B\243H\15\233\257\310\24*\64\14\0~\15\217\310\64\6\42"
"g\232\270)C\0\177\6\0@\60\6\0\0\0\4\377\377\0";
| 4,545 |
4,036 | <gh_stars>1000+
public class Test<T> {
static class Super {}
static class Sub1 extends Super {}
static class Sub2 extends Super {}
// modeled after results on Alfresco
void foo(Super lhs, Super rhs) {
if (lhs instanceof Sub1) ;
else if (rhs instanceof Sub1)
if ((lhs instanceof Sub1) || (lhs instanceof Sub2));
}
void bar(Super x) {
if (x instanceof Super);
else if (x instanceof Sub1);
}
// modeled after results on Apache Lucene
void baz(Super x, Super y) {
if (x instanceof Sub1);
else if (x instanceof Sub1);
}
// NOT OK
void w(Super x) {
if (x instanceof Sub2 || x instanceof Super);
else if (x instanceof Sub1);
}
// modeled after result on WildFly
@Override
public boolean equals(Object object) {
if ((object != null) && !(object instanceof Test)) {
Test<?> value = (Test<?>) object;
return (this.hashCode() == value.hashCode()) && super.equals(object);
}
return super.equals(object);
}
// NOT OK
Sub1 m(Super o) {
if (!(o instanceof Sub1))
return (Sub1)o;
return null;
}
// OK: not a guaranteed failure
Sub1 m2(Super o) {
if (!(o instanceof Sub1));
return (Sub1)o;
}
// OK: reassigned
Sub1 m3(Super o) {
if (!(o instanceof Sub1)) {
o = new Sub1();
return (Sub1)o;
}
return null;
}
}
| 502 |
1,162 | package net.simonvt.schematic.compiler;
import com.squareup.javapoet.ClassName;
import com.squareup.javapoet.FieldSpec;
import com.squareup.javapoet.MethodSpec;
import com.squareup.javapoet.TypeSpec;
import javax.lang.model.element.Modifier;
public final class WriterUtils {
private WriterUtils() {
}
public static void singleton(TypeSpec.Builder typeSpec, ClassName className,
ClassName... params) {
FieldSpec instance = FieldSpec.builder(className, "instance", Modifier.PRIVATE, Modifier.STATIC,
Modifier.VOLATILE).build();
typeSpec.addField(instance);
StringBuilder paramsBuilder = new StringBuilder();
boolean first = true;
int size = params.length;
for (ClassName param : params) {
if (!first) {
paramsBuilder.append(", ");
} else {
first = false;
}
String simpleName = param.simpleName();
String paramName = Character.toLowerCase(simpleName.charAt(0)) + simpleName.substring(1);
paramsBuilder.append(paramName);
}
MethodSpec.Builder spec = MethodSpec.methodBuilder("getInstance")
.addModifiers(Modifier.PUBLIC, Modifier.STATIC)
.returns(className);
for (ClassName param : params) {
String simpleName = param.simpleName();
String paramName = Character.toLowerCase(simpleName.charAt(0)) + simpleName.substring(1);
spec.addParameter(param, paramName);
}
spec.beginControlFlow("if (instance == null)")
.beginControlFlow("synchronized ($T.class)", className)
.beginControlFlow("if (instance == null)")
.addStatement("instance = new $T($L)", className, paramsBuilder.toString())
.endControlFlow()
.endControlFlow()
.endControlFlow()
.addStatement("return instance");
typeSpec.addMethod(spec.build());
}
}
| 675 |
611 | //
// ObjcService.h
// ZRouterTests
//
// Created by zuik on 2018/5/23.
// Copyright © 2018 zuik. All rights reserved.
//
#import <Foundation/Foundation.h>
@protocol ObjcServiceInput
@end
@protocol ObjcServiceSubInput <ObjcServiceInput>
@end
@interface ObjcService : NSObject<ObjcServiceSubInput>
@end
@interface ObjcSubService : ObjcService
@end
| 132 |
799 | RAW_RESPONSE = {
"lastUpdated": 1609661791534,
"handles": [
"testname"
],
"links": {
"forums": [
{
"name": "testforum",
"actorHandle": "testname",
"uid": "4671aeaf49c792689533b00664a5c3ef"
}
],
"forumTotalCount": 1,
"instantMessageChannelTotalCount": 0,
"forumPrivateMessageTotalCount": 0,
"reportTotalCount": 0,
"instantMessageTotalCount": 0,
"instantMessageServerTotalCount": 0,
"forumPostTotalCount": 3
},
"activeFrom": 1171802820000,
"activeUntil": 1171802820000,
"uid": "8d0b12d9a3fa8ed75afc38a42e491f9f"
}
CLIENT = {'source_name': 'JSON', 'feed_name_to_config': {
'api_path': {'extractor': 'actors[*]', 'indicator_type': 'STIX Threat Actor',
'indicator': 'links_forums',
'mapping': {'handles': 'stixaliases', 'lastUpdated': 'updateddate',
'activeFrom': 'activefrom', 'activeUntil': 'activeuntil',
'links.forums.name': 'forum_name',
'links.forums.actorHandle': 'forum_handle',
'forumTotalCount': 'intel471forumtotalcount',
'forumPostTotalCount': 'intel471forumposttotalcount',
'reportTotalCount': 'intel471reporttotalcount',
'instantMessageTotalCount': 'intel471instantmessagetotalcount'},
'flat_json_with_prefix': True, 'custom_build_iterator': None,
'fetch_time': '10 minutes', 'handle_indicator_function': None}},
'url': 'api_path', 'verify': False, 'auth': (
'username',
'password'), 'headers': None, 'cert': None, 'tlp_color': None}
FEED_DATA = {'extractor': 'actors[*]',
'indicator_type': 'STIX Threat Actor',
'indicator': 'links_forums',
'mapping': {'handles': 'stixaliases', 'lastUpdated': 'updateddate', 'activeFrom': 'activefrom',
'activeUntil': 'activeuntil', 'links.forums.name': 'forum_name',
'links.forums.actorHandle': 'forum_handle', 'forumTotalCount': 'forumtotalcount',
'forumPostTotalCount': 'forumposttotalcount', 'reportTotalCount': 'intel471reporttotalcount',
'instantMessageTotalCount': 'instantmessagetotalcount'}, 'flat_json_with_prefix': True,
'custom_build_iterator': None, 'fetch_time': '10 minutes', 'handle_indicator_function': None}
| 1,287 |
1,856 | #pragma once
#ifdef PCPP_TESTS_DEBUG
#include "Packet.h"
#endif
#include <stdint.h>
#include <stdlib.h>
namespace pcpp_tests
{
int getFileLength(const char* filename);
uint8_t* readFileIntoBuffer(const char* filename, int& bufferLength);
void printBufferDifferences(const uint8_t* buffer1, size_t buffer1Len, const uint8_t* buffer2, size_t buffer2Len);
#define READ_FILE_INTO_BUFFER(num, filename) \
int bufferLength##num = 0; \
uint8_t* buffer##num = pcpp_tests::readFileIntoBuffer(filename, bufferLength##num); \
PTF_ASSERT_NOT_NULL(buffer##num) \
#define READ_FILE_AND_CREATE_PACKET(num, filename) \
READ_FILE_INTO_BUFFER(num, filename); \
pcpp::RawPacket rawPacket##num((const uint8_t*)buffer##num, bufferLength##num, time, true)
#define READ_FILE_AND_CREATE_PACKET_LINKTYPE(num, filename, linktype) \
READ_FILE_INTO_BUFFER(num, filename); \
pcpp::RawPacket rawPacket##num((const uint8_t*)buffer##num, bufferLength##num, time, true, linktype)
#ifdef PCPP_TESTS_DEBUG
void savePacketToPcap(pcpp::Packet& packet, std::string fileName);
#endif
} | 406 |
10,016 | <filename>zap/src/main/java/org/zaproxy/zap/extension/ascan/filters/ScanFilter.java
/*
* Zed Attack Proxy (ZAP) and its related class files.
*
* ZAP is an HTTP/HTTPS proxy for assessing web application security.
*
* Copyright 2019 The ZAP Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.zaproxy.zap.extension.ascan.filters;
import org.zaproxy.zap.model.StructuralNode;
/**
* ScanFilter is called for each message to be scanned to check if it's filtered or not. New Filters
* are added to the scan through {@link ExtensionActiveScan#startScan(String, Target, User,
* Object[]) }.
*
* @author KSASAN <EMAIL>
* @since 2.9.0
*/
public interface ScanFilter {
FilterResult isFiltered(StructuralNode node);
}
| 369 |
9,715 | <reponame>Debanitrkl/MLAlgorithms
# coding:utf-8
from mla.datasets.base import *
| 35 |
3,639 | <gh_stars>1000+
package top.tangyh.lamp.model.entity.base;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import io.swagger.annotations.ApiModelProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
import lombok.ToString;
import lombok.experimental.Accessors;
import top.tangyh.basic.annotation.echo.Echo;
import top.tangyh.basic.base.entity.TreeEntity;
import top.tangyh.lamp.model.constant.EchoDictType;
import javax.validation.constraints.Size;
import static com.baomidou.mybatisplus.annotation.SqlCondition.EQUAL;
import static com.baomidou.mybatisplus.annotation.SqlCondition.LIKE;
import static top.tangyh.lamp.model.constant.EchoApi.DICTIONARY_ITEM_FEIGN_CLASS;
/**
* <p>
* 实体类
* 资源
* </p>
*
* @author zuihou
* @since 2021-09-15
*/
@Data
@NoArgsConstructor
@ToString(callSuper = true)
@EqualsAndHashCode(callSuper = true)
@Accessors(chain = true)
@TableName("c_menu")
@AllArgsConstructor
@Builder
public class SysMenu extends TreeEntity<SysMenu, Long> {
private static final long serialVersionUID = 1L;
/**
* 描述
*/
@ApiModelProperty(value = "描述")
@Size(max = 200, message = "描述长度不能超过200")
@TableField(value = "describe_", condition = LIKE)
private String describe;
/**
* 类型;[20-菜单 60-数据]
*/
@ApiModelProperty(value = "类型")
@Size(max = 2, message = "类型长度不能超过{max}")
@TableField(value = "resource_type", condition = LIKE)
@Echo(api = DICTIONARY_ITEM_FEIGN_CLASS, dictType = EchoDictType.RESOURCE_TYPE)
private String resourceType;
/**
* 通用菜单
* True表示无需分配所有人就可以访问的
*/
@ApiModelProperty(value = "通用菜单")
@TableField("is_general")
private Boolean isGeneral;
/**
* 路径
*/
@ApiModelProperty(value = "路径")
@Size(max = 255, message = "路径长度不能超过255")
@TableField(value = "path", condition = LIKE)
private String path;
/**
* 组件
*/
@ApiModelProperty(value = "组件")
@Size(max = 255, message = "组件长度不能超过255")
@TableField(value = "component", condition = LIKE)
private String component;
/**
* 状态
*/
@ApiModelProperty(value = "状态")
@TableField("state")
private Boolean state;
/**
* 菜单图标
*/
@ApiModelProperty(value = "菜单图标")
@Size(max = 255, message = "菜单图标长度不能超过255")
@TableField(value = "icon", condition = LIKE)
private String icon;
/**
* 分组
*/
@ApiModelProperty(value = "分组")
@Size(max = 20, message = "分组长度不能超过20")
@TableField(value = "group_", condition = LIKE)
private String group;
/**
* 内置
*/
@ApiModelProperty(value = "内置")
@TableField("readonly_")
private Boolean readonly;
/**
* 数据范围;[01-全部 02-本单位及子级 03-本单位 04-本部门 05-本部门及子级 06-个人 07-自定义]
*/
@ApiModelProperty(value = "数据范围")
@TableField(value = "`data_scope`", condition = LIKE)
@Size(max = 2, message = "数据范围长度不能超过{max}")
private String dataScope;
/**
* 实现类;自定义实现类全类名
*/
@ApiModelProperty(value = "实现类")
@TableField(value = "`custom_class`", condition = LIKE)
@Size(max = 255, message = "实现类长度不能超过{max}")
private String customClass;
/**
* 是否默认
*/
@ApiModelProperty(value = "是否默认")
@TableField(value = "`is_def`")
private Boolean isDef;
/**
* 树层级
*/
@ApiModelProperty(value = "树层级")
@TableField(value = "tree_grade", condition = EQUAL)
private Integer treeGrade;
/**
* 树路径;用id拼接树结构
*/
@ApiModelProperty(value = "树路径")
@TableField(value = "tree_path", condition = LIKE)
private String treePath;
}
| 1,938 |
1,133 | <gh_stars>1000+
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# United States Government Sponsorship acknowledged. This software is subject to
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
# (No [Export] License Required except when exporting to an embargoed country,
# end user, or in support of a prohibited end use). By downloading this software,
# the user agrees to comply with all applicable U.S. export laws and regulations.
# The user has the responsibility to obtain export licenses, or other export
# authority as may be required before exporting this software to any 'EAR99'
# embargoed foreign country or citizen of those countries.
#
# Author: <NAME>
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
from __future__ import print_function
import logging
from iscesys.Component.Component import Component
from isceobj.Platform.Platform import Platform
from isceobj import Constants
from isceobj.Util.decorators import type_check, force, pickled, logged
PRF = Component.Parameter('PRF',
public_name='PRF',
default=None,
type = float,
mandatory = True,
doc = 'Pulse Repetition Frequency')
RANGE_SAMPLING_RATE = Component.Parameter('rangeSamplingRate',
public_name='RANGE_SAMPLING_RATE',
default=None,
type = float,
mandatory = True,
doc = 'Range sampling rate')
CHIRP_SLOPE = Component.Parameter('chirpSlope',
public_name='CHIRP_SLOPE',
default=None,
type = float,
mandatory = True,
doc = 'Chirp slope of range pulse in Hz / sec')
RADAR_WAVELENGTH = Component.Parameter('radarWavelength',
public_name='RADAR_WAVELENGTH',
default=None,
type = float,
mandatory = True,
doc = 'Radar wavelength')
RADAR_FREQUENCY = Component.Parameter('radarFrequency',
public_name='RADAR_FREQUENCY',
default=None,
type = float,
mandatory = True,
doc = 'Radar frequency in Hz')
INPHASE_BIAS = Component.Parameter('inPhaseValue',
public_name='INPHASE_BIAS',
default=None,
type = float,
mandatory = True,
doc = 'Inphase channel bias')
QUADRATURE_BIAS = Component.Parameter('quadratureValue',
public_name='QUADRATURE_BIAS',
default=None,
type = float,
mandatory = True,
doc = 'Quadrature channel bias')
CALTONE_LOCATION = Component.Parameter('caltoneLocation',
public_name='CALTONE_LOCATION',
default=None,
type = float,
mandatory = True,
doc = 'Caltone location in Hz')
RANGE_FIRST_SAMPLE = Component.Parameter('rangeFirstSample',
public_name='RANGE_FIRST_SAMPLE',
default=None,
type = float,
mandatory = True,
doc = 'Range to the first valid sample')
IQ_FLIP = Component.Parameter('IQFlip',
public_name='IQ_FLIP',
default=None,
type = str,
mandatory = True,
doc = 'If the I/Q channels have been flipped')
RANGE_PULSE_DURATION = Component.Parameter('rangePulseDuration',
public_name='RANGE_PULSE_DURATION',
default=None,
type = float,
mandatory = True,
doc = 'Range pulse duration')
INCIDENCE_ANGLE = Component.Parameter('incidenceAngle',
public_name='INCIDENCE_ANGLE',
default=None,
type = float,
mandatory = True,
doc = 'Incidence angle')
RANGE_PIXEL_SIZE = Component.Parameter('rangePixelSize',
public_name='RANGE_PIXEL_SIZE',
default=None,
type = float,
mandatory = True,
doc = 'Range pixel size')
AZIMUTH_PIXEL_SIZE = Component.Parameter('azimuthPixelSize',
public_name='AZIMUTH_PIXEL_SIZE',
default=None,
type = float,
mandatory = True,
doc = '')
PULSE_LENGHT = Component.Parameter('pulseLength',
public_name='PULSE_LENGHT',
default=None,
type = float,
mandatory = True,
doc = 'Pulse length')
##
# This class allows the creation of a Radar object. The parameters that need
# to be
# set are
#\verbatim
#RANGE_FIRST_SAMPLE': range first sample. Mandatory.
#PRF: pulse repetition frequency. Mandatory.
#CALTONE_LOCATION: caltone location. Optional. Default 0.
#INPHASE_VALUE: in phase value. Mandatory.
#QUADRATURE_VALUE: quadrature value. Mandatory.
#IQ_FLIP: IQ flip flag. Optional. Default 'n'.
#RANGE_SAMPLING_RATE: range sampling rate. Mandatory.
#\endverbatim
#Since the Radar class inherits the Component.Component, the methods of initialization described in the Component package can be used.
#Moreover each parameter can be set with the corresponding accessor method setParameter() (see the class member methods).
@pickled
class Radar(Component):
family = 'radar'
logging_name = 'isce.isceobj.radar'
parameter_list = (PRF,
RANGE_SAMPLING_RATE,
RANGE_FIRST_SAMPLE,
CHIRP_SLOPE,
RADAR_WAVELENGTH,
RADAR_FREQUENCY,
IQ_FLIP,
INPHASE_BIAS,
QUADRATURE_BIAS,
CALTONE_LOCATION,
RANGE_PULSE_DURATION,
INCIDENCE_ANGLE,
RANGE_PIXEL_SIZE,
AZIMUTH_PIXEL_SIZE,
PULSE_LENGHT)
def _facilities(self):
self._platform = self.facility(
'_platform',
public_name='PLATFORM',
module='isceobj.Platform.Platform',
factory='createPlatform',
args= (),
mandatory=True,
doc = "Platform information")
@logged
def __init__(self, name=''):
super(Radar, self).__init__(family=self.__class__.family, name=name)
return None
def __complex__(self):
return self.inPhaseValue + (1j) * self.quadratureValue
@force(float)
def setRangeFirstSample(self, var):
self.rangeFirstSample = var
pass
@force(float)
def setPulseRepetitionFrequency(self, var):
self.PRF = var
def getPulseRepetitionFrequency(self):
return self.PRF
@force(float)
def setCaltoneLocation(self, var):
self.caltoneLocation = var
@force(float)
def setInPhaseValue(self, var):
self.inPhaseValue = var
return
def getInPhaseValue(self):
return self.inPhaseValue
@force(float)
def setQuadratureValue(self, var):
self.quadratureValue = var
def getQuadratureValue(self):
return self.quadratureValue
def setIQFlip(self, var):
self.IQFlip = str(var)
@force(float)
def setRangeSamplingRate(self, var):
self.rangeSamplingRate = var
def getRangeSamplingRate(self):
return self.rangeSamplingRate
@force(float)
def setChirpSlope(self, var):
self.chirpSlope = var
def getChirpSlope(self):
return self.chirpSlope
@force(float)
def setRangePulseDuration(self, var):
self.rangePulseDuration = var
def getRangePulseDuration(self):
return self.rangePulseDuration
@force(float)
def setRadarFrequency(self, freq):
self.radarFrequency = freq
self.radarWavelength = Constants.lambda2nu(self.radarFrequency)
def getRadarFrequency(self):
return self.radarFrequency
@force(float)
def setRadarWavelength(self, var):
self.radarWavelength = var
self.radarFrequency = Constants.nu2lambda(self.radarWavelength)
return None
def getRadarWavelength(self):
return self.radarWavelength
@force(float)
def setIncidenceAngle(self, var):
self.incidenceAngle = var
def getIncidenceAngle(self):
return self.incidenceAngle
@type_check(Platform)
def setPlatform(self, platform):
self._platform = platform
pass
def getPlatform(self):
return self._platform
@force(float)
def setRangePixelSize(self, size):
self.rangePixelSize = size
def getRangePixelSize(self):
return self.rangePixelSize
@force(float)
def setAzimuthPixelSize(self, size):
self.azimuthPixelSize = size
def getAzimuthPixelSize(self):
return self.azimuthPixelSize
@force(float)
def setPulseLength(self, rpl):
self.pulseLength = rpl
def getPulseLength(self):
return self.pulseLength
def setBeamNumber(self, num):
self.beamNumber = num
def getBeamNumber(self):
return self.beamNumber
platform = property(getPlatform , setPlatform )
def __str__(self):
retstr = "Pulse Repetition Frequency: (%s)\n"
retlst = (self.PRF,)
retstr += "Range Sampling Rate: (%s)\n"
retlst += (self.rangeSamplingRate,)
retstr += "Radar Wavelength: (%s)\n"
retlst += (self.radarWavelength,)
retstr += "Chirp Slope: (%s)\n"
retlst += (self.chirpSlope,)
return retstr % retlst
def createRadar():
return Radar()
| 3,883 |
575 | //
// PipelineDuplexLogic.h
// DJISdkDemo
//
// Copyright © 2020 DJI. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "PipelineStatistical.h"
#import <DJISDK/DJISDK.h>
NS_ASSUME_NONNULL_BEGIN
@interface PipelineDuplexLogic : NSObject
@property (atomic) PipelineStatistical *uploadStatistical;
@property (atomic) PipelineStatistical *downloadStatistical;
@property (nonatomic) BOOL isUploadTransmissionSuccessful;
@property (nonatomic) BOOL isDownloadTransmissionSuccessful;
@property (nonatomic) BOOL isUploadTransimssionFailure;
@property (nonatomic) BOOL isDownloadTransimssionFailure;
@property (atomic) BOOL stopDownload;
@property (atomic) BOOL isDownloading;
@property (atomic) BOOL stopUpload;
@property (atomic) BOOL isUploading;
@property (nonatomic) NSString *downloadTitle;
@property (nonatomic) NSString *uploadTitle;
@property (nonatomic, nullable) NSString *downloadFinalResult;
@property (nonatomic, nullable) NSString *uploadFinalResult;
- (void)download:(NSString *)fileName
localFilePath:(NSString *)localStoragePath
pipeline:(DJIPipeline *)pipeline
withFinishBlock:(void (^)())finishBlock
withFailureBlock:(void(^)(DJIPipeline *_Nullable pipeline, NSString *_Nullable error))failureBlock;
- (void)stopDownload:(DJIPipeline *_Nullable)pipeline;
- (void)uploadFile:(NSString *)filePath
pipeline:(DJIPipeline *)pipeline
pieceLength:(NSInteger)pieceLength
frequency:(double)frequency
withFinishBlock:(void (^)())finishBlock
withFailureBlock:(void(^)(DJIPipeline *_Nullable pipeline, NSString *_Nullable error))failureBlock;
- (void)stopUpload:(DJIPipeline *_Nullable)pipeline;
@end
NS_ASSUME_NONNULL_END
| 575 |
336 | <reponame>tomwei7/LA104
static constexpr const char* mSendStrings[] = {
"AT\r\n", "ATI\r\n", "ATD\r\n", "AT+CMGF=1\r\n", "",
"A", "B", "C", "D", "0", "1", ""
};
class CPageSend : public CPage
{
bool mSendInterval{false};
int mCurrentIndex{0};
public:
CPageSend(CApplication& app) : CPage(app)
{
}
virtual const char* Name() override
{
return "Send";
}
virtual void Draw(Layout::Render& r) override
{
using namespace Layout;
GUI::Background(r.GetRectangle(), RGB565(404040), RGB565(404040));
#define S(x, y) Select(mApp.mX == x && mApp.mY == y)
r << Padding(4, 4, 4, 2);
int x=0, y=0;
for (int i=0; i<COUNT(mSendStrings); i++)
{
if (strcmp(mSendStrings[i], "") == 0)
{
r << NewLine();
x = 0;
y++;
continue;
}
r << S(x, y) << Button(mSendStrings[i]);
if (mApp.mX == x && mApp.mY == y)
mCurrentIndex = i;
x++;
}
r << S(0, 2) << Radio(mSendInterval, "Hello %d!");
#undef S
}
virtual void OnKey(int key) override
{
if (key == BIOS::KEY::Enter)
{
switch (mApp.mY*16 + mApp.mX)
{
case 0x20: mSendInterval = !mSendInterval; break;
default:
mApp.Send(mSendStrings[mCurrentIndex]);
}
}
switch (mApp.mY)
{
case 0: mApp.mX = max(0, min(mApp.mX, 3)); break;
case 1: mApp.mX = max(0, min(mApp.mX, 5)); break;
case 2: mApp.mX = max(0, min(mApp.mX, 0)); break;
case 3: mApp.mY = 2; break;
}
}
virtual void OnTick() override
{
EVERY(1000)
{
if (mSendInterval)
{
static int counter = 0;
char message[16];
sprintf(message, "Hello %d!", counter++);
mApp.Send(message);
}
}
}
};
| 1,221 |
4,047 | <reponame>kira78/meson<gh_stars>1000+
#!/usr/bin/env python3
import os
import sys
if not os.path.isfile(sys.argv[1]):
raise Exception("Couldn't find {!r}".format(sys.argv[1]))
| 81 |
4,372 | <gh_stars>1000+
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.sharding.route.engine.condition.generator;
import com.google.common.base.Preconditions;
import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.ExpressionSegment;
import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.LiteralExpressionSegment;
import org.apache.shardingsphere.sql.parser.sql.common.segment.dml.expr.simple.ParameterMarkerExpressionSegment;
import java.util.List;
import java.util.Optional;
/**
* Condition value.
*/
public final class ConditionValue {
private final Comparable<?> value;
public ConditionValue(final ExpressionSegment expressionSegment, final List<Object> parameters) {
value = getValue(expressionSegment, parameters);
}
private Comparable<?> getValue(final ExpressionSegment expressionSegment, final List<Object> parameters) {
if (expressionSegment instanceof ParameterMarkerExpressionSegment) {
return getValue((ParameterMarkerExpressionSegment) expressionSegment, parameters);
}
if (expressionSegment instanceof LiteralExpressionSegment) {
return getValue((LiteralExpressionSegment) expressionSegment);
}
return null;
}
private Comparable<?> getValue(final ParameterMarkerExpressionSegment expressionSegment, final List<Object> parameters) {
int parameterMarkerIndex = expressionSegment.getParameterMarkerIndex();
if (parameterMarkerIndex < parameters.size()) {
Object result = parameters.get(parameterMarkerIndex);
Preconditions.checkArgument(result instanceof Comparable, "Sharding value must implements Comparable.");
return (Comparable<?>) result;
}
return null;
}
private Comparable<?> getValue(final LiteralExpressionSegment expressionSegment) {
Object result = expressionSegment.getLiterals();
Preconditions.checkArgument(result instanceof Comparable, "Sharding value must implements Comparable.");
return (Comparable<?>) result;
}
/**
* Get condition value.
*
* @return condition value
*/
public Optional<Comparable<?>> getValue() {
return Optional.ofNullable(value);
}
}
| 998 |
849 | """
Copyright 2020 The Magma Authors.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import abc
from .utils import CryptoError
class GSMA3A8Algo(metaclass=abc.ABCMeta):
"""
Abstract class for the GSM A3/A8 algorithms. The A3/A8 algos take
the key and random variable as input, and produce an auth tuple as output.
"""
@abc.abstractmethod
def generate_auth_tuple(self, key):
"""
Args:
key - secret key for a subscriberdb
Returns:
(rand, sres, cipher_key) auth tuple
Raises:
CryptoError on any error
"""
raise NotImplementedError()
class UnsafePreComputedA3A8(GSMA3A8Algo):
"""
Sample implementation of the A3/A8 algo. This algo expects the auth
tuple to be stored directly as the key for the subscriber.
Essentially this algo doesn't do any random number generation or crypto
operation, but provides a dummy implementation of the A3/A8 interfaces.
"""
def generate_auth_tuple(self, key):
"""
Args:
key - 28 byte long auth tuple
Returns:
(rand, sres, cipher_key) tuple
Raises:
CryptoError if the key is not 28 byte long
"""
if len(key) != 28:
raise CryptoError('Invalid auth vector: %s' % key)
rand = key[:16]
sres = key[16:20]
cipher_key = key[20:]
return (rand, sres, cipher_key)
| 699 |
1,830 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
import configparser
from common import utilities
from common.utilities import ConfigInfo
import shutil
import os
from service.tars_service import TarsService
class NodeConfigGenerator:
def __init__(self, config):
self.config = config
self.genesis_tpl_config = ConfigInfo.genesis_config_tpl_path
self.node_tpl_config = ConfigInfo.node_config_tpl_path
self.genesis_config_file = "config.genesis"
self.genesis_tmp_config_file = 'config.genesis.tmp'
self.ini_config_file = "config.ini"
self.ini_config_tmp_file = "config.ini.tmp"
self.node_pem_file = "node.pem"
self.node_id_file = "node.nodeid"
self.root_dir = "generated/"
def get_all_service_info(self, node_config, service_name):
config_file_list = [
self.ini_config_file, self.genesis_config_file, self.node_id_file, self.node_pem_file]
node_id_path = os.path.join(self.get_node_pem_path(
node_config, service_name), self.node_id_file)
node_pem_path = os.path.join(self.get_node_pem_path(
node_config, service_name), self.node_pem_file)
config_path_list = [self.get_node_ini_config_path(
node_config, service_name), self.get_genesis_config_path(node_config, service_name), node_id_path, node_pem_path]
return (config_file_list, config_path_list)
def get_genesis_config_path(self, node_config, service_name):
return self.get_node_config_path(node_config, service_name, self.genesis_tmp_config_file)
def get_node_ini_config_path(self, node_config, service_name):
return self.get_node_config_path(node_config, service_name, self.ini_config_tmp_file)
def get_node_config_path(self, node_config, service_name, file_name):
return os.path.join(self.root_dir, self.config.chain_id, self.config.group_config.group_id, node_config.deploy_ip, service_name, file_name)
def get_node_config_dir(self, node_config, service_name):
return os.path.join(self.root_dir, self.config.chain_id, self.config.group_config.group_id, node_config.deploy_ip, service_name)
def generate_genesis_config(self, nodeid_list):
"""
generate the gensis config
"""
config_content = configparser.ConfigParser()
config_content.read(self.genesis_tpl_config)
consensus_section = "consensus"
config_content[consensus_section]["consensus_type"] = self.config.group_config.genesis_config.consensus_type
config_content[consensus_section]["block_tx_count_limit"] = str(
self.config.group_config.genesis_config.block_tx_count_limit)
config_content[consensus_section]["leader_period"] = str(
self.config.group_config.genesis_config.leader_period)
i = 0
for nodeid in nodeid_list:
key = "node." + str(i)
value = nodeid.strip() + ":1"
config_content[consensus_section][key] = value
i = i + 1
tx_section = "tx"
config_content[tx_section]["gas_limit"] = self.config.group_config.genesis_config.gas_limit
return config_content
def generate_node_config(self, node_config, node_name):
"""
generate node config: config.ini.tmp
"""
ini_config = configparser.ConfigParser()
ini_config.read(self.node_tpl_config)
chain_section = "chain"
ini_config[chain_section]["sm_crypto"] = utilities.convert_bool_to_str(
self.config.group_config.sm_crypto)
ini_config[chain_section]["group_id"] = self.config.group_config.group_id
ini_config[chain_section]["chain_id"] = self.config.group_config.chain_id
service_section = "service"
ini_config[service_section]["node_name"] = node_name
ini_config[service_section]["rpc"] = self.config.chain_id + \
"." + node_config.rpc_service_name
ini_config[service_section]["gateway"] = self.config.chain_id + \
"." + node_config.gateway_service_name
# generate the node name
if node_config.microservice_node is True:
node_service_config_item = node_config.node_service_config_info[node_name]
for config_key in node_service_config_item.keys():
ini_config[service_section][config_key] = self.config.chain_id + \
"." + node_service_config_item[config_key]
executor_section = "executor"
if self.config.group_config.vm_type == "evm":
ini_config[executor_section]["is_wasm"] = utilities.convert_bool_to_str(
False)
if self.config.group_config.vm_type == "wasm":
ini_config[executor_section]["is_wasm"] = utilities.convert_bool_to_str(
True)
ini_config[executor_section]["is_auth_check"] = utilities.convert_bool_to_str(
self.config.group_config.auth_check)
ini_config[executor_section]["auth_admin_account"] = self.config.group_config.init_auth_address
return ini_config
def generate_node_ini_config(self, node_config):
for node_name in node_config.node_service_config_info.keys():
ini_config = self.generate_node_config(node_config, node_name)
service_list = node_config.nodes_service_name_list[node_name]
for service in service_list:
file_path = self.get_node_config_path(
node_config, service, self.ini_config_tmp_file)
if os.path.exists(file_path):
utilities.log_error(
"* generate ini config for service %s failed, for the config file %s already exists!" % (service, file_path))
return False
utilities.log_info(
"* generate ini config for service %s\n\tconfig path: %s" % (service, file_path))
utilities.mkfiledir(file_path)
with open(file_path, 'w') as configfile:
ini_config.write(configfile)
return True
def write_config_to_path(self, config_content, config_file_path):
if os.path.exists(config_file_path):
utilities.log_error(
"write config to %s failed for already exists" % config_file_path)
return False
utilities.mkfiledir(config_file_path)
with open(config_file_path, 'w') as config_file_path:
config_file_path.write(config_content)
return True
def reload_node_config_for_expanded_node(self, node_config, ini_config_content, node_name):
"""
reload node_config after fetch iniConfig
"""
ini_config = configparser.ConfigParser()
ini_config.read_string(ini_config_content)
chain_section = "chain"
node_config.group_id = ini_config[chain_section]["group_id"]
self.config.group_id = node_config.group_id
node_config.chain_id = ini_config[chain_section]["chain_id"]
self.config.chain_id = node_config.chain_id
self.config.group_config.group_id = node_config.group_id
self.config.group_config.chain_id = node_config.chain_id
sm_crypto = ini_config[chain_section]["sm_crypto"]
self.config.group_config.sm_crypto = False
node_config.generate_service_name_list()
node_config.generate_service_config_info()
if sm_crypto == "true":
self.config.group_config.sm_crypto = True
# reset the node_name for the ini config
ini_config["service"]["node_name"] = node_name
return ini_config
def generate_expand_node_config(self, node_config):
tars_service = TarsService(self.config.tars_config.tars_url,
self.config.tars_config.tars_token, self.config.chain_id, "")
# fetch the ini config
(ret, ini_config_content) = tars_service.fetch_server_config_file(
self.ini_config_file, node_config.expanded_service)
if ret is False:
utilities.log_error(
"* expand node failed for fetch ini config from %s failed" % node_config.expanded_service)
return False
# fetch the genesis config
(ret, genesis_config_content) = tars_service.fetch_server_config_file(
self.genesis_config_file, node_config.expanded_service)
if ret is False:
utilities.log_error(
"* expand node failed for fetch genesis config from %s failed" % node_config.expanded_service)
return False
# load group_id and crypto_type from the config
# reload the config
utilities.log_info("* reload node config")
for node_name in node_config.node_service_config_info.keys():
updated_ini_config = self.reload_node_config_for_expanded_node(
node_config, ini_config_content, node_name)
# Note: obtain updated service_list after reload node_config
service_list = node_config.nodes_service_name_list[node_name]
for service in service_list:
ini_config_path = self.get_node_config_path(
node_config, service, self.ini_config_tmp_file)
utilities.log_info(
"* generate ini config, service: %s, path: %s" % (service, ini_config_path))
# if self.write_config_to_path(ini_config_content, ini_config_path) is False:
# return False
if os.path.exists(ini_config_path):
utilities.log_error(
"* generate ini config for %s failed for the config %s already exists." % (service, ini_config_path))
return False
utilities.mkfiledir(ini_config_path)
with open(ini_config_path, 'w') as configfile:
updated_ini_config.write(configfile)
utilities.log_info(
"* generate ini config for service: %s success" % service)
# generate genesis config
genesis_config_path = self.get_node_config_path(
node_config, service, self.genesis_tmp_config_file)
utilities.log_info(
"* generate genesis config, service: %s, path: %s" % (service, genesis_config_path))
if self.write_config_to_path(genesis_config_content, genesis_config_path) is False:
return False
utilities.log_info(
"* generate genesis config for service: %s success" % service)
return True
def generate_node_genesis_config(self, node_config, nodeid_list):
genesis_config = self.generate_genesis_config(nodeid_list)
return self.write_config(
genesis_config, self.genesis_tmp_config_file, node_config)
def write_config(self, updated_config, file_name, node_config):
for key in node_config.nodes_service_name_list.keys():
service_list = node_config.nodes_service_name_list[key].keys()
for service in service_list:
file_path = self.get_node_config_path(
node_config, service, file_name)
if os.path.exists(file_path):
utilities.log_error(
"* generate genesis config for %s failed for the config %s already exists." % (service, file_path))
return False
utilities.log_info(
"* generate genesis config for %s\n\t path: %s" % (service, file_path))
utilities.mkfiledir(file_path)
with open(file_path, 'w') as configfile:
updated_config.write(configfile)
return True
def generate_all_nodes_pem(self):
nodeid_list = []
for node_config in self.config.group_config.node_list:
single_node_list = self.generate_node_pem(node_config)
nodeid_list = nodeid_list + single_node_list
return nodeid_list
def generate_node_all_config(self, node_config, nodeid_list):
if self.generate_node_genesis_config(node_config, nodeid_list) is False:
return False
return self.generate_node_ini_config(node_config)
def get_node_pem_path(self, node_config, service_name):
return os.path.join(self.root_dir, self.config.group_config.chain_id, self.config.group_config.group_id, node_config.deploy_ip, service_name)
def generate_node_pem(self, node_config):
"""
generate private key for the node
"""
outputdir = self.root_dir
pem_path = os.path.join(outputdir, self.node_pem_file)
node_id_path = os.path.join(outputdir, self.node_id_file)
nodeid_list = []
for key in node_config.nodes_service_name_list.keys():
utilities.generate_private_key(
self.config.group_config.sm_crypto, outputdir)
with open(node_id_path, 'r', encoding='utf-8') as f:
nodeid_list.append(f.read())
single_node_service = (
node_config.nodes_service_name_list[key]).keys()
for service in single_node_service:
dst_path = self.get_node_pem_path(node_config, service)
utilities.log_info(
"* generate pem file for %s\n\t- pem_path: %s\n\t- node_id_path: %s\n\t- sm_crypto: %d" % (service, dst_path, node_id_path, self.config.group_config.sm_crypto))
# copy the generated file to all services path
utilities.mkdir(dst_path)
shutil.copy(pem_path, dst_path)
shutil.copy(node_id_path, dst_path)
return nodeid_list
| 6,285 |
325 | #include <wiz/utility/text.h>
#include <wiz/utility/path.h>
#include <wiz/utility/reader.h>
#include <wiz/utility/report.h>
#include <wiz/utility/import_manager.h>
#include <wiz/utility/source_location.h>
#include <wiz/utility/resource_manager.h>
namespace wiz {
namespace {
const char* const SourceExtension = ".wiz";
}
ImportManager::ImportManager(StringPool* stringPool, ResourceManager* resourceManager, ArrayView<StringView> importDirs)
: stringPool(stringPool), resourceManager(resourceManager), importDirs(importDirs) {}
StringView ImportManager::getStartPath() const {
return startPath;
}
void ImportManager::setStartPath(StringView value) {
startPath = value;
}
StringView ImportManager::getCurrentPath() const {
return currentPath;
}
void ImportManager::setCurrentPath(StringView value) {
currentPath = value;
}
ImportResult ImportManager::attemptAbsoluteImport(StringView originalPath, StringView attemptedPath, ImportOptions importOptions, StringView& displayPath, StringView& canonicalPath, std::unique_ptr<Reader>& reader) {
static_cast<void>(originalPath);
const auto appendExtension = (importOptions & ImportOptions::AppendExtension) != ImportOptions::None;
const auto allowShellResources = (importOptions & ImportOptions::AllowShellResources) != ImportOptions::None;
reader = nullptr;
if (attemptedPath.startsWith("<"_sv) && attemptedPath.endsWith(">"_sv)) {
displayPath = canonicalPath = attemptedPath;
} else {
canonicalPath = stringPool->intern(path::toNormalizedAbsolute(StringView(attemptedPath.toString() + (appendExtension && !attemptedPath.endsWith(StringView(SourceExtension)) ? SourceExtension : ""))));
displayPath = StringView();
if (startPath.getLength() != 0) {
const auto relativeDisplayPath = path::toRelative(canonicalPath, startPath);
displayPath = stringPool->intern(relativeDisplayPath);
}
if (displayPath.getLength() == 0) {
displayPath = stringPool->intern(path::toNormalized(StringView(attemptedPath.toString() + (appendExtension && !attemptedPath.endsWith(StringView(SourceExtension)) ? SourceExtension : ""))));
}
}
if (alreadyImportedPaths.find(canonicalPath) != alreadyImportedPaths.end()) {
// Already included, do nothing.
return ImportResult::AlreadyImported;
} else {
reader = resourceManager->openReader(canonicalPath, allowShellResources);
if (reader != nullptr && reader->isOpen()) {
alreadyImportedPaths.insert(canonicalPath);
return ImportResult::JustImported;
} else {
displayPath = StringView();
canonicalPath = StringView();
return ImportResult::Failed;
}
}
}
ImportResult ImportManager::attemptRelativeImport(StringView originalPath, ImportOptions importOptions, StringView& displayPath, StringView& canonicalPath, std::unique_ptr<Reader>& reader) {
return attemptAbsoluteImport(originalPath, StringView(path::getDirectory(currentPath).toString() + "/" + originalPath.toString()), importOptions, displayPath, canonicalPath, reader);
}
ImportResult ImportManager::importModule(StringView originalPath, ImportOptions importOptions, StringView& displayPath, StringView& canonicalPath, std::unique_ptr<Reader>& reader) {
if (originalPath.startsWith("./"_sv) || originalPath.startsWith("../"_sv)) {
const auto result = attemptRelativeImport(originalPath, importOptions, displayPath, canonicalPath, reader);
if (result != ImportResult::Failed) {
return result;
}
} else {
{
const auto result = attemptRelativeImport(originalPath, importOptions, displayPath, canonicalPath, reader);
if (result != ImportResult::Failed) {
return result;
}
}
for (const auto& dir : importDirs) {
const auto sanitizedDir = dir.findLastOf("/\\"_sv) >= dir.getLength()
? path::getDirectory(dir)
: dir;
const auto result = attemptAbsoluteImport(originalPath, StringView(sanitizedDir.toString() + "/" + originalPath.toString()), importOptions, displayPath, canonicalPath, reader);
if (result != ImportResult::Failed) {
return result;
}
}
}
displayPath = StringView();
canonicalPath = StringView();
return ImportResult::Failed;
}
} | 1,903 |
9,718 | #include <Arduino.h>
#include <arduino-timer.h>
#include <rpcWiFi.h>
#include <sfud.h>
#include <SPI.h>
#include "config.h"
#include "language_understanding.h"
#include "mic.h"
#include "speech_to_text.h"
#include "text_to_speech.h"
void connectWiFi()
{
while (WiFi.status() != WL_CONNECTED)
{
Serial.println("Connecting to WiFi..");
WiFi.begin(SSID, PASSWORD);
delay(500);
}
Serial.println("Connected!");
}
void setup()
{
Serial.begin(9600);
while (!Serial)
; // Wait for Serial to be ready
delay(1000);
connectWiFi();
while (!(sfud_init() == SFUD_SUCCESS))
;
sfud_qspi_fast_read_enable(sfud_get_device(SFUD_W25Q32_DEVICE_INDEX), 2);
pinMode(WIO_KEY_C, INPUT_PULLUP);
mic.init();
speechToText.init();
textToSpeech.init();
Serial.println("Ready.");
}
auto timer = timer_create_default();
void say(String text)
{
Serial.println(text);
textToSpeech.convertTextToSpeech(text);
}
bool timerExpired(void *announcement)
{
say((char *)announcement);
return false;
}
void processAudio()
{
String text = speechToText.convertSpeechToText();
Serial.println(text);
int total_seconds = languageUnderstanding.GetTimerDuration(text);
if (total_seconds == 0)
{
return;
}
int minutes = total_seconds / 60;
int seconds = total_seconds % 60;
String begin_message;
if (minutes > 0)
{
begin_message += minutes;
begin_message += " minute ";
}
if (seconds > 0)
{
begin_message += seconds;
begin_message += " second ";
}
begin_message += "timer started.";
String end_message("Times up on your ");
if (minutes > 0)
{
end_message += minutes;
end_message += " minute ";
}
if (seconds > 0)
{
end_message += seconds;
end_message += " second ";
}
end_message += "timer.";
say(begin_message);
timer.in(total_seconds * 1000, timerExpired, (void *)(end_message.c_str()));
}
void loop()
{
if (digitalRead(WIO_KEY_C) == LOW && !mic.isRecording())
{
Serial.println("Starting recording...");
mic.startRecording();
}
if (!mic.isRecording() && mic.isRecordingReady())
{
Serial.println("Finished recording");
processAudio();
mic.reset();
}
timer.tick();
}
| 1,039 |
3,301 | package com.alibaba.alink.operator.common.aps;
import org.apache.flink.ml.api.misc.param.Params;
import com.alibaba.alink.operator.batch.BatchOperator;
public abstract class ApsCheckpoint {
public abstract void write(BatchOperator <?> operator, String identity, Long mlEnvId, Params params);
public abstract BatchOperator <?> read(String identity, Long mlEnvId, Params params);
}
| 124 |
511 | <gh_stars>100-1000
//******************************************************************
//
// Copyright 2015 Samsung Electronics All Rights Reserved.
//
//-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
#include <memory>
#include <cstdlib>
#include <functional>
#include <map>
#include <utility>
#include <ctime>
#include "DataCache.h"
#include "ResponseStatement.h"
#include "RCSResourceAttributes.h"
#include "ExpiryTimer.h"
#include "ocrandom.h"
namespace OIC
{
namespace Service
{
namespace
{
void verifyObserveCB(
const HeaderOptions &_hos, const ResponseStatement &_rep,
int _result, unsigned int _seq, std::weak_ptr<DataCache> rpPtr)
{
std::shared_ptr<DataCache> ptr = rpPtr.lock();
if (ptr)
{
ptr->onObserve(_hos, _rep, _result, _seq);
}
}
ObserveCB verifiedObserveCB(std::weak_ptr<DataCache> rpPtr)
{
return std::bind(verifyObserveCB,
std::placeholders::_1, std::placeholders::_2,
std::placeholders::_3, std::placeholders::_4, rpPtr);
}
void verifyGetCB(
const HeaderOptions &_hos, const ResponseStatement &_rep,
int _result, std::weak_ptr<DataCache> rpPtr)
{
std::shared_ptr<DataCache> Ptr = rpPtr.lock();
if (Ptr)
{
Ptr->onGet(_hos, _rep, _result);
}
}
GetCB verifiedGetCB(std::weak_ptr<DataCache> rpPtr)
{
return std::bind(verifyGetCB,
std::placeholders::_1, std::placeholders::_2,
std::placeholders::_3, rpPtr);
}
}
DataCache::DataCache()
{
subscriberList = std::unique_ptr<SubscriberInfo>(new SubscriberInfo());
sResource = nullptr;
state = CACHE_STATE::READY_YET;
mode = CACHE_MODE::FREQUENCY;
networkTimeOutHandle = 0;
pollingHandle = 0;
lastSequenceNum = 0;
isReady = false;
}
DataCache::~DataCache()
{
state = CACHE_STATE::DESTROYED;
if (subscriberList != nullptr)
{
subscriberList->clear();
subscriberList.release();
}
if (sResource->isObservable())
{
try
{
sResource->cancelObserve();
}
catch (...)
{
// ignore the exception because data cache was released.
}
}
}
void DataCache::initializeDataCache(PrimitiveResourcePtr pResource)
{
sResource = pResource;
pObserveCB = verifiedObserveCB(std::weak_ptr<DataCache>(shared_from_this()));
pGetCB = verifiedGetCB(std::weak_ptr<DataCache>(shared_from_this()));
pTimerCB = (TimerCB)(std::bind(&DataCache::onTimeOut, this, std::placeholders::_1));
pPollingCB = (TimerCB)(std::bind(&DataCache::onPollingOut, this, std::placeholders::_1));
sResource->requestGet(pGetCB);
if (sResource->isObservable())
{
sResource->requestObserve(pObserveCB);
}
networkTimeOutHandle = networkTimer.post(CACHE_DEFAULT_EXPIRED_MILLITIME, pTimerCB);
}
CacheID DataCache::addSubscriber(CacheCB func, REPORT_FREQUENCY rf, long repeatTime)
{
Report_Info newItem;
newItem.rf = rf;
newItem.repeatTime = repeatTime;
newItem.timerID = 0;
newItem.reportID = generateCacheID();
std::lock_guard<std::mutex> lock(m_mutex);
if (subscriberList != nullptr)
{
subscriberList->insert(
std::make_pair(newItem.reportID, std::make_pair(newItem, func)));
}
return newItem.reportID;
}
CacheID DataCache::deleteSubscriber(CacheID id)
{
CacheID ret = 0;
SubscriberInfoPair pair = findSubscriber(id);
std::lock_guard<std::mutex> lock(m_mutex);
if (pair.first != 0)
{
ret = pair.first;
subscriberList->erase(pair.first);
}
return ret;
}
SubscriberInfoPair DataCache::findSubscriber(CacheID id)
{
SubscriberInfoPair ret;
std::lock_guard<std::mutex> lock(m_mutex);
for (auto &i : *subscriberList)
{
if (i.first == id)
{
ret = std::make_pair(i.first, std::make_pair((Report_Info)i.second.first,
(CacheCB)i.second.second));
break;
}
}
return ret;
}
const PrimitiveResourcePtr DataCache::getPrimitiveResource() const
{
return sResource;
}
const RCSResourceAttributes DataCache::getCachedData() const
{
std::lock_guard<std::mutex> lock(att_mutex);
if (state != CACHE_STATE::READY)
{
return RCSResourceAttributes();
}
return attributes;
}
bool DataCache::isCachedData() const
{
return isReady;
}
void DataCache::onObserve(const HeaderOptions & /*_hos*/,
const ResponseStatement &_rep, int _result, unsigned int _seq)
{
if (_result != OC_STACK_OK || _rep.getAttributes().empty() || lastSequenceNum > _seq)
{
return;
}
else
{
lastSequenceNum = _seq;
}
if (state != CACHE_STATE::READY)
{
state = CACHE_STATE::READY;
isReady = true;
}
if (mode != CACHE_MODE::OBSERVE)
{
mode = CACHE_MODE::OBSERVE;
}
networkTimer.cancel(networkTimeOutHandle);
networkTimeOutHandle = networkTimer.post(CACHE_DEFAULT_EXPIRED_MILLITIME, pTimerCB);
notifyObservers(_rep.getAttributes());
}
void DataCache::onGet(const HeaderOptions & /*_hos*/,
const ResponseStatement &_rep, int _result)
{
if (_result != OC_STACK_OK || _rep.getAttributes().empty())
{
return;
}
if (state != CACHE_STATE::READY)
{
state = CACHE_STATE::READY;
isReady = true;
}
if (mode != CACHE_MODE::OBSERVE)
{
networkTimer.cancel(networkTimeOutHandle);
networkTimeOutHandle = networkTimer.post(
CACHE_DEFAULT_EXPIRED_MILLITIME, pTimerCB);
pollingHandle = pollingTimer.post(CACHE_DEFAULT_REPORT_MILLITIME, pPollingCB);
}
notifyObservers(_rep.getAttributes());
}
void DataCache::notifyObservers(const RCSResourceAttributes Att)
{
{
std::lock_guard<std::mutex> lock(att_mutex);
if (attributes == Att)
{
return;
}
attributes = Att;
}
std::lock_guard<std::mutex> lock(m_mutex);
for (auto &i : * subscriberList)
{
if (i.second.first.rf == REPORT_FREQUENCY::UPTODATE)
{
i.second.second(this->sResource, Att);
}
}
}
CACHE_STATE DataCache::getCacheState() const
{
return state;
}
void DataCache::onTimeOut(unsigned int /*timerID*/)
{
if (mode == CACHE_MODE::OBSERVE)
{
sResource->cancelObserve();
mode = CACHE_MODE::FREQUENCY;
networkTimer.cancel(networkTimeOutHandle);
networkTimeOutHandle = networkTimer.post(
CACHE_DEFAULT_EXPIRED_MILLITIME, pTimerCB);
pollingHandle = pollingTimer.post(CACHE_DEFAULT_REPORT_MILLITIME, pPollingCB);
return;
}
state = CACHE_STATE::LOST_SIGNAL;
}
void DataCache::onPollingOut(const unsigned int /*timerID*/)
{
if (sResource != nullptr)
{
mode = CACHE_MODE::FREQUENCY;
sResource->requestGet(pGetCB);
}
return;
}
CacheID DataCache::generateCacheID()
{
CacheID retID = 0;
while (1)
{
if (findSubscriber(retID).first == 0 && retID != 0)
{
break;
}
retID = OCGetRandom();
}
return retID;
}
void DataCache::requestGet()
{
state = CACHE_STATE::UPDATING;
if (sResource != nullptr)
{
sResource->requestGet(pGetCB);
}
}
bool DataCache::isEmptySubscriber() const
{
std::lock_guard<std::mutex> lock(m_mutex);
return (subscriberList != nullptr) ? subscriberList->empty() : true;
}
} // namespace Service
} // namespace OIC
| 5,644 |
764 | <gh_stars>100-1000
{"symbol": "MAKI","address": "0x270D09cb4be817c98e84fEffdE03D5CD45e30a27","overview":{"en": ""},"email": "","website": "https://maki.finance","state": "NORMAL","links": {"blog": "","twitter": "https://twitter.com/maki_finance","telegram": "","github": ""}} | 109 |
1,439 | /*******************************************************************************
* Copyright 2018 <NAME> http://galenframework.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.galenframework.speclang2.pagespec;
import com.galenframework.parser.SyntaxException;
import com.galenframework.parser.IndentationStructureParser;
import com.galenframework.parser.StructNode;
import com.galenframework.parser.StringCharReader;
import com.galenframework.utils.GalenUtils;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
public class ImportProcessor implements StructNodeProcessor {
protected final PageSpecHandler pageSpecHandler;
public ImportProcessor(PageSpecHandler pageSpecHandler) {
this.pageSpecHandler = pageSpecHandler;
}
@Override
public List<StructNode> process(StringCharReader reader, StructNode statementNode) throws IOException {
List<StructNode> importedNodes = new LinkedList<>();
if (reader.hasMoreNormalSymbols()) {
importedNodes.addAll(importPageSpec(reader.getTheRest().trim(), statementNode));
}
if (statementNode.getChildNodes() != null) {
for (StructNode childNode : statementNode.getChildNodes()) {
importedNodes.addAll(importPageSpec(childNode.getName(), childNode));
}
}
return importedNodes;
}
protected List<StructNode> importPageSpec(String filePath, StructNode origin) throws IOException {
String fullPath = filePath;
if (pageSpecHandler.getContextPath() != null) {
fullPath = pageSpecHandler.getContextPath() + "/" + filePath;
}
return loadPageSpec(origin, fullPath);
}
protected List<StructNode> loadPageSpec(StructNode origin, String fullPath) throws IOException {
String fileId = GalenUtils.calculateFileId(fullPath);
if (!pageSpecHandler.getProcessedImports().contains(fileId)) {
pageSpecHandler.getProcessedImports().add(fileId);
InputStream stream = GalenUtils.findFileOrResourceAsStream(fullPath);
if (stream == null) {
throw new SyntaxException(origin, "Cannot find file: " + fullPath);
}
List<StructNode> structs = new IndentationStructureParser().parse(stream, fullPath);
PageSpecHandler childPageSpecHandler = new PageSpecHandler(pageSpecHandler, GalenUtils.getParentForFile(fullPath));
List<StructNode> allProcessedChildNodes = new MacroProcessor(childPageSpecHandler).process(structs);
new PostProcessor(childPageSpecHandler).process(allProcessedChildNodes);
}
return Collections.emptyList();
}
}
| 1,123 |
5,169 | <reponame>Gantios/Specs<gh_stars>1000+
{
"name": "ActionsDrawer",
"version": "0.1.1",
"summary": "A prettier alternative to action sheets.",
"swift_version": "4.2",
"description": "ActionsDrawer is built as a replacement to the standard action sheets. Action sheets provide little to no customization UI wise, while ActionsDrawer lets you display actions however you want (grouped, with images, etc.) in a scrollable area.",
"homepage": "https://github.com/Prismik/ActionsDrawer",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"authors": {
"Francis": "<EMAIL>"
},
"source": {
"git": "https://github.com/Prismik/ActionsDrawer.git",
"tag": "0.1.1"
},
"platforms": {
"ios": "8.0"
},
"source_files": "ActionsDrawer/Classes/**/*",
"frameworks": "UIKit",
"dependencies": {
"PinLayout": [
"~> 1.8"
]
}
}
| 339 |
5,651 | """
Experiments with infix operator dispatch
>>> kadd = KnowsAdd()
>>> kadd + 1
(<KnowsAdd object>, 1)
>>> kadd * 1
"""
class KnowsAdd:
def __add__(self, other):
return self, other
def __repr__(self):
return '<{} object>'.format(type(self).__name__)
| 125 |
5,169 | <filename>Specs/ATSDK/0.0.1-weex/ATSDK.podspec.json
{
"name": "ATSDK",
"version": "0.0.1-weex",
"summary": "No summary",
"description": "description",
"license": {
"type": "Copyright",
"text": " Alibaba-INC copyright\n"
},
"authors": {
"yinfeng": "<EMAIL>"
},
"source": {
"http": "http://download.taobaocdn.com/freedom/39794/compress/ATSDK.framework.zip"
},
"homepage": "http://github.com",
"vendored_frameworks": "ATSDK.framework",
"resources": "ATSDK.framework/Versions/A/Resources/*",
"exclude_files": "ATSDK.framework/Versions/A/Resources/Info.plist",
"platforms": {
"ios": "7.0"
},
"requires_arc": true,
"frameworks": [
"AVFoundation",
"MobileCoreServices",
"AudioToolbox"
]
}
| 327 |
749 | import os
import sys
import site
from distutils.sysconfig import get_python_lib
if __name__ == '__main__':
prefix = sys.argv[1]
#ask distutils where to install the python module
install_dir = get_python_lib(plat_specific=True, prefix=prefix)
#use sites when the prefix is already recognized
try:
paths = [p for p in site.getsitepackages() if p.startswith(prefix)]
if len(paths) == 1: install_dir = paths[0]
except AttributeError: pass
#strip the prefix to return a relative path
print(os.path.relpath(install_dir, prefix))
| 204 |
364 | /**
* @file oglplus/buffer_binding.hpp
* @brief Buffer binding point indices
*
* @author <NAME>
*
* Copyright 2010-2019 <NAME>. Distributed under the Boost
* Software License, Version 1.0. (See accompanying file
* LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
*/
#pragma once
#ifndef OGLPLUS_BUFFER_BINDING_1107121519_HPP
#define OGLPLUS_BUFFER_BINDING_1107121519_HPP
#include <oglplus/limited_value.hpp>
namespace oglplus {
#if OGLPLUS_DOCUMENTATION_ONLY
/// Type for the uniform buffer binding point index
class UniformBufferBindingPoint : public LimitedCount {
public:
/// Construction from a @c GLuint
UniformBufferBindingPoint(GLuint count);
};
#elif GL_VERSION_3_1 || GL_ARB_uniform_buffer_object
OGLPLUS_DECLARE_LIMITED_COUNT_TYPE(
UniformBufferBindingPoint, MAX_UNIFORM_BUFFER_BINDINGS)
#else
using UniformBufferBindingPoint = GLuint;
#endif
#if OGLPLUS_DOCUMENTATION_ONLY
/// Type for the transform feedback buffer binding point index
class TransformFeedbackBufferBindingPoint : public LimitedCount {
public:
/// Construction from a @c GLuint
TransformFeedbackBufferBindingPoint(GLuint count);
};
#elif GL_VERSION_4_0 || GL_ARB_transform_feedback3
OGLPLUS_DECLARE_LIMITED_COUNT_TYPE(
TransformFeedbackBufferBindingPoint, MAX_TRANSFORM_FEEDBACK_BUFFERS)
#else
using TransformFeedbackBufferBindingPoint = GLuint;
#endif
#if OGLPLUS_DOCUMENTATION_ONLY
/// Type for the atomic counter buffer binding point index
class AtomicCounterBufferBindingPoint : public LimitedCount {
public:
/// Construction from a @c GLuint
AtomicCounterBufferBindingPoint(GLuint count);
};
#elif GL_VERSION_4_2 || GL_ARB_shader_atomic_counters
OGLPLUS_DECLARE_LIMITED_COUNT_TYPE(
AtomicCounterBufferBindingPoint, MAX_ATOMIC_COUNTER_BUFFER_BINDINGS)
#else
using AtomicCounterBufferBindingPoint = GLuint;
#endif
#if OGLPLUS_DOCUMENTATION_ONLY
/// Type for the shader storage buffer binding point index
class ShaderStorageBufferBindingPoint : public LimitedCount {
public:
/// Construction from a @c GLuint
ShaderStorageBufferBindingPoint(GLuint count);
};
#elif GL_VERSION_4_3 || GL_ARB_shader_storage_buffer_object
OGLPLUS_DECLARE_LIMITED_COUNT_TYPE(
ShaderStorageBufferBindingPoint, MAX_SHADER_STORAGE_BUFFER_BINDINGS)
#else
using ShaderStorageBufferBindingPoint = GLuint;
#endif
} // namespace oglplus
#endif // include guard
| 821 |
778 | package org.aion.api.server.types;
import static junit.framework.TestCase.assertEquals;
import static junit.framework.TestCase.assertNull;
import static org.aion.base.Constants.NRG_CREATE_CONTRACT_DEFAULT;
import static org.aion.base.Constants.NRG_TRANSACTION_DEFAULT;
import java.math.BigInteger;
import org.aion.util.types.AddressUtils;
import org.json.JSONObject;
import org.junit.Test;
public class ArgTxCallTest {
@Test
public void testFromJsonContractCreateDefaults() {
long nrgPrice = 10L;
JSONObject tx = new JSONObject();
ArgTxCall txCall = ArgTxCall.fromJSON(tx, nrgPrice);
assertNull(txCall.getFrom());
assertNull(txCall.getTo());
assertEquals(0, txCall.getData().length);
assertEquals(BigInteger.ZERO, txCall.getNonce());
assertEquals(BigInteger.ZERO, txCall.getValue());
assertEquals(NRG_CREATE_CONTRACT_DEFAULT, txCall.getNrg());
assertEquals(nrgPrice, txCall.getNrgPrice());
}
@Test
public void testFromJsonTxDefaults() {
long nrgPrice = 10L;
String toAddr = "0xa076407088416d71467529d8312c24d7596f5d7db75a5c4129d2763df112b8a1";
JSONObject tx = new JSONObject();
tx.put("to", toAddr);
ArgTxCall txCall = ArgTxCall.fromJSON(tx, nrgPrice);
assertNull(txCall.getFrom());
assertEquals(AddressUtils.wrapAddress(toAddr), txCall.getTo());
assertEquals(0, txCall.getData().length);
assertEquals(BigInteger.ZERO, txCall.getNonce());
assertEquals(BigInteger.ZERO, txCall.getValue());
assertEquals(NRG_TRANSACTION_DEFAULT, txCall.getNrg());
assertEquals(nrgPrice, txCall.getNrgPrice());
}
@Test
public void testTxCallfromJSONforCall() {
long nrgPrice = 10L;
String toAddr = "0xa076407088416d71467529d8312c24d7596f5d7db75a5c4129d2763df112b8a1";
JSONObject tx = new JSONObject();
tx.put("to", toAddr);
ArgTxCall txCall = ArgTxCall.fromJSONforCall(tx, nrgPrice);
assertNull(txCall.getFrom());
assertEquals(AddressUtils.wrapAddress(toAddr), txCall.getTo());
assertEquals(0, txCall.getData().length);
assertEquals(BigInteger.ZERO, txCall.getNonce());
assertEquals(BigInteger.ZERO, txCall.getValue());
assertEquals(Long.MAX_VALUE, txCall.getNrg());
assertEquals(nrgPrice, txCall.getNrgPrice());
}
@Test
public void testTxCallContractCreatefromJSONforCall() {
long nrgPrice = 10L;
JSONObject tx = new JSONObject();
ArgTxCall txCall = ArgTxCall.fromJSONforCall(tx, nrgPrice);
assertNull(txCall.getFrom());
assertNull(txCall.getTo());
assertEquals(0, txCall.getData().length);
assertEquals(BigInteger.ZERO, txCall.getNonce());
assertEquals(BigInteger.ZERO, txCall.getValue());
assertEquals(Long.MAX_VALUE, txCall.getNrg());
assertEquals(nrgPrice, txCall.getNrgPrice());
}
}
| 1,290 |
1,103 | /*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.objects.delegate;
import com.netflix.hollow.api.custom.HollowMapTypeAPI;
import com.netflix.hollow.api.objects.HollowMap;
import com.netflix.hollow.core.read.dataaccess.HollowMapTypeDataAccess;
import com.netflix.hollow.core.read.iterator.HollowMapEntryOrdinalIterator;
import com.netflix.hollow.core.schema.HollowMapSchema;
import java.util.Map;
/**
* This is the extension of the {@link HollowRecordDelegate} interface for MAP type records.
*
* @see HollowRecordDelegate
*/
public interface HollowMapDelegate<K, V> extends HollowRecordDelegate {
public int size(int ordinal);
public V get(HollowMap<K, V> map, int ordinal, Object key);
public boolean containsKey(HollowMap<K, V> map, int ordinal, Object key);
public boolean containsValue(HollowMap<K, V> map, int ordinal, Object value);
public K findKey(HollowMap<K, V> map, int ordinal, Object... hashKey);
public V findValue(HollowMap<K, V> map, int ordinal, Object... hashKey);
public Map.Entry<K, V> findEntry(HollowMap<K, V> map, int ordinal, Object... hashKey);
public HollowMapEntryOrdinalIterator iterator(int ordinal);
public HollowMapSchema getSchema();
public HollowMapTypeDataAccess getTypeDataAccess();
public HollowMapTypeAPI getTypeAPI();
}
| 633 |
371 | <reponame>wuchunfu/EngineX<filename>jar-enginex-runner/src/main/java/com/baoying/enginex/executor/engine/service/EngineNodeService.java
package com.baoying.enginex.executor.engine.service;
import com.baomidou.mybatisplus.extension.service.IService;
import com.baoying.enginex.executor.engine.model.EngineNode;
import java.util.List;
public interface EngineNodeService extends IService<EngineNode> {
/**
* 根据版本id获取版本下的所有节点
* @param versionId
* @return
*/
List<EngineNode> getEngineNodeListByVersionId(Long versionId);
}
| 227 |
14,668 | <filename>ash/quick_pair/feature_status_tracker/fast_pair_enabled_provider_unittest.cc
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "ash/quick_pair/feature_status_tracker/fast_pair_enabled_provider.h"
#include <memory>
#include "ash/constants/ash_features.h"
#include "ash/quick_pair/feature_status_tracker/bluetooth_enabled_provider.h"
#include "ash/quick_pair/feature_status_tracker/fake_bluetooth_adapter.h"
#include "ash/quick_pair/feature_status_tracker/fast_pair_pref_enabled_provider.h"
#include "ash/quick_pair/feature_status_tracker/logged_in_user_enabled_provider.h"
#include "ash/quick_pair/feature_status_tracker/mock_bluetooth_enabled_provider.h"
#include "ash/quick_pair/feature_status_tracker/mock_fast_pair_pref_enabled_provider.h"
#include "ash/quick_pair/feature_status_tracker/mock_google_api_key_availability_provider.h"
#include "ash/quick_pair/feature_status_tracker/mock_logged_in_user_enabled_provider.h"
#include "ash/quick_pair/feature_status_tracker/mock_screen_state_enabled_provider.h"
#include "ash/quick_pair/feature_status_tracker/screen_state_enabled_provider.h"
#include "ash/test/ash_test_base.h"
#include "base/callback.h"
#include "base/memory/ptr_util.h"
#include "base/memory/scoped_refptr.h"
#include "base/test/mock_callback.h"
#include "base/test/scoped_feature_list.h"
#include "device/bluetooth/bluetooth_adapter_factory.h"
#include "testing/gtest/include/gtest/gtest-param-test.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace ash {
namespace quick_pair {
class FastPairEnabledProviderTest : public AshTestBase {
public:
void SetUp() override {
AshTestBase::SetUp();
adapter_ = base::MakeRefCounted<FakeBluetoothAdapter>();
device::BluetoothAdapterFactory::SetAdapterForTesting(adapter_);
}
protected:
scoped_refptr<FakeBluetoothAdapter> adapter_;
};
TEST_F(FastPairEnabledProviderTest, ProviderCallbackIsInvokedOnBTChanges) {
base::test::ScopedFeatureList feature_list{features::kFastPair};
base::MockCallback<base::RepeatingCallback<void(bool)>> callback;
EXPECT_CALL(callback, Run(true));
auto* fast_pair_pref_enabled_provider = new MockFastPairPrefEnabledProvider();
ON_CALL(*fast_pair_pref_enabled_provider, is_enabled)
.WillByDefault(testing::Return(true));
auto* logged_in_user_enabled_provider = new MockLoggedInUserEnabledProvider();
ON_CALL(*logged_in_user_enabled_provider, is_enabled)
.WillByDefault(testing::Return(true));
auto* screen_state_enabled_provider = new MockScreenStateEnabledProvider();
ON_CALL(*screen_state_enabled_provider, is_enabled)
.WillByDefault(testing::Return(true));
auto* google_api_key_availability_provider =
new MockGoogleApiKeyAvailabilityProvider();
ON_CALL(*google_api_key_availability_provider, is_enabled)
.WillByDefault(testing::Return(true));
auto provider = std::make_unique<FastPairEnabledProvider>(
std::make_unique<BluetoothEnabledProvider>(),
base::WrapUnique(fast_pair_pref_enabled_provider),
base::WrapUnique(logged_in_user_enabled_provider),
base::WrapUnique(screen_state_enabled_provider),
base::WrapUnique(google_api_key_availability_provider));
provider->SetCallback(callback.Get());
adapter_->NotifyPoweredChanged(true);
}
// Represents: <is_flag_enabled, is_bt_enabled, is_pref_enabled,
// is_user_logged_in, is_screen_state_on,
// is_google_api_keys_available>
using TestParam = std::tuple<bool, bool, bool, bool, bool, bool>;
class FastPairEnabledProviderTestWithParams
: public FastPairEnabledProviderTest,
public testing::WithParamInterface<TestParam> {};
TEST_P(FastPairEnabledProviderTestWithParams, IsEnabledWhenExpected) {
bool is_flag_enabled = std::get<0>(GetParam());
bool is_bt_enabled = std::get<1>(GetParam());
bool is_pref_enabled = std::get<2>(GetParam());
bool is_user_logged_in = std::get<3>(GetParam());
bool is_screen_state_on = std::get<4>(GetParam());
bool is_google_api_keys_available = std::get<5>(GetParam());
base::test::ScopedFeatureList feature_list;
feature_list.InitWithFeatureState(features::kFastPair, is_flag_enabled);
auto* bluetooth_enabled_provider = new MockBluetoothEnabledProvider();
ON_CALL(*bluetooth_enabled_provider, is_enabled)
.WillByDefault(testing::Return(is_bt_enabled));
auto* fast_pair_pref_enabled_provider = new MockFastPairPrefEnabledProvider();
ON_CALL(*fast_pair_pref_enabled_provider, is_enabled)
.WillByDefault(testing::Return(is_pref_enabled));
auto* logged_in_user_enabled_provider = new MockLoggedInUserEnabledProvider();
ON_CALL(*logged_in_user_enabled_provider, is_enabled)
.WillByDefault(testing::Return(is_user_logged_in));
auto* screen_state_enabled_provider = new MockScreenStateEnabledProvider();
ON_CALL(*screen_state_enabled_provider, is_enabled)
.WillByDefault(testing::Return(is_screen_state_on));
auto* google_api_key_availability_provider =
new MockGoogleApiKeyAvailabilityProvider();
ON_CALL(*google_api_key_availability_provider, is_enabled)
.WillByDefault(testing::Return(is_google_api_keys_available));
auto provider = std::make_unique<FastPairEnabledProvider>(
std::unique_ptr<BluetoothEnabledProvider>(bluetooth_enabled_provider),
std::unique_ptr<FastPairPrefEnabledProvider>(
fast_pair_pref_enabled_provider),
std::unique_ptr<LoggedInUserEnabledProvider>(
logged_in_user_enabled_provider),
std::unique_ptr<ScreenStateEnabledProvider>(
screen_state_enabled_provider),
base::WrapUnique(google_api_key_availability_provider));
bool all_are_enabled = is_flag_enabled && is_bt_enabled && is_pref_enabled &&
is_user_logged_in && is_screen_state_on &&
is_google_api_keys_available;
EXPECT_EQ(provider->is_enabled(), all_are_enabled);
}
INSTANTIATE_TEST_SUITE_P(FastPairEnabledProviderTestWithParams,
FastPairEnabledProviderTestWithParams,
testing::Combine(testing::Bool(),
testing::Bool(),
testing::Bool(),
testing::Bool(),
testing::Bool(),
testing::Bool()));
} // namespace quick_pair
} // namespace ash
| 2,514 |
1,210 | <gh_stars>1000+
// Copyright (c) 2011 <NAME>
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
#if !defined(BOOST_SPIRIT_REPOSITORY_QI_ADVANCE_JAN_23_2011_1203PM)
#define BOOST_SPIRIT_REPOSITORY_QI_ADVANCE_JAN_23_2011_1203PM
#include <boost/spirit/include/qi_parse.hpp>
///////////////////////////////////////////////////////////////////////////////
// definition the place holder
namespace boost { namespace spirit { namespace repository { namespace qi
{
BOOST_SPIRIT_TERMINAL_EX(advance);
}}}}
///////////////////////////////////////////////////////////////////////////////
// implementation the enabler
namespace boost { namespace spirit
{
template <typename A0>
struct use_terminal<qi::domain
, terminal_ex<repository::qi::tag::advance, fusion::vector1<A0> > >
: mpl::or_<is_integral<A0>, is_enum<A0> >
{};
template <>
struct use_lazy_terminal<qi::domain, repository::qi::tag::advance, 1>
: mpl::true_
{};
}}
///////////////////////////////////////////////////////////////////////////////
// implementation of the parser
namespace boost { namespace spirit { namespace repository { namespace qi
{
template <typename Int>
struct advance_parser
: boost::spirit::qi::primitive_parser< advance_parser<Int> >
{
// Define the attribute type exposed by this parser component
template <typename Context, typename Iterator>
struct attribute
{
typedef boost::spirit::unused_type type;
};
advance_parser(Int dist)
: dist(dist)
{}
// This function is called during the actual parsing process
template <typename Iterator, typename Context
, typename Skipper, typename Attribute>
bool parse(Iterator& first, Iterator const& last
, Context&, Skipper&, Attribute&) const
{
// This series of checks is designed to fail parsing on negative
// values, without generating a "expression always evaluates true"
// warning on unsigned types.
if (dist == Int(0)) return true;
if (dist < Int(1)) return false;
typedef typename std::iterator_traits<Iterator>::iterator_category
iterator_category;
return advance(first, last, iterator_category());
}
// This function is called during error handling to create
// a human readable string for the error context.
template <typename Context>
boost::spirit::info what(Context&) const
{
return boost::spirit::info("advance");
}
private:
// this is the general implementation used by most iterator categories
template <typename Iterator, typename IteratorCategory>
bool advance(Iterator& first, Iterator const& last
, IteratorCategory) const
{
Int n = dist;
Iterator i = first;
while (n)
{
if (i == last) return false;
++i;
--n;
}
first = i;
return true;
}
// this is a specialization for random access iterators
template <typename Iterator>
bool advance(Iterator& first, Iterator const& last
, std::random_access_iterator_tag) const
{
Iterator const it = first + dist;
if (it > last) return false;
first = it;
return true;
}
Int const dist;
};
}}}}
///////////////////////////////////////////////////////////////////////////////
// instantiation of the parser
namespace boost { namespace spirit { namespace qi
{
template <typename Modifiers, typename A0>
struct make_primitive<
terminal_ex<repository::qi::tag::advance, fusion::vector1<A0> >
, Modifiers>
{
typedef repository::qi::advance_parser<A0> result_type;
template <typename Terminal>
result_type operator()(Terminal const& term, unused_type) const
{
return result_type(fusion::at_c<0>(term.args));
}
};
}}}
#endif
| 1,703 |
677 | <reponame>InfiniteSynthesis/lynx-native
#include "gl/canvas/lynx_shader.h"
namespace canvas {
const char* LxVertexShaderPos::GetShaderString() {
return "\
attribute vec4 a_position; \
attribute vec2 a_texCoord; \
attribute vec4 a_color; \
varying vec2 v_texCoord; \
varying vec4 v_color; \
void main() { \
v_color = a_color; \
v_texCoord = a_texCoord; \
gl_Position = a_position; \
} \
";
}
void LxVertexShaderPos::Initialize(GLuint program) {
}
const char* LxFragmentShaderColor::GetShaderString() {
return "\
precision mediump float; \
varying vec2 v_texCoord; \
varying vec4 v_color; \
void main() { \
gl_FragColor = v_color; \
} \
";
}
void LxFragmentShaderColor::Initialize(GLuint program) {
}
} | 351 |
338 | {
"Smartfren Andromax AD6B1H": {
"type": "mobile-phone",
"properties": {
"Device_Name": "C3",
"Device_Code_Name": "AD6B1H",
"Device_Maker": "Smartfren Andromax",
"Device_Pointing_Method": "touchscreen",
"Device_Brand_Name": "Harga"
},
"standard": false
}
}
| 146 |
1,039 | <reponame>thmclellan/PDF-Writer
/*
Source File : InputStringStream.h
Copyright 2016 <NAME> PDFWriter
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#pragma once
#include "IByteReaderWithPosition.h"
#include <string>
class InputStringStream : public IByteReaderWithPosition
{
public:
InputStringStream();
InputStringStream(const std::string& inString);
~InputStringStream(void);
void Assign(const std::string& inString);
// IByteReaderWithPosition implementation
virtual LongBufferSizeType Read(IOBasicTypes::Byte* inBuffer, LongBufferSizeType inBufferSize);
virtual bool NotEnded();
virtual void Skip(LongBufferSizeType inSkipSize);
virtual void SetPosition(LongFilePositionType inOffsetFromStart);
virtual void SetPositionFromEnd(LongFilePositionType inOffsetFromEnd);
virtual LongFilePositionType GetCurrentPosition();
private:
std::string::const_iterator mStartPosition;
std::string::const_iterator mEndPosition;
std::string::const_iterator mCurrentPosition;
};
| 403 |
389 | from enum import Enum
class Color(str, Enum):
green = "green"
red = "red"
yellow = "yellow"
| 43 |
348 | {"nom":"Saint-Palais","circ":"11ème circonscription","dpt":"Gironde","inscrits":398,"abs":220,"votants":178,"blancs":23,"nuls":1,"exp":154,"res":[{"nuance":"REM","nom":"<NAME>","voix":91},{"nuance":"FN","nom":"<NAME>","voix":63}]} | 91 |
995 | // Copyright 2004-present Facebook. All Rights Reserved.
#pragma once
#include <fizz/client/PskCache.h>
#include <fizz/protocol/Factory.h>
#include <fizz/protocol/OpenSSLFactory.h>
namespace fizz {
namespace client {
std::string serializePsk(const fizz::client::CachedPsk& psk);
fizz::client::CachedPsk deserializePsk(
const std::string& str,
const fizz::Factory& factory);
} // namespace client
} // namespace fizz
| 152 |
17,702 | # ==============================================================================
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.md file in the project root
# for full license information.
# ==============================================================================
import numpy as np
import os
import sys
import subprocess
import signal
import re
import pytest
from cntk.ops.tests.ops_test_utils import cntk_device
from cntk.cntk_py import DeviceKind_GPU
abs_path = os.path.dirname(os.path.abspath(__file__))
example_dir = os.path.join(abs_path, '..', '..', '..', '..', 'Examples', 'Text', 'LightRNN', 'LightRNN')
sys.path.append(abs_path)
sys.path.append(example_dir)
script_under_test = os.path.join(example_dir, 'train.py')
TIMEOUT_SECONDS = 300
TOLERANCE_ABSOLUTE = 1E-1
def run_command(**kwargs):
command = ['mpiexec', '-n', '1', 'python', script_under_test]
for key, value in kwargs.items():
command += ['-' + key, str(value)]
return command
def test_lightrnn(device_id):
if cntk_device(device_id).type() != DeviceKind_GPU:
pytest.skip('test only runs on GPU')
expected_valid_error = 7.251514
expected_test_error = 7.305801
command = run_command(datadir=os.path.join(example_dir, '..', 'test'),
outputdir=os.path.join(example_dir, '..', 'LightRNN'),
vocabdir=os.path.join(example_dir, '..', 'test'),
vocab_file=os.path.join(example_dir, '..', 'test', 'vocab.txt'),
alloc_file=os.path.join(example_dir, '..', 'test', 'word-0.location'),
vocabsize=1566,
optim='adam', lr=0.20,
embed=500, nhid=500, batchsize=20, layer=2,
epochs=1)
p = subprocess.Popen(command, stdout=subprocess.PIPE)
if sys.version_info[0] < 3:
out = p.communicate()[0]
else:
try:
out = p.communicate(timeout=TIMEOUT_SECONDS)[0] # in case we have a hang
except subprocess.TimeoutExpired:
os.kill(p.pid, signal.CTRL_C_EVENT)
raise RuntimeError('Timeout in mpiexec, possibly hang')
str_out = out.decode(sys.getdefaultencoding())
results = re.findall("Epoch 1 Done : Valid error = (.+), Test error = (.+)", str_out)
results = results[0]
assert len(results) == 2
assert np.allclose([float(results[0]), float(results[1])], [expected_valid_error, expected_test_error], atol=TOLERANCE_ABSOLUTE)
| 1,095 |
3,495 | /*
*
* Copyright (c) 2020 Project CHIP Authors
* Copyright (c) 2018 Nest Labs, Inc.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @file Button.h
*
* Describes a Button tied to a GPIO that provides debouncing and polling APIs
*
**/
#pragma once
#include "driver/gpio.h"
#include "esp_log.h"
#include "esp_system.h"
#include "freertos/FreeRTOS.h"
#include "freertos/task.h"
class Button
{
public:
/**
* @brief
* Initialize a button
*
* @param gpioNum The GPIO pin this button should keep track of
* @param debouncePeriod The debouncing period in FreeRTOS ticks
* @return esp_err_t
*/
esp_err_t Init(gpio_num_t gpioNum, uint16_t debouncePeriod);
/**
* @brief
* Poll on the button and read its current state
*
* @return true If a button event occurred
* @return false If no button event occurred
*/
bool Poll();
/**
* @brief
* Returns the state of the button
*
* @return true If the button is pressed
* @return false If the button is not pressed or released if poll() is true.
*/
bool IsPressed();
/**
* @brief
* Get the time timestamp since the button entered its current state
*
* @return uint32_t The time in milliseconds since the app started
*/
uint32_t GetStateStartTime();
/**
* @brief
* Get the duration in milliseconds since the button entered its current state
*
* @return uint32_t The time in milliseconds
*/
uint32_t GetStateDuration();
/**
* @brief
* Get the duration in milliseconds the button spent in its previous state
*
* @return uint32_t The time in milliseconds
*/
uint32_t GetPrevStateDuration();
private:
// in ticks
uint32_t mLastReadTime;
// in ticks
uint32_t mStateStartTime;
// in ticks
uint32_t mPrevStateDur;
gpio_num_t mGPIONum;
// in ticks
uint16_t mDebouncePeriod;
// true when button is pressed
bool mState;
bool mLastPolledState;
};
inline bool Button::IsPressed()
{
return mState;
}
inline uint32_t Button::GetStateStartTime()
{
return mStateStartTime * portTICK_PERIOD_MS;
}
inline uint32_t Button::GetPrevStateDuration()
{
return mPrevStateDur * portTICK_PERIOD_MS;
}
| 1,084 |
571 | /**
* @file copyMakeBorder.cpp
* @brief mex interface for cv::copyMakeBorder
* @ingroup core
* @author Amro
* @date 2015
*/
#include "mexopencv.hpp"
using namespace std;
using namespace cv;
/**
* Main entry called from Matlab
* @param nlhs number of left-hand-side arguments
* @param plhs pointers to mxArrays in the left-hand-side
* @param nrhs number of right-hand-side arguments
* @param prhs pointers to mxArrays in the right-hand-side
*/
void mexFunction(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[])
{
// Check the number of arguments
nargchk(nrhs>=2 && nlhs<=1);
// Argument vector: (src, t, b, l, r) or (src, [t b l r])
vector<MxArray> rhs(prhs, prhs+nrhs);
bool vect_variant = (rhs[1].numel() == 4);
nargchk(vect_variant ? ((nrhs%2)==0) : (nrhs>=5 && (nrhs%2)!=0));
// Option processing
int borderType = cv::BORDER_DEFAULT;
bool isolated = false; // TODO: only makes sense for ROI submatrices
Scalar value;
for (int i=(vect_variant ? 2 : 5); i<nrhs; i+=2) {
string key(rhs[i].toString());
if (key == "BorderType")
borderType = BorderType[rhs[i+1].toString()];
else if (key == "Isolated")
isolated = rhs[i+1].toBool();
else if (key == "Value")
value = rhs[i+1].toScalar();
else
mexErrMsgIdAndTxt("mexopencv:error",
"Unrecognized option %s", key.c_str());
}
borderType |= (isolated ? cv::BORDER_ISOLATED : 0);
// Process
Mat src(rhs[0].toMat()), dst;
int top, bottom, left, right;
if (vect_variant) {
vector<int> v(rhs[1].toVector<int>());
nargchk(v.size() == 4);
top = v[0];
bottom = v[1];
left = v[2];
right = v[3];
}
else {
top = rhs[1].toInt();
bottom = rhs[2].toInt();
left = rhs[3].toInt();
right = rhs[4].toInt();
}
copyMakeBorder(src, dst, top, bottom, left, right, borderType, value);
plhs[0] = MxArray(dst);
}
| 957 |
649 | package net.poc.model;
public class Note {
private final String title;
private final String description;
private final boolean withPicture;
public boolean getWithPicture() {
return withPicture;
}
public String getTitle() {
return title;
}
public String getDescription() {
return description;
}
public Note() {
this.title = "";
this.description = "";
this.withPicture = false;
}
public Note(String title, String description, boolean withPicture) {
this.title = title;
this.description = description;
this.withPicture = withPicture;
}
public Note(String title, String description) {
this.title = title;
this.description = description;
this.withPicture = false;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Note note = (Note) o;
if (title != null ? !title.equals(note.title) : note.title != null) return false;
return description != null ? description.equals(note.description) : note.description == null;
}
@Override
public int hashCode() {
int result = title != null ? title.hashCode() : 0;
result = 31 * result + (description != null ? description.hashCode() : 0);
return result;
}
public static class NoteBuilder {
private String title = "";
private String description = "";
private boolean withPicture = false;
public NoteBuilder() {
}
public NoteBuilder called(String title) {
this.title = title;
return this;
}
public NoteBuilder withDescription(String description) {
this.description = description;
return this;
}
public NoteBuilder withPicture() {
this.withPicture = true;
return this;
}
public Note build(){
return new Note(title, description, withPicture);
}
}
}
| 838 |
628 | <gh_stars>100-1000
#include "basic_auth.h"
#include <utility>
#include "CivetServer.h"
#include "detail/base64.h"
namespace prometheus {
BasicAuthHandler::BasicAuthHandler(AuthFunc callback, std::string realm)
: callback_(std::move(callback)), realm_(std::move(realm)) {}
bool BasicAuthHandler::authorize(CivetServer* server, mg_connection* conn) {
if (!AuthorizeInner(server, conn)) {
WriteUnauthorizedResponse(conn);
return false;
}
return true;
}
bool BasicAuthHandler::AuthorizeInner(CivetServer*, mg_connection* conn) {
const char* authHeader = mg_get_header(conn, "Authorization");
if (authHeader == nullptr) {
// No auth header was provided.
return false;
}
std::string authHeaderStr = authHeader;
// Basic auth header is expected to be of the form:
// "Basic dXNlcm5hbWU6cGFzc3dvcmQ="
const std::string prefix = "Basic ";
if (authHeaderStr.compare(0, prefix.size(), prefix) != 0) {
return false;
}
// Strip the "Basic " prefix leaving the base64 encoded auth string
auto b64Auth = authHeaderStr.substr(prefix.size());
std::string decoded;
try {
decoded = detail::base64_decode(b64Auth);
} catch (...) {
return false;
}
// decoded auth string is expected to be of the form:
// "username:password"
// colons may not appear in the username.
auto splitPos = decoded.find(':');
if (splitPos == std::string::npos) {
return false;
}
auto username = decoded.substr(0, splitPos);
auto password = decoded.substr(splitPos + 1);
// TODO: bool does not permit a distinction between 401 Unauthorized
// and 403 Forbidden. Authentication may succeed, but the user still
// not be authorized to perform the request.
return callback_(username, password);
}
void BasicAuthHandler::WriteUnauthorizedResponse(mg_connection* conn) {
mg_printf(conn, "HTTP/1.1 401 Unauthorized\r\n");
mg_printf(conn, "WWW-Authenticate: Basic realm=\"%s\"\r\n", realm_.c_str());
mg_printf(conn, "Connection: close\r\n");
mg_printf(conn, "Content-Length: 0\r\n");
// end headers
mg_printf(conn, "\r\n");
}
} // namespace prometheus
| 722 |
720 | <reponame>choco9966/ttach<filename>ttach/transforms.py<gh_stars>100-1000
from functools import partial
from typing import Optional, List, Union, Tuple
from . import functional as F
from .base import DualTransform, ImageOnlyTransform
class HorizontalFlip(DualTransform):
"""Flip images horizontally (left->right)"""
identity_param = False
def __init__(self):
super().__init__("apply", [False, True])
def apply_aug_image(self, image, apply=False, **kwargs):
if apply:
image = F.hflip(image)
return image
def apply_deaug_mask(self, mask, apply=False, **kwargs):
if apply:
mask = F.hflip(mask)
return mask
def apply_deaug_label(self, label, apply=False, **kwargs):
return label
def apply_deaug_keypoints(self, keypoints, apply=False, **kwargs):
if apply:
keypoints = F.keypoints_hflip(keypoints)
return keypoints
class VerticalFlip(DualTransform):
"""Flip images vertically (up->down)"""
identity_param = False
def __init__(self):
super().__init__("apply", [False, True])
def apply_aug_image(self, image, apply=False, **kwargs):
if apply:
image = F.vflip(image)
return image
def apply_deaug_mask(self, mask, apply=False, **kwargs):
if apply:
mask = F.vflip(mask)
return mask
def apply_deaug_label(self, label, apply=False, **kwargs):
return label
def apply_deaug_keypoints(self, keypoints, apply=False, **kwargs):
if apply:
keypoints = F.keypoints_vflip(keypoints)
return keypoints
class Rotate90(DualTransform):
"""Rotate images 0/90/180/270 degrees
Args:
angles (list): angles to rotate images
"""
identity_param = 0
def __init__(self, angles: List[int]):
if self.identity_param not in angles:
angles = [self.identity_param] + list(angles)
super().__init__("angle", angles)
def apply_aug_image(self, image, angle=0, **kwargs):
k = angle // 90 if angle >= 0 else (angle + 360) // 90
return F.rot90(image, k)
def apply_deaug_mask(self, mask, angle=0, **kwargs):
return self.apply_aug_image(mask, -angle)
def apply_deaug_label(self, label, angle=0, **kwargs):
return label
def apply_deaug_keypoints(self, keypoints, angle=0, **kwargs):
angle *= -1
k = angle // 90 if angle >= 0 else (angle + 360) // 90
return F.keypoints_rot90(keypoints, k=k)
class Scale(DualTransform):
"""Scale images
Args:
scales (List[Union[int, float]]): scale factors for spatial image dimensions
interpolation (str): one of "nearest"/"lenear" (see more in torch.nn.interpolate)
align_corners (bool): see more in torch.nn.interpolate
"""
identity_param = 1
def __init__(
self,
scales: List[Union[int, float]],
interpolation: str = "nearest",
align_corners: Optional[bool] = None,
):
if self.identity_param not in scales:
scales = [self.identity_param] + list(scales)
self.interpolation = interpolation
self.align_corners = align_corners
super().__init__("scale", scales)
def apply_aug_image(self, image, scale=1, **kwargs):
if scale != self.identity_param:
image = F.scale(
image,
scale,
interpolation=self.interpolation,
align_corners=self.align_corners,
)
return image
def apply_deaug_mask(self, mask, scale=1, **kwargs):
if scale != self.identity_param:
mask = F.scale(
mask,
1 / scale,
interpolation=self.interpolation,
align_corners=self.align_corners,
)
return mask
def apply_deaug_label(self, label, scale=1, **kwargs):
return label
def apply_deaug_keypoints(self, keypoints, scale=1, **kwargs):
return keypoints
class Resize(DualTransform):
"""Resize images
Args:
sizes (List[Tuple[int, int]): scale factors for spatial image dimensions
original_size Tuple(int, int): optional, image original size for deaugmenting mask
interpolation (str): one of "nearest"/"lenear" (see more in torch.nn.interpolate)
align_corners (bool): see more in torch.nn.interpolate
"""
def __init__(
self,
sizes: List[Tuple[int, int]],
original_size: Tuple[int, int] = None,
interpolation: str = "nearest",
align_corners: Optional[bool] = None,
):
if original_size is not None and original_size not in sizes:
sizes = [original_size] + list(sizes)
self.interpolation = interpolation
self.align_corners = align_corners
self.original_size = original_size
super().__init__("size", sizes)
def apply_aug_image(self, image, size, **kwargs):
if size != self.original_size:
image = F.resize(
image,
size,
interpolation=self.interpolation,
align_corners=self.align_corners,
)
return image
def apply_deaug_mask(self, mask, size, **kwargs):
if self.original_size is None:
raise ValueError(
"Provide original image size to make mask backward transformation"
)
if size != self.original_size:
mask = F.resize(
mask,
self.original_size,
interpolation=self.interpolation,
align_corners=self.align_corners,
)
return mask
def apply_deaug_label(self, label, size=1, **kwargs):
return label
def apply_deaug_keypoints(self, keypoints, size=1, **kwargs):
return keypoints
class Add(ImageOnlyTransform):
"""Add value to images
Args:
values (List[float]): values to add to each pixel
"""
identity_param = 0
def __init__(self, values: List[float]):
if self.identity_param not in values:
values = [self.identity_param] + list(values)
super().__init__("value", values)
def apply_aug_image(self, image, value=0, **kwargs):
if value != self.identity_param:
image = F.add(image, value)
return image
class Multiply(ImageOnlyTransform):
"""Multiply images by factor
Args:
factors (List[float]): factor to multiply each pixel by
"""
identity_param = 1
def __init__(self, factors: List[float]):
if self.identity_param not in factors:
factors = [self.identity_param] + list(factors)
super().__init__("factor", factors)
def apply_aug_image(self, image, factor=1, **kwargs):
if factor != self.identity_param:
image = F.multiply(image, factor)
return image
class FiveCrops(ImageOnlyTransform):
"""Makes 4 crops for each corner + center crop
Args:
crop_height (int): crop height in pixels
crop_width (int): crop width in pixels
"""
def __init__(self, crop_height, crop_width):
crop_functions = (
partial(F.crop_lt, crop_h=crop_height, crop_w=crop_width),
partial(F.crop_lb, crop_h=crop_height, crop_w=crop_width),
partial(F.crop_rb, crop_h=crop_height, crop_w=crop_width),
partial(F.crop_rt, crop_h=crop_height, crop_w=crop_width),
partial(F.center_crop, crop_h=crop_height, crop_w=crop_width),
)
super().__init__("crop_fn", crop_functions)
def apply_aug_image(self, image, crop_fn=None, **kwargs):
return crop_fn(image)
def apply_deaug_mask(self, mask, **kwargs):
raise ValueError("`FiveCrop` augmentation is not suitable for mask!")
def apply_deaug_keypoints(self, keypoints, **kwargs):
raise ValueError("`FiveCrop` augmentation is not suitable for keypoints!")
| 3,526 |
16,461 | <gh_stars>1000+
// Copyright 2018-present 650 Industries. All rights reserved.
#import <ABI43_0_0ExpoModulesCore/ABI43_0_0EXExportedModule.h>
#import <ABI43_0_0ExpoModulesCore/ABI43_0_0EXModuleRegistryConsumer.h>
#import <ABI43_0_0ExpoModulesCore/ABI43_0_0EXEventEmitter.h>
#import <ABI43_0_0ExpoModulesCore/ABI43_0_0EXEventEmitterService.h>
@import AuthenticationServices;
@interface ABI43_0_0EXAppleAuthentication : ABI43_0_0EXExportedModule <ABI43_0_0EXModuleRegistryConsumer, ABI43_0_0EXEventEmitter>
@end
| 212 |
2,667 | <reponame>TaylorWenOne/viabtc_exchange_server<gh_stars>1000+
/*
* Description:
* History: <EMAIL>, 2016/04/17, create
*/
# include <stdlib.h>
# include <unistd.h>
# include <assert.h>
# include "nw_job.h"
# include "nw_sock.h"
struct thread_arg {
nw_job *job;
void *privdata;
};
static void *thread_routine(void *data)
{
struct thread_arg *arg = data;
nw_job *job = arg->job;
void *privdata = arg->privdata;
free(data);
for (;;) {
pthread_mutex_lock(&job->lock);
while ((job->request_count == 0) && (!job->shutdown)) {
pthread_cond_wait(&job->notify, &job->lock);
}
if (job->shutdown) {
break;
}
assert(job->request_head != NULL);
nw_job_entry *entry = job->request_head;
job->request_head = entry->next;
if (job->request_head) {
job->request_head->prev = NULL;
} else {
job->request_tail = NULL;
}
job->request_count -= 1;
pthread_mutex_unlock(&job->lock);
job->type.on_job(entry, privdata);
pthread_mutex_lock(&job->lock);
if (job->reply_tail) {
entry->prev = job->reply_tail;
entry->next = NULL;
job->reply_tail->next = entry;
job->reply_tail = entry;
} else {
entry->prev = NULL;
entry->next = NULL;
job->reply_head = entry;
job->reply_tail = entry;
}
job->reply_count += 1;
write(job->pipefd[1], " ", 1);
pthread_mutex_unlock(&job->lock);
}
pthread_mutex_unlock(&job->lock);
return privdata;
}
static void on_can_read(struct ev_loop *loop, ev_io *watcher, int events)
{
nw_job *job = (nw_job *)watcher;
for (;;) {
char c;
int ret = read(job->pipefd[0], &c, 1);
if (ret < 0)
break;
}
for (;;) {
pthread_mutex_lock(&job->lock);
if (job->reply_count == 0) {
pthread_mutex_unlock(&job->lock);
break;
}
nw_job_entry *entry = job->reply_head;
job->reply_head = entry->next;
if (job->reply_head) {
job->reply_head->prev = NULL;
} else {
job->reply_tail = NULL;
}
job->reply_count -= 1;
pthread_mutex_unlock(&job->lock);
if (job->type.on_finish)
job->type.on_finish(entry);
if (job->type.on_cleanup)
job->type.on_cleanup(entry);
nw_cache_free(job->cache, entry);
}
}
static void nw_job_free(nw_job *job)
{
pthread_mutex_destroy(&job->lock);
pthread_cond_destroy(&job->notify);
if (job->threads)
free(job->threads);
free(job);
}
nw_job *nw_job_create(nw_job_type *type, int thread_count)
{
if (!type->on_job)
return NULL;
if (type->on_init && !type->on_release)
return NULL;
nw_job *job = malloc(sizeof(nw_job));
if (job == NULL)
return NULL;
memset(job, 0, sizeof(nw_job));
nw_loop_init();
job->type = *type;
job->loop = nw_default_loop;
if (pthread_mutex_init(&job->lock, NULL) != 0) {
free(job);
return NULL;
}
if (pthread_cond_init(&job->notify, NULL) != 0) {
pthread_mutex_destroy(&job->lock);
free(job);
return NULL;
}
job->thread_count = thread_count;
job->threads = calloc(job->thread_count, sizeof(pthread_t));
if (job->threads == NULL) {
nw_job_free(job);
return NULL;
}
job->cache = nw_cache_create(sizeof(nw_job_entry));
if (job->cache == NULL) {
nw_job_free(job);
return NULL;
}
if (pipe(job->pipefd) != 0) {
nw_job_free(job);
return NULL;
}
nw_sock_set_nonblock(job->pipefd[0]);
nw_sock_set_nonblock(job->pipefd[1]);
ev_io_init(&job->ev, on_can_read, job->pipefd[0], EV_READ);
ev_io_start(job->loop, &job->ev);
for (int i = 0; i < job->thread_count; ++i) {
struct thread_arg *arg = malloc(sizeof(struct thread_arg));
if (arg == NULL) {
nw_job_release(job);
return NULL;
}
memset(arg, 0, sizeof(struct thread_arg));
arg->job = job;
if (job->type.on_init) {
arg->privdata = job->type.on_init();
if (arg->privdata == NULL) {
nw_job_release(job);
return NULL;
}
}
if (pthread_create(&job->threads[i], NULL, thread_routine, arg) != 0) {
nw_job_release(job);
return NULL;
}
job->thread_start++;
}
return job;
}
int nw_job_add(nw_job *job, uint32_t id, void *request)
{
nw_job_entry *entry = nw_cache_alloc(job->cache);
if (entry == NULL)
return -1;
memset(entry, 0, sizeof(nw_job_entry));
entry->id = id;
entry->request = request;
pthread_mutex_lock(&job->lock);
if (job->request_tail) {
entry->prev = job->request_tail;
entry->next = NULL;
job->request_tail->next = entry;
job->request_tail = entry;
} else {
entry->prev = NULL;
entry->next = NULL;
job->request_head = entry;
job->request_tail = entry;
}
job->request_count += 1;
pthread_cond_signal(&job->notify);
pthread_mutex_unlock(&job->lock);
return 0;
}
void nw_job_release(nw_job *job)
{
pthread_mutex_lock(&job->lock);
if (job->shutdown) {
pthread_mutex_unlock(&job->lock);
return;
}
job->shutdown = true;
pthread_cond_broadcast(&job->notify);
pthread_mutex_unlock(&job->lock);
for (int i = 0; i < job->thread_start; ++i) {
void *privdata = NULL;
if (pthread_join(job->threads[i], &privdata) != 0) {
continue;
}
if (privdata != NULL && privdata != PTHREAD_CANCELED) {
job->type.on_release(privdata);
}
}
ev_io_stop(job->loop, &job->ev);
close(job->pipefd[0]);
close(job->pipefd[1]);
nw_job_free(job);
}
| 3,073 |
427 | <reponame>Polidea/SiriusObfuscator<filename>SymbolExtractorAndRenamer/lldb/packages/Python/lldbsuite/test/lang/swift/get_value/TestSwiftGetValueAsUnsigned.py
# TestSwiftGetValueAsUnsigned.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
"""
Tests that the SBValue::GetValueAsUnsigned() API works for Swift types
"""
import lldb
from lldbsuite.test.lldbtest import *
import lldbsuite.test.decorators as decorators
import lldbsuite.test.lldbutil as lldbutil
import unittest2
class SwiftGetValueAsUnsignedAPITest(TestBase):
mydir = TestBase.compute_mydir(__file__)
@decorators.swiftTest
def test_get_value_as_unsigned_sbapi(self):
"""Tests that the SBValue::GetValueAsUnsigned() API works for Swift types"""
self.build()
self.getvalue_commands()
def setUp(self):
TestBase.setUp(self)
def getvalue_commands(self):
"""Tests that the SBValue::GetValueAsUnsigned() API works for Swift types"""
self.runCmd("file a.out", CURRENT_EXECUTABLE_SET)
lldbutil.run_break_set_by_source_regexp(
self, r"break here", num_expected_locations=1)
self.runCmd("run", RUN_SUCCEEDED)
# The stop reason of the thread should be breakpoint.
self.expect("thread list", STOPPED_DUE_TO_BREAKPOINT,
substrs=['stopped',
'stop reason = breakpoint'])
frame = self.frame()
string = frame.FindVariable("aString")
number = frame.FindVariable("aNumber")
number.SetPreferSyntheticValue(True)
classobject = frame.FindVariable("aClassObject")
numberValue = number.GetValueAsUnsigned()
self.assertTrue(
numberValue == 123456,
"Swift.Int does not have a valid value")
builtinPointerValue = string.GetChildMemberWithName(
"str_value").GetChildMemberWithName("base").GetChildMemberWithName("value")
self.assertTrue(builtinPointerValue != 0,
"Builtin.RawPointer does not have a valid value")
objectPointerValue = string.GetChildMemberWithName(
"str_value").GetChildMemberWithName("value")
self.assertTrue(objectPointerValue != 0,
"Builtin.RawPointer does not have a valid value")
classValue = classobject.GetValueAsUnsigned()
self.assertTrue(
classValue != 0,
"Class types are aggregates with pointer values")
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lldb.SBDebugger.Terminate)
unittest2.main()
| 1,186 |
344 | <reponame>PuzeLiu/mushroom-rl
from .muscle_simulation import NoExternalSimulation, MuscleSimulation
| 34 |
860 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.execution;
import java.time.Duration;
import java.util.HashMap;
import java.util.Map;
import org.apache.samza.application.descriptors.ApplicationDescriptorImpl;
import org.apache.samza.application.LegacyTaskApplication;
import org.apache.samza.application.StreamApplication;
import org.apache.samza.application.descriptors.StreamApplicationDescriptorImpl;
import org.apache.samza.application.TaskApplication;
import org.apache.samza.config.Config;
import org.apache.samza.config.JobConfig;
import org.apache.samza.config.MapConfig;
import org.apache.samza.system.descriptors.GenericInputDescriptor;
import org.apache.samza.system.descriptors.GenericOutputDescriptor;
import org.apache.samza.system.descriptors.GenericSystemDescriptor;
import org.apache.samza.operators.KV;
import org.apache.samza.operators.MessageStream;
import org.apache.samza.operators.OutputStream;
import org.apache.samza.operators.functions.JoinFunction;
import org.apache.samza.serializers.JsonSerdeV2;
import org.apache.samza.serializers.KVSerde;
import org.apache.samza.serializers.Serde;
import org.apache.samza.serializers.StringSerde;
import org.apache.samza.task.IdentityStreamTask;
import org.junit.Before;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
/**
* Unit test base class to set up commonly used test application and configuration.
*/
class ExecutionPlannerTestBase {
protected StreamApplicationDescriptorImpl mockStreamAppDesc;
protected Config mockConfig;
protected JobNode mockJobNode;
protected KVSerde<String, Object> defaultSerde;
protected GenericSystemDescriptor inputSystemDescriptor;
protected GenericSystemDescriptor outputSystemDescriptor;
protected GenericSystemDescriptor intermediateSystemDescriptor;
protected GenericInputDescriptor<KV<String, Object>> input1Descriptor;
protected GenericInputDescriptor<KV<String, Object>> input2Descriptor;
protected GenericInputDescriptor<KV<String, Object>> intermediateInputDescriptor;
protected GenericInputDescriptor<KV<String, Object>> broadcastInputDesriptor;
protected GenericOutputDescriptor<KV<String, Object>> outputDescriptor;
protected GenericOutputDescriptor<KV<String, Object>> intermediateOutputDescriptor;
@Before
public void setUp() {
defaultSerde = KVSerde.of(new StringSerde(), new JsonSerdeV2<>());
inputSystemDescriptor = new GenericSystemDescriptor("input-system", "mockSystemFactoryClassName");
outputSystemDescriptor = new GenericSystemDescriptor("output-system", "mockSystemFactoryClassName");
intermediateSystemDescriptor = new GenericSystemDescriptor("intermediate-system", "mockSystemFactoryClassName");
input1Descriptor = inputSystemDescriptor.getInputDescriptor("input1", defaultSerde);
input2Descriptor = inputSystemDescriptor.getInputDescriptor("input2", defaultSerde);
outputDescriptor = outputSystemDescriptor.getOutputDescriptor("output", defaultSerde);
intermediateInputDescriptor = intermediateSystemDescriptor.getInputDescriptor("jobName-jobId-partition_by-p1", defaultSerde)
.withPhysicalName("jobName-jobId-partition_by-p1");
intermediateOutputDescriptor = intermediateSystemDescriptor.getOutputDescriptor("jobName-jobId-partition_by-p1", defaultSerde)
.withPhysicalName("jobName-jobId-partition_by-p1");
broadcastInputDesriptor = intermediateSystemDescriptor.getInputDescriptor("jobName-jobId-broadcast-b1", defaultSerde)
.withPhysicalName("jobName-jobId-broadcast-b1");
Map<String, String> configs = new HashMap<>();
configs.put(JobConfig.JOB_NAME, "jobName");
configs.put(JobConfig.JOB_ID, "jobId");
configs.putAll(input1Descriptor.toConfig());
configs.putAll(input2Descriptor.toConfig());
configs.putAll(outputDescriptor.toConfig());
configs.putAll(inputSystemDescriptor.toConfig());
configs.putAll(outputSystemDescriptor.toConfig());
configs.putAll(intermediateSystemDescriptor.toConfig());
configs.put(JobConfig.JOB_DEFAULT_SYSTEM, intermediateSystemDescriptor.getSystemName());
mockConfig = spy(new MapConfig(configs));
mockStreamAppDesc = new StreamApplicationDescriptorImpl(getRepartitionJoinStreamApplication(), mockConfig);
}
String getJobNameAndId() {
return "jobName-jobId";
}
void configureJobNode(ApplicationDescriptorImpl mockStreamAppDesc) {
JobGraph jobGraph = new ExecutionPlanner(mockConfig, mock(StreamManager.class))
.createJobGraph(mockStreamAppDesc);
mockJobNode = spy(jobGraph.getJobNodes().get(0));
}
StreamApplication getRepartitionOnlyStreamApplication() {
return appDesc -> {
MessageStream<KV<String, Object>> input1 = appDesc.getInputStream(input1Descriptor);
input1.partitionBy(KV::getKey, KV::getValue, mock(KVSerde.class), "p1");
};
}
StreamApplication getRepartitionJoinStreamApplication() {
return appDesc -> {
MessageStream<KV<String, Object>> input1 = appDesc.getInputStream(input1Descriptor);
MessageStream<KV<String, Object>> input2 = appDesc.getInputStream(input2Descriptor);
OutputStream<KV<String, Object>> output = appDesc.getOutputStream(outputDescriptor);
JoinFunction<String, Object, Object, KV<String, Object>> mockJoinFn = mock(JoinFunction.class);
input1
.partitionBy(KV::getKey, KV::getValue, defaultSerde, "p1")
.map(kv -> kv.value)
.join(input2.map(kv -> kv.value), mockJoinFn,
new StringSerde(), new JsonSerdeV2<>(Object.class), new JsonSerdeV2<>(Object.class),
Duration.ofHours(1), "j1")
.sendTo(output);
};
}
TaskApplication getTaskApplication() {
return appDesc -> {
appDesc.withInputStream(input1Descriptor)
.withInputStream(input2Descriptor)
.withInputStream(intermediateInputDescriptor)
.withOutputStream(intermediateOutputDescriptor)
.withOutputStream(outputDescriptor)
.withTaskFactory(() -> new IdentityStreamTask());
};
}
TaskApplication getLegacyTaskApplication() {
return new LegacyTaskApplication(IdentityStreamTask.class.getName());
}
StreamApplication getBroadcastOnlyStreamApplication(Serde serde) {
return appDesc -> {
MessageStream<KV<String, Object>> input = appDesc.getInputStream(input1Descriptor);
input.broadcast(serde, "b1");
};
}
}
| 2,348 |
1,338 | // -*- C++ -*- compatibility header.
// This file is part of the GNU ANSI C++ Library.
#include <algorithm>
#include <deque>
#include <functional>
#include <iterator>
#include <list>
#include <map>
#include <memory>
#include <numeric>
#include <set>
#include <stack>
#include <utility>
#include <vector>
| 105 |
4,262 | <reponame>rikvb/camel
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder;
import java.util.Map;
import java.util.function.Consumer;
import java.util.function.Supplier;
import org.apache.camel.CamelContext;
import org.apache.camel.RouteTemplateContext;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.model.DefaultRouteTemplateContext;
import org.apache.camel.model.ModelCamelContext;
import org.apache.camel.model.RouteTemplateDefinition;
/**
* Fluent builder for adding new routes from route templates.
*/
public final class TemplatedRouteBuilder {
private final CamelContext camelContext;
private final String routeTemplateId;
private final RouteTemplateContext routeTemplateContext;
private String routeId;
private Consumer<RouteTemplateDefinition> handler;
private Consumer<RouteTemplateContext> configurer;
private TemplatedRouteBuilder(CamelContext camelContext, String routeTemplateId) {
this.camelContext = camelContext;
this.routeTemplateId = routeTemplateId;
this.routeTemplateContext = new DefaultRouteTemplateContext(camelContext);
}
/**
* Creates a new {@link TemplatedRouteBuilder} to specify input parameters, and others, for the route template.
*
* @param camelContext the camel context
* @param routeTemplateId the id of the route template
* @return the builder
*/
public static TemplatedRouteBuilder builder(CamelContext camelContext, String routeTemplateId) {
return new TemplatedRouteBuilder(camelContext, routeTemplateId);
}
/**
* Sets the id of the route. If no route id is configured, then Camel will auto assign a route id, which is returned
* from the build method.
*
* @param routeId the route id
*/
public TemplatedRouteBuilder routeId(String routeId) {
this.routeId = routeId;
return this;
}
/**
* Adds a parameter the route template will use when creating the route.
*
* @param name parameter name
* @param value parameter value
*/
public TemplatedRouteBuilder parameter(String name, Object value) {
routeTemplateContext.setParameter(name, value);
return this;
}
/**
* Adds parameters the route template will use when creating the route.
*
* @param parameters the template parameters to add
*/
public TemplatedRouteBuilder parameters(Map<String, Object> parameters) {
parameters.forEach(routeTemplateContext::setParameter);
return this;
}
/**
* Binds the bean to the template local repository (takes precedence over global beans)
*
* @param id the id of the bean
* @param bean the bean
*/
public TemplatedRouteBuilder bean(String id, Object bean) {
routeTemplateContext.bind(id, bean);
return this;
}
/**
* Binds the bean to the template local repository (takes precedence over global beans)
*
* @param id the id of the bean
* @param type the type of the bean to associate the binding
* @param bean the bean
*/
public TemplatedRouteBuilder bean(String id, Class<?> type, Object bean) {
routeTemplateContext.bind(id, type, bean);
return this;
}
/**
* Binds the bean (via a supplier) to the template local repository (takes precedence over global beans)
*
* @param id the id of the bean
* @param type the type of the bean to associate the binding
* @param bean the bean
*/
public TemplatedRouteBuilder bean(String id, Class<?> type, Supplier<Object> bean) {
routeTemplateContext.bind(id, type, bean);
return this;
}
/**
* Sets a handler which gives access to the route template model that will be used for creating the route. This can
* be used to do validation. Any changes to the model happens before the route is created and added, however these
* changes affect future usage of the same template.
*
* @param handler the handler with callback to invoke with the given route template
*/
public TemplatedRouteBuilder handler(Consumer<RouteTemplateDefinition> handler) {
this.handler = handler;
return this;
}
/**
* Sets a configurer which allows to do configuration while the route template is being used to create a route. This
* gives control over the creating process, such as binding local beans and doing other kind of customization.
*
* @param configurer the configurer with callback to invoke with the given route template context
*/
public TemplatedRouteBuilder configure(Consumer<RouteTemplateContext> configurer) {
this.configurer = configurer;
return this;
}
/**
* Adds the route to the {@link CamelContext} which is built from the configured route template.
*
* @return the route id of the route that was added.
*/
public String add() {
try {
if (handler != null) {
RouteTemplateDefinition def
= camelContext.adapt(ModelCamelContext.class).getRouteTemplateDefinition(routeTemplateId);
if (def == null) {
throw new IllegalArgumentException("Cannot find RouteTemplate with id " + routeTemplateId);
}
handler.accept(def);
}
// configurer is executed later controlled by the route template context
if (configurer != null) {
routeTemplateContext.setConfigurer(configurer);
}
return camelContext.addRouteFromTemplate(routeId, routeTemplateId, routeTemplateContext);
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeException(e);
}
}
}
| 2,189 |
571 | /*
Cephes Math Library Release 2.3: December, 1988
Copyright 1984, 1987, 1988 by <NAME>
Direct inquiries to 30 Frost Street, Cambridge, MA 02140
*/
#pragma once
namespace cephes {
double polevl(double x, const double *coef, int N);
double p1evl(double x, const double *coef, int N);
}
| 99 |
328 | // Copyright 2022 The BladeDISC Authors. All rights reserved.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include <ATen/core/dispatch/Dispatcher.h>
#include <ATen/core/ivalue.h>
#include <ATen/core/stack.h>
#include <functional>
namespace torch {
namespace lazy {
bool force_eager_fallback(c10::Symbol op);
void ltc_eager_fallback(
const c10::OperatorHandle& op,
torch::jit::Stack* stack);
void ts_eager_fallback(
const c10::OperatorHandle& op,
torch::jit::Stack* stack,
c10::DeviceType device_type);
// The TorchScript backend does not register itself with pytorch dispatcher
// until it is explicitly initialized. This function should only be called
// by the main Torchscript backend init function.
void register_ts_ltc_eager_fallback();
} // namespace lazy
} // namespace torch
| 390 |
1,716 | <gh_stars>1000+
/*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.jimfs;
import com.google.common.base.Ascii;
import com.google.common.base.Function;
import com.ibm.icu.lang.UCharacter;
import java.text.Normalizer;
import java.util.regex.Pattern;
/**
* Normalizations that can be applied to names in paths. Includes Unicode normalizations and
* normalizations for case insensitive paths. These normalizations can be set in {@code
* Configuration.Builder} when creating a Jimfs file system instance and are automatically applied
* to paths in the file system.
*
* @author <NAME>
*/
public enum PathNormalization implements Function<String, String> {
/** No normalization. */
NONE(0) {
@Override
public String apply(String string) {
return string;
}
},
/** Unicode composed normalization (form {@linkplain java.text.Normalizer.Form#NFC NFC}). */
NFC(Pattern.CANON_EQ) {
@Override
public String apply(String string) {
return Normalizer.normalize(string, Normalizer.Form.NFC);
}
},
/** Unicode decomposed normalization (form {@linkplain java.text.Normalizer.Form#NFD NFD}). */
NFD(Pattern.CANON_EQ) {
@Override
public String apply(String string) {
return Normalizer.normalize(string, Normalizer.Form.NFD);
}
},
/*
* Some notes on case folding/case insensitivity of file systems:
*
* In general (I don't have any counterexamples) case-insensitive file systems handle
* their case insensitivity in a locale-independent way. NTFS, for example, writes a
* special case mapping file ($UpCase) to the file system when it's first initialized,
* and this is not affected by the locale of either the user or the copy of Windows
* being used. This means that it will NOT handle i/I-variants in filenames as you'd
* expect for Turkic languages, even for a Turkish user who has installed a Turkish
* copy of Windows.
*/
/** Unicode case folding for case insensitive paths. Requires ICU4J on the classpath. */
CASE_FOLD_UNICODE(Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE) {
@Override
public String apply(String string) {
try {
return UCharacter.foldCase(string, true);
} catch (NoClassDefFoundError e) {
NoClassDefFoundError error =
new NoClassDefFoundError(
"PathNormalization.CASE_FOLD_UNICODE requires ICU4J. "
+ "Did you forget to include it on your classpath?");
error.initCause(e);
throw error;
}
}
},
/** ASCII case folding for simple case insensitive paths. */
CASE_FOLD_ASCII(Pattern.CASE_INSENSITIVE) {
@Override
public String apply(String string) {
return Ascii.toLowerCase(string);
}
};
private final int patternFlags;
private PathNormalization(int patternFlags) {
this.patternFlags = patternFlags;
}
/** Applies this normalization to the given string, returning the normalized result. */
@Override
public abstract String apply(String string);
/**
* Returns the flags that should be used when creating a regex {@link Pattern} in order to
* approximate this normalization.
*/
public int patternFlags() {
return patternFlags;
}
/**
* Applies the given normalizations to the given string in order, returning the normalized result.
*/
public static String normalize(String string, Iterable<PathNormalization> normalizations) {
String result = string;
for (PathNormalization normalization : normalizations) {
result = normalization.apply(result);
}
return result;
}
/** Compiles a regex pattern using flags based on the given normalizations. */
public static Pattern compilePattern(String regex, Iterable<PathNormalization> normalizations) {
int flags = 0;
for (PathNormalization normalization : normalizations) {
flags |= normalization.patternFlags();
}
return Pattern.compile(regex, flags);
}
}
| 1,411 |
2,206 | <reponame>YunLemon/speedment
/*
*
* Copyright (c) 2006-2020, Speedment, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); You may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.speedment.runtime.field;
import org.junit.jupiter.api.Test;
import java.util.List;
import java.util.Set;
import java.util.stream.Stream;
import static com.speedment.runtime.field.TestEntity.ID;
import static com.speedment.runtime.field.TestEntity.NAME;
import static com.speedment.runtime.field.predicate.Inclusion.*;
import static java.util.Comparator.*;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toSet;
import static org.junit.jupiter.api.Assertions.assertEquals;
/**
*
* @author pemi
*/
final class ComparableFieldTest extends BaseFieldTest {
@Test
void testReferenceFieldComparatorNullFieldsFirst() {
final List<TestEntity> result = entities.stream().sorted(NAME.comparatorNullFieldsFirst().thenComparing(ID.comparator())).collect(toList());
final List<TestEntity> expected = entities.stream()
.sorted(
comparing(TestEntity::getName, nullsFirst(String::compareTo))
.thenComparing(comparing(TestEntity::getId, nullsFirst(Integer::compareTo)))
)
.collect(toList());
assertEquals(expected, result);
}
@Test
void testReferenceFieldComparatorNullFieldsLast() {
final List<TestEntity> result = entities.stream().sorted(NAME.comparator().thenComparing(ID.comparator())).collect(toList());
final List<TestEntity> expected = entities.stream()
.sorted(
comparing(TestEntity::getName, nullsLast(String::compareTo))
.thenComparing(comparing(TestEntity::getId, nullsLast(Integer::compareTo)))
)
.collect(toList());
assertEquals(expected, result);
}
@Test
void testIsNull() {
assertEquals(
collect(e -> e.getName() == null).size(),
collect(NAME.isNull()).size()
);
}
@Test
void testIsNotNull() {
assertEquals(
collect(e -> e.getName() != null).size(),
collect(NAME.isNotNull()).size()
);
}
@Test
void testEqual() {
assertEquals(
collect(e -> "a".equals(e.getName())).size(),
collect(NAME.equal("a")).size()
);
}
@Test
void testNotEqual() {
assertEquals(
collect(e -> e.getName() != null && !"a".equals(e.getName())).size(),
collect(NAME.notEqual("a")).size()
);
}
@Test
void lessThan() {
assertEquals(
collect(e -> e.getName() != null && "f".compareTo(e.getName()) > 0),
collect(NAME.lessThan("f"))
);
}
@Test
void lessOrEqual() {
assertEquals(
collect(e -> e.getName() != null && "f".compareTo(e.getName()) >= 0),
collect(NAME.lessOrEqual("f"))
);
}
@Test
void greaterThan() {
assertEquals(
collect(e -> e.getName() != null && "f".compareTo(e.getName()) < 0),
collect(NAME.greaterThan("f"))
);
}
@Test
void greaterOrEqual() {
assertEquals(
collect(e -> e.getName() != null && "f".compareTo(e.getName()) <= 0),
collect(NAME.greaterOrEqual("f"))
);
}
@Test
void between2Arg() {
final List<TestEntity> expected = collect(e -> e.getId() != null && e.getId() >= 2 && e.getId() < 6);
final List<TestEntity> result = collect(ID.between(2, 6));
assertEquals(
expected,
result
);
// These tests appear to be errenous. If the start is inclusive and end is inclusive,
// passing the same argument as both start and stop will return 0 hits. It is also not
// allowed to set either start or stop to null.
// assertEquals(0, collect(ID.between(2, null)).size());
// assertEquals(0, collect(ID.between(null, 6)).size());
assertEquals(0, collect(ID.between(6, 2)).size());
assertEquals(0, collect(ID.between(2, 2)).size());
}
@Test
void between3ArgInclIncl() {
final List<TestEntity> expected = collect(e -> e.getId() != null && e.getId() >= 2 && e.getId() <= 6);
final List<TestEntity> result = collect(ID.between(2, 6, START_INCLUSIVE_END_INCLUSIVE));
assertEquals(
expected,
result
);
assertEquals(0, collect(ID.between(6, 2, START_INCLUSIVE_END_INCLUSIVE)).size());
assertEquals(1, collect(ID.between(2, 2, START_INCLUSIVE_END_INCLUSIVE)).size());
}
@Test
void between3ArgExclIncl() {
final List<TestEntity> expected = collect(e -> e.getId() != null && e.getId() > 2 && e.getId() <= 6);
final List<TestEntity> result = collect(ID.between(2, 6, START_EXCLUSIVE_END_INCLUSIVE));
assertEquals(
expected,
result
);
assertEquals(0, collect(ID.between(6, 2, START_EXCLUSIVE_END_INCLUSIVE)).size());
assertEquals(0, collect(ID.between(2, 2, START_EXCLUSIVE_END_INCLUSIVE)).size());
}
@Test
void between3ArgExclExcl() {
final List<TestEntity> expected = collect(e -> e.getId() != null && e.getId() > 2 && e.getId() < 6);
final List<TestEntity> result = collect(ID.between(2, 6, START_EXCLUSIVE_END_EXCLUSIVE));
assertEquals(
expected,
result
);
assertEquals(0, collect(ID.between(6, 2, START_EXCLUSIVE_END_EXCLUSIVE)).size());
assertEquals(0, collect(ID.between(2, 2, START_EXCLUSIVE_END_EXCLUSIVE)).size());
}
@Test
void in() {
final Integer[] ints = {2, 3, 5, 7, 11, 13, 16};
final Set<Integer> intSet = Stream.of(ints).collect(toSet());
final List<TestEntity> expected = collect(e -> intSet.contains(e.getId()));
assertEquals(expected, collect(ID.in(intSet)));
assertEquals(expected, collect(ID.in(ints)));
assertEquals(0, collect(ID.in()).size());
assertEquals(1, collect(ID.in(1)).size());
assertEquals(0, collect(ID.in((Integer) null)).size());
}
}
| 2,914 |
728 | /*
* Copyright (c) 2015 <NAME> <<EMAIL>>
* All Rights Reserved.
*/
package me.zhanghai.android.patternlock.sample.util;
public class PreferenceContract {
public static final String KEY_THEME = "pref_key_theme";
public static final String DEFAULT_THEME = "0";
public static final String KEY_PATTERN_SHA1 = "pref_key_pattern_sha1";
public static final String DEFAULT_PATTERN_SHA1 = null;
public static final String KEY_PATTERN_VISIBLE = "pref_key_pattern_visible";
public static final Boolean DEFAULT_PATTERN_VISIBLE = true;
}
| 187 |
1,726 | /// @ref gtc_vec1
/// @file glm/gtc/vec1.hpp
///
/// @see core (dependence)
///
/// @defgroup gtc_vec1 GLM_GTC_vec1
/// @ingroup gtc
///
/// Include <glm/gtc/vec1.hpp> to use the features of this extension.
///
/// Add vec1, ivec1, uvec1 and bvec1 types.
#pragma once
// Dependency:
#include "../ext/vec1.hpp"
#if GLM_MESSAGES == GLM_MESSAGES_ENABLED && !defined(GLM_EXT_INCLUDED)
# pragma message("GLM: GLM_GTC_vec1 extension included")
#endif
#include "vec1.inl"
| 208 |
595 | /******************************************************************************
* Copyright (C) 2018 - 2021 Xilinx, Inc. All rights reserved.
* SPDX-License-Identifier: MIT
******************************************************************************/
/*****************************************************************************/
/**
*
* @file xusb_freertos_class_audio.c
*
* This file contains the implementation of the audio specific class code
* for the example.
*
*<pre>
* MODIFICATION HISTORY:
*
* Ver Who Date Changes
* ----- ---- -------- -------------------------------------------------------
* 1.0 rb 26/03/18 First release
*
*</pre>
*
******************************************************************************/
/***************************** Include Files *********************************/
#include "FreeRTOS.h"
#include "task.h"
#include "xusb_freertos_ch9_audio.h"
#include "xusb_freertos_class_audio.h"
/************************** Constant Definitions *****************************/
/***************** Macros (Inline Functions) Definitions *********************/
/**************************** Type Definitions *******************************/
/************************** Function Prototypes ******************************/
/************************** Variable Definitions *****************************/
/****************************************************************************/
/**
* This function is called by Chapter9 handler when class request is received
* from Host.
*
* @param InstancePtr is pointer to Usb_DevData instance.
* @param SetupData is the setup packet received from Host.
*
* @note None.
*
*****************************************************************************/
void Usb_ClassReq(struct Usb_DevData *InstancePtr, SetupPacket *SetupData)
{
u32 ReplyLen;
u8 Error = 0;
static u8 Reply[USB_REQ_REPLY_LEN] ALIGNMENT_CACHELINE;
u8 UnitId = SetupData->wIndex >> 8;
/* Check that the requested reply length is not bigger than our reply
* buffer. This should never happen...
*/
if (SetupData->wLength > USB_REQ_REPLY_LEN)
return;
switch (SetupData->bRequest) {
case UAC2_CS_CUR:
switch (UnitId) {
case USB_CLK_SRC_ID:
switch (SetupData->wValue >> 8) {
case UAC2_CS_CONTROL_SAM_FREQ:
if ((SetupData->bRequestType &
USB_ENDPOINT_DIR_MASK) == 0) {
/* Set Request */
ReplyLen = SetupData->wLength;
EpBufferRecv(InstancePtr->PrivateData,
0, Reply, ReplyLen);
EpBufferSend(InstancePtr->PrivateData,
0, NULL, 0);
} else {
/* Get Request */
ReplyLen = SetupData->wLength > 4 ? 4 :
SetupData->wLength;
Reply[0] = (u8)0x44;
Reply[1] = (u8)0xAC;
Reply[2] = (u8)0x00;
Reply[3] = (u8)0x00;
EpBufferSend(InstancePtr->PrivateData,
0, Reply, ReplyLen);
}
break;
case UAC2_CS_CONTROL_CLOCK_VALID:
ReplyLen = SetupData->wLength > 4 ? 4 :
SetupData->wLength;
/* Internal clock always valid */
Reply[0] = (u8)0x01;
EpBufferSend(InstancePtr->PrivateData, 0,
Reply, ReplyLen);
break;
default:
/* Unknown Control Selector for Clock Unit */
Error = 1;
break;
}
break;
case USB_CLK_SEL_ID:
if ((SetupData->bRequestType &
USB_ENDPOINT_DIR_MASK) == 0) {
/* Set Request */
ReplyLen = SetupData->wLength;
EpBufferRecv(InstancePtr->PrivateData, 0,
Reply, ReplyLen);
EpBufferSend(InstancePtr->PrivateData, 0,
NULL, 0);
} else {
/* Get Request */
ReplyLen = SetupData->wLength > 4 ? 4 :
SetupData->wLength;
Reply[0] = (u8)0x01;
EpBufferSend(InstancePtr->PrivateData, 0,
Reply, ReplyLen);
}
break;
case OUT_FETR_UNT_ID:
case IN_FETR_UNT_ID:
switch (SetupData->wValue >> 8) {
case UAC2_FU_VOLUME_CONTROL:
/* Feature not available */
if ((SetupData->bRequestType &
USB_ENDPOINT_DIR_MASK) == 0) {
/* Set Request */
ReplyLen = SetupData->wLength;
EpBufferRecv(InstancePtr->PrivateData,
0, Reply, ReplyLen);
EpBufferSend(InstancePtr->PrivateData,
0, NULL, 0);
} else {
/* Get Request */
ReplyLen = SetupData->wLength > 4 ? 4 :
SetupData->wLength;
Reply[0] = 0x00;
Reply[1] = 0x00;
EpBufferSend(InstancePtr->PrivateData,
0, Reply, ReplyLen);
}
break;
case UAC2_FU_MUTE_CONTROL:
/* Feature not available */
if ((SetupData->bRequestType &
USB_ENDPOINT_DIR_MASK) == 0) {
/* Set Request */
ReplyLen = SetupData->wLength;
EpBufferRecv(InstancePtr->PrivateData,
0, Reply, ReplyLen);
EpBufferSend(InstancePtr->PrivateData,
0, NULL, 0);
} else {
/* Get Request */
ReplyLen = SetupData->wLength > 4 ? 4 :
SetupData->wLength;
Reply[0] = 0x01;
EpBufferSend(InstancePtr->PrivateData,
0, Reply, ReplyLen);
}
break;
default:
/* Unknown Control Selector for Feature Unit */
Error = 1;
break;
}
break;
default:
/* Unknown unit ID */
Error = 1;
break;
}
break;
case UAC2_CS_RANGE:
switch (UnitId) {
case USB_CLK_SRC_ID:
switch (SetupData->wValue >> 8) {
case UAC2_CS_CONTROL_SAM_FREQ:
ReplyLen = SetupData->wLength > 14 ? 14 :
SetupData->wLength;
Reply[0] = (u8)0x01;
Reply[1] = (u8)0x00;
Reply[2] = (u8)0x44;
Reply[3] = (u8)0xAC;
Reply[4] = (u8)0x00;
Reply[5] = (u8)0x00;
Reply[6] = (u8)0x44;
Reply[7] = (u8)0xAC;
Reply[8] = (u8)0x00;
Reply[9] = (u8)0x00;
Reply[10] = (u8)0x00;
Reply[11] = (u8)0x00;
Reply[12] = (u8)0x00;
Reply[13] = (u8)0x00;
EpBufferSend(InstancePtr->PrivateData, 0,
Reply, ReplyLen);
break;
default:
/* Unknown Clock Source Range Request */
Error = 1;
break;
}
break;
case OUT_FETR_UNT_ID:
case IN_FETR_UNT_ID:
switch (SetupData->wValue >> 8) {
case UAC2_FU_VOLUME_CONTROL:
/* Feature not available */
ReplyLen = SetupData->wLength > 14 ? 14 :
SetupData->wLength;
Reply[0] = (u8)0x01;
Reply[1] = (u8)0x00;
Reply[2] = (u8)0x00;
Reply[3] = (u8)0x81;
Reply[4] = (u8)0x00;
Reply[5] = (u8)0x00;
Reply[6] = (u8)0x00;
Reply[7] = (u8)0x01;
EpBufferSend(InstancePtr->PrivateData, 0,
Reply, ReplyLen);
break;
default:
/* Unknown Control Selector for Feature Unit */
Error = 1;
break;
}
break;
default:
/* Unknown unit ID */
Error = 1;
break;
}
break;
default:
Error = 1;
Ep0StallRestart(InstancePtr->PrivateData);
break;
}
/* Set the send stall bit if there is an error */
if (Error)
Ep0StallRestart(InstancePtr->PrivateData);
}
/****************************************************************************/
/**
* This task implements audio record functionality
*
* @param pvParameters private parameters.
*
* @note None.
*
*****************************************************************************/
void prvRecordTask(void *pvParameters)
{
u32 Size;
u16 MaxPktSize = 1024;
struct Usb_DevData *InstancePtr = pvParameters;
USBCH9_DATA *ch9_ptr =
(USBCH9_DATA *)Get_DrvData(InstancePtr->PrivateData);
struct audio_dev *dev = (struct audio_dev *)(ch9_ptr->data_ptr);
dev->index = 0;
dev->residue = 0;
dev->firstpkt = 1;
dev->xSemaphoreRecord = xSemaphoreCreateBinary();
SetEpInterval(InstancePtr->PrivateData, ISO_EP,
USB_EP_DIR_IN, AUDIO_INTERVAL);
/* Endpoint enables - not needed for Control EP */
EpEnable(InstancePtr->PrivateData, ISO_EP, USB_EP_DIR_IN,
MaxPktSize, USB_EP_TYPE_ISOCHRONOUS);
StreamOn(InstancePtr->PrivateData, ISO_EP, USB_EP_DIR_OUT,
BufferPtrTemp);
while (1) {
xSemaphoreTake(dev->xSemaphoreRecord, portMAX_DELAY);
Size = dev->packetsize;
dev->residue += dev->packetresidue;
if ((dev->residue / dev->interval) >= dev->framesize) {
Size += dev->framesize;
dev->residue -= dev->framesize * dev->interval;
}
/* Buffer is completed, retransmitting the same file data */
if ((dev->index + Size) > dev->disksize)
dev->index = 0;
if (EpBufferSend(InstancePtr->PrivateData, ISO_EP,
dev->virtualdisk + dev->index,
Size) == XST_SUCCESS) {
dev->index += Size;
if (dev->firstpkt) {
Size = dev->packetsize;
dev->residue += dev->packetresidue;
if ((dev->residue / dev->interval) >=
dev->framesize) {
Size += dev->framesize;
dev->residue -= dev->framesize *
dev->interval;
}
if ((dev->index + Size) > dev->disksize)
dev->index = 0;
else
dev->index += Size;
dev->firstpkt = 0;
}
} else
xSemaphoreGive(dev->xSemaphoreRecord);
}
}
/****************************************************************************/
/**
* This task implements audio playback functionality
*
* @param pvParameters private parameters.
*
* @note None.
*
*****************************************************************************/
void prvPlayBackTask(void *pvParameters)
{
u16 MaxPktSize = 1024;
struct Usb_DevData *InstancePtr = pvParameters;
USBCH9_DATA *ch9_ptr =
(USBCH9_DATA *)Get_DrvData(InstancePtr->PrivateData);
struct audio_dev *dev = (struct audio_dev *)(ch9_ptr->data_ptr);
dev->xSemaphorePlay = xSemaphoreCreateBinary();
#if defined(PLATFORM_ZYNQ)
xSemaphoreGive(dev->xSemaphorePlay);
#endif
dev->index = 0;
dev->residue = 0;
dev->firstpkt = 1;
dev->bytesRecv = 0;
SetEpInterval(InstancePtr->PrivateData, ISO_EP,
USB_EP_DIR_OUT, AUDIO_INTERVAL);
/* Endpoint enables - not needed for Control EP */
EpEnable(InstancePtr->PrivateData, ISO_EP,
USB_EP_DIR_OUT, MaxPktSize,
USB_EP_TYPE_ISOCHRONOUS);
StreamOn(InstancePtr->PrivateData, ISO_EP, USB_EP_DIR_IN,
BufferPtrTemp);
while (1) {
xSemaphoreTake(dev->xSemaphorePlay, portMAX_DELAY);
if ((dev->index + dev->bytesRecv) > dev->disksize)
dev->index = 0;
memcpy(dev->virtualdisk + dev->index, BufferPtrTemp,
dev->bytesRecv);
dev->index += dev->bytesRecv;
EpBufferRecv(InstancePtr->PrivateData, ISO_EP,
BufferPtrTemp, 1024);
}
}
| 4,104 |
1,333 | package org.xujin.moss.client.endpoint;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.xujin.moss.client.endpoint.dependency.analyzer.JarDependencies;
import org.xujin.moss.client.utils.Analyzer;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.actuate.endpoint.web.annotation.RestControllerEndpoint;
import org.springframework.core.env.Environment;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import javax.annotation.PostConstruct;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
@RestControllerEndpoint(id = "appinfo")
@Slf4j
public class AppInfoEndPoint {
private static final Log logger = LogFactory.getLog(AppInfoEndPoint.class);
@Autowired
private Environment env;
private Map<String, Object> cache = new ConcurrentHashMap<>();
@RequestMapping(method = {RequestMethod.GET})
public Object get() {
return cache;
}
private String nullToEmpty(Object o){
if(null==o){
return "";
}
return o.toString();
}
@PostConstruct
public void init() {
CompletableFuture.runAsync(() -> {
String appName = env.getProperty("spring.application.name", "");
try {
JarDependencies dependencies = Analyzer.getAllPomInfo();
cache.put("appName", nullToEmpty(appName));
cache.put("springBootVersion",nullToEmpty(dependencies.getSpringBootVersion()));
cache.put("springCloudVersion",nullToEmpty(dependencies.getSpringCloudVersion()));
List<HashMap<String, String>> list = new ArrayList<>();
dependencies.getPomInfos().forEach(p -> {
if (p.getGroupId().equals("org.springframework.cloud")
&& p.getArtifactId().startsWith("spring-cloud")) {
HashMap<String, String> kv = new HashMap<>();
kv.put(p.getArtifactId(), p.getVersion());
list.add(kv);
}
});
cache.put("using", list);
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
});
}
}
| 1,085 |
496 | /*******************************************************************************
* Project: Nebula
* @file Channel.hpp
* @brief
* @author bwar
* @date: Mar 25, 2018
* @note
* Modify history:
******************************************************************************/
#ifndef SRC_CHANNEL_CHANNEL_HPP_
#define SRC_CHANNEL_CHANNEL_HPP_
namespace neb
{
class Channel
{
public:
Channel(){}
virtual ~Channel(){}
};
}
#endif /* SRC_CHANNEL_CHANNEL_HPP_ */
| 159 |
14,668 | <reponame>zealoussnow/chromium
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef COMPONENTS_INVALIDATION_IMPL_FCM_SYNC_NETWORK_CHANNEL_H_
#define COMPONENTS_INVALIDATION_IMPL_FCM_SYNC_NETWORK_CHANNEL_H_
#include <string>
#include "base/callback.h"
#include "base/observer_list.h"
#include "base/values.h"
#include "components/invalidation/impl/channels_states.h"
namespace invalidation {
// FCMSyncNetworkChannel implements common tasks needed from the network by
// client:
// - registering message callbacks
// - notifying on network problems
class FCMSyncNetworkChannel {
public:
class Observer {
public:
virtual void OnFCMChannelStateChanged(
FcmChannelState invalidator_state) = 0;
};
// See SetMessageReceiver below.
// payload - additional info specific to the invalidation
// private_topic - the internal (to FCM) representation for the public topic
// public_topic - the topic which was invalidated, e.g. in case of Chrome
// Sync it'll be BOOKMARK or PASSWORD
// version - version number of the invalidation
using MessageCallback =
base::RepeatingCallback<void(const std::string& payload,
const std::string& private_topic,
const std::string& public_topic,
int64_t version)>;
using TokenCallback = base::RepeatingCallback<void(const std::string& token)>;
FCMSyncNetworkChannel();
virtual ~FCMSyncNetworkChannel();
virtual void StartListening() = 0;
virtual void StopListening() = 0;
// Sets the receiver to which messages from the data center will be delivered.
// The callback will be invoked whenever an invalidation message is received
// from FCM. It is *not* guaranteed to be invoked exactly once or in-order
// (with respect to the invalidation's version number).
void SetMessageReceiver(MessageCallback incoming_receiver);
// Sets the receiver to which FCM registration token will be delivered.
// The callback will be invoked whenever a new InstanceID token becomes
// available.
void SetTokenReceiver(TokenCallback token_receiver);
// Classes interested in network channel state changes should implement
// FCMSyncNetworkChannel::Observer and register here.
void AddObserver(Observer* observer);
void RemoveObserver(Observer* observer);
// Subclass should implement RequestDetailedStatus to provide debugging
// information.
virtual void RequestDetailedStatus(
const base::RepeatingCallback<void(const base::DictionaryValue&)>&
callback) = 0;
protected:
// Subclass should notify about connection state through
// NotifyChannelStateChange. If communication doesn't work and it is possible
// that invalidations from server will not reach this client then channel
// should call this function with TRANSIENT_INVALIDATION_ERROR.
void NotifyChannelStateChange(FcmChannelState invalidator_state);
// Subclass should call DeliverIncomingMessage for message to reach
// invalidations library.
bool DeliverIncomingMessage(const std::string& payload,
const std::string& private_topic,
const std::string& public_topic,
int64_t version);
// Subclass should call DeliverToken for token to reach registration
// manager.
bool DeliverToken(const std::string& token);
private:
// Callbacks into invalidation library
MessageCallback incoming_receiver_;
TokenCallback token_receiver_;
int received_messages_count_;
std::string token_;
base::ObserverList<Observer>::Unchecked observers_;
};
} // namespace invalidation
#endif // COMPONENTS_INVALIDATION_IMPL_FCM_SYNC_NETWORK_CHANNEL_H_
| 1,243 |
348 | {"nom":"Villers-Chief","circ":"5ème circonscription","dpt":"Doubs","inscrits":103,"abs":50,"votants":53,"blancs":2,"nuls":0,"exp":51,"res":[{"nuance":"LR","nom":"Mme <NAME>","voix":43},{"nuance":"REM","nom":"Mme <NAME>","voix":8}]} | 93 |
921 | <reponame>vsch/idea-markdown
// Copyright (c) 2015-2020 <NAME> <<EMAIL>> Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.vladsch.md.nav.util.format;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.editor.Editor;
import com.vladsch.flexmark.ext.gfm.tasklist.TaskListExtension;
import com.vladsch.flexmark.ext.tables.TablesExtension;
import com.vladsch.flexmark.formatter.Formatter;
import com.vladsch.flexmark.util.data.MutableDataHolder;
import com.vladsch.flexmark.util.format.CharWidthProvider;
import com.vladsch.md.nav.actions.handlers.util.PsiEditContext;
import com.vladsch.md.nav.editor.api.MdFormatCustomizationProvider;
import com.vladsch.md.nav.language.DiscretionaryText;
import com.vladsch.md.nav.language.FormatWithSoftWrap;
import com.vladsch.md.nav.language.MdCodeStyleSettings;
import com.vladsch.md.nav.language.TableCaptionActionType;
import com.vladsch.md.nav.parser.PegdownOptionsAdapter;
import com.vladsch.md.nav.psi.util.MdTypes;
import com.vladsch.md.nav.settings.MdRenderingProfile;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.regex.Pattern;
public class FlexmarkFormatOptionsAdapter {
final @NotNull PsiEditContext myEditContext;
final @NotNull MdRenderingProfile myRenderingProfile;
final @NotNull MdCodeStyleSettings myStyleSettings;
final @Nullable Editor myEditor;
@NotNull CharWidthProvider myCharWidthProvider = CharWidthProvider.NULL;
public FlexmarkFormatOptionsAdapter(@NotNull PsiEditContext editContext, int startOffset, int endOffset) {
this.myEditContext = editContext;
this.myRenderingProfile = editContext.getRenderingProfile();
this.myStyleSettings = editContext.getStyleSettings();
this.myEditor = editContext.getEditor();
setCharWidthProvider(startOffset, endOffset);
}
private void setCharWidthProvider(int startOffset, int endOffset) {
myCharWidthProvider = CharWidthProvider.NULL;
if (myStyleSettings.USE_ACTUAL_CHAR_WIDTH) {
for (MdFormatCustomizationProvider provider : MdFormatCustomizationProvider.EXTENSIONS.getValue()) {
CharWidthProvider charWidthProvider = provider.createCharWidthProvider(myEditContext, startOffset, endOffset);
if (charWidthProvider != null && charWidthProvider != CharWidthProvider.NULL) {
myCharWidthProvider = charWidthProvider;
}
}
}
}
@NotNull
public PsiEditContext getEditContext() {
return myEditContext;
}
@NotNull
public MdRenderingProfile getRenderingProfile() {
return myRenderingProfile;
}
@NotNull
public MdCodeStyleSettings getStyleSettings() {
return myStyleSettings;
}
@Nullable
public Editor getEditor() {
return myEditor;
}
@NotNull
public CharWidthProvider getCharWidthProvider() {
return myCharWidthProvider;
}
@NotNull
public MutableDataHolder getTableFormatOptions(@Nullable CharSequence tableIndentPrefix) {
return getFormatOptions()
.set(TablesExtension.FORMAT_TABLE_INDENT_PREFIX, tableIndentPrefix == null ? "" : tableIndentPrefix.toString());
}
@NotNull
public MutableDataHolder getFormatOptions() {
MutableDataHolder options = PegdownOptionsAdapter.flexmarkOptions(myRenderingProfile).toMutable();
// Plugin Only Settings
// SMART_EDIT_TABLES
// SMART_EDIT_TABLE_SEPARATOR_LINE
// INDENT_SIZE
// TAB_SIZE
// USE_TAB_CHARACTER
// SMART_TABS
// WRAP_ON_TYPING
// SMART_EDIT_ATX_HEADER
// SMART_EDIT_SETEXT_HEADER
// SMART_ENTER_SETEXT_HEADER
// SMART_ENTER_ATX_HEADER
// TOC_FORMAT_ON_SAVE
// NEW_BULLET_LIST_ITEM_MARKER
// FORMAT_WITH_SOFT_WRAP
// Passed indirectly
// USE_ACTUAL_CHAR_WIDTH
FormatWithSoftWrap formatWithSoftWrap = myStyleSettings.FORMAT_WITH_SOFT_WRAP();
boolean isUseSoftWraps = myEditor != null && myEditor.getSettings().isUseSoftWraps();
boolean useInfiniteMargins = isUseSoftWraps && formatWithSoftWrap == FormatWithSoftWrap.INFINITE_MARGIN;
ASTNode frontMatterNode = myEditContext.getFrontMatterNode();
boolean hasFrontMatter = frontMatterNode != null && frontMatterNode.getElementType() == MdTypes.JEKYLL_FRONT_MATTER_OPEN;
boolean hasFlexmarkFrontMatter = frontMatterNode != null && frontMatterNode.getElementType() == MdTypes.FLEXMARK_FRONT_MATTER_OPEN;
int keepBlankLines = hasFlexmarkFrontMatter ? Math.max(2, myStyleSettings.KEEP_BLANK_LINES) : myStyleSettings.KEEP_BLANK_LINES;
options
.set(Formatter.FORMAT_CHAR_WIDTH_PROVIDER, myCharWidthProvider)
// FEATURE: add separate setting for trailing and max blank lines, also separate flexmark spec example differences
.set(Formatter.MAX_TRAILING_BLANK_LINES, keepBlankLines)
.set(Formatter.MAX_BLANK_LINES, keepBlankLines)
.set(Formatter.FORMATTER_ON_TAG, myStyleSettings.getContainer().FORMATTER_ON_TAG)
.set(Formatter.FORMATTER_OFF_TAG, myStyleSettings.getContainer().FORMATTER_OFF_TAG)
.set(Formatter.FORMATTER_TAGS_ENABLED, myStyleSettings.getContainer().FORMATTER_TAGS_ENABLED)
.set(Formatter.FORMATTER_TAGS_ACCEPT_REGEXP, myStyleSettings.getContainer().FORMATTER_TAGS_ACCEPT_REGEXP)
// add link markup
.set(Formatter.LINK_MARKER_COMMENT_PATTERN, Pattern.compile("^\\s*@IGNORE PREVIOUS:.*$"))
// PARA_WRAP_TEXT
.set(Formatter.RIGHT_MARGIN, myStyleSettings.PARA_WRAP_TEXT ? (useInfiniteMargins ? 50000 : myRenderingProfile.getRightMargin()) : 0)
.set(TablesExtension.FORMAT_TABLE_LEAD_TRAIL_PIPES, myStyleSettings.TABLE_LEAD_TRAIL_PIPES)
.set(TablesExtension.FORMAT_TABLE_SPACE_AROUND_PIPES, myStyleSettings.TABLE_SPACE_AROUND_PIPE)
.set(TablesExtension.FORMAT_TABLE_ADJUST_COLUMN_WIDTH, myStyleSettings.TABLE_ADJUST_COLUMN_WIDTH)
.set(TablesExtension.FORMAT_TABLE_APPLY_COLUMN_ALIGNMENT, myStyleSettings.TABLE_APPLY_COLUMN_ALIGNMENT)
.set(TablesExtension.FORMAT_TABLE_FILL_MISSING_COLUMNS, myStyleSettings.TABLE_FILL_MISSING_COLUMNS)
.set(TablesExtension.FORMAT_TABLE_CAPTION, TableCaptionActionType.ADAPTER.get(myStyleSettings.TABLE_CAPTION).flexMarkEnum)
.set(TablesExtension.FORMAT_TABLE_CAPTION_SPACES, DiscretionaryText.ADAPTER.get(myStyleSettings.TABLE_CAPTION_SPACES).flexMarkEnum)
.set(TablesExtension.FORMAT_TABLE_LEFT_ALIGN_MARKER, DiscretionaryText.ADAPTER.get(myStyleSettings.TABLE_LEFT_ALIGN_MARKER).flexMarkEnum)
.set(TablesExtension.FORMAT_TABLE_MIN_SEPARATOR_COLUMN_WIDTH, 3)
.set(TablesExtension.FORMAT_TABLE_MIN_SEPARATOR_DASHES, 3)
.set(TablesExtension.FORMAT_TABLE_TRIM_CELL_WHITESPACE, myStyleSettings.TABLE_TRIM_CELLS || myStyleSettings.TABLE_APPLY_COLUMN_ALIGNMENT)
.set(Formatter.LIST_ADD_BLANK_LINE_BEFORE, myStyleSettings.LIST_ADD_BLANK_LINE_BEFORE)
.set(Formatter.LIST_RENUMBER_ITEMS, myStyleSettings.LIST_RENUMBER_ITEMS)
.set(Formatter.LIST_BULLET_MARKER, myStyleSettings.BULLET_LIST_ITEM_MARKER().flexMarkEnum)
.set(Formatter.LIST_ALIGN_NUMERIC, myStyleSettings.LIST_ALIGN_NUMERIC().flexMarkEnum)
.set(Formatter.LIST_RESET_FIRST_ITEM_NUMBER, myStyleSettings.LIST_RESET_FIRST_ITEM_NUMBER)
.set(Formatter.LIST_SPACING, myStyleSettings.LIST_SPACING().flexMarkEnum)
.set(Formatter.SETEXT_HEADING_EQUALIZE_MARKER, myStyleSettings.SETEXT_HEADER_EQUALIZE_MARKER)
.set(Formatter.ATX_HEADING_TRAILING_MARKER, myStyleSettings.ATX_HEADER_TRAILING_MARKER().flexMarkEnum)
.set(Formatter.SPACE_AFTER_ATX_MARKER, myStyleSettings.SPACE_AFTER_ATX_MARKER().flexMarkEnum)
.set(Formatter.HEADING_STYLE, myStyleSettings.HEADING_PREFERENCE().flexMarkEnum)
.set(Formatter.REFERENCE_PLACEMENT, myStyleSettings.REFERENCE_PLACEMENT().flexMarkEnum)
.set(Formatter.REFERENCE_SORT, myStyleSettings.REFERENCE_SORT().flexMarkEnum)
.set(Formatter.ESCAPE_SPECIAL_CHARS, myStyleSettings.ESCAPE_SPECIAL_CHARS_ON_WRAP)
.set(Formatter.ESCAPE_NUMBERED_LEAD_IN, myStyleSettings.ESCAPE_NUMBERED_LEAD_IN_ON_WRAP)
.set(Formatter.UNESCAPE_SPECIAL_CHARS, myStyleSettings.UNESCAPE_SPECIAL_CHARS_ON_WRAP)
.set(Formatter.FENCED_CODE_MARKER_LENGTH, myStyleSettings.CODE_FENCE_MARKER_LENGTH)
.set(Formatter.FENCED_CODE_MARKER_TYPE, myStyleSettings.CODE_FENCE_MARKER_TYPE().flexMarkEnum)
.set(Formatter.FENCED_CODE_MATCH_CLOSING_MARKER, myStyleSettings.CODE_FENCE_MATCH_CLOSING_MARKER)
.set(Formatter.FENCED_CODE_MINIMIZE_INDENT, myStyleSettings.CODE_FENCE_MINIMIZE_INDENT)
.set(Formatter.FENCED_CODE_SPACE_BEFORE_INFO, myStyleSettings.CODE_FENCE_SPACE_BEFORE_INFO)
.set(Formatter.INDENTED_CODE_MINIMIZE_INDENT, myStyleSettings.VERBATIM_MINIMIZE_INDENT)
.set(Formatter.BLOCK_QUOTE_MARKERS, myStyleSettings.BLOCK_QUOTE_MARKERS().flexMarkEnum)
.set(Formatter.KEEP_EXPLICIT_LINKS_AT_START, myStyleSettings.keepAtStartOfLine(MdTypes.EXPLICIT_LINK, hasFrontMatter))
.set(Formatter.KEEP_IMAGE_LINKS_AT_START, myStyleSettings.keepAtStartOfLine(MdTypes.IMAGE, hasFrontMatter))
// Task List Extension
.set(TaskListExtension.FORMAT_LIST_ITEM_CASE, myStyleSettings.TASK_LIST_ITEM_CASE().flexMarkEnum)
.set(TaskListExtension.FORMAT_LIST_ITEM_PLACEMENT, myStyleSettings.TASK_LIST_ITEM_PLACEMENT().flexMarkEnum)
.set(Formatter.LISTS_ITEM_CONTENT_AFTER_SUFFIX, myStyleSettings.TASK_ITEM_CONTINUATION().isAlignToFirst()) // text indents after task item suffix
;
// Allow customizations
for (MdFormatCustomizationProvider provider : MdFormatCustomizationProvider.EXTENSIONS.getValue()) {
provider.customizeFormatOptions(this, options);
}
return options;
}
}
| 4,584 |
870 | <gh_stars>100-1000
from setuptools import setup, find_packages
setup(name='batchgenerators',
version='0.23',
description='Data augmentation toolkit',
url='https://github.com/MIC-DKFZ/batchgenerators',
author='Division of Medical Image Computing, German Cancer Research Center AND Applied Computer Vision Lab, '
'Helmholtz Imaging Platform',
author_email='<EMAIL>',
license='Apache License Version 2.0, January 2004',
packages=find_packages(exclude=["tests"]),
install_requires=[
"pillow>=7.1.2",
"numpy>=1.10.2",
"scipy",
"scikit-image",
"scikit-learn",
"future",
"unittest2",
"threadpoolctl"
],
keywords=['data augmentation', 'deep learning', 'image segmentation', 'image classification',
'medical image analysis', 'medical image segmentation'],
)
| 407 |
361 | <gh_stars>100-1000
def test_function():
return True
class test_class():
def __init__(self):
pass
def test_me(self):
return "I'm a test"
| 75 |
5,903 | <reponame>zhouguangping/pentaho-kettle
/*!
* Copyright (C) 2017 by <NAME> : http://www.pentaho.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.pentaho.googledrive.vfs.test;
import org.apache.commons.vfs2.FileName;
import org.apache.commons.vfs2.FileSystemOptions;
import org.junit.Test;
import org.pentaho.googledrive.vfs.GoogleDriveFileProvider;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.*;
import static org.junit.Assert.assertTrue;
public class GoogleDriveFileProviderTest {
private final String SCHEME = "googledrive";
private static final String DISPLAY_NAME = "Google Drive";
@Test public void testFileProvider() throws Exception {
GoogleDriveFileProvider fileProvider = new GoogleDriveFileProvider();
assertTrue( fileProvider.SCHEME.equals( SCHEME ) );
assertTrue( fileProvider.DISPLAY_NAME.equals( DISPLAY_NAME ) );
FileName fileName = mock( FileName.class );
FileSystemOptions options = new FileSystemOptions();
assertNotNull( fileProvider.doCreateFileSystem( fileName, options ) );
}
}
| 478 |
1,582 | # -*- coding: utf-8 -*-
from urllib import urlencode
def redirect_to_login(request, next_path, login_url, redirect_field_name):
url = login_url + '?' + urlencode({redirect_field_name: next_path})
return request.redirect(url)
| 88 |
310 | <gh_stars>100-1000
package org.minbox.framework.knowledge.library.common.entity;
import com.gitee.hengboy.mybatis.enhance.common.annotation.Column;
import com.gitee.hengboy.mybatis.enhance.common.annotation.Id;
import com.gitee.hengboy.mybatis.enhance.common.annotation.Table;
import com.gitee.hengboy.mybatis.enhance.common.enums.KeyGeneratorTypeEnum;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.io.Serializable;
import java.sql.Timestamp;
/**
* <p>本类由Code-Builder自动生成</p>
* <p>表名: kl_user_info - 用户基本信息 - 数据实体</p>
*
* @author Code-Builder
* @since 恒宇少年
* ===============================
* Created with Code-Builder.
* User:
* Date:Apr 9, 2019 1:27:51 PM
* 简书:http://www.jianshu.com/u/092df3f77bca
* 码云:https://gitee.com/hengboy
* GitHub:https://github.com/hengyuboy
* ================================
*/
@Data
@Table(name = "kl_user_info")
@ApiModel
public class UserInfoEntity implements Serializable {
/**
* UI_ID - 主键自增
*/
@Id(generatorType = KeyGeneratorTypeEnum.UUID)
@Column(name="UI_ID")
@ApiModelProperty("主键自增")
private String uiId;
/**
* UI_NICK_NAME - 用户昵称
*/
@Column(name="UI_NICK_NAME")
@ApiModelProperty("用户昵称")
private String uiNickName;
/**
* UI_OPEN_ID - 用户微信openId
*/
@Column(name="UI_OPEN_ID")
@ApiModelProperty("用户微信openId")
private String uiOpenId;
/**
* UI_PASSWORD - 用户密码
*/
@Column(name="UI_PASSWORD")
@ApiModelProperty("用户密码")
private String uiPassword;
/**
* UI_LAST_LOGIN_TIME - 最后登录时间
*/
@Column(name="UI_LAST_LOGIN_TIME")
@ApiModelProperty("最后登录时间")
private Timestamp uiLastLoginTime;
/**
* UI_STATUS - 用户状态,O:正常,D:已删除
*/
@Column(name="UI_STATUS",insertable = false)
@ApiModelProperty("用户状态,O:正常,D:已删除")
private String uiStatus;
/**
* UI_IS_LOCK - 是否锁定,Y:锁定,N:未锁定
*/
@Column(name="UI_IS_LOCK",insertable = false)
@ApiModelProperty("是否锁定,Y:锁定,N:未锁定")
private String uiIsLock;
/**
* UI_CREATE_TIME - 创建时间
*/
@Column(name="UI_CREATE_TIME",insertable = false)
@ApiModelProperty("创建时间")
private Timestamp uiCreateTime;
/**
* UI_MARK - 备注信息
*/
@Column(name="UI_MARK")
@ApiModelProperty("备注信息")
private String uiMark;
}
| 1,290 |
763 | <filename>db/basedb.py
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<<EMAIL>>
# http://binux.me
# Created on 2012-08-30 17:43:49
import logging
import sqlite3
logger = logging.getLogger('qiandao.basedb')
def tostr(s):
if isinstance(s, bytes):
try:
return s.decode()
except :
return s
if isinstance(s, bytearray):
try:
return s.decode()
except :
return s
return s
class BaseDB(object):
'''
BaseDB
dbcur should be overwirte
'''
placeholder = "%s"
@staticmethod
def escape(string):
return '`%s`' % string
@property
def dbcur(self):
if self.conn.unread_result:
try:
self.conn.get_rows()
except:
pass
self.conn.ping(reconnect=True)
return self.conn.cursor()
def _execute(self, sql_query, values=[]):
dbcur = self.dbcur
dbcur.execute(sql_query, values)
return dbcur
def _select(self, tablename=None, what="*", where="", where_values=[], offset=0, limit=None):
tablename = self.escape(tablename or self.__tablename__)
if isinstance(what, list) or isinstance(what, tuple) or what is None:
what = ','.join(self.escape(f) for f in what) if what else '*'
sql_query = "SELECT %s FROM %s" % (what, tablename)
if where: sql_query += " WHERE %s" % where
if limit: sql_query += " LIMIT %d, %d" % (offset, limit)
logger.debug("<sql: %s>", sql_query)
for row in self._execute(sql_query, where_values):
yield [tostr(x) for x in row]
def _select2dic(self, tablename=None, what="*", where="", where_values=[], offset=0, limit=None):
tablename = self.escape(tablename or self.__tablename__)
if isinstance(what, list) or isinstance(what, tuple) or what is None:
what = ','.join(self.escape(f) for f in what) if what else '*'
sql_query = "SELECT %s FROM %s" % (what, tablename)
if where: sql_query += " WHERE %s" % where
if limit: sql_query += " LIMIT %d, %d" % (offset, limit)
logger.debug("<sql: %s>", sql_query)
dbcur = self._execute(sql_query, where_values)
fields = [f[0] for f in dbcur.description]
rtv = []
for row in dbcur:
rtv.append(dict(zip(fields, [tostr(x) for x in row])))
#yield dict(zip(fields, [tostr(x) for x in row]))
return rtv
def _replace(self, tablename=None, **values):
tablename = self.escape(tablename or self.__tablename__)
if values:
_keys = ", ".join(self.escape(k) for k in values.keys())
_values = ", ".join([self.placeholder, ] * len(values))
sql_query = "REPLACE INTO %s (%s) VALUES (%s)" % (tablename, _keys, _values)
else:
sql_query = "REPLACE INTO %s DEFAULT VALUES" % tablename
logger.debug("<sql: %s>", sql_query)
if values:
dbcur = self._execute(sql_query, list(values.values()))
else:
dbcur = self._execute(sql_query)
return dbcur.lastrowid
def _insert(self, tablename=None, **values):
tablename = self.escape(tablename or self.__tablename__)
if values:
_keys = ", ".join((self.escape(k) for k in values.keys()))
_values = ", ".join([self.placeholder, ] * len(values))
sql_query = "INSERT INTO %s (%s) VALUES (%s)" % (tablename, _keys, _values)
else:
sql_query = "INSERT INTO %s DEFAULT VALUES" % tablename
logger.debug("<sql: %s>", sql_query)
if values:
dbcur = self._execute(sql_query, list(values.values()))
else:
dbcur = self._execute(sql_query)
return dbcur.lastrowid
def _update(self, tablename=None, where="1=0", where_values=[], **values):
tablename = self.escape(tablename or self.__tablename__)
_key_values = ", ".join(["%s = %s" % (self.escape(k), self.placeholder) for k in values.keys()])
sql_query = "UPDATE %s SET %s WHERE %s" % (tablename, _key_values, where)
logger.debug("<sql: %s>", sql_query)
return self._execute(sql_query, list(values.values())+list(where_values))
def _delete(self, tablename=None, where="1=0", where_values=[]):
tablename = self.escape(tablename or self.__tablename__)
sql_query = "DELETE FROM %s" % tablename
if where: sql_query += " WHERE %s" % where
logger.debug("<sql: %s>", sql_query)
return self._execute(sql_query, where_values)
if __name__ == "__main__":
class DB(BaseDB):
__tablename__ = "test"
def __init__(self):
self.conn = sqlite3.connect(":memory:")
cursor = self.conn.cursor()
cursor.execute('''CREATE TABLE `%s` (id INTEGER PRIMARY KEY AUTOINCREMENT, name, age)'''
% self.__tablename__)
@property
def dbcur(self):
return self.conn.cursor()
db = DB()
assert db._insert(db.__tablename__, name="binux", age=23) == 1
assert db._select(db.__tablename__, "name, age").fetchone() == ("binux", 23)
assert db._select2dic(db.__tablename__, "name, age")[0]["name"] == "binux"
assert db._select2dic(db.__tablename__, "name, age")[0]["age"] == 23
db._replace(db.__tablename__, id=1, age=24)
assert db._select(db.__tablename__, "name, age").fetchone() == (None, 24)
db._update(db.__tablename__, "id = 1", age=16)
assert db._select(db.__tablename__, "name, age").fetchone() == (None, 16)
db._delete(db.__tablename__, "id = 1")
assert db._select(db.__tablename__).fetchall() == []
| 2,780 |
8,027 | <reponame>Unknoob/buck
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.rules.macros;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.assertEquals;
import com.facebook.buck.core.cell.TestCellBuilder;
import com.facebook.buck.core.cell.nameresolver.CellNameResolver;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.BuildTargetFactory;
import com.facebook.buck.core.model.UnconfiguredTargetConfiguration;
import com.facebook.buck.core.path.ForwardRelativePath;
import com.facebook.buck.core.rules.ActionGraphBuilder;
import com.facebook.buck.core.rules.BuildRule;
import com.facebook.buck.core.rules.resolver.impl.TestActionGraphBuilder;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.io.filesystem.impl.FakeProjectFilesystem;
import com.facebook.buck.rules.args.Arg;
import com.facebook.buck.rules.coercer.CoerceFailedException;
import com.facebook.buck.rules.coercer.DefaultTypeCoercerFactory;
import com.google.common.collect.ImmutableList;
import com.google.common.reflect.TypeToken;
import java.nio.file.Path;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
public class OutputMacroExpanderTest {
@Rule public ExpectedException thrown = ExpectedException.none();
private ProjectFilesystem filesystem;
private ActionGraphBuilder graphBuilder;
private CellNameResolver cellNameResolver;
private StringWithMacrosConverter converter;
private ActionGraphBuilder setup(ProjectFilesystem projectFilesystem, BuildTarget buildTarget) {
cellNameResolver = TestCellBuilder.createCellRoots(projectFilesystem).getCellNameResolver();
graphBuilder = new TestActionGraphBuilder();
converter =
StringWithMacrosConverter.of(
buildTarget,
cellNameResolver,
graphBuilder,
ImmutableList.of(new OutputMacroExpander()));
return graphBuilder;
}
@Test
public void replaceOutputOfSupplementaryOutputWithRelativePath() throws Exception {
BuildTarget buildTarget = BuildTargetFactory.newInstance("//some:target");
filesystem = new FakeProjectFilesystem();
graphBuilder = setup(filesystem, buildTarget);
RuleWithSupplementaryOutput rule = new RuleWithSupplementaryOutput(buildTarget, filesystem);
graphBuilder.addToIndex(rule);
String originalCmd = "$(output one)";
String transformedString = coerceAndStringify(originalCmd, rule);
// Verify that the correct cmd was created.
Path absolutePath =
graphBuilder
.getSourcePathResolver()
.getRelativePath(rule.getSourcePathToSupplementaryOutput("one"));
String expectedCmd = absolutePath.toString();
assertEquals(expectedCmd, transformedString);
}
@Test
public void missingLocationArgumentThrows() throws Exception {
filesystem = FakeProjectFilesystem.createJavaOnlyFilesystem("/some_root");
cellNameResolver = TestCellBuilder.createCellRoots(filesystem).getCellNameResolver();
thrown.expect(CoerceFailedException.class);
thrown.expectMessage(
allOf(
containsString("The macro '$(output )' could not be expanded:"),
containsString("expected exactly one argument (found 1)")));
new DefaultTypeCoercerFactory()
.typeCoercerForType(TypeToken.of(StringWithMacros.class))
.coerceBoth(
cellNameResolver,
filesystem,
ForwardRelativePath.of(""),
UnconfiguredTargetConfiguration.INSTANCE,
UnconfiguredTargetConfiguration.INSTANCE,
"$(output )");
}
private String coerceAndStringify(String input, BuildRule rule) throws CoerceFailedException {
StringWithMacros stringWithMacros =
new DefaultTypeCoercerFactory()
.typeCoercerForType(TypeToken.of(StringWithMacros.class))
.coerceBoth(
cellNameResolver,
filesystem,
rule.getBuildTarget().getCellRelativeBasePath().getPath(),
UnconfiguredTargetConfiguration.INSTANCE,
UnconfiguredTargetConfiguration.INSTANCE,
input);
Arg arg = converter.convert(stringWithMacros);
return Arg.stringify(arg, graphBuilder.getSourcePathResolver());
}
}
| 1,654 |
1,630 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
from functools import partial
import pytest
import torch
import torch.distributed as dist
import torch.multiprocessing as mp
from colossalai.communication import (recv_backward, recv_forward, recv_obj_meta, send_backward,
send_backward_recv_forward, send_forward, send_forward_recv_backward,
send_obj_meta)
from colossalai.context.parallel_mode import ParallelMode
from colossalai.core import global_context as gpc
from colossalai.initialize import launch
from colossalai.logging import get_dist_logger
from colossalai.utils import free_port, get_current_device
from colossalai.testing import rerun_on_exception
BATCH_SIZE = 4
SEQ_LENGTH = 2
HIDDEN_SIZE = 16
CONFIG = dict(parallel=dict(pipeline=dict(size=4), tensor=dict(size=1, mode=None)), seed=1024)
def check_equal(A, B):
return torch.allclose(A, B, rtol=1e-5, atol=1e-3)
def check_forward(output_tensor, rank, logger):
dist.barrier()
if gpc.is_first_rank(ParallelMode.PIPELINE):
tensor = output_tensor.clone()
else:
tensor = recv_forward(output_tensor.shape)
logger.info('Rank {} received forward. Correct tensor: {}'.format(rank, check_equal(tensor, output_tensor)))
if not gpc.is_last_rank(ParallelMode.PIPELINE):
send_forward(tensor)
logger.info('Rank {} sent forward.'.format(rank))
def check_backward(output_grad, rank, logger):
dist.barrier()
if gpc.is_last_rank(ParallelMode.PIPELINE):
grad = output_grad.clone()
else:
grad = recv_backward(output_grad.shape)
logger.info('Rank {} received backward. Correct grad: {}'.format(rank, check_equal(grad, output_grad)))
if not gpc.is_first_rank(ParallelMode.PIPELINE):
send_backward(grad)
logger.info('Rank {} sent backward.'.format(rank))
def check_forward_backward(output_tensor, output_grad, rank, logger):
dist.barrier()
if not gpc.is_first_rank(ParallelMode.PIPELINE):
tensor = send_backward_recv_forward(output_grad, output_tensor.shape)
logger.info('Rank {} sent backward received forward. Correct tensor: {}'.format(
rank, check_equal(tensor, output_tensor)))
if not gpc.is_last_rank(ParallelMode.PIPELINE):
grad = send_forward_recv_backward(output_tensor, output_grad.shape)
logger.info('Rank {} sent forward received backward. Correct grad: {}'.format(
rank, check_equal(grad, output_grad)))
def check_comm(size, rank, prev_rank, next_rank, logger):
dtype = torch.float32
device = get_current_device()
tensor_shape = (BATCH_SIZE, SEQ_LENGTH, HIDDEN_SIZE)
grad_shape = (BATCH_SIZE, SEQ_LENGTH, HIDDEN_SIZE)
tensor = torch.randn(tensor_shape, dtype=dtype, device=device)
dist.all_reduce(tensor)
grad = torch.randn(grad_shape, dtype=dtype, device=device)
dist.all_reduce(grad)
check_forward(tensor, rank, logger)
check_backward(grad, rank, logger)
check_forward_backward(tensor, grad, rank, logger)
def run_check(rank, world_size, port):
launch(config=CONFIG, rank=rank, world_size=world_size, host='localhost', port=port, backend='nccl')
logger = get_dist_logger()
rank = gpc.get_global_rank()
prev_rank = gpc.get_prev_global_rank(ParallelMode.PIPELINE)
next_rank = gpc.get_next_global_rank(ParallelMode.PIPELINE)
logger.info('Rank {0}: prev rank {1}, next rank {2}'.format(rank, prev_rank, next_rank))
logger.info('Distributed environment is initialzied.')
check_comm(world_size, rank, prev_rank, next_rank, logger)
gpc.destroy()
torch.cuda.empty_cache()
@pytest.mark.dist
@rerun_on_exception(exception_type=mp.ProcessRaisedException, pattern=".*Address already in use.*")
def test_p2p():
world_size = 4
run_func = partial(run_check, world_size=world_size, port=free_port())
mp.spawn(run_func, nprocs=world_size)
if __name__ == '__main__':
test_p2p()
| 1,598 |
593 | <reponame>Awais75/Linq
/*=============================================================================
Copyright (c) 2012 <NAME>
take_while.h
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
==============================================================================*/
#ifndef LINQ_GUARD_EXTENSIONS_TAKE_WHILE_H
#define LINQ_GUARD_EXTENSIONS_TAKE_WHILE_H
#include <linq/extensions/extension.h>
#include <boost/range.hpp>
#include <boost/range/algorithm/find_if.hpp>
#include <linq/extensions/detail/placeholders.h>
#include <linq/extensions/detail/not_predicate.h>
namespace linq {
namespace detail {
struct take_while_t
{
template<class Range, class Predicate>
auto operator()(Range && r, Predicate p) const
LINQ_RETURNS(boost::make_iterator_range(boost::begin(r), boost::find_if(r, std::bind(not_predicate(), p, linq::_1))));
};
}
namespace {
range_extension<detail::take_while_t> take_while = {};
}
}
#endif
| 362 |
1,352 | // pbrt is Copyright(c) 1998-2020 <NAME>, <NAME>, and <NAME>.
// The pbrt source code is licensed under the Apache License, Version 2.0.
// SPDX: Apache-2.0
#include <pbrt/util/pstd.h>
#include <pbrt/util/check.h>
#include <pbrt/util/memory.h>
namespace pstd {
namespace pmr {
memory_resource::~memory_resource() {}
class NewDeleteResource : public memory_resource {
void *do_allocate(size_t size, size_t alignment) {
#if defined(PBRT_HAVE__ALIGNED_MALLOC)
return _aligned_malloc(size, alignment);
#elif defined(PBRT_HAVE_POSIX_MEMALIGN)
void *ptr;
if (alignment < sizeof(void *))
return malloc(size);
if (posix_memalign(&ptr, alignment, size) != 0)
ptr = nullptr;
return ptr;
#else
return memalign(alignment, size);
#endif
}
void do_deallocate(void *ptr, size_t bytes, size_t alignment) {
if (!ptr)
return;
#if defined(PBRT_HAVE__ALIGNED_MALLOC)
_aligned_free(ptr);
#else
free(ptr);
#endif
}
bool do_is_equal(const memory_resource &other) const noexcept {
return this == &other;
}
};
static NewDeleteResource *ndr;
memory_resource *new_delete_resource() noexcept {
if (!ndr)
ndr = new NewDeleteResource;
return ndr;
}
static memory_resource *defaultMemoryResource = new_delete_resource();
memory_resource *set_default_resource(memory_resource *r) noexcept {
memory_resource *orig = defaultMemoryResource;
defaultMemoryResource = r;
return orig;
}
memory_resource *get_default_resource() noexcept {
return defaultMemoryResource;
}
void *monotonic_buffer_resource::do_allocate(size_t bytes, size_t align) {
#ifndef NDEBUG
// Ensures that the monotonic_buffer_resource is used in the same
// thread that originally created it. This is an attempt to catch race
// conditions, since the class is, by design, not thread safe. Note
// that this CHECK effectively assumes that these are being allocated
// via something like ThreadLocal; there are perfectly reasonably ways
// of allocating these in one thread and using them in another thread,
// so this is tied to pbrt's current usage of them...
CHECK(constructTID == std::this_thread::get_id());
#endif
if (bytes > block_size)
// We've got a big allocation; let the current block be so that
// smaller allocations have a chance at using up more of it.
return upstream->allocate(bytes, align);
if ((current_pos % align) != 0)
current_pos += align - (current_pos % align);
DCHECK_EQ(0, current_pos % align);
if (!current || current_pos + bytes > current->size) {
current = allocate_block(block_size);
current_pos = 0;
}
void *ptr = (char *)current->ptr + current_pos;
current_pos += bytes;
return ptr;
}
} // namespace pmr
} // namespace pstd
| 1,083 |
831 | <reponame>qq1056779951/android
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.tools.idea.uibuilder.handlers.motion.property2.action;
import com.android.tools.idea.uibuilder.handlers.motion.editor.MotionSceneTag;
import com.android.tools.idea.uibuilder.handlers.motion.editor.adapters.MTag;
import com.android.tools.idea.uibuilder.handlers.motion.editor.ui.MotionEditorSelector;
import com.android.tools.idea.uibuilder.handlers.motion.property2.MotionLayoutAttributesModel;
import com.android.tools.idea.uibuilder.handlers.motion.property2.MotionSelection;
import com.android.tools.idea.uibuilder.property2.NelePropertyItem;
import com.android.tools.property.panel.api.InspectorLineModel;
import com.intellij.icons.AllIcons;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.Presentation;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class SubSectionControlAction extends AnAction {
private final NelePropertyItem myProperty;
private InspectorLineModel myLineModel;
private LookupResult myLookupResult;
public SubSectionControlAction(@Nullable NelePropertyItem property) {
myProperty = property;
myLookupResult = new LookupResult();
}
public void setLineModel(@NotNull InspectorLineModel lineModel) {
myLineModel = lineModel;
myLineModel.setEnabled(check());
}
@Override
public void update(@NotNull AnActionEvent event) {
boolean isPresent = check();
Presentation presentation = event.getPresentation();
presentation.setDescription(getCommandName(isPresent));
presentation.setIcon(isPresent ? AllIcons.Diff.GutterCheckBoxSelected : AllIcons.Diff.GutterCheckBox);
if (myLineModel != null) {
myLineModel.setEnabled(isPresent);
}
}
@Override
public void actionPerformed(@NotNull AnActionEvent event) {
boolean isPresent = check();
String commandName = getCommandName(isPresent);
if (commandName == null) {
return;
}
if (isPresent) {
MTag.TagWriter tagWriter = myLookupResult.subTag.getTagWriter();
tagWriter.deleteTag();
tagWriter.commit(commandName);
}
else {
MTag.TagWriter tagWriter = MotionLayoutAttributesModel.createSubTag(myLookupResult.selection,
myLookupResult.tag,
myLookupResult.subTagName);
tagWriter.commit(commandName);
}
}
private boolean check() {
if (myProperty == null) {
return false;
}
MotionSelection selection = MotionLayoutAttributesModel.getMotionSelection(myProperty);
String subTagName = MotionLayoutAttributesModel.getSubTag(myProperty);
if (selection == null || subTagName == null ||
(selection.getType() != MotionEditorSelector.Type.CONSTRAINT &&
selection.getType() != MotionEditorSelector.Type.TRANSITION)) {
return false;
}
MotionSceneTag tag = selection.getMotionSceneTag();
if (tag == null) {
return false;
}
MotionSceneTag subTag = MotionLayoutAttributesModel.getSubTag(tag, subTagName);
myLookupResult.selection = selection;
myLookupResult.tag = tag;
myLookupResult.subTagName = subTagName;
myLookupResult.subTag = subTag;
return subTag != null;
}
@Nullable
private String getCommandName(boolean isPresent) {
String subTagName = myLookupResult.subTagName;
if (subTagName == null) {
return null;
}
if (!isPresent) {
return String.format("Create %1$s tag", subTagName);
}
else {
return String.format("Remove %1$s tag", subTagName);
}
}
private static class LookupResult {
MotionSelection selection;
MotionSceneTag tag;
String subTagName;
MotionSceneTag subTag;
}
}
| 1,590 |
5,422 | # Copyright 2020 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Image validation service."""
from __future__ import annotations
import base64
import imghdr
from core import feconf
from core import utils
from core.domain import html_validation_service
def validate_image_and_filename(raw_image, filename):
"""Validates the image data and its filename.
Args:
raw_image: str. The image content.
filename: str. The filename for the image.
Returns:
str. The file format of the image.
Raises:
ValidationError. Image or filename supplied fails one of the
validation checks.
"""
hundred_kb_in_bytes = 100 * 1024
if not raw_image:
raise utils.ValidationError('No image supplied')
if utils.is_base64_encoded(raw_image):
raw_image = base64.decodebytes(raw_image.encode('utf-8'))
if len(raw_image) > hundred_kb_in_bytes:
raise utils.ValidationError(
'Image exceeds file size limit of 100 KB.')
allowed_formats = ', '.join(
list(feconf.ACCEPTED_IMAGE_FORMATS_AND_EXTENSIONS.keys()))
if html_validation_service.is_parsable_as_xml(raw_image):
file_format = 'svg'
invalid_tags, invalid_attrs = (
html_validation_service.get_invalid_svg_tags_and_attrs(raw_image))
if invalid_tags or invalid_attrs:
invalid_tags_message = (
'tags: %s' % invalid_tags if invalid_tags else '')
invalid_attrs_message = (
'attributes: %s' % invalid_attrs if invalid_attrs else '')
raise utils.ValidationError(
'Unsupported tags/attributes found in the SVG:\n%s\n%s' % (
invalid_tags_message, invalid_attrs_message))
if not html_validation_service.does_svg_tag_contains_xmlns_attribute(
raw_image):
raise utils.ValidationError(
'The svg tag does not contains the \'xmlns\' attribute.')
else:
# Verify that the data is recognized as an image.
file_format = imghdr.what(None, h=raw_image)
if file_format not in feconf.ACCEPTED_IMAGE_FORMATS_AND_EXTENSIONS:
raise utils.ValidationError('Image not recognized')
# Verify that the file type matches the supplied extension.
if not filename:
raise utils.ValidationError('No filename supplied')
if filename.rfind('.') == 0:
raise utils.ValidationError('Invalid filename')
if '/' in filename or '..' in filename:
raise utils.ValidationError(
'Filenames should not include slashes (/) or consecutive '
'dot characters.')
if '.' not in filename:
raise utils.ValidationError(
'Image filename with no extension: it should have '
'one of the following extensions: %s.' % allowed_formats)
dot_index = filename.rfind('.')
extension = filename[dot_index + 1:].lower()
if (extension not in
feconf.ACCEPTED_IMAGE_FORMATS_AND_EXTENSIONS[file_format]):
raise utils.ValidationError(
'Expected a filename ending in .%s, received %s' %
(file_format, filename))
return file_format
| 1,463 |
6,958 | <filename>source/backend/cpu/CPUQuantizationUtils.hpp<gh_stars>1000+
/* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
// edited from tensorflow - quantization_utils.cc by MNN.
#ifndef QUANTIZATION_HPP
#define QUANTIZATION_HPP
#include <math.h>
#include <stdio.h>
#include <cmath>
#include <limits>
#include "TFQuantizeOp_generated.h"
namespace MNN {
inline int CalculateInputRadius(int inputIntegerBits, int inputLeftShift) {
const double maxInputRescaled =
1.0 * ((1 << inputIntegerBits) - 1) * (1ll << (31 - inputIntegerBits)) / (1ll << inputLeftShift);
return static_cast<int>(std::floor(maxInputRescaled));
}
inline void QuantizeMultiplier(double doubleMultiplier, int32_t* quantizedMultiplier, int* shift) {
if (doubleMultiplier == 0.) {
*quantizedMultiplier = 0;
*shift = 0;
return;
}
const double q = std::frexp(doubleMultiplier, shift);
auto qFixed = static_cast<int64_t>(round(q * (1ll << 31)));
MNN_ASSERT(qFixed <= (1ll << 31));
if (qFixed == (1ll << 31)) {
qFixed /= 2;
++*shift;
}
MNN_ASSERT(qFixed <= std::numeric_limits<int32_t>::max());
*quantizedMultiplier = static_cast<int32_t>(qFixed);
}
inline void QuantizeMultiplierGreaterThanOne(double doubleMultiplier, int32_t* quantizedMultiplier, int* leftShift) {
MNN_ASSERT(doubleMultiplier > 1.);
QuantizeMultiplier(doubleMultiplier, quantizedMultiplier, leftShift);
MNN_ASSERT(*leftShift >= 0);
}
inline void PreprocessSoftmaxScaling(double beta, double inputScale, int inputIntegerBits, int32_t* quantizedMultiplier,
int* leftShift) {
const double inputBetaRealMultiplier =
std::min(beta * inputScale * (1 << (31 - inputIntegerBits)), (1ll << 31) - 1.0);
QuantizeMultiplierGreaterThanOne(inputBetaRealMultiplier, quantizedMultiplier, leftShift);
}
inline void CalculateActivationRangeUint8(FusedActivation activation, int outputZeropoint, float inputScale,
int32_t* actMin, int32_t* actMax) {
const int32_t qmin = std::numeric_limits<uint8_t>::min();
const int32_t qmax = std::numeric_limits<uint8_t>::max();
const auto scale = inputScale;
const auto zeroPoint = outputZeropoint;
auto quantize = [scale, zeroPoint](float f) { return zeroPoint + static_cast<int32_t>(round(f / scale)); };
if (activation == FusedActivation_kTfLiteActRelu) {
*actMin = std::max(qmin, quantize(0.0));
*actMax = qmax;
} else if (activation == FusedActivation_kTfLiteActRelu6) {
*actMin = std::max(qmin, quantize(0.0));
*actMax = std::min(qmax, quantize(6.0));
} else if (activation == FusedActivation_kTfLiteActRelu1) {
*actMin = std::max(qmin, quantize(-1.0));
*actMax = std::min(qmax, quantize(1.0));
} else {
*actMin = qmin;
*actMax = qmax;
}
}
inline void QuantizeMultiplierSmallerThanOne(double doubleMultiplier, int32_t* quantizedMultiplier, int* rightShift) {
MNN_ASSERT(doubleMultiplier < 1.);
MNN_ASSERT(doubleMultiplier > 0.);
int shift;
QuantizeMultiplier(doubleMultiplier, quantizedMultiplier, &shift);
MNN_ASSERT(shift <= 0);
*rightShift = -shift;
}
template <class T>
float FloatForOneQuantizedLevel(float rangeMin, float rangeMax) {
const int64_t highest = static_cast<int64_t>(std::numeric_limits<T>::max());
const int64_t lowest = static_cast<int64_t>(std::numeric_limits<T>::min());
const float floatForOneQuantizedLevel = (rangeMax - rangeMin) / (highest - lowest);
return floatForOneQuantizedLevel;
}
template <class T1, class T2, class T3>
void QuantizationRangeForMultiplication(float minA, float maxA, float minB, float maxB, float* minC, float* maxC) {
const float aFloatForOneQuantLevel = FloatForOneQuantizedLevel<T1>(minA, maxA);
const float bFloatForOneQuantLevel = FloatForOneQuantizedLevel<T2>(minB, maxB);
const int64_t cHighest = static_cast<int64_t>(std::numeric_limits<T3>::max());
const int64_t cLowest = static_cast<int64_t>(std::numeric_limits<T3>::min());
const float cFloatForOneQuantLevel = aFloatForOneQuantLevel * bFloatForOneQuantLevel;
*minC = cFloatForOneQuantLevel * cLowest;
*maxC = cFloatForOneQuantLevel * cHighest;
}
template <class T>
int64_t FloatToQuantizedUnclamped(float input, float rangeMin, float rangeMax) {
const int64_t lowestQuantized = static_cast<double>(std::numeric_limits<T>::min());
if (rangeMin == rangeMax) {
return lowestQuantized;
}
const int numberOfBits = sizeof(T) * 8;
const int64_t numberOfSteps = static_cast<int64_t>(1) << numberOfBits;
const double rangeAdjust = (numberOfSteps / (numberOfSteps - 1.0));
const double range = ((rangeMax - rangeMin) * rangeAdjust);
const double rangeScale = (numberOfSteps / range);
int64_t quantized = (round(input * rangeScale) - round(rangeMin * rangeScale));
quantized += lowestQuantized;
return quantized;
}
template <class T>
int64_t FloatToQuantizedUnclampedOpt(float input, float rangeMin, float rangeMax) {
const double rangeScale = (((static_cast<int64_t>(1) << 32) - 1.0) / (rangeMax - rangeMin));
int64_t quantized = (round(input * rangeScale) - round(rangeMin * rangeScale));
quantized += -(static_cast<int64_t>(1) << 31);
return quantized;
}
template <class T>
T FloatToQuantized(float input, float rangeMin, float rangeMax) {
if (std::is_same<T, float>::value) {
// Specialization for float. This is used in reference implementation
// for float which is useful to compare performance between float
// and quantized type.
return input;
}
int64_t quantized = FloatToQuantizedUnclamped<T>(input, rangeMin, rangeMax);
const int64_t lowestQuantized = static_cast<int64_t>(std::numeric_limits<T>::min());
const int64_t highestQuantized = static_cast<int64_t>(std::numeric_limits<T>::max());
quantized = std::max(quantized, lowestQuantized);
quantized = std::min(quantized, highestQuantized);
return static_cast<T>(static_cast<int32_t>(quantized));
}
template <class T>
float QuantizedToFloat(T input, float rangeMin, float rangeMax) {
if (std::is_same<T, float>::value) {
// Specialization for float. This is used in reference implementation
// for float which is useful to compare performance between float
// and quantized type.
return input;
}
if (rangeMin == rangeMax) {
return rangeMin;
}
const int numberOfBits = sizeof(T) * 8;
const int64_t numberOfSteps = static_cast<int64_t>(1) << numberOfBits;
const double rangeAdjust = (numberOfSteps / (numberOfSteps - 1.0));
const double range = ((rangeMax - rangeMin) * rangeAdjust);
const double rangeScale = (range / numberOfSteps);
const int64_t lowestQuantized = static_cast<int64_t>(std::numeric_limits<T>::min());
const double offsetInput = static_cast<double>(input) - lowestQuantized;
// For compatibility with DEQUANTIZE_WITH_EIGEN, we should convert
// rangeScale to a float, otherwise rangeMinRounded might be slightly
// different.
const double rangeMinRounded = round(rangeMin / static_cast<float>(rangeScale)) * static_cast<float>(rangeScale);
const double result = rangeMinRounded + (offsetInput * rangeScale);
return static_cast<float>(result);
}
template <class T>
float QuantizedToFloatOpt(T input, float rangeMin, float rangeMax) {
if (std::is_same<T, float>::value) {
// Specialization for float. This is used in reference implementation
// for float which is useful to compare performance between float
// and quantized type.
return input;
}
if (rangeMin == rangeMax) {
return rangeMin;
}
const int numberOfBits = sizeof(int32_t) * 8;
const int64_t numberOfSteps = static_cast<int64_t>(1) << numberOfBits;
const int64_t lowestQuantized = static_cast<int64_t>(1) << (numberOfBits - 1);
const double rangeScale = ((rangeMax - rangeMin) / (numberOfSteps - 1.0));
const double result = rangeMin + ((input + lowestQuantized) * rangeScale);
return static_cast<float>(result);
}
template <class T1, class T2>
inline T2 RequantizeInNewRange(T1 input, float minInput, float maxInput, float minNew, float maxNew) {
const float inputFloat = QuantizedToFloat<T1>(input, minInput, maxInput);
T2 result = FloatToQuantized<T2>(inputFloat, minNew, maxNew);
return result;
}
// Because converting 32-bit accumulated results down to eight bit is a common
// case, we have a specialized code path to handle it as efficiently as
// possible using only fixed-point math for the inner loop.
inline void RequantizeManyInNewRangeReference(const int32_t* input, int64_t count, float minInput, float maxInput,
float minOutput, float maxOutput, uint8_t* output) {
// Initially we calculate all the constants we need once, before we go into
// the inner loop. If this is updated, also update the Eigen version.
const int fpShift = 16;
const float inputRange = maxInput - minInput;
const float outputRange = maxOutput - minOutput;
const float recipOutputRange = outputRange == 0.0 ? 0.0 : (255.0 / outputRange);
const float inputRezero = (minInput + maxInput) / 2.0;
const int64_t rangeScaleFp =
outputRange == 0.0 ? 0.0 : static_cast<int64_t>(255.0 * (1 << fpShift) * inputRange / outputRange);
const int64_t inputOffsetFp = static_cast<int64_t>(inputRezero * recipOutputRange * (1 << fpShift));
const int64_t outputOffsetFp =
outputRange == 0.0 ? 0 : static_cast<int64_t>((1 << fpShift) * (minOutput * 255.0) / outputRange);
const int64_t roundingDelta = 1 << (fpShift - 1);
// Inside this loop we just do minimal adds, multiplies, and shifts, in a way
// that could be easily adapted for a SIMD implementation. It should also be
// possible to perform all the calculations in 32-bit rather than 64, but
// that's not been implemented yet.
for (size_t index = 0; index < count; ++index) {
const int64_t inputValue = static_cast<int64_t>(input[index]);
const int64_t fpValue = ((inputValue * rangeScaleFp) >> 32) + inputOffsetFp;
const int64_t offsetIntermediate = fpValue - outputOffsetFp;
const int64_t roundIntermediate = offsetIntermediate + roundingDelta;
int64_t quantizedInt64 = roundIntermediate >> fpShift;
quantizedInt64 = std::max(quantizedInt64, int64_t(0));
quantizedInt64 = std::min(quantizedInt64, int64_t(255));
output[index] = static_cast<uint8_t>(static_cast<int32_t>(quantizedInt64));
}
}
// Another common case is converting eight bit inputs up to thirty two bits, so
// we have specialized fixed-point code to accelerate that. There is also a NEON
// version for ARM devices below.
inline void RequantizeManyInNewRange8To32BitReference(const uint8_t* input, int64_t count, float minInput,
float maxInput, float minOutput, float maxOutput,
int32_t* output) {
const float code0Float = QuantizedToFloat<uint8_t>(0, minInput, maxInput);
const float code1Float = QuantizedToFloat<uint8_t>(1, minInput, maxInput);
const int64_t code0Int64 = FloatToQuantizedUnclamped<int32_t>(code0Float, minOutput, maxOutput);
const int64_t code1Int64 = FloatToQuantizedUnclamped<int32_t>(code1Float, minOutput, maxOutput);
const int32_t multInt32 = static_cast<int32_t>(code1Int64 - code0Int64);
const int64_t lowestQuantized = static_cast<int64_t>(std::numeric_limits<int32_t>::min());
const int64_t highestQuantized = static_cast<int64_t>(std::numeric_limits<int32_t>::max());
for (int64_t i = 0; i < count; ++i) {
const int64_t inputValue = static_cast<int64_t>(input[i]);
int64_t outputValue = code0Int64 + (inputValue * multInt32);
outputValue = std::max(outputValue, lowestQuantized);
outputValue = std::min(outputValue, highestQuantized);
output[i] = static_cast<int32_t>(outputValue);
}
}
template <class T1, class T2>
inline void RequantizeManyInNewRange(const T1* input, int64_t count, float minInput, float maxInput, float minOutput,
float maxOutput, T2* output) {
for (size_t index = 0; index < count; ++index) {
const float inputFloat = QuantizedToFloat<T1>(input[index], minInput, maxInput);
output[index] = FloatToQuantized<T2>(inputFloat, minOutput, maxOutput);
}
}
// template <>
// inline void RequantizeManyInNewRange<int32_t, uint8_t>(
// const int32_t* input, int64_t count, float minInput, float
// maxInput, float minOutput, float maxOutput, uint8_t*
// output) {
// RequantizeManyInNewRangeReference(input, count, minInput, maxInput,
// minOutput, maxOutput, output);
// }
template <>
inline void RequantizeManyInNewRange<uint8_t, int32_t>(const uint8_t* input, int64_t count, float minInput,
float maxInput, float minOutput, float maxOutput,
int32_t* output) {
RequantizeManyInNewRange8To32BitReference(input, count, minInput, maxInput, minOutput, maxOutput, output);
}
inline void CalculateUsedRange(Tensor* input, int32_t* usedMinQuantized, int32_t* usedMaxQuantized) {
int inputDataSize = 1;
for (int i = 0; i < input->buffer().dimensions; i++) {
inputDataSize *= input->buffer().dim[i].extent;
}
int32_t* inputData = (int32_t*)input->buffer().host;
usedMinQuantized[0] = inputData[0];
usedMaxQuantized[0] = inputData[0];
for (int i = 0; i < inputDataSize; i++) {
if (inputData[i] < usedMinQuantized[0]) {
usedMinQuantized[0] = inputData[i];
}
if (inputData[i] > usedMaxQuantized[0]) {
usedMaxQuantized[0] = inputData[i];
}
}
}
inline void GetOutputMinAndMaxForQuantizedAdd(float inputMin, float inputMax, float smallerInputMin,
float smallerInputMax, float* outputMin, float* outputMax) {
// We need to have a good range to add our two arguments together in. This
// is surprisingly tricky, since it has to satisfy a few different needs:
// - Must be symmetrical around zero, so that 0 + 0 = 0.
// - Must hold the largest of the argument ranges.
// - Should have enough range that the bits of the lowest and highest
// arguments overlap if possible without the lower getting truncated.
// - Should have some headroom so that there's no overflow.
// - Needs to be signed.
// This leads us to use a scheme where we (assuming the inputs are eight bit
// and the output is 32-bit) use the bottom 32 - 17 = 15 bits to store the
// accumulated results. This gives us all the properties we need.
*outputMax = std::max(inputMax, std::max(-inputMin, std::max(smallerInputMax, -smallerInputMin))) * (1 << 17);
*outputMin = -(*outputMax);
}
} // namespace MNN
#endif /* CPUQuantizedBiasAdd_hpp */
| 6,595 |
3,348 | <filename>heron/api/src/java/org/apache/heron/api/topology/SpoutDeclarer.java
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.heron.api.topology;
import java.util.Map;
import org.apache.heron.api.generated.TopologyAPI;
import org.apache.heron.api.spout.IRichSpout;
public class SpoutDeclarer extends BaseComponentDeclarer<SpoutDeclarer> {
private OutputFieldsGetter output;
public SpoutDeclarer(String name, IRichSpout spout, Number taskParallelism) {
super(name, spout, taskParallelism);
output = new OutputFieldsGetter();
spout.declareOutputFields(output);
}
@Override
public SpoutDeclarer returnThis() {
return this;
}
public void dump(TopologyAPI.Topology.Builder bldr) {
TopologyAPI.Spout.Builder spoutBldr = TopologyAPI.Spout.newBuilder();
TopologyAPI.Component.Builder compBldr = TopologyAPI.Component.newBuilder();
super.dump(compBldr);
spoutBldr.setComp(compBldr);
Map<String, TopologyAPI.StreamSchema.Builder> outs = output.getFieldsDeclaration();
for (Map.Entry<String, TopologyAPI.StreamSchema.Builder> entry : outs.entrySet()) {
TopologyAPI.OutputStream.Builder obldr = TopologyAPI.OutputStream.newBuilder();
TopologyAPI.StreamId.Builder sbldr = TopologyAPI.StreamId.newBuilder();
sbldr.setId(entry.getKey());
sbldr.setComponentName(getName());
obldr.setStream(sbldr);
obldr.setSchema(entry.getValue());
spoutBldr.addOutputs(obldr);
}
bldr.addSpouts(spoutBldr);
}
}
| 754 |
6,034 | <reponame>ssSlowDown/onemall<filename>trade-service-project/trade-service-app/src/main/java/cn/iocoder/mall/tradeservice/dal/mysql/dataobject/order/TradeOrderLogisticsDO.java
package cn.iocoder.mall.tradeservice.dal.mysql.dataobject.order;
import cn.iocoder.mall.mybatis.core.dataobject.DeletableDO;
import cn.iocoder.mall.tradeservice.enums.logistics.LogisticsDeliveryTypeEnum;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import com.baomidou.mybatisplus.extension.handlers.FastjsonTypeHandler;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.experimental.Accessors;
import java.util.List;
/**
* 交易订单的物流信息
*/
@TableName(value = "logistics_order")
@Data
@EqualsAndHashCode(callSuper = true)
@Accessors(chain = true)
public class TradeOrderLogisticsDO extends DeletableDO {
/**
* 物流订单号
*/
private Integer id;
/**
* 配送类型
*
* 枚举 {@link LogisticsDeliveryTypeEnum}
*/
private Integer deliveryType;
/**
* 物流公司编号
*
* 使用 DataDict 数据字典 EXPRESS
*/
private Integer expressId;
/**
* 物流公司单号
*/
private String expressNo;
/**
* 交易订单号
*
* 外键 {@link TradeOrderDO#getId()}
*/
private Integer tradeOrderId;
/**
* 商品列表
*/
@TableField(typeHandler = FastjsonTypeHandler.class)
private List<Item> items;
/**
* 商品项
*/
@Data
public static class Item {
/**
* 商品 SKU 编号
*/
private Integer skuId;
/**
* 数量
*/
private Integer quantity;
}
}
| 819 |
385 | <reponame>vector-ai/vectorhub<filename>tests/test_encode_chunk_documents.py<gh_stars>100-1000
import pytest
from vectorhub.encoders.text.tfhub import USE2Vec
enc = USE2Vec()
enc.__name__ = "sample"
@pytest.fixture
def chunk_docs():
return [{
"value": [
{
"text": "hey"
},
{
"text": "weirdo"
}
]},
{"value": [
{
"text": "hello"
},
{
"text": "stranger"
}
]},
]
def assert_vectors_in_docs(docs):
for d in docs:
assert "text_sample_chunkvector_" in d['value'][0], "misssing vector"
def test_encode_documents_in_docs(chunk_docs):
chunk_docs = enc.encode_chunk_documents(chunk_field="value", fields=["text"], documents=chunk_docs)
assert_vectors_in_docs(chunk_docs)
| 474 |
2,504 | <reponame>dujianxin/Windows-universal-samples
#include "pch.h"
#include "MrcVideoEffectDefinition.h"
using namespace MrcEffectDefinitions;
MrcVideoEffectDefinition::MrcVideoEffectDefinition()
{
StreamType = DefaultStreamType;
HologramCompositionEnabled = DefaultHologramCompositionEnabled;
RecordingIndicatorEnabled = DefaultRecordingIndicatorEnabled;
VideoStabilizationEnabled = DefaultVideoStabilizationEnabled;
VideoStabilizationBufferLength = DefaultVideoStabilizationBufferLength;
GlobalOpacityCoefficient = DefaultGlobalOpacityCoefficient;
BlankOnProtectedContent = DefaultBlankOnProtectedContent;
ShowHiddenMesh = DefaultShowHiddenMesh;
OutputSize = DefaultOutputSize;
PreferredHologramPerspective = DefaultPreferredHologramPerspective;
}
| 253 |
742 | <gh_stars>100-1000
/*********************************************************************
* Software License Agreement (BSD License)
*
* Copyright (c) 20012, <NAME>, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of the Willow Garage nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*********************************************************************/
#include "theora_image_transport/theora_subscriber.h"
#include <cv_bridge/cv_bridge.h>
#include <sensor_msgs/image_encodings.h>
#include <opencv2/imgproc/imgproc.hpp>
#include <boost/scoped_array.hpp>
#include <vector>
using namespace std;
namespace theora_image_transport {
TheoraSubscriber::TheoraSubscriber()
: pplevel_(0),
received_header_(false),
received_keyframe_(false),
decoding_context_(NULL),
setup_info_(NULL)
{
th_info_init(&header_info_);
th_comment_init(&header_comment_);
}
TheoraSubscriber::~TheoraSubscriber()
{
if (decoding_context_) th_decode_free(decoding_context_);
th_setup_free(setup_info_);
th_info_clear(&header_info_);
th_comment_clear(&header_comment_);
}
void TheoraSubscriber::subscribeImpl(ros::NodeHandle &nh, const std::string &base_topic, uint32_t queue_size,
const Callback &callback, const ros::VoidPtr &tracked_object,
const image_transport::TransportHints &transport_hints)
{
// queue_size doesn't account for the 3 header packets, so we correct (with a little extra) here.
queue_size += 4;
typedef image_transport::SimpleSubscriberPlugin<theora_image_transport::Packet> Base;
Base::subscribeImpl(nh, base_topic, queue_size, callback, tracked_object, transport_hints);
// Set up reconfigure server for this topic
reconfigure_server_ = boost::make_shared<ReconfigureServer>(this->nh());
ReconfigureServer::CallbackType f = boost::bind(&TheoraSubscriber::configCb, this, _1, _2);
reconfigure_server_->setCallback(f);
}
void TheoraSubscriber::configCb(Config& config, uint32_t level)
{
if (decoding_context_ && pplevel_ != config.post_processing_level) {
pplevel_ = updatePostProcessingLevel(config.post_processing_level);
config.post_processing_level = pplevel_; // In case more than PPLEVEL_MAX
}
else
pplevel_ = config.post_processing_level;
}
int TheoraSubscriber::updatePostProcessingLevel(int level)
{
int pplevel_max;
int err = th_decode_ctl(decoding_context_, TH_DECCTL_GET_PPLEVEL_MAX, &pplevel_max, sizeof(int));
if (err)
ROS_WARN("Failed to get maximum post-processing level, error code %d", err);
else if (level > pplevel_max) {
ROS_WARN("Post-processing level %d is above the maximum, clamping to %d", level, pplevel_max);
level = pplevel_max;
}
err = th_decode_ctl(decoding_context_, TH_DECCTL_SET_PPLEVEL, &level, sizeof(int));
if (err) {
ROS_ERROR("Failed to set post-processing level, error code %d", err);
return pplevel_; // old value
}
return level;
}
//When using this caller is responsible for deleting oggpacket.packet!!
void TheoraSubscriber::msgToOggPacket(const theora_image_transport::Packet &msg, ogg_packet &ogg)
{
ogg.bytes = msg.data.size();
ogg.b_o_s = msg.b_o_s;
ogg.e_o_s = msg.e_o_s;
ogg.granulepos = msg.granulepos;
ogg.packetno = msg.packetno;
ogg.packet = new unsigned char[ogg.bytes];
memcpy(ogg.packet, &msg.data[0], ogg.bytes);
}
void TheoraSubscriber::internalCallback(const theora_image_transport::PacketConstPtr& message, const Callback& callback)
{
/// @todo Break this function into pieces
ogg_packet oggpacket;
msgToOggPacket(*message, oggpacket);
boost::scoped_array<unsigned char> packet_guard(oggpacket.packet); // Make sure packet memory gets deleted
// Beginning of logical stream flag means we're getting new headers
if (oggpacket.b_o_s == 1) {
// Clear all state, everything we knew is wrong
received_header_ = false;
received_keyframe_ = false;
if (decoding_context_) {
th_decode_free(decoding_context_);
decoding_context_ = NULL;
}
th_setup_free(setup_info_);
setup_info_ = NULL;
th_info_clear(&header_info_);
th_info_init(&header_info_);
th_comment_clear(&header_comment_);
th_comment_init(&header_comment_);
latest_image_.reset();
}
// Decode header packets until we get the first video packet
if (received_header_ == false) {
int rval = th_decode_headerin(&header_info_, &header_comment_, &setup_info_, &oggpacket);
switch (rval) {
case 0:
// We've received the full header; this is the first video packet.
decoding_context_ = th_decode_alloc(&header_info_, setup_info_);
if (!decoding_context_) {
ROS_ERROR("[theora] Decoding parameters were invalid");
return;
}
received_header_ = true;
pplevel_ = updatePostProcessingLevel(pplevel_);
break; // Continue on the video decoding
case TH_EFAULT:
ROS_WARN("[theora] EFAULT when processing header packet");
return;
case TH_EBADHEADER:
ROS_WARN("[theora] Bad header packet");
return;
case TH_EVERSION:
ROS_WARN("[theora] Header packet not decodable with this version of libtheora");
return;
case TH_ENOTFORMAT:
ROS_WARN("[theora] Packet was not a Theora header");
return;
default:
// If rval > 0, we successfully received a header packet.
if (rval < 0)
ROS_WARN("[theora] Error code %d when processing header packet", rval);
return;
}
}
// Wait for a keyframe if we haven't received one yet - delta frames are useless to us in that case
received_keyframe_ = received_keyframe_ || (th_packet_iskeyframe(&oggpacket) == 1);
if (!received_keyframe_)
return;
// We have a video packet we can handle, let's decode it
int rval = th_decode_packetin(decoding_context_, &oggpacket, NULL);
switch (rval) {
case 0:
break; // Yay, we got a frame. Carry on below.
case TH_DUPFRAME:
// Video data hasn't changed, so we update the timestamp and reuse the last received frame.
ROS_DEBUG("[theora] Got a duplicate frame");
if (latest_image_) {
latest_image_->header = message->header;
callback(latest_image_);
}
return;
case TH_EFAULT:
ROS_WARN("[theora] EFAULT processing video packet");
return;
case TH_EBADPACKET:
ROS_WARN("[theora] Packet does not contain encoded video data");
return;
case TH_EIMPL:
ROS_WARN("[theora] The video data uses bitstream features not supported by this version of libtheora");
return;
default:
ROS_WARN("[theora] Error code %d when decoding video packet", rval);
return;
}
// We have a new decoded frame available
th_ycbcr_buffer ycbcr_buffer;
th_decode_ycbcr_out(decoding_context_, ycbcr_buffer);
// Wrap YCbCr channel data into OpenCV format
th_img_plane &y_plane = ycbcr_buffer[0], &cb_plane = ycbcr_buffer[1], &cr_plane = ycbcr_buffer[2];
cv::Mat y(y_plane.height, y_plane.width, CV_8UC1, y_plane.data, y_plane.stride);
cv::Mat cb_sub(cb_plane.height, cb_plane.width, CV_8UC1, cb_plane.data, cb_plane.stride);
cv::Mat cr_sub(cr_plane.height, cr_plane.width, CV_8UC1, cr_plane.data, cr_plane.stride);
// Upsample chroma channels
cv::Mat cb, cr;
cv::pyrUp(cb_sub, cb);
cv::pyrUp(cr_sub, cr);
// Merge into interleaved image. Note OpenCV uses YCrCb, so we swap the chroma channels.
cv::Mat ycrcb, channels[] = {y, cr, cb};
cv::merge(channels, 3, ycrcb);
// Convert to BGR color
cv::Mat bgr, bgr_padded;
cv::cvtColor(ycrcb, bgr_padded, CV_YCrCb2BGR);
// Pull out original (non-padded) image region
bgr = bgr_padded(cv::Rect(header_info_.pic_x, header_info_.pic_y,
header_info_.pic_width, header_info_.pic_height));
latest_image_ = cv_bridge::CvImage(message->header, sensor_msgs::image_encodings::BGR8, bgr).toImageMsg();
/// @todo Handle RGB8 or MONO8 efficiently
callback(latest_image_);
}
} //namespace theora_image_transport
| 3,567 |
892 | {
"schema_version": "1.2.0",
"id": "GHSA-5jh3-w6q5-9mr3",
"modified": "2022-01-22T00:02:15Z",
"published": "2022-01-15T00:01:53Z",
"aliases": [
"CVE-2022-22056"
],
"details": "The Le-yan dental management system contains a hard-coded credentials vulnerability in the web page source code, which allows an unauthenticated remote attacker to acquire administrator’s privilege and control the system or disrupt service.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2022-22056"
},
{
"type": "WEB",
"url": "https://www.twcert.org.tw/tw/cp-132-5510-45d71-1.html"
}
],
"database_specific": {
"cwe_ids": [
"CWE-798"
],
"severity": "CRITICAL",
"github_reviewed": false
}
} | 364 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.core.startup;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.netbeans.junit.NbTestCase;
import org.netbeans.junit.RandomlyFails;
/**
* Checks the behaviour of NetBeans logging support.
*/
public class TopLoggingStartLogTest extends NbTestCase {
private ByteArrayOutputStream w;
private Handler handler;
private Logger logger;
static {
System.setProperty("org.netbeans.log.startup", "print"); // NOI18N
}
public TopLoggingStartLogTest(String testName) {
super(testName);
}
@Override
protected void setUp() throws Exception {
clearWorkDir();
System.setProperty("netbeans.user", getWorkDirPath());
// initialize logging
TopLogging.initialize();
w = new ByteArrayOutputStream() {
@Override
public void write(byte[] b, int off, int len) {
super.write(b, off, len);
}
@Override
public void write(byte[] b) throws IOException {
super.write(b);
}
@Override
public void write(int b) {
super.write(b);
}
@Override
public String toString() {
TopLogging.flush(false);
handler.flush();
String retValue;
retValue = super.toString();
return retValue;
}
};
handler = TopLogging.createStreamHandler(new PrintStream(getStream()));
logger = Logger.getLogger("");
Handler[] old = logger.getHandlers();
// do not remove default handlers from CLIOptions.initialize():
// for (int i = 0; i < old.length; i++) {
// logger.removeHandler(old[i]);
// }
logger.addHandler(handler);
w.reset();
}
protected ByteArrayOutputStream getStream() {
return w;
}
@RandomlyFails // NB-Core-Build #1659
public void testProgress() throws Exception {
StartLog.logProgress("First visible message");
Pattern p = Pattern.compile("@[0-9]+.*First visible message");
Matcher m = p.matcher(getStream().toString());
if (!m.find()) {
fail("msg shall be logged: " + getStream().toString());
}
String disk = readLog(true);
Matcher d = p.matcher(disk);
if (!d.find()) {
fail("msg shall be logged to file: " + disk);
}
}
public void testStartEnd() throws Exception {
StartLog.logStart("run");
StartLog.logEnd("run");
{
Pattern p = Pattern.compile("@[0-9]+.*run.*started");
Matcher m = p.matcher(getStream().toString());
if (!m.find()) {
fail("msg shall be logged: " + getStream().toString());
}
String disk = readLog(true);
Matcher d = p.matcher(disk);
if (!d.find()) {
fail("msg shall be logged to file: " + disk);
}
}
{
Pattern p = Pattern.compile("@[0-9]+.*run.*finished");
Matcher m = p.matcher(getStream().toString());
if (!m.find()) {
fail("msg shall be logged: " + getStream().toString());
}
String disk = readLog(true);
Matcher d = p.matcher(disk);
if (!d.find()) {
fail("msg shall be logged to file: " + disk);
}
}
}
public void testStartEndToLogger() throws Exception {
Logger LOG = Logger.getLogger("org.netbeans.log.startup");
LOG.log(Level.FINE, "start", "run");
LOG.log(Level.FINE, "end", "run");
{
Pattern p = Pattern.compile("@[0-9]+.*run.*started");
Matcher m = p.matcher(getStream().toString());
if (!m.find()) {
fail("msg shall be logged: " + getStream().toString());
}
String disk = readLog(true);
Matcher d = p.matcher(disk);
if (!d.find()) {
fail("msg shall be logged to file: " + disk);
}
}
{
Pattern p = Pattern.compile("@[0-9]+.*run.*finished");
Matcher m = p.matcher(getStream().toString());
if (!m.find()) {
fail("msg shall be logged: " + getStream().toString());
}
String disk = readLog(true);
Matcher d = p.matcher(disk);
if (!d.find()) {
fail("msg shall be logged to file: " + disk);
}
}
}
private String readLog(boolean doFlush) throws IOException {
if (doFlush) {
TopLogging.flush(false);
}
File log = new File(new File(new File(getWorkDir(), "var"), "log"), "messages.log");
assertTrue("Log file exists: " + log, log.canRead());
FileInputStream is = new FileInputStream(log);
byte[] arr = new byte[(int)log.length()];
int r = is.read(arr);
assertEquals("all read", arr.length, r);
is.close();
return new String(arr);
}
}
| 2,802 |
310 | {
"name": "MLB at Bat (iOS)",
"description": "An iPhone baseball app for viewing scores and streaming matches.",
"url": "http://m.mlb.com/apps/atbat"
} | 53 |
348 | <filename>ichnaea/scripts/load_cell_data.py
#!/usr/bin/env python
"""
Import from public cell data into a local dev environment.
Download from https://location.services.mozilla.com/downloads
This has been tested with a differential cell export (~400kB compressed).
A full cell export (~370,000kB) contains unexpected data that will
require code changes to handle gracefully, and may require adjusting
the resources of the development environment.
"""
import argparse
import logging
import os
import os.path
import sys
from ichnaea.conf import settings
from ichnaea.db import db_worker_session
from ichnaea.log import configure_logging
from ichnaea.data.public import read_stations_from_csv
from ichnaea.taskapp.config import init_worker
from ichnaea.util import gzip_open
LOGGER = logging.getLogger(__name__)
def get_eager_celery_app():
"""Returns an eagerly configured celery app."""
# We have to import and fix celery settings before importing the celery_app
# module since that has a side effect of creating the celery app
from ichnaea.taskapp import settings as celery_settings
celery_settings.task_always_eager = True
celery_settings.task_eager_propagates = True
from ichnaea.taskapp.app import celery_app
return celery_app
def main(argv, _db=None):
parser = argparse.ArgumentParser(
prog=argv[0],
description=(
"Import from public cell data into a local dev environment. "
"See https://location.services.mozilla.com/downloads"
),
)
parser.add_argument("filename", help="Path to the csv.gz import file.")
args = parser.parse_args(argv[1:])
if not settings("local_dev_env"):
print("This script can only be run in a local dev environment.")
print("Set LOCAL_DEV_ENV=True in your environment.")
return 1
filename = os.path.abspath(os.path.expanduser(args.filename))
if not os.path.isfile(filename):
print("File %s not found." % filename)
return 1
configure_logging()
celery_app = get_eager_celery_app()
init_worker(celery_app)
cellarea_queue = celery_app.data_queues["update_cellarea"]
with db_worker_session(celery_app.db, commit=False) as session:
with gzip_open(filename, "r") as file_handle:
read_stations_from_csv(
session, file_handle, celery_app.redis_client, cellarea_queue
)
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv))
| 898 |
14,668 | <reponame>zealoussnow/chromium
// Copyright 2014 The Crashpad Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "util/synchronization/semaphore.h"
#include <errno.h>
#include <math.h>
#include <time.h>
#include <chrono>
#include "base/check_op.h"
#include "base/logging.h"
#include "base/posix/eintr_wrapper.h"
#include "util/misc/time.h"
namespace crashpad {
#if defined(OS_ANDROID)
Semaphore::Semaphore(int value) : cv_(), mutex_(), value_(value) {}
Semaphore::~Semaphore() = default;
void Semaphore::Wait() {
std::unique_lock<std::mutex> lock(mutex_);
cv_.wait(lock, [this] { return this->value_ > 0; });
--value_;
}
bool Semaphore::TimedWait(double seconds) {
DCHECK_GE(seconds, 0.0);
if (isinf(seconds)) {
Wait();
return true;
}
std::unique_lock<std::mutex> lock(mutex_);
if (!cv_.wait_for(lock, std::chrono::duration<double>(seconds), [this] {
return this->value_ > 0;
})) {
return false;
}
--value_;
return true;
}
void Semaphore::Signal() {
std::lock_guard<std::mutex> lock(mutex_);
++value_;
cv_.notify_one();
}
#elif !defined(OS_APPLE)
Semaphore::Semaphore(int value) {
PCHECK(sem_init(&semaphore_, 0, value) == 0) << "sem_init";
}
Semaphore::~Semaphore() {
PCHECK(sem_destroy(&semaphore_) == 0) << "sem_destroy";
}
void Semaphore::Wait() {
PCHECK(HANDLE_EINTR(sem_wait(&semaphore_)) == 0) << "sem_wait";
}
bool Semaphore::TimedWait(double seconds) {
DCHECK_GE(seconds, 0.0);
if (isinf(seconds)) {
Wait();
return true;
}
timespec current_time;
if (clock_gettime(CLOCK_REALTIME, ¤t_time) != 0) {
PLOG(ERROR) << "clock_gettime";
return false;
}
timespec timeout;
timeout.tv_sec = seconds;
timeout.tv_nsec = (seconds - trunc(seconds)) * 1E9;
AddTimespec(current_time, timeout, &timeout);
int rv = HANDLE_EINTR(sem_timedwait(&semaphore_, &timeout));
PCHECK(rv == 0 || errno == ETIMEDOUT) << "sem_timedwait";
return rv == 0;
}
void Semaphore::Signal() {
PCHECK(sem_post(&semaphore_) == 0) << "sem_post";
}
#endif // OS_ANDROID
} // namespace crashpad
| 1,007 |
988 | <filename>src/filter_tree/filter_tree_utils.h
/*
* Copyright 2018-2021 Redis Labs Ltd. and Contributors
*
* This file is available under the Redis Labs Source Available License Agreement
*/
#pragma once
#include "filter_tree.h"
bool isInFilter(const FT_FilterNode *filter);
bool extractOriginAndRadius(const FT_FilterNode *filter, SIValue *origin,
SIValue *radius, char **point);
bool isDistanceFilter(FT_FilterNode *filter);
| 138 |
547 | <gh_stars>100-1000
from math import atan2
import numpy as np
import numba as nb
from functools import partial
import dask.array as da
from numba import cuda
import xarray as xr
from xrspatial.utils import ngjit
from xrspatial.utils import has_cuda
from xrspatial.utils import cuda_args
from xrspatial.utils import is_cupy_backed
from typing import Optional
# 3rd-party
try:
import cupy
except ImportError:
class cupy(object):
ndarray = False
RADIAN = 180 / np.pi
@ngjit
def _cpu(data):
out = np.zeros_like(data, dtype=np.float64)
out[:] = np.nan
rows, cols = data.shape
for y in range(1, rows-1):
for x in range(1, cols-1):
a = data[y-1, x-1]
b = data[y-1, x]
c = data[y-1, x+1]
d = data[y, x-1]
f = data[y, x+1]
g = data[y+1, x-1]
h = data[y+1, x]
i = data[y+1, x+1]
dz_dx = ((c + 2 * f + i) - (a + 2 * d + g)) / 8
dz_dy = ((g + 2 * h + i) - (a + 2 * b + c)) / 8
if dz_dx == 0 and dz_dy == 0:
# flat surface, slope = 0, thus invalid aspect
out[y, x] = -1.
else:
aspect = np.arctan2(dz_dy, -dz_dx) * RADIAN
# convert to compass direction values (0-360 degrees)
if aspect < 0:
out[y, x] = 90.0 - aspect
elif aspect > 90.0:
out[y, x] = 360.0 - aspect + 90.0
else:
out[y, x] = 90.0 - aspect
return out
@cuda.jit(device=True)
def _gpu(arr):
a = arr[0, 0]
b = arr[0, 1]
c = arr[0, 2]
d = arr[1, 0]
f = arr[1, 2]
g = arr[2, 0]
h = arr[2, 1]
i = arr[2, 2]
two = nb.int32(2.) # reducing size to int8 causes wrong results
eight = nb.int32(8.) # reducing size to int8 causes wrong results
ninety = nb.float32(90.)
dz_dx = ((c + two * f + i) - (a + two * d + g)) / eight
dz_dy = ((g + two * h + i) - (a + two * b + c)) / eight
if dz_dx == 0 and dz_dy == 0:
# flat surface, slope = 0, thus invalid aspect
aspect = nb.float32(-1.) # TODO: return null instead
else:
aspect = atan2(dz_dy, -dz_dx) * nb.float32(57.29578)
# convert to compass direction values (0-360 degrees)
if aspect < nb.float32(0.):
aspect = ninety - aspect
elif aspect > ninety:
aspect = nb.float32(360.0) - aspect + ninety
else:
aspect = ninety - aspect
if aspect > nb.float32(359.999): # lame float equality check...
return nb.float32(0.)
else:
return aspect
@cuda.jit
def _run_gpu(arr, out):
i, j = cuda.grid(2)
di = 1
dj = 1
if (i-di >= 0 and
i+di < out.shape[0] and
j-dj >= 0 and
j+dj < out.shape[1]):
out[i, j] = _gpu(arr[i-di:i+di+1, j-dj:j+dj+1])
def _run_cupy(data: cupy.ndarray) -> cupy.ndarray:
griddim, blockdim = cuda_args(data.shape)
out = cupy.empty(data.shape, dtype='f4')
out[:] = cupy.nan
_run_gpu[griddim, blockdim](data, out)
return out
def _run_dask_cupy(data: da.Array) -> da.Array:
msg = 'Upstream bug in dask prevents cupy backed arrays'
raise NotImplementedError(msg)
# add any func args
# TODO: probably needs cellsize args
_func = partial(_run_cupy)
out = data.map_overlap(_func,
depth=(1, 1),
boundary=cupy.nan,
dtype=cupy.float32,
meta=cupy.array(()))
return out
def _run_numpy(data: np.ndarray) -> np.ndarray:
out = _cpu(data)
return out
def _run_dask_numpy(data: da.Array) -> da.Array:
_func = partial(_cpu)
out = data.map_overlap(_func,
depth=(1, 1),
boundary=np.nan,
meta=np.array(()))
return out
def aspect(agg: xr.DataArray,
name: Optional[str] = 'aspect') -> xr.DataArray:
"""
Calculates the aspect value of an elevation aggregate.
Calculates, for all cells in the array, the downward slope direction
of each cell based on the elevation of its neighbors in a 3x3 grid.
The value is measured clockwise in degrees with 0 and 360 at due
north. Flat areas are given a value of -1. Values along the edges
are not calculated.
Parameters
----------
agg : xarray.DataArray
2D NumPy, CuPy, NumPy-backed Dask, or Cupy-backed Dask array
of elevation values.
name : str, default='aspect'
Name of ouput DataArray.
Returns
-------
aspect_agg : xarray.DataArray of the same type as `agg`
2D aggregate array of calculated aspect values.
All other input attributes are preserved.
References
----------
- arcgis: http://desktop.arcgis.com/en/arcmap/10.3/tools/spatial-analyst-toolbox/how-aspect-works.htm#ESRI_SECTION1_4198691F8852475A9F4BC71246579FAA # noqa
Examples
--------
.. plot::
:include-source:
import matplotlib.pyplot as plt
import numpy as np
import xarray as xr
from xrspatial import generate_terrain, aspect
# Generate Example Terrain
W = 800
H = 600
template_terrain = xr.DataArray(np.zeros((H, W)))
x_range=(-20e6, 20e6)
y_range=(-20e6, 20e6)
terrain_agg = generate_terrain(
template_terrain, x_range=x_range, y_range=y_range, seed=1, zfactor=1000
)
# Edit Attributes
terrain_agg = terrain_agg.assign_attrs(
{
'Description': 'Example Terrain',
'units': 'km',
'Max Elevation': '4000',
}
)
terrain_agg = terrain_agg.rename({'x': 'lon', 'y': 'lat'})
terrain_agg = terrain_agg.rename('Elevation')
# Create Aspect Aggregate Array
aspect_agg = aspect(agg = terrain_agg, name = 'Aspect')
# Edit Attributes
aspect_agg = aspect_agg.assign_attrs(
{
'Description': 'Example Aspect',
'units': 'deg',
}
)
# Plot Terrain
terrain_agg.plot(cmap = 'terrain', aspect = 2, size = 4)
plt.title("Terrain")
plt.ylabel("latitude")
plt.xlabel("longitude")
# Plot Aspect
aspect_agg.plot(aspect = 2, size = 4)
plt.title("Aspect")
plt.ylabel("latitude")
plt.xlabel("longitude")
.. sourcecode:: python
>>> print(terrain_agg[200:203, 200:202])
<xarray.DataArray 'Elevation' (lat: 3, lon: 2)>
array([[707.57051795, 704.3194383 ],
[706.36271613, 705.4514285 ],
[699.46372883, 703.7514251 ]])
Coordinates:
* lon (lon) float64 -9.975e+06 -9.925e+06
* lat (lat) float64 -6.633e+06 -6.567e+06 -6.5e+06
Attributes:
res: (50000.0, 66666.66666666667)
Description: Example Terrain
units: km
Max Elevation: 4000
.. sourcecode:: python
>>> print(aspect_agg[200:203, 200:202])
<xarray.DataArray 'Aspect' (lat: 3, lon: 2)>
array([[155.07530658, 146.26526699],
[194.81685088, 136.55836607],
[203.14170549, 187.97760934]])
Coordinates:
* lon (lon) float64 -9.975e+06 -9.925e+06
* lat (lat) float64 -6.633e+06 -6.567e+06 -6.5e+06
Attributes:
res: (50000.0, 66666.66666666667)
Description: Example Aspect
units: deg
Max Elevation: 4000
"""
# numpy case
if isinstance(agg.data, np.ndarray):
out = _run_numpy(agg.data)
# cupy case
elif has_cuda() and isinstance(agg.data, cupy.ndarray):
out = _run_cupy(agg.data)
# dask + cupy case
elif has_cuda() and isinstance(agg.data, da.Array) and is_cupy_backed(agg):
out = _run_dask_cupy(agg.data)
# dask + numpy case
elif isinstance(agg.data, da.Array):
out = _run_dask_numpy(agg.data)
else:
raise TypeError('Unsupported Array Type: {}'.format(type(agg.data)))
return xr.DataArray(out,
name=name,
coords=agg.coords,
dims=agg.dims,
attrs=agg.attrs)
| 4,361 |
692 | <filename>SingularityService/src/main/java/com/hubspot/singularity/smtp/SingularityMailRecordCleaner.java
package com.hubspot.singularity.smtp;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import com.hubspot.mesos.JavaUtils;
import com.hubspot.singularity.config.SMTPConfiguration;
import com.hubspot.singularity.data.MetadataManager;
import com.hubspot.singularity.scheduler.SingularityLeaderOnlyPoller;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class SingularityMailRecordCleaner extends SingularityLeaderOnlyPoller {
private static final Logger LOG = LoggerFactory.getLogger(
SingularityMailRecordCleaner.class
);
private final MetadataManager metadataManager;
private final Optional<SMTPConfiguration> smtpConfiguration;
@Inject
SingularityMailRecordCleaner(
Optional<SMTPConfiguration> smtpConfiguration,
MetadataManager metadataManager
) {
super(
smtpConfiguration.isPresent()
? Math.max(
smtpConfiguration.get().getRateLimitCooldownMillis(),
smtpConfiguration.get().getRateLimitPeriodMillis()
)
: 0,
TimeUnit.MILLISECONDS
);
this.metadataManager = metadataManager;
this.smtpConfiguration = smtpConfiguration;
}
@Override
public void runActionOnPoll() {
final long start = System.currentTimeMillis();
final long rateLimitExpiresAfter = Math.max(
smtpConfiguration.get().getRateLimitCooldownMillis(),
smtpConfiguration.get().getRateLimitPeriodMillis()
);
LOG.debug("Cleaning stale mail records");
int numCleaned = 0;
int numSeen = 0;
for (String requestId : metadataManager.getRequestsWithMailRecords()) {
for (String emailType : metadataManager.getEmailTypesWithMailRecords(requestId)) {
for (String mailRecordTimestamp : metadataManager.getMailRecords(
requestId,
emailType
)) {
numSeen++;
if (start - Long.parseLong(mailRecordTimestamp) > rateLimitExpiresAfter) {
metadataManager.deleteMailRecord(requestId, emailType, mailRecordTimestamp);
numCleaned++;
}
}
}
}
LOG.debug(
"Cleaned {} of {} mail record timestamps in {}",
numCleaned,
numSeen,
JavaUtils.duration(start)
);
}
}
| 879 |
1,223 | package carpet.mixins;
import carpet.fakes.SpawnHelperInnerInterface;
import carpet.utils.SpawnReporter;
import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap;
import net.minecraft.entity.SpawnGroup;
import net.minecraft.util.math.GravityField;
import net.minecraft.world.SpawnHelper;
import org.spongepowered.asm.mixin.Final;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable;
@Mixin(SpawnHelper.Info.class)
public class SpawnHelperInnerMixin implements SpawnHelperInnerInterface
{
@Shadow @Final private int spawningChunkCount;
@Shadow @Final private GravityField densityField;
@Shadow @Final private Object2IntOpenHashMap<SpawnGroup> groupToCount;
@Inject(method = "isBelowCap", at = @At("HEAD"), cancellable = true)
private void changeMobCaps(SpawnGroup entityCategory, CallbackInfoReturnable<Boolean> cir)
{
int newCap = (int) ((double)entityCategory.getCapacity()*(Math.pow(2.0,(SpawnReporter.mobcap_exponent/4))));
int i = newCap * spawningChunkCount / SpawnReporter.MAGIC_NUMBER;
cir.setReturnValue(groupToCount.getInt(entityCategory) < i);
}
@Override
public GravityField getPotentialCalculator()
{
return densityField;
}
@Override
public int cmGetChunkCount() {
return spawningChunkCount;
}
}
| 531 |
22,481 | """Hub for communication with 1-Wire server or mount_dir."""
from __future__ import annotations
import logging
import os
from typing import TYPE_CHECKING
from pi1wire import Pi1Wire
from pyownet import protocol
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_IDENTIFIERS,
ATTR_MANUFACTURER,
ATTR_MODEL,
ATTR_NAME,
ATTR_VIA_DEVICE,
CONF_HOST,
CONF_PORT,
CONF_TYPE,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.entity import DeviceInfo
from .const import (
CONF_MOUNT_DIR,
CONF_TYPE_OWSERVER,
CONF_TYPE_SYSBUS,
DEVICE_SUPPORT_OWSERVER,
DEVICE_SUPPORT_SYSBUS,
DOMAIN,
MANUFACTURER_EDS,
MANUFACTURER_HOBBYBOARDS,
MANUFACTURER_MAXIM,
)
from .model import (
OWDeviceDescription,
OWDirectDeviceDescription,
OWServerDeviceDescription,
)
DEVICE_COUPLERS = {
# Family : [branches]
"1F": ["aux", "main"]
}
DEVICE_MANUFACTURER = {
"7E": MANUFACTURER_EDS,
"EF": MANUFACTURER_HOBBYBOARDS,
}
_LOGGER = logging.getLogger(__name__)
def _is_known_owserver_device(device_family: str, device_type: str) -> bool:
"""Check if device family/type is known to the library."""
if device_family in ("7E", "EF"): # EDS or HobbyBoard
return device_type in DEVICE_SUPPORT_OWSERVER[device_family]
return device_family in DEVICE_SUPPORT_OWSERVER
class OneWireHub:
"""Hub to communicate with SysBus or OWServer."""
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize."""
self.hass = hass
self.type: str | None = None
self.pi1proxy: Pi1Wire | None = None
self.owproxy: protocol._Proxy | None = None
self.devices: list[OWDeviceDescription] | None = None
async def connect(self, host: str, port: int) -> None:
"""Connect to the owserver host."""
try:
self.owproxy = await self.hass.async_add_executor_job(
protocol.proxy, host, port
)
except protocol.ConnError as exc:
raise CannotConnect from exc
async def check_mount_dir(self, mount_dir: str) -> None:
"""Test that the mount_dir is a valid path."""
if not await self.hass.async_add_executor_job(os.path.isdir, mount_dir):
raise InvalidPath
self.pi1proxy = Pi1Wire(mount_dir)
async def initialize(self, config_entry: ConfigEntry) -> None:
"""Initialize a config entry."""
self.type = config_entry.data[CONF_TYPE]
if self.type == CONF_TYPE_SYSBUS:
mount_dir = config_entry.data[CONF_MOUNT_DIR]
_LOGGER.debug("Initializing using SysBus %s", mount_dir)
await self.check_mount_dir(mount_dir)
elif self.type == CONF_TYPE_OWSERVER:
host = config_entry.data[CONF_HOST]
port = config_entry.data[CONF_PORT]
_LOGGER.debug("Initializing using OWServer %s:%s", host, port)
await self.connect(host, port)
await self.discover_devices()
if TYPE_CHECKING:
assert self.devices
# Register discovered devices on Hub
device_registry = dr.async_get(self.hass)
for device in self.devices:
device_info: DeviceInfo = device.device_info
device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id,
identifiers=device_info[ATTR_IDENTIFIERS],
manufacturer=device_info[ATTR_MANUFACTURER],
model=device_info[ATTR_MODEL],
name=device_info[ATTR_NAME],
via_device=device_info.get(ATTR_VIA_DEVICE),
)
async def discover_devices(self) -> None:
"""Discover all devices."""
if self.devices is None:
if self.type == CONF_TYPE_SYSBUS:
self.devices = await self.hass.async_add_executor_job(
self._discover_devices_sysbus
)
if self.type == CONF_TYPE_OWSERVER:
self.devices = await self.hass.async_add_executor_job(
self._discover_devices_owserver
)
def _discover_devices_sysbus(self) -> list[OWDeviceDescription]:
"""Discover all sysbus devices."""
devices: list[OWDeviceDescription] = []
assert self.pi1proxy
all_sensors = self.pi1proxy.find_all_sensors()
if not all_sensors:
_LOGGER.error(
"No onewire sensor found. Check if dtoverlay=w1-gpio "
"is in your /boot/config.txt. "
"Check the mount_dir parameter if it's defined"
)
for interface in all_sensors:
device_family = interface.mac_address[:2]
device_id = f"{device_family}-{interface.mac_address[2:]}"
if device_family not in DEVICE_SUPPORT_SYSBUS:
_LOGGER.warning(
"Ignoring unknown device family (%s) found for device %s",
device_family,
device_id,
)
continue
device_info: DeviceInfo = {
ATTR_IDENTIFIERS: {(DOMAIN, device_id)},
ATTR_MANUFACTURER: DEVICE_MANUFACTURER.get(
device_family, MANUFACTURER_MAXIM
),
ATTR_MODEL: device_family,
ATTR_NAME: device_id,
}
device = OWDirectDeviceDescription(
device_info=device_info,
interface=interface,
)
devices.append(device)
return devices
def _discover_devices_owserver(
self, path: str = "/", parent_id: str | None = None
) -> list[OWDeviceDescription]:
"""Discover all owserver devices."""
devices: list[OWDeviceDescription] = []
assert self.owproxy
for device_path in self.owproxy.dir(path):
device_id = os.path.split(os.path.split(device_path)[0])[1]
device_family = self.owproxy.read(f"{device_path}family").decode()
_LOGGER.debug("read `%sfamily`: %s", device_path, device_family)
device_type = self._get_device_type_owserver(device_path)
if not _is_known_owserver_device(device_family, device_type):
_LOGGER.warning(
"Ignoring unknown device family/type (%s/%s) found for device %s",
device_family,
device_type,
device_id,
)
continue
device_info: DeviceInfo = {
ATTR_IDENTIFIERS: {(DOMAIN, device_id)},
ATTR_MANUFACTURER: DEVICE_MANUFACTURER.get(
device_family, MANUFACTURER_MAXIM
),
ATTR_MODEL: device_type,
ATTR_NAME: device_id,
}
if parent_id:
device_info[ATTR_VIA_DEVICE] = (DOMAIN, parent_id)
device = OWServerDeviceDescription(
device_info=device_info,
id=device_id,
family=device_family,
path=device_path,
type=device_type,
)
devices.append(device)
if device_branches := DEVICE_COUPLERS.get(device_family):
for branch in device_branches:
devices += self._discover_devices_owserver(
f"{device_path}{branch}", device_id
)
return devices
def _get_device_type_owserver(self, device_path: str) -> str:
"""Get device model."""
if TYPE_CHECKING:
assert self.owproxy
device_type = self.owproxy.read(f"{device_path}type").decode()
_LOGGER.debug("read `%stype`: %s", device_path, device_type)
if device_type == "EDS":
device_type = self.owproxy.read(f"{device_path}device_type").decode()
_LOGGER.debug("read `%sdevice_type`: %s", device_path, device_type)
if TYPE_CHECKING:
assert isinstance(device_type, str)
return device_type
class CannotConnect(HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidPath(HomeAssistantError):
"""Error to indicate the path is invalid."""
| 4,057 |
3,102 | <filename>clang/test/Analysis/html_diagnostics/relevant_lines/synthesized_body.cpp
// Faking std::call_once implementation.
namespace std {
typedef struct once_flag_s {
int _M_once = 0;
} once_flag;
template <class Callable, class... Args>
void call_once(once_flag &o, Callable&& func, Args&&... args);
} // namespace std
int deref(int *x) {
return *x;
}
void call_deref_once() {
static std::once_flag once;
int *p = nullptr;
std::call_once(once, &deref, p);
}
// RUN: rm -rf %t.output
// RUN: %clang_analyze_cc1 -std=c++11 -analyze -analyzer-checker=core -analyzer-output html -o %t.output %s
// RUN: cat %t.output/* | FileCheck %s --match-full-lines
// CHECK: var relevant_lines = {"1": {"3": 1, "8": 1, "11": 1, "12": 1, "15": 1, "16": 1, "17": 1, "18": 1}};
| 313 |
14,668 | // Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/page_load_metrics/browser/observers/ad_metrics/aggregate_frame_data.h"
#include "components/page_load_metrics/browser/observers/ad_metrics/frame_data_utils.h"
#include "components/page_load_metrics/common/page_load_metrics.mojom.h"
namespace page_load_metrics {
AggregateFrameData::AggregateFrameData() = default;
AggregateFrameData::~AggregateFrameData() = default;
void AggregateFrameData::UpdateCpuUsage(base::TimeTicks update_time,
base::TimeDelta update,
bool is_ad) {
// Update the overall usage for all of the relevant buckets.
cpu_usage_ += update;
// Update the peak usage.
total_peak_cpu_.UpdatePeakWindowedPercent(update, update_time);
if (!is_ad)
non_ad_peak_cpu_.UpdatePeakWindowedPercent(update, update_time);
}
void AggregateFrameData::ProcessResourceLoadInFrame(
const mojom::ResourceDataUpdatePtr& resource,
bool is_main_frame) {
resource_data_.ProcessResourceLoad(resource);
if (is_main_frame)
main_frame_resource_data_.ProcessResourceLoad(resource);
}
void AggregateFrameData::AdjustAdBytes(int64_t unaccounted_ad_bytes,
ResourceMimeType mime_type,
bool is_main_frame) {
resource_data_.AdjustAdBytes(unaccounted_ad_bytes, mime_type);
if (is_main_frame)
main_frame_resource_data_.AdjustAdBytes(unaccounted_ad_bytes, mime_type);
}
} // namespace page_load_metrics
| 659 |
1,444 | <gh_stars>1000+
package mage.abilities.effects.common.combat;
import mage.abilities.Ability;
import mage.abilities.effects.RestrictionEffect;
import mage.constants.Duration;
import mage.filter.common.FilterCreaturePermanent;
import mage.game.Game;
import mage.game.permanent.Permanent;
/**
* @author LevelX2
*/
public class CantBeBlockedByAllSourceEffect extends RestrictionEffect {
private final FilterCreaturePermanent filterBlockedBy;
public CantBeBlockedByAllSourceEffect(FilterCreaturePermanent filterBlockedBy, Duration duration) {
super(duration);
this.filterBlockedBy = filterBlockedBy;
staticText = "{this}"
+ " can't be blocked "
+ (filterBlockedBy.getMessage().startsWith("except by") ? "" : "by ")
+ filterBlockedBy.getMessage();
}
public CantBeBlockedByAllSourceEffect(final CantBeBlockedByAllSourceEffect effect) {
super(effect);
this.filterBlockedBy = effect.filterBlockedBy;
}
@Override
public boolean applies(Permanent permanent, Ability source, Game game) {
return permanent.equals(source.getSourcePermanentIfItStillExists(game));
}
@Override
public boolean canBeBlocked(Permanent attacker, Permanent blocker, Ability source, Game game, boolean canUseChooseDialogs) {
return !filterBlockedBy.match(blocker, source.getSourceId(), source.getControllerId(), game);
}
@Override
public CantBeBlockedByAllSourceEffect copy() {
return new CantBeBlockedByAllSourceEffect(this);
}
}
| 545 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.