max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
14,668 | <filename>components/flags_ui/feature_entry_macros.h
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef COMPONENTS_FLAGS_UI_FEATURE_ENTRY_MACROS_H_
#define COMPONENTS_FLAGS_UI_FEATURE_ENTRY_MACROS_H_
// Macros to simplify specifying the type of FeatureEntry. Please refer to
// the comments on FeatureEntry::Type in feature_entry.h, which explain the
// different entry types and when they should be used.
#define SINGLE_VALUE_TYPE_AND_VALUE(command_line_switch, switch_value) \
flags_ui::FeatureEntry::SINGLE_VALUE, { \
.switches = { command_line_switch, switch_value, nullptr, nullptr } \
}
#define SINGLE_VALUE_TYPE(command_line_switch) \
SINGLE_VALUE_TYPE_AND_VALUE(command_line_switch, "")
#define ORIGIN_LIST_VALUE_TYPE(command_line_switch, switch_value) \
flags_ui::FeatureEntry::ORIGIN_LIST_VALUE, { \
.switches = { command_line_switch, switch_value, nullptr, nullptr } \
}
#define SINGLE_DISABLE_VALUE_TYPE_AND_VALUE(command_line_switch, switch_value) \
flags_ui::FeatureEntry::SINGLE_DISABLE_VALUE, { \
.switches = { command_line_switch, switch_value, nullptr, nullptr } \
}
#define SINGLE_DISABLE_VALUE_TYPE(command_line_switch) \
SINGLE_DISABLE_VALUE_TYPE_AND_VALUE(command_line_switch, "")
#define ENABLE_DISABLE_VALUE_TYPE_AND_VALUE(enable_switch, enable_value, \
disable_switch, disable_value) \
flags_ui::FeatureEntry::ENABLE_DISABLE_VALUE, { \
.switches = { enable_switch, enable_value, disable_switch, disable_value } \
}
#define ENABLE_DISABLE_VALUE_TYPE(enable_switch, disable_switch) \
ENABLE_DISABLE_VALUE_TYPE_AND_VALUE(enable_switch, "", disable_switch, "")
#define MULTI_VALUE_TYPE(choices_list) \
flags_ui::FeatureEntry::MULTI_VALUE, { .choices = choices_list }
#define FEATURE_VALUE_TYPE(feature_entry) \
flags_ui::FeatureEntry::FEATURE_VALUE, { \
.feature = { &feature_entry, {}, nullptr } \
}
#define FEATURE_WITH_PARAMS_VALUE_TYPE(feature_entry, feature_variations, \
feature_trial) \
flags_ui::FeatureEntry::FEATURE_WITH_PARAMS_VALUE, { \
.feature = { &feature_entry, feature_variations, feature_trial } \
}
#endif // COMPONENTS_FLAGS_UI_FEATURE_ENTRY_MACROS_H_
| 1,075 |
643 | <filename>ReactSkia/components/RSkComponentImage.cpp
#include "include/core/SkPaint.h"
#include "include/core/SkClipOp.h"
#include "include/core/SkImageFilter.h"
#include "include/effects/SkImageFilters.h"
#include "react/renderer/components/image/ImageEventEmitter.h"
#include "ReactSkia/components/RSkComponentImage.h"
#include "ReactSkia/views/common/RSkDrawUtils.h"
#include "ReactSkia/views/common/RSkImageUtils.h"
#include "ReactSkia/views/common/RSkImageCacheManager.h"
#include "ReactSkia/utils/RnsLog.h"
#include "ReactSkia/utils/RnsUtils.h"
#include "ReactSkia/views/common/RSkConversion.h"
namespace facebook {
namespace react {
using namespace RSkDrawUtils;
using namespace RSkImageUtils;
using namespace RSkImageCacheManager;
namespace {
sk_sp<SkImage> getLocalImage(ImageSource source) {
if ( !source.uri.empty() && !(source.uri.substr(0, 14) == "file://assets/")) {
return nullptr;
}
std::string path = "./" + source.uri.substr(7);
RNS_PROFILE_START(getImageData)
sk_sp<SkImage> imageData=getImageData(path.c_str());
RNS_PROFILE_END(path.c_str(),getImageData)
if(!imageData) {
RNS_LOG_ERROR("Draw Image Failed :" << path);
}
#ifdef RNS_IMAGE_CACHE_USAGE_DEBUG
printCacheUsage();
#endif //RNS_IMAGECACHING_DEBUG
return imageData;
}
}//namespace
RSkComponentImage::RSkComponentImage(const ShadowView &shadowView)
: RSkComponent(shadowView) {}
void RSkComponentImage::OnPaint(
SkCanvas *canvas) {
auto component = getComponentData();
auto const &imageProps =
*std::static_pointer_cast<ImageProps const>(component.props);
sk_sp<SkImage> imageData = nullptr;
if (!imageProps.sources.empty() && imageProps.sources[0].type == ImageSource::Type::Local ) {
imageData=getLocalImage(imageProps.sources[0]);
}
auto imageEventEmitter = std::static_pointer_cast<ImageEventEmitter const>(component.eventEmitter);
if(imageData) {
/* Emitting Load completed Event*/
imageEventEmitter->onLoad();
Rect frame = component.layoutMetrics.frame;
SkRect frameRect = SkRect::MakeXYWH(frame.origin.x, frame.origin.y, frame.size.width, frame.size.height);
auto const &imageBorderMetrics=imageProps.resolveBorderMetrics(component.layoutMetrics);
SkRect targetRect = computeTargetRect({imageData->width(),imageData->height()},frameRect,imageProps.resizeMode);
SkPaint paint;
/* TO DO: Handle filter quality based on build time configuration. Setting Low Filter Quality as a default for now*/
paint.setFilterQuality(DEFAULT_IMAGE_FILTER_QUALITY);
if(imageProps.resizeMode == ImageResizeMode::Repeat){
sk_sp<SkImageFilter> imageFilter(SkImageFilters::Tile(targetRect,frameRect ,nullptr));
paint.setImageFilter(std::move(imageFilter));
}
/* Draw order 1. Background 2. Image 3. Border*/
drawBackground(canvas,frame,imageBorderMetrics,imageProps.backgroundColor,imageProps.opacity);
canvas->save();
if(( frameRect.width() < targetRect.width()) || ( frameRect.height() < targetRect.height())) {
canvas->clipRect(frameRect,SkClipOp::kIntersect);
}
canvas->drawImageRect(imageData,targetRect,&paint);
canvas->restore();
drawBorder(canvas,frame,imageBorderMetrics,imageProps.backgroundColor,imageProps.opacity);
} else {
/* Emitting Image Load failed Event*/
imageEventEmitter->onError();
}
}
RnsShell::LayerInvalidateMask RSkComponentImage::updateComponentProps(const ShadowView &newShadowView,bool forceUpdate) {
auto const &newimageProps = *std::static_pointer_cast<ImageProps const>(newShadowView.props);
auto component = getComponentData();
auto const &oldimageProps = *std::static_pointer_cast<ImageProps const>(component.props);
RnsShell::LayerInvalidateMask updateMask=RnsShell::LayerInvalidateNone;
if((forceUpdate) || (oldimageProps.resizeMode != newimageProps.resizeMode)) {
imageProps.resizeMode = newimageProps.resizeMode;
updateMask =static_cast<RnsShell::LayerInvalidateMask>(updateMask | RnsShell::LayerInvalidateAll);
}
if((forceUpdate) || (oldimageProps.tintColor != newimageProps.tintColor )) {
RNS_LOG_NOT_IMPL;
imageProps.tintColor = RSkColorFromSharedColor(newimageProps.tintColor,SK_ColorTRANSPARENT);
}
return updateMask;
}
} // namespace react
} // namespace facebook
| 1,525 |
2,151 | <filename>components/ukm/content/debug_page/debug_page.h
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef COMPONENTS_UKM_DEBUG_PAGE_REQUEST_JOB_H_
#define COMPONENTS_UKM_DEBUG_PAGE_REQUEST_JOB_H_
#include <string>
#include "base/callback.h"
#include "base/macros.h"
#include "content/public/browser/url_data_source.h"
namespace ukm {
class UkmService;
namespace debug {
// Implements the chrome://ukm page for debugging UKM state.
class DebugPage : public content::URLDataSource {
public:
typedef base::RepeatingCallback<UkmService*()> ServiceGetter;
explicit DebugPage(ServiceGetter service_getter);
// content::URLDataSource:
std::string GetSource() const override;
std::string GetMimeType(const std::string& path) const override;
void StartDataRequest(
const std::string& path,
const content::ResourceRequestInfo::WebContentsGetter& wc_getter,
const content::URLDataSource::GotDataCallback& callback) override;
bool AllowCaching() const override;
private:
~DebugPage() override;
ServiceGetter service_getter_;
DISALLOW_IMPLICIT_CONSTRUCTORS(DebugPage);
};
} // namespace debug
} // namespace ukm
#endif // COMPONENTS_UKM_DEBUG_PAGE_REQUEST_JOB_H_
| 441 |
8,772 | <gh_stars>1000+
package org.apereo.cas.shell.commands;
import org.junit.jupiter.api.Order;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import static org.junit.jupiter.api.Assertions.*;
/**
* This is {@link ExitCommandTests}.
* This should be run last using a high order number
* to allow for all other tests to pass. Successful execution
* of this test class would terminate the runtime.
* @author <NAME>
* @since 6.4.0
*/
@EnableAutoConfiguration
@Tag("SHELL")
@Order(Order.DEFAULT + 1)
public class ExitCommandTests extends BaseCasShellCommandTests {
@Test
public void verifyOperation() {
shell.evaluate(() -> "quit");
fail("Shell should have quit but did not");
}
}
| 268 |
3,631 | <filename>drools-scenario-simulation/drools-scenario-simulation-backend/src/test/java/org/drools/scenariosimulation/backend/expression/DMNFeelExpressionEvaluatorTest.java
/*
* Copyright 2018 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.scenariosimulation.backend.expression;
import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import com.fasterxml.jackson.databind.node.TextNode;
import org.junit.Test;
import org.kie.dmn.api.feel.runtime.events.FEELEvent;
import org.kie.dmn.api.feel.runtime.events.FEELEvent.Severity;
import org.kie.dmn.feel.FEEL;
import org.kie.dmn.feel.runtime.events.FEELEventBase;
import org.kie.dmn.feel.runtime.events.SyntaxErrorEvent;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.assertj.core.api.Assertions.entry;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
public class DMNFeelExpressionEvaluatorTest {
DMNFeelExpressionEvaluator expressionEvaluator = new DMNFeelExpressionEvaluator(this.getClass().getClassLoader());
@Test
public void evaluateUnaryExpression() {
assertTrue(expressionEvaluator.evaluateUnaryExpression("not( true )", false, boolean.class).isSuccessful());
assertTrue(expressionEvaluator.evaluateUnaryExpression(">2, >5", BigDecimal.valueOf(6), BigDecimal.class).isSuccessful());
assertTrue(expressionEvaluator.evaluateUnaryExpression("abs(-1)", BigDecimal.valueOf(1), BigDecimal.class).isSuccessful());
assertFalse(expressionEvaluator.evaluateUnaryExpression("abs(-1)", BigDecimal.valueOf(-1), BigDecimal.class).isSuccessful());
assertTrue(expressionEvaluator.evaluateUnaryExpression("max(1, ?) > 1", BigDecimal.valueOf(2), BigDecimal.class).isSuccessful());
assertFalse(expressionEvaluator.evaluateUnaryExpression("max(1, ?) < 1", BigDecimal.valueOf(2), BigDecimal.class).isSuccessful());
assertTrue(expressionEvaluator.evaluateUnaryExpression("? = 2", BigDecimal.valueOf(2), BigDecimal.class).isSuccessful());
assertFalse(expressionEvaluator.evaluateUnaryExpression("? > 2", BigDecimal.valueOf(2), BigDecimal.class).isSuccessful());
assertTrue(expressionEvaluator.evaluateUnaryExpression("? + 1 > ?", BigDecimal.valueOf(2), BigDecimal.class).isSuccessful());
Map<String, BigDecimal> contextValue = Collections.singletonMap("key_a", BigDecimal.valueOf(1));
assertTrue(expressionEvaluator.evaluateUnaryExpression("{key_a : 1}", contextValue, Map.class).isSuccessful());
assertFalse(expressionEvaluator.evaluateUnaryExpression("{key_a : 2}", contextValue, Map.class).isSuccessful());
List<BigDecimal> contextListValue = Collections.singletonList(BigDecimal.valueOf(23));
assertTrue(expressionEvaluator.evaluateUnaryExpression(new TextNode("23").toString(), contextListValue, List.class).isSuccessful());
assertFalse(expressionEvaluator.evaluateUnaryExpression(new TextNode("2").toString(), contextListValue, List.class).isSuccessful());
assertTrue(expressionEvaluator.evaluateUnaryExpression(new TextNode("? = [23]").toString(), contextListValue, List.class).isSuccessful());
assertFalse(expressionEvaluator.evaluateUnaryExpression(new TextNode("? = [2]").toString(), contextListValue, List.class).isSuccessful());
List<BigDecimal> contextListValue2 = Arrays.asList(BigDecimal.valueOf(23), BigDecimal.valueOf(32));
assertTrue(expressionEvaluator.evaluateUnaryExpression(new TextNode(" ? = [23, 32]").toString(), contextListValue2, List.class).isSuccessful());
assertFalse("Collection unary expression needs to start with ?",
expressionEvaluator.evaluateUnaryExpression(new TextNode("[23, 32]").toString(),
contextListValue2,
List.class).isSuccessful());
assertFalse(expressionEvaluator.evaluateUnaryExpression(new TextNode(" ? = [23, 32, 123]").toString(), contextListValue2, List.class).isSuccessful());
assertTrue(expressionEvaluator.evaluateUnaryExpression(new TextNode(" ?[1] = 23").toString(), contextListValue2, List.class).isSuccessful());
assertFalse(expressionEvaluator.evaluateUnaryExpression(new TextNode(" ?[1] = 32").toString(), contextListValue2, List.class).isSuccessful());
Map<String, Object> firstMap = new HashMap<>();
firstMap.put("Price", new BigDecimal(2000));
firstMap.put("Name", "PC");
Map<String, Object> secondMap = new HashMap<>();
secondMap.put("Price", new BigDecimal(3300));
secondMap.put("Name", "CAR");
String firstParameter = "{Price: 2000,Name:\"PC\"}";
String secondParameter = "{Price:3300, Name:\"CAR\"}";
List<Map<String, Object>> context = Arrays.asList(firstMap, secondMap);
assertTrue(expressionEvaluator.evaluateUnaryExpression(new TextNode("?=[" + firstParameter + ", " + secondParameter + "]").toString(), context, List.class).isSuccessful());
assertFalse(expressionEvaluator.evaluateUnaryExpression(new TextNode("?=[{Price: 2001,Name:\"PC\"}, {Price:3301,Name:\"CAR\"}]").toString(), context, List.class).isSuccessful());
assertFalse(expressionEvaluator.evaluateUnaryExpression(new TextNode("?=[{Price: 2000, Name:\"PCA\"}, {Price:3300,Name:\"CARE\"}]").toString(), context, List.class).isSuccessful());
assertFalse(expressionEvaluator.evaluateUnaryExpression(new TextNode("?=[{Pric: 2000, Name:\"PC\"}, {Price:3300,Names:\"CARE\"}]").toString(), context, List.class).isSuccessful());
/* Different order: Failure */
assertFalse(expressionEvaluator.evaluateUnaryExpression(new TextNode("?=[" + secondParameter + ", " + firstParameter + "]").toString(), context, List.class).isSuccessful());
/* IN operator */
assertTrue(expressionEvaluator.evaluateUnaryExpression(new TextNode(firstParameter + " in ?").toString(), context, List.class).isSuccessful());
assertTrue(expressionEvaluator.evaluateUnaryExpression(new TextNode(secondParameter + " in ?").toString(), context, List.class).isSuccessful());
assertFalse(expressionEvaluator.evaluateUnaryExpression(new TextNode("{Price: 2001,Name:\"PC\"} in ?").toString(), context, List.class).isSuccessful());
assertFalse(expressionEvaluator.evaluateUnaryExpression(new TextNode("{Price: 3300,Name:\"CARE\"} in ?").toString(), context, List.class).isSuccessful());
assertTrue(expressionEvaluator.evaluateUnaryExpression(new TextNode("(" + firstParameter + " in ?) and ("+ secondParameter +" in ?)").toString(), context, List.class).isSuccessful());
assertTrue(expressionEvaluator.evaluateUnaryExpression(new TextNode("(" + secondParameter + " in ?) and ("+ firstParameter +" in ?)").toString(), context, List.class).isSuccessful());
assertThatThrownBy(() -> expressionEvaluator.evaluateUnaryExpression("variable", null, null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageStartingWith("Error during evaluation:");
assertThatThrownBy(() -> expressionEvaluator.evaluateUnaryExpression("! true", null, null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageStartingWith("Syntax error:");
assertThatThrownBy(() -> expressionEvaluator.evaluateUnaryExpression("? > 2", null, BigDecimal.class))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
@SuppressWarnings("unchecked")
public void evaluateLiteralExpression() {
assertEquals(BigDecimal.valueOf(5), expressionEvaluator.evaluateLiteralExpression("2 + 3", BigDecimal.class.getCanonicalName(), null));
Map<String, Object> parsedValue = (Map<String, Object>) expressionEvaluator.evaluateLiteralExpression("{key_a : 1}", Map.class.getCanonicalName(), Collections.emptyList());
assertTrue(parsedValue.containsKey("key_a"));
assertEquals(parsedValue.get("key_a"), BigDecimal.valueOf(1));
List<BigDecimal> parsedValueListExpression = (List<BigDecimal>) expressionEvaluator.evaluateLiteralExpression(new TextNode("[10, 12]").toString(), List.class.getCanonicalName(), Collections.emptyList());
assertEquals(2, parsedValueListExpression.size());
assertEquals(BigDecimal.valueOf(10), parsedValueListExpression.get(0));
assertEquals(BigDecimal.valueOf(12), parsedValueListExpression.get(1));
assertThatThrownBy(() -> expressionEvaluator
.evaluateLiteralExpression("SPEED", String.class.getCanonicalName(), null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageStartingWith("Error during evaluation:");
assertThatThrownBy(() -> expressionEvaluator
.evaluateLiteralExpression("\"SPEED", String.class.getCanonicalName(), null))
.isInstanceOf(IllegalArgumentException.class)
.hasMessageStartingWith("Syntax error:");
}
@SuppressWarnings("unchecked")
@Test
public void expressionTest() {
String listJsonString = "[{\"name\": \"\\\"John\\\"\"}, " +
"{\"name\": \"\\\"John\\\"\", \"names\" : [{\"value\": \"\\\"Anna\\\"\"}, {\"value\": \"\\\"Mario\\\"\"}]}]";
List<Map<String, Object>> parsedValue = (List<Map<String, Object>>) expressionEvaluator.convertResult(listJsonString, List.class.getCanonicalName(),
Collections.singletonList(Map.class.getCanonicalName()));
assertEquals(2, parsedValue.size());
assertEquals(2, ((List<Object>) parsedValue.get(1).get("names")).size());
assertTrue(((List<Object>) parsedValue.get(1).get("names")).contains("Anna"));
String mapJsonString = "{\"first\": {\"name\": \"\\\"John\\\"\"}}";
Map<String, Map<String, Object>> parsedMap = (Map<String, Map<String, Object>>) expressionEvaluator
.convertResult(mapJsonString, Map.class.getCanonicalName(),
Arrays.asList(String.class.getCanonicalName(), Object.class.getCanonicalName()));
assertEquals(1, parsedMap.size());
assertEquals("John", parsedMap.get("first").get("name"));
mapJsonString = "{\"first\": {\"siblings\": [{\"name\" : \"\\\"John\\\"\"}]}}";
parsedMap = (Map<String, Map<String, Object>>) expressionEvaluator
.convertResult(mapJsonString, Map.class.getCanonicalName(),
Arrays.asList(String.class.getCanonicalName(), Object.class.getCanonicalName()));
assertEquals(1, parsedMap.size());
assertEquals("John", ((List<Map<String, Object>>) parsedMap.get("first").get("siblings")).get(0).get("name"));
mapJsonString = "{\"first\": {\"phones\": {\"number\" : \"1\"}}}";
parsedMap = (Map<String, Map<String, Object>>) expressionEvaluator
.convertResult(mapJsonString, Map.class.getCanonicalName(),
Arrays.asList(String.class.getCanonicalName(), Object.class.getCanonicalName()));
assertEquals(1, parsedMap.size());
assertEquals(BigDecimal.valueOf(1), ((Map<String, Object>) parsedMap.get("first").get("phones")).get("number"));
}
@Test
public void fromObjectToExpressionTest() {
assertEquals("\"Test\"", expressionEvaluator.fromObjectToExpression("Test"));
assertEquals("false", expressionEvaluator.fromObjectToExpression(false));
assertEquals("1", expressionEvaluator.fromObjectToExpression(BigDecimal.valueOf(1)));
assertEquals("date( \"2019-05-13\" )", expressionEvaluator.fromObjectToExpression(LocalDate.of(2019, 5, 13)));
assertEquals("null", expressionEvaluator.fromObjectToExpression(null));
}
@Test
public void listenerTest() {
FEELEvent syntaxErrorEvent = new SyntaxErrorEvent(Severity.ERROR, "test", null, 0, 0, null);
FEELEvent genericError = new FEELEventBase(Severity.ERROR, "error", null);
FEELEvent notError = new FEELEventBase(Severity.INFO, "info", null);
AtomicReference<FEELEvent> error = new AtomicReference<>();
FEEL feel = expressionEvaluator.newFeelEvaluator(error);
// Only a single error of type syntax
applyEvents(Collections.singletonList(syntaxErrorEvent), feel);
assertEquals(syntaxErrorEvent, error.get());
error.set(null);
// Syntax error as second
applyEvents(Arrays.asList(genericError, syntaxErrorEvent), feel);
assertEquals(syntaxErrorEvent, error.get());
error.set(null);
// Syntax error as first
applyEvents(Arrays.asList(syntaxErrorEvent, genericError), feel);
assertEquals(syntaxErrorEvent, error.get());
error.set(null);
// Not error
applyEvents(Collections.singletonList(notError), feel);
assertNull(error.get());
}
private void applyEvents(List<FEELEvent> events, FEEL feel) {
for (FEELEvent event : events) {
feel.getListeners().forEach(listener -> listener.onEvent(event));
}
}
@Test
public void expressionListTest() {
String expressionCollectionJsonString = new TextNode("[ 1, 10 ]").toString();
List<BigDecimal> result = (List<BigDecimal>) expressionEvaluator.convertResult(expressionCollectionJsonString, List.class.getCanonicalName(), Collections.EMPTY_LIST);
assertEquals(2, result.size());
assertEquals(BigDecimal.ONE, result.get(0));
assertEquals(BigDecimal.TEN, result.get(1));
}
@Test
public void expressionObjectListTest() {
String expressionCollectionJsonString = new TextNode("[{age:10},{name:\"John\"}]").toString();
List<Map<String, Object>> result =
(List<Map<String, Object>>) expressionEvaluator.convertResult(expressionCollectionJsonString,
List.class.getCanonicalName(),
Collections.EMPTY_LIST);
assertEquals(2, result.size());
assertThat(result.get(0)).containsOnly(entry("age", BigDecimal.TEN));
assertThat(result.get(1)).containsOnly(entry("name", "John"));
}
@Test(expected = IllegalArgumentException.class)
public void expressionListTest_Wrong() {
String expressionCollectionJsonString = new TextNode("[ 1 : 234").toString();
expressionEvaluator.convertResult(expressionCollectionJsonString, List.class.getCanonicalName(), Collections.EMPTY_LIST);
}
@Test
public void expressionMapTest() {
String expressionCollectionJsonString = new TextNode("{ x : 5, y : 3 }").toString();
Map<String, BigDecimal> result = (Map<String, BigDecimal>) expressionEvaluator.convertResult(expressionCollectionJsonString, Map.class.getCanonicalName(), Collections.EMPTY_LIST);
assertEquals(2, result.size());
assertEquals(BigDecimal.valueOf(5), result.get("x"));
assertEquals(BigDecimal.valueOf(3), result.get("y"));
}
@Test(expected = IllegalArgumentException.class)
public void expressionMapTest_Wrong() {
String expressionCollectionJsonString = new TextNode(": 5 y : 3 }").toString();
expressionEvaluator.convertResult(expressionCollectionJsonString, Map.class.getCanonicalName(), Collections.EMPTY_LIST);
}
@Test
public void expressionListVerifyResultTest() {
String expressionCollectionJsonString = new TextNode("10").toString();
List<BigDecimal> contextValue = Collections.singletonList(BigDecimal.valueOf(10));
assertTrue(expressionEvaluator.verifyResult(expressionCollectionJsonString, contextValue, List.class).isSuccessful());
}
@Test
public void expressionMapVerifyResultTest() {
String expressionCollectionJsonString = new TextNode("{key_a : 1}").toString();
Map<String, BigDecimal> contextValue = Collections.singletonMap("key_a", BigDecimal.valueOf(1));
assertTrue(expressionEvaluator.verifyResult(expressionCollectionJsonString, contextValue, Map.class).isSuccessful());
}
@Test
public void isStructuredInput() {
assertTrue(expressionEvaluator.isStructuredInput(List.class.getCanonicalName()));
assertTrue(expressionEvaluator.isStructuredInput(ArrayList.class.getCanonicalName()));
assertTrue(expressionEvaluator.isStructuredInput(LinkedList.class.getCanonicalName()));
assertFalse(expressionEvaluator.isStructuredInput(Map.class.getCanonicalName()));
assertFalse(expressionEvaluator.isStructuredInput(HashMap.class.getCanonicalName()));
assertFalse(expressionEvaluator.isStructuredInput(LinkedHashMap.class.getCanonicalName()));
assertFalse(expressionEvaluator.isStructuredInput(Set.class.getCanonicalName()));
assertFalse(expressionEvaluator.isStructuredInput(Integer.class.getCanonicalName()));
assertFalse(expressionEvaluator.isStructuredInput(String.class.getCanonicalName()));
}
@Test
public void testUnaryTestUsingKieExtendedProfile() {
// DROOLS-6337 today() and now() functions not evaluated correctly in Test Scenarios
ZonedDateTime now = (ZonedDateTime) expressionEvaluator.evaluateLiteralExpression("now()", ZonedDateTime.class.getCanonicalName(), Collections.emptyList());
LocalDate today = (LocalDate) expressionEvaluator.evaluateLiteralExpression("today()", LocalDate.class.getCanonicalName(), Collections.emptyList());
assertNotNull(now);
assertNotNull(today);
assertTrue(expressionEvaluator.evaluateUnaryExpression("now() > ?", now.minusDays(1), ZonedDateTime.class).isSuccessful());
assertTrue(expressionEvaluator.evaluateUnaryExpression("today() > ?", today.minusDays(1), LocalDate.class).isSuccessful());
}
} | 7,211 |
348 | {"nom":"Saint-Langis-lès-Mortagne","circ":"2ème circonscription","dpt":"Orne","inscrits":706,"abs":352,"votants":354,"blancs":16,"nuls":9,"exp":329,"res":[{"nuance":"REM","nom":"<NAME>","voix":180},{"nuance":"LR","nom":"<NAME>","voix":149}]} | 98 |
1,041 | package io.ebeaninternal.api;
import io.ebean.Transaction;
import io.ebeaninternal.server.core.OrmQueryRequest;
/**
* Request for loading Associated One Beans.
*/
public abstract class LoadRequest {
protected final OrmQueryRequest<?> parentRequest;
protected final Transaction transaction;
protected final boolean lazy;
LoadRequest(OrmQueryRequest<?> parentRequest, boolean lazy) {
this.parentRequest = parentRequest;
this.transaction = parentRequest == null ? null : parentRequest.transaction();
this.lazy = lazy;
}
/**
* Return the associated bean type for this load request.
*/
public abstract Class<?> beanType();
/**
* Return true if this is a lazy load and false if it is a secondary query.
*/
public boolean isLazy() {
return lazy;
}
/**
* Return the transaction to use if this is a secondary query.
* <p>
* Lazy loading queries run in their own transaction.
* </p>
*/
public Transaction transaction() {
return transaction;
}
/**
* Return true if the parent query is a findIterate() type query.
* So one of - findIterate(), findEach(), findEachWhile() or findVisit().
*/
public boolean isParentFindIterate() {
return parentRequest != null && parentRequest.query().getType() == SpiQuery.Type.ITERATE;
}
}
| 391 |
3,459 | <filename>Cores/FCEU/FCEU/drivers/win/directories.h
#ifndef WIN_DIRECTORIES_H
#define WIN_DIRECTORIES_H
void ConfigDirectories();
#endif
| 62 |
338 | <filename>mcpipy/snake.py<gh_stars>100-1000
from mine import *
from board2d import Board2D
from time import sleep
from random import randint
import input
width = 30
height = 20
mc = Minecraft()
board = Board2D(mc, width, height)
board.fill(block.AIR)
vx = 1
vy = 0
x = width // 2
y = height // 2
length = 3
tail = []
def newGold():
while True:
x = randint(0, width-1)
y = randint(0, height-1)
if board.getBlock(x,y) == block.AIR:
board.setBlock(x,y, block.GOLD_BLOCK)
return
newGold()
while True:
if x < 0 or x >= width or y < 0 or y >= height or (x,y) in tail:
board.setBlock(x, y, block.WOOL_RED)
board.draw()
mc.postToChat("Game over")
break
tail.append((x,y))
if len(tail) > length:
board.setBlock(tail[0], block.AIR)
del tail[0]
if board.getBlock(x,y) == block.GOLD_BLOCK:
length += 1
newGold()
board.setBlock(x, y, block.BRICK_BLOCK)
board.draw()
sleep(0.05+1.5/length)
if input.wasPressedSinceLast(input.UP):
vy = 1
vx = 0
elif input.wasPressedSinceLast(input.DOWN):
vy = -1
vx = 0
elif input.wasPressedSinceLast(input.LEFT):
vy = 0
vx = -1
elif input.wasPressedSinceLast(input.RIGHT):
vy = 0
vx = 1
x += vx
y += vy
| 742 |
1,144 | package org.compiere.model;
/*
* #%L
* de.metas.adempiere.adempiere.base
* %%
* Copyright (C) 2015 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
import java.sql.ResultSet;
import java.util.List;
import java.util.Properties;
import de.metas.util.Check;
/**
* Window Tab Customization
* @author <NAME>, <EMAIL>
* <li>BF [ 2726889 ] Finish User Window (AD_UserDef*) functionality
*/
public class MUserDefTab extends X_AD_UserDef_Tab
{
/**
*
*/
private static final long serialVersionUID = -1211944378938157092L;
public MUserDefTab(Properties ctx, int AD_UserDef_Tab_ID, String trxName)
{
super(ctx, AD_UserDef_Tab_ID, trxName);
}
public MUserDefTab(Properties ctx, ResultSet rs, String trxName)
{
super(ctx, rs, trxName);
}
private MUserDefField[] getFields(boolean reload)
{
if (!reload && m_fields != null)
{
return m_fields;
}
final String whereClause = MUserDefField.COLUMNNAME_AD_UserDef_Tab_ID+"=?";
final List<MUserDefField> list = new Query(getCtx(), MUserDefField.Table_Name, whereClause, get_TrxName())
.setParameters(get_ID())
.setOnlyActiveRecords(true)
.setOrderBy(MUserDefField.COLUMNNAME_AD_Field_ID)
.list(MUserDefField.class);
//
m_fields = list.toArray(new MUserDefField[list.size()]);
return m_fields;
}
private MUserDefField[] m_fields = null;
public MUserDefField getField(int AD_Field_ID)
{
if (AD_Field_ID <= 0)
{
return null;
}
for (MUserDefField field : getFields(false))
{
if (AD_Field_ID == field.getAD_Field_ID())
{
return field;
}
}
return null;
}
public void apply(GridTabVO vo)
{
final String name = getName();
if (!Check.isEmpty(name) && name.length() > 1)
vo.setName(name);
if (!Check.isEmpty(getDescription()))
vo.setDescription(getDescription());
if (!Check.isEmpty(getHelp()))
vo.setHelp(getHelp());
//
vo.IsSingleRow = this.isSingleRow();
vo.setReadOnly(this.isReadOnly());
// vo.IsDeleteable
// vo.IsHighVolume
// vo.IsInsertRecord
}
}
| 1,018 |
14,668 | // Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef IOS_CHROME_COMMON_CREDENTIAL_PROVIDER_ARCHIVABLE_CREDENTIAL_UTIL_H_
#define IOS_CHROME_COMMON_CREDENTIAL_PROVIDER_ARCHIVABLE_CREDENTIAL_UTIL_H_
#import <Foundation/Foundation.h>
// Constructs a record identifier for the given data. This should be as close
// as possible to |RecordIdentifierForPasswordForm|, as this is what is used
// to detect if a credential should be updated instead of created.
NSString* RecordIdentifierForData(NSURL* url, NSString* username);
#endif // IOS_CHROME_COMMON_CREDENTIAL_PROVIDER_ARCHIVABLE_CREDENTIAL_UTIL_H_
| 234 |
1,103 | <gh_stars>1000+
/*
* Copyright 2016-2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.hollow.api.producer.listener;
import com.netflix.hollow.api.producer.HollowProducer;
import com.netflix.hollow.api.producer.Status;
import com.netflix.hollow.api.producer.validation.ValidationStatusListener;
import java.time.Duration;
/**
* A listener of cycle events associated with the producer cycle stage.
* <p>
* A cycle listener instance may be registered when building a {@link HollowProducer producer}
* (see {@link HollowProducer.Builder#withListener(HollowProducerEventListener)}} or by
* registering on the producer itself
* (see {@link HollowProducer#addListener(HollowProducerEventListener)}.
*/
public interface CycleListener extends HollowProducerEventListener {
/**
* The reasons for a cycle skip event
*/
enum CycleSkipReason {
/**
* The cycle is skipped because the producer is not a primary producer.
*/
NOT_PRIMARY_PRODUCER
}
/**
* A receiver of a cycle skip event. Called when a cycle is skipped.
* <p>
* If this event occurs then no further cycle events (or any events associated with sub-stages) will occur and
* the cycle stage is complete.
*
* @param reason the reason the cycle is skipped
*/
// See HollowProducerListenerV2
// Can this be merged in to onCycleComplete with status?
void onCycleSkip(CycleSkipReason reason);
/**
* A receiver of a new delta chain event. Called when the next state produced will begin a new delta chain.
* Occurs before the {@link #onCycleStart(long) cycle start} event.
* <p>
* This will be called prior to the next state being produced if
* {@link HollowProducer#restore(long, com.netflix.hollow.api.consumer.HollowConsumer.BlobRetriever)}
* hasn't been called or the restore failed.
*
* @param version the version of the state that will become the first of a new delta chain
*/
// This is called just before onCycleStart, can the two be merged with additional arguments?
void onNewDeltaChain(long version);
/**
* A receiver of a cycle start event. Called when the {@code HollowProducer} has begun a new cycle.
*
* @param version the version produced by the {@code HollowProducer} for new cycle about to start.
*/
void onCycleStart(long version);
/**
* A receiver of a cycle complete event. Called after the {@code HollowProducer} has completed a cycle
* with success or failure. Occurs after the {@link #onCycleStart(long) cycle start} event.
* <p>
* If the cycle is successful then the {@code status} reports
* {@link Status.StatusType#SUCCESS success}. Success indicates that a new state has been as been
* {@link PopulateListener populated},
* {@link PublishListener published},
* {@link IntegrityCheckListener integrity checked},
* {@link ValidationStatusListener validated}, and
* {@link AnnouncementListener announced}.
* Alternatively success may also indicate that population resulted in no changes and therefore there is no new
* state to publish and announce. If so a {@link PublishListener#onNoDeltaAvailable(long) no delta available}
* event of the {@link PublishListener publisher} stage is be emitted after which the cycle complete event is
* emitted.
* <p>
* If the cycle failed then the {@code status} reports {@link Status.StatusType#FAIL failure}.
*
* @param status the status of this cycle.
* @param readState the read state, null if no read state avaulable
* @param version the version
* @param elapsed duration of the cycle
*/
void onCycleComplete(Status status, HollowProducer.ReadState readState, long version, Duration elapsed);
}
| 1,380 |
351 | package com.promegu.xloggerexample;
import com.promegu.xlog.base.XLog;
/**
* Created by guyacong on 2015/7/12.
*/
@XLog
public class BaseCalculator {
public String getName() {
return "BaseCalculator";
}
public int calculate(int i, int j) {
return i + j;
}
}
| 125 |
986 | <gh_stars>100-1000
import sys
import pyarrow as pa
import pyarrow.parquet as pq
import pytest
from pandas.util import testing as tm
import ibis
from ibis.backends.base.file import FileDatabase
from ibis.backends.parquet import ParquetTable
pytestmark = pytest.mark.skipif(
sys.platform == 'win32', reason='See ibis issue #1698'
)
@pytest.fixture
def transformed(parquet):
closes = parquet.pq.close
opens = parquet.pq.open
t = opens.inner_join(closes, ['time', 'ticker'])
t = t[opens, closes.close]
t = t.mutate(avg=(t.open + t.close) / 2)
t = t[['time', 'ticker', 'avg']]
return t
def test_creation(parquet):
# we have existing files in our dir
d = parquet.client.root
assert len(list(d.iterdir())) == 1
pqd = d / 'pq'
assert len(list(pqd.iterdir())) == 2
assert len(pq.read_table(str(pqd / 'open.parquet'))) == 50
assert len(pq.read_table(str(pqd / 'close.parquet'))) == 50
def test_client(tmpdir, file_backends_data):
# construct with a path to a file
d = tmpdir / 'pq'
d.mkdir()
for k, v in file_backends_data.items():
f = d / f"{k}.parquet"
table = pa.Table.from_pandas(v)
pq.write_table(table, str(f))
c = ibis.parquet.connect(tmpdir)
assert c.list_databases() == ['.', 'pq']
assert c.database().pq.list_tables() == ['close', 'open']
def test_navigation(parquet):
# directory navigation
assert isinstance(parquet, FileDatabase)
result = dir(parquet)
assert result == ['.', 'pq']
d = parquet.pq
assert isinstance(d, FileDatabase)
result = dir(d)
assert result == ['.', 'close', 'open']
result = d.list_tables()
assert result == ['close', 'open']
opens = d.open
assert isinstance(opens.op(), ParquetTable)
closes = d.close
assert isinstance(closes.op(), ParquetTable)
def test_read(parquet, file_backends_data):
closes = parquet.pq.close
assert str(closes) is not None
result = closes.execute()
expected = file_backends_data['close']
tm.assert_frame_equal(result, expected)
result = closes.execute()
tm.assert_frame_equal(result, expected)
def test_write(transformed, tmpdir):
t = transformed
expected = t.execute()
tpath = tmpdir / 'new_dir'
tpath.mkdir()
path = tpath / 'foo.parquet'
assert not path.exists()
t = transformed[['time', 'ticker', 'avg']]
c = ibis.parquet.connect(tpath)
c.insert('foo.parquet', t)
t.execute()
assert path.exists()
# readback
c = ibis.parquet.connect(str(tpath)).database()
result = c.list_databases()
assert result == ['.']
result = c.foo.execute()
tm.assert_frame_equal(result, expected)
path = tpath / 'foo.parquet'
assert path.exists()
| 1,131 |
971 | <filename>dl-biz/src/main/java/com/ucar/datalink/biz/service/impl/TaskStatusServiceImpl.java
package com.ucar.datalink.biz.service.impl;
import com.alibaba.fastjson.JSON;
import com.google.common.collect.Sets;
import com.ucar.datalink.common.errors.TaskConflictException;
import com.ucar.datalink.domain.task.TaskStatus;
import com.ucar.datalink.biz.service.TaskStatusService;
import com.ucar.datalink.common.zookeeper.DLinkZkPathDef;
import com.ucar.datalink.common.zookeeper.DLinkZkUtils;
import org.I0Itec.zkclient.DataUpdater;
import org.I0Itec.zkclient.exception.ZkNoNodeException;
import org.I0Itec.zkclient.exception.ZkNodeExistsException;
import org.apache.zookeeper.CreateMode;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
/**
* 任务状态管理服务类,基于Zookeeper.
* <p>
* Created by lubiao on 2016/12/6.
*/
@Service
public class TaskStatusServiceImpl implements TaskStatusService {
@Override
public void addStatus(TaskStatus status) throws TaskConflictException {
DLinkZkUtils zkUtils = DLinkZkUtils.get();
String statusPath = DLinkZkPathDef.getTaskStatusNode(status.getId());
byte[] bytes = JSON.toJSONBytes(status);
try {
zkUtils.zkClient().createPersistent(DLinkZkPathDef.getTaskNode(status.getId()), true);
zkUtils.zkClient().create(statusPath, bytes, CreateMode.EPHEMERAL);
} catch (ZkNodeExistsException e) {
byte[] data = zkUtils.zkClient().readData(statusPath, true);
if (data != null) {
TaskStatus otherTaskStatus = JSON.parseObject(data, TaskStatus.class);
throw new TaskConflictException(status.getId(), status.getWorkerId(), otherTaskStatus.getWorkerId(),
status.getExecutionId(), otherTaskStatus.getExecutionId());
} else {
addStatus(status);
}
}
}
@Override
public void updateStatus(TaskStatus status) {
DLinkZkUtils zkUtils = DLinkZkUtils.get();
String statusPath = DLinkZkPathDef.getTaskStatusNode(status.getId());
byte[] bytes = JSON.toJSONBytes(status);
zkUtils.zkClient().updateDataSerialized(statusPath, new DataUpdater<byte[]>() {
@Override
public byte[] update(byte[] currentData) {
return bytes;
}
});
}
@Override
public void removeStatus(String taskId) {
DLinkZkUtils zkUtils = DLinkZkUtils.get();
String statusPath = DLinkZkPathDef.getTaskStatusNode(taskId);
zkUtils.zkClient().delete(statusPath);
}
@Override
public Collection<TaskStatus> getAll() {
DLinkZkUtils zkUtils = DLinkZkUtils.get();
List<TaskStatus> result = new ArrayList<>();
for (String taskId : tasks()) {
byte[] bytes = zkUtils.zkClient().readData(DLinkZkPathDef.getTaskStatusNode(taskId), true);
if (bytes != null) {
result.add(JSON.parseObject(bytes, TaskStatus.class));
}
}
return result;
}
@Override
public TaskStatus getStatus(String taskId) {
DLinkZkUtils zkUtils = DLinkZkUtils.get();
byte[] bytes = zkUtils.zkClient().readData(DLinkZkPathDef.getTaskStatusNode(taskId), true);
if (bytes != null) {
return JSON.parseObject(bytes, TaskStatus.class);
} else {
return null;
}
}
@Override
public Set<String> tasks() {
DLinkZkUtils zkUtils = DLinkZkUtils.get();
try {
List<String> list = zkUtils.zkClient().getChildren(DLinkZkPathDef.TaskRoot);
return list == null ? Sets.newHashSet() : list.stream().collect(Collectors.toSet());
} catch (ZkNoNodeException e) {
return Sets.newHashSet();
}
}
}
| 1,732 |
892 | <filename>advisories/unreviewed/2022/05/GHSA-w8vh-crmm-4pp5/GHSA-w8vh-crmm-4pp5.json
{
"schema_version": "1.2.0",
"id": "GHSA-w8vh-crmm-4pp5",
"modified": "2022-05-01T18:12:48Z",
"published": "2022-05-01T18:12:48Z",
"aliases": [
"CVE-2007-3347"
],
"details": "The D-Link DPH-540/DPH-541 phone accepts SIP INVITE messages that are not from the Call Server's IP address, which allows remote attackers to engage in arbitrary SIP communication with the phone, as demonstrated by communication with forged caller ID.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2007-3347"
},
{
"type": "WEB",
"url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/35063"
},
{
"type": "WEB",
"url": "http://secunia.com/advisories/25803"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/bid/24560"
},
{
"type": "WEB",
"url": "http://www.sipera.com/index.php?action=resources,threat_advisory&tid=219&"
},
{
"type": "WEB",
"url": "http://www.vupen.com/english/advisories/2007/2320"
}
],
"database_specific": {
"cwe_ids": [
],
"severity": "HIGH",
"github_reviewed": false
}
} | 609 |
1,517 | import subprocess
import sys
import re
# Note: no output will be printed until the entire test suite has finished
p = subprocess.Popen(sys.argv[1], shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
stdoutResult, stderrResult = p.communicate()
successRegex = re.compile('OK \(\d+ tests\)')
print(stdoutResult)
print(stderrResult)
if successRegex.search(stderrResult + stdoutResult):
sys.exit(0)
else:
sys.exit(1)
| 165 |
337 | /*
* filesystem.c -- filesystem functions
*
* Copyright (c) 2013, 2014 <NAME> <<EMAIL>>
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#include "filesystem.h"
/*
* SDL.getBasePath()
*
* Returns:
* The base directory where the application runs or nil on failure
* The error message
*/
static int
l_getBasePath(lua_State *L)
{
char *str = SDL_GetBasePath();
if (str == NULL)
return commonPushSDLError(L, 1);
lua_pushstring(L, str);
SDL_free(str);
return 1;
}
/*
* SDL.getPrefPath(organization, application)
*
* Arguments:
* organization the organization name
* application the application name
*
* Returns:
* The directory or nil on failure
* The error message
*/
static int
l_getPrefPath(lua_State *L)
{
const char *organization = luaL_checkstring(L, 1);
const char *application = luaL_checkstring(L, 2);
char *str = SDL_GetPrefPath(organization, application);
if (str == NULL)
return commonPushSDLError(L, 1);
lua_pushstring(L, str);
SDL_free(str);
return 1;
}
const luaL_Reg FilesystemFunctions[] = {
{ "getBasePath", l_getBasePath },
{ "getPrefPath", l_getPrefPath },
{ NULL, NULL }
};
| 605 |
2,231 | <filename>engine/gamesys/src/gamesys/gamesys_private.h<gh_stars>1000+
// Copyright 2020 The Defold Foundation
// Licensed under the Defold License version 1.0 (the "License"); you may not use
// this file except in compliance with the License.
//
// You may obtain a copy of the License, together with FAQs at
// https://www.defold.com/license
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
#ifndef DM_GAMESYS_PRIVER_H
#define DM_GAMESYS_PRIVER_H
#include <dlib/message.h>
#include <render/render.h>
#include <gameobject/gameobject.h>
namespace dmScript
{
struct LuaCallbackInfo;
}
namespace dmGameSystem
{
#define EXT_CONSTANTS(prefix, ext)\
static const char* prefix##_EXT = ext;\
static const dmhash_t prefix##_EXT_HASH = dmHashString64(ext);\
EXT_CONSTANTS(COLLECTION_FACTORY, "collectionfactoryc")
EXT_CONSTANTS(COLLISION_OBJECT, "collisionobjectc")
EXT_CONSTANTS(FACTORY, "factoryc")
EXT_CONSTANTS(FONT, "fontc")
EXT_CONSTANTS(MATERIAL, "materialc")
EXT_CONSTANTS(BUFFER, "bufferc")
EXT_CONSTANTS(MODEL, "modelc")
EXT_CONSTANTS(TEXTURE, "texturec")
EXT_CONSTANTS(TEXTURE_SET, "texturesetc")
EXT_CONSTANTS(TILE_MAP, "tilemapc")
#undef EXT_CONSTANTS
static const dmhash_t PROP_FONT = dmHashString64("font");
static const dmhash_t PROP_IMAGE = dmHashString64("image");
static const dmhash_t PROP_MATERIAL = dmHashString64("material");
static const dmhash_t PROP_TEXTURE[dmRender::RenderObject::MAX_TEXTURE_COUNT] = {
dmHashString64("texture0"),
dmHashString64("texture1"),
dmHashString64("texture2"),
dmHashString64("texture3"),
dmHashString64("texture4"),
dmHashString64("texture5"),
dmHashString64("texture6"),
dmHashString64("texture7")
};
static const dmhash_t PROP_TILE_SOURCE = dmHashString64("tile_source");
struct EmitterStateChangedScriptData
{
EmitterStateChangedScriptData()
{
memset(this, 0, sizeof(*this));
}
dmhash_t m_ComponentId;
dmScript::LuaCallbackInfo* m_CallbackInfo;
};
// A wrapper for dmScript::CheckGoInstance
dmGameObject::HInstance CheckGoInstance(lua_State* L);
/**
* Log message error. The function will send a formatted printf-style string to dmLogError
* and append message sender/receiver information on the following format:
* Message <MESSAGE-ID> sent from <SENDER> to <RECEIVER>. For format-string should be a complete sentence including
* a terminating period character but without trailing space.
* @param message message
* @param format printf-style format string
*/
void LogMessageError(dmMessage::Message* message, const char* format, ...);
}
#endif // DM_GAMESYS_PRIVER_H
| 1,131 |
32,544 | package com.baeldung.java9.methodhandles;
public class Book {
String id;
String title;
public Book(String id, String title) {
this.id = id;
this.title = title;
}
@SuppressWarnings("unused")
private String formatBook() {
return id + " > " + title;
}
}
| 131 |
1,338 | /*
* Copyright 2005, Haiku, Inc. All Rights Reserved.
* Distributed under the terms of the MIT License.
*/
#ifndef KERNEL_ARCH_THREAD_TYPES_H
#define KERNEL_ARCH_THREAD_TYPES_H
#include <arch_thread_types.h>
#endif /* KERNEL_ARCH_THREAD_TYPES_H */
| 100 |
348 | <reponame>chamberone/Leaflet.PixiOverlay
{"nom":"Mésigny","circ":"1ère circonscription","dpt":"Haute-Savoie","inscrits":617,"abs":384,"votants":233,"blancs":15,"nuls":3,"exp":215,"res":[{"nuance":"REM","nom":"<NAME>","voix":119},{"nuance":"LR","nom":"<NAME>","voix":96}]} | 111 |
5,168 | /**
* \file dnn/atlas-stub/src/libatlas-wrap.cpp
* MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
*
* Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied.
*/
#pragma GCC visibility push(default)
#include <cstdio>
#define LOGE(fmt, v...) fprintf(stderr, "err: " fmt "\n", ##v)
#include "acl/acl.h"
#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
#if defined(_WIN32)
#include <windows.h>
#define RTLD_LAZY 0
static void* dlopen(const char* file, int) {
return static_cast<void*>(LoadLibraryA(file));
}
static void* dlerror() {
const char* errmsg = "dlerror not aviable in windows";
return const_cast<char*>(errmsg);
}
static void* dlsym(void* handle, const char* name) {
FARPROC symbol = GetProcAddress((HMODULE)handle, name);
return reinterpret_cast<void*>(symbol);
}
#else
#include <dlfcn.h>
#include <unistd.h>
#endif
static void log_failed_load(int func_idx);
namespace {
template <typename T>
T on_init_failed(int func_idx);
template <>
float on_init_failed(int func_idx) {
log_failed_load(func_idx);
return 0.f;
}
template <>
aclFloat16 on_init_failed(int func_idx) {
log_failed_load(func_idx);
return 0;
}
template <>
aclDataBuffer* on_init_failed(int func_idx) {
log_failed_load(func_idx);
return nullptr;
}
template <>
aclError on_init_failed(int func_idx) {
log_failed_load(func_idx);
return ACL_ERROR_INTERNAL_ERROR;
}
template <>
void* on_init_failed(int func_idx) {
log_failed_load(func_idx);
return nullptr;
}
template <>
uint32_t on_init_failed(int func_idx) {
log_failed_load(func_idx);
return 0;
}
template <>
size_t on_init_failed(int func_idx) {
log_failed_load(func_idx);
return 0;
}
template <>
void on_init_failed(int func_idx) {
log_failed_load(func_idx);
}
template <>
int64_t on_init_failed(int func_idx) {
log_failed_load(func_idx);
return 0;
}
template <>
const char* on_init_failed(int func_idx) {
log_failed_load(func_idx);
return "load lib failed";
}
template <>
aclopAttr* on_init_failed(int func_idx) {
log_failed_load(func_idx);
return nullptr;
}
template <>
aclmdlDesc* on_init_failed(int func_idx) {
log_failed_load(func_idx);
return nullptr;
}
template <>
aclmdlDataset* on_init_failed(int func_idx) {
log_failed_load(func_idx);
return nullptr;
}
template <>
aclFormat on_init_failed(int func_idx) {
log_failed_load(func_idx);
return ACL_FORMAT_UNDEFINED;
}
template <>
aclTensorDesc* on_init_failed(int func_idx) {
log_failed_load(func_idx);
return nullptr;
}
template <>
aclDataType on_init_failed(int func_idx) {
log_failed_load(func_idx);
return ACL_DT_UNDEFINED;
}
template <>
aclmdlAIPP* on_init_failed(int func_idx) {
log_failed_load(func_idx);
return nullptr;
}
} // namespace
#include "./libatlas-wrap.h"
static const char* default_so_paths[] = {
"/usr/local/Ascend/acllib/lib64/libascendcl.so",
"libascendcl.so",
};
static void* get_library_handle() {
void* handle = nullptr;
for (size_t i = 0; i < (sizeof(default_so_paths) / sizeof(char*)); i++) {
handle = dlopen(default_so_paths[i], RTLD_LAZY);
if (handle) {
break;
}
}
if (!handle) {
LOGE("Failed to load atlas library");
return nullptr;
}
return handle;
}
static void log_failed_load(int func_idx) {
LOGE("failed to load atlas func: %s", g_func_name[func_idx]);
}
static void* resolve_library_func(void* handle, const char* func) {
if (!handle) {
LOGE("handle should not be nullptr!");
return nullptr;
}
auto ret = dlsym(handle, func);
if (!ret) {
LOGE("failed to load atlas func: %s", func);
}
return ret;
}
| 1,681 |
1,412 | #include <stdbool.h>
#include <stdint.h>
#include <stddef.h>
#pragma once
struct Blob;
// Can pass as the maxFrames argument to lovrSoundCreateFromCallback
#define LOVR_SOUND_ENDLESS 0xFFFFFFFF
typedef enum {
SAMPLE_F32,
SAMPLE_I16
} SampleFormat;
typedef enum {
CHANNEL_MONO,
CHANNEL_STEREO,
CHANNEL_AMBISONIC
} ChannelLayout;
typedef struct Sound Sound;
typedef uint32_t (SoundCallback)(Sound* sound, uint32_t offset, uint32_t count, void* data);
typedef void (SoundDestroyCallback)(Sound* sound);
Sound* lovrSoundCreateRaw(uint32_t frames, SampleFormat format, ChannelLayout channels, uint32_t sampleRate, struct Blob* data);
Sound* lovrSoundCreateStream(uint32_t frames, SampleFormat format, ChannelLayout channels, uint32_t sampleRate);
Sound* lovrSoundCreateFromFile(struct Blob* blob, bool decode);
Sound* lovrSoundCreateFromCallback(SoundCallback read, void *callbackMemo, SoundDestroyCallback callbackDataDestroy, SampleFormat format, uint32_t sampleRate, ChannelLayout channels, uint32_t maxFrames);
void lovrSoundDestroy(void* ref);
struct Blob* lovrSoundGetBlob(Sound* sound);
SampleFormat lovrSoundGetFormat(Sound* sound);
ChannelLayout lovrSoundGetChannelLayout(Sound* sound);
uint32_t lovrSoundGetChannelCount(Sound* sound);
uint32_t lovrSoundGetSampleRate(Sound* sound);
uint32_t lovrSoundGetFrameCount(Sound* sound);
size_t lovrSoundGetStride(Sound* sound);
bool lovrSoundIsCompressed(Sound* sound);
bool lovrSoundIsStream(Sound* sound);
uint32_t lovrSoundRead(Sound* sound, uint32_t offset, uint32_t count, void* data);
uint32_t lovrSoundWrite(Sound* sound, uint32_t offset, uint32_t count, const void* data);
uint32_t lovrSoundCopy(Sound* src, Sound* dst, uint32_t frames, uint32_t srcOffset, uint32_t dstOffset);
void *lovrSoundGetCallbackMemo(Sound *sound);
| 589 |
1,647 | #ifndef PYTHONIC_NUMPY_RANDOM_NEGATIVE_BINOMIAL_HPP
#define PYTHONIC_NUMPY_RANDOM_NEGATIVE_BINOMIAL_HPP
#include "pythonic/include/numpy/random/negative_binomial.hpp"
#include "pythonic/include/numpy/random/generator.hpp"
#include "pythonic/types/ndarray.hpp"
#include "pythonic/types/NoneType.hpp"
#include "pythonic/types/tuple.hpp"
#include "pythonic/utils/functor.hpp"
#include <random>
#include <algorithm>
PYTHONIC_NS_BEGIN
namespace numpy
{
namespace random
{
template <class pS>
types::ndarray<double, pS> negative_binomial(double n, double p,
pS const &shape)
{
types::ndarray<double, pS> result{shape, types::none_type()};
std::gamma_distribution<double> distribution_gamma{n, (1 - p) / p};
std::generate(result.fbegin(), result.fend(), [&]() {
return std::poisson_distribution<long>{
(distribution_gamma(details::generator))}(details::generator);
});
return result;
}
auto negative_binomial(double n, double p, long size)
-> decltype(negative_binomial(n, p, types::array<long, 1>{{size}}))
{
return negative_binomial(n, p, types::array<long, 1>{{size}});
}
double negative_binomial(double n, double p, types::none_type d)
{
std::gamma_distribution<double> distribution_gamma{n, (1 - p) / p};
return std::poisson_distribution<long>{
(distribution_gamma(details::generator))}(details::generator);
}
}
}
PYTHONIC_NS_END
#endif
| 672 |
419 | package com.swagger.entity;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
@ApiModel
public class DefaultPojo {
@ApiModelProperty(value = "ID",example = "1000")
private int id;
@ApiModelProperty(value = "地址",example = "上海")
private String address;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
}
| 232 |
901 | <reponame>dandycheung/Serial<filename>serialization/src/test/java/com/twitter/serial/serializer/CoreSerializersTests.java
/*
* Copyright 2017 Twitter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.serial.serializer;
import com.twitter.serial.SerializationTestUtils;
import com.twitter.serial.stream.bytebuffer.ByteBufferSerial;
import com.twitter.serial.stream.Serial;
import com.twitter.serial.util.SerializableClass;
import com.twitter.serial.util.SerializationException;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.math.BigDecimal;
import static org.assertj.core.api.Java6Assertions.assertThat;
public class CoreSerializersTests {
private Serial mSerial;
@Before
public void setUp() {
mSerial = new ByteBufferSerial();
}
@Test
public void testBaseSerializers() throws Exception {
SerializationTestUtils.checkSerialization(true, CoreSerializers.BOOLEAN);
SerializationTestUtils.checkSerialization(false, CoreSerializers.BOOLEAN);
SerializationTestUtils.checkSerialization(123, CoreSerializers.INTEGER);
SerializationTestUtils.checkSerialization('c', CoreSerializers.CHARACTER);
SerializationTestUtils.checkSerialization((short) 2, CoreSerializers.SHORT);
SerializationTestUtils.checkSerialization(45678L, CoreSerializers.LONG);
SerializationTestUtils.checkSerialization(12.34, CoreSerializers.DOUBLE);
SerializationTestUtils.checkSerialization(5.678F, CoreSerializers.FLOAT);
SerializationTestUtils.checkSerialization(
new int[] { -50, 0, 1, 1337, Integer.MAX_VALUE, Integer.MIN_VALUE },
CoreSerializers.INT_ARRAY);
SerializationTestUtils.checkSerialization(
new long[] { -50L, 0L, 1L, 1337L, Long.MAX_VALUE, Long.MIN_VALUE },
CoreSerializers.LONG_ARRAY);
SerializationTestUtils.checkSerialization(
new float[] { -50.0f, 0.0f, Float.MAX_VALUE, Float.MIN_VALUE },
CoreSerializers.FLOAT_ARRAY);
SerializationTestUtils.checkSerialization(
new double[] { -50.0d, 0.0d, Double.MAX_VALUE, Double.MIN_VALUE },
CoreSerializers.DOUBLE_ARRAY);
SerializationTestUtils.checkSerialization("hello", CoreSerializers.STRING);
SerializationTestUtils.checkSerialization(new BigDecimal(0.23), CoreSerializers.BIG_DECIMAL);
SerializationTestUtils.checkSerialization(new BigDecimal("1.1232190473829758495647358647354354123124325233"),
CoreSerializers.BIG_DECIMAL);
SerializationTestUtils.checkSerialization(new BigDecimal(102392914738294732.1323432432F),
CoreSerializers.BIG_DECIMAL);
SerializationTestUtils.checkSerialization(new BigDecimal("123456789.1234567890"),
CoreSerializers.BIG_DECIMAL);
}
@Test
public void testSimpleObjectSerializer() throws Exception {
SerializationTestUtils.checkSerialization("hello", CoreSerializers.SIMPLE_OBJECT);
SerializationTestUtils.checkSerialization(123, CoreSerializers.SIMPLE_OBJECT);
SerializationTestUtils.checkSerialization(45678L, CoreSerializers.SIMPLE_OBJECT);
SerializationTestUtils.checkSerialization(12.34, CoreSerializers.SIMPLE_OBJECT);
SerializationTestUtils.checkSerialization(5.678F, CoreSerializers.SIMPLE_OBJECT);
SerializationTestUtils.checkSerialization("hello", CoreSerializers.SIMPLE_OBJECT);
}
@Test
public void testSerializeEnum() throws Exception {
SerializationTestUtils.checkSerializationComparingFieldByField(SerializationTestUtils.TestType.DEFAULT,
CoreSerializers.getEnumSerializer(SerializationTestUtils.TestType.class));
SerializationTestUtils.checkSerializationComparingFieldByField(SerializationTestUtils.TestType.FIRST,
CoreSerializers.getEnumSerializer(SerializationTestUtils.TestType.class));
SerializationTestUtils.checkSerializationComparingFieldByField(SerializationTestUtils.TestType.SECOND,
CoreSerializers.getEnumSerializer(SerializationTestUtils.TestType.class));
}
@Test
public void testReadNullWithDefaultSerializer() throws Exception {
final SerializationTestUtils.TestObject testObject = new SerializationTestUtils.TestObject("testName", 1);
SerializationTestUtils.checkSerializationComparingFieldByField(
testObject, SerializationTestUtils.TestObject.SERIALIZER);
}
@Test
public void testBaseClassSerializer() throws Exception {
final Serializer<SerializationTestUtils.BaseTestObject> baseSerializer =
CoreSerializers.getBaseClassSerializer(
SerializableClass.create(SerializationTestUtils.TestObject.class,
new SerializationTestUtils.TestObject.TestObjectSerializer()),
SerializableClass.create(SerializationTestUtils.TestObject2.class,
SerializationTestUtils.TestObject2.SERIALIZER)
);
final SerializationTestUtils.TestObject testObject = new SerializationTestUtils.TestObject("test name", 1);
final SerializationTestUtils.TestObject2 testObject2 = new SerializationTestUtils.TestObject2(true);
SerializationTestUtils.checkSerializationComparingFieldByField(testObject, baseSerializer);
SerializationTestUtils.checkSerializationComparingFieldByField(testObject2, baseSerializer);
}
@Test
public void testBaseClassSerializerWithDummyClass() throws Exception {
final Serializer<SerializationTestUtils.BaseTestObject> baseSerializer =
CoreSerializers.getBaseClassSerializer(
SerializableClass.<SerializationTestUtils.BaseTestObject>getDummy(),
SerializableClass.create(SerializationTestUtils.TestObject2.class,
SerializationTestUtils.TestObject2.SERIALIZER)
);
final SerializationTestUtils.TestObject2 testObject2 = new SerializationTestUtils.TestObject2(true);
SerializationTestUtils.checkSerializationComparingFieldByField(testObject2, baseSerializer);
}
@Test(expected = SerializationException.class)
public void testBaseClassSerializerThrowsExceptionWhenNoSerializerForSubclass() throws Exception {
final Serializer<SerializationTestUtils.BaseTestObject> baseSerializer =
CoreSerializers.getBaseClassSerializer(
SerializableClass.<SerializationTestUtils.BaseTestObject>getDummy(),
SerializableClass.create(SerializationTestUtils.TestObject2.class,
SerializationTestUtils.TestObject2.SERIALIZER)
);
final SerializationTestUtils.TestObject testObject = new SerializationTestUtils.TestObject("test name", 1);
final byte[] testObjectByteArray = mSerial.toByteArray(testObject, baseSerializer);
assertThat(mSerial.fromByteArray(testObjectByteArray, baseSerializer)).isNull();
}
@Test
public void testSerializableSerializer() throws IOException, ClassNotFoundException {
final Serializer<Integer> integerSerializer = CoreSerializers.getSerializableSerializer();
SerializationTestUtils.checkSerialization(null, integerSerializer);
SerializationTestUtils.checkSerialization(5, integerSerializer);
final Serializer<String> stringSerializer = CoreSerializers.getSerializableSerializer();
SerializationTestUtils.checkSerialization(null, stringSerializer);
SerializationTestUtils.checkSerialization("", stringSerializer);
SerializationTestUtils.checkSerialization("com/twitter/test", stringSerializer);
}
}
| 3,091 |
861 | <filename>src/test/java/com/univocity/parsers/examples/samples/CsvSearchExample.java
/*******************************************************************************
* Copyright 2016 Univocity Software Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.univocity.parsers.examples.samples;
import com.univocity.parsers.common.*;
import com.univocity.parsers.common.processor.*;
import com.univocity.parsers.csv.*;
import java.io.*;
import java.util.*;
/**
* Example to demonstrate how to implement a basic search over a CSV file.
*
* @author Univocity Software Pty Ltd - <a href="mailto:<EMAIL>"><EMAIL></a>
*/
public class CsvSearchExample {
// Let's create our own RowProcessor to analyze the rows
static class CsvSearch extends RowListProcessor {
private final String stringToMatch;
private final String columnToMatch;
private int indexToMatch = -1;
public CsvSearch(String columnToMatch, String stringToMatch){
this.columnToMatch = columnToMatch;
this.stringToMatch = stringToMatch.toLowerCase();
}
public CsvSearch(int columnToMatch, String stringToMatch){
this(stringToMatch, null);
indexToMatch = columnToMatch;
}
@Override
public void rowProcessed(String[] row, ParsingContext context) {
if(indexToMatch == -1) {
indexToMatch = context.indexOf(columnToMatch);
}
String value = row[indexToMatch];
if(value != null && value.toLowerCase().contains(stringToMatch)) {
super.rowProcessed(row, context);
}
// else skip the row.
}
}
public static void main(String... args) {
// let's measure the time roughly
long start = System.currentTimeMillis();
CsvParserSettings settings = new CsvParserSettings();
settings.setHeaderExtractionEnabled(true);
CsvSearch search = new CsvSearch("City", "Paris"); //searching for cities with "paris" in the name
//We instruct the parser to send all rows parsed to your custom RowProcessor.
settings.setProcessor(search);
//Finally, we create a parser
CsvParser parser = new CsvParser(settings);
//And parse! All rows are sent to your custom RowProcessor (CsvSearch)
//I'm using a 150MB CSV file with 1.3 million rows.
parser.parse(new File("/Users/jbax/dev/data/worldcitiespop.txt"), "ISO-8859-1");
List<String[]> results = search.getRows();
//Nothing else to do. The parser closes the input and does everything for you safely. Let's just get the results:
System.out.println("Rows matched: " + results.size());
System.out.println("Time taken: " + (System.currentTimeMillis() - start) + " ms");
System.out.println("Matched rows");
for(String[] row : results){
System.out.println(Arrays.toString(row));
}
}
}
| 997 |
879 | package org.zstack.header.configuration;
import org.zstack.header.message.APIEvent;
/**
*/
public class APIGenerateSqlIndexEvent extends APIEvent {
public APIGenerateSqlIndexEvent(String apiId) {
super(apiId);
}
public APIGenerateSqlIndexEvent() {
super(null);
}
public static APIGenerateSqlIndexEvent __example__() {
APIGenerateSqlIndexEvent event = new APIGenerateSqlIndexEvent();
return event;
}
}
| 185 |
1,470 | <reponame>ngoducthinh97/flutter_inappwebview<filename>android/src/main/java/com/pichillilorenzo/flutter_inappwebview/types/URLCredential.java<gh_stars>1000+
package com.pichillilorenzo.flutter_inappwebview.types;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import java.util.HashMap;
import java.util.Map;
public class URLCredential {
@Nullable
private Long id;
@Nullable
private String username;
@Nullable
private String password;
@Nullable
private Long protectionSpaceId;
public URLCredential(@Nullable String username, @Nullable String password) {
this.username = username;
this.password = password;
}
public URLCredential (@Nullable Long id, @NonNull String username, @NonNull String password, @Nullable Long protectionSpaceId) {
this.id = id;
this.username = username;
this.password = password;
this.protectionSpaceId = protectionSpaceId;
}
public Map<String, Object> toMap() {
Map<String, Object> urlCredentialMap = new HashMap<>();
urlCredentialMap.put("username", username);
urlCredentialMap.put("password", password);
return urlCredentialMap;
}
@Nullable
public Long getId() {
return id;
}
public void setId(@Nullable Long id) {
this.id = id;
}
@Nullable
public String getUsername() {
return username;
}
public void setUsername(@Nullable String username) {
this.username = username;
}
@Nullable
public String getPassword() {
return password;
}
public void setPassword(@Nullable String password) {
this.password = password;
}
@Nullable
public Long getProtectionSpaceId() {
return protectionSpaceId;
}
public void setProtectionSpaceId(@Nullable Long protectionSpaceId) {
this.protectionSpaceId = protectionSpaceId;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
URLCredential that = (URLCredential) o;
if (username != null ? !username.equals(that.username) : that.username != null) return false;
return password != null ? password.equals(that.password) : that.password == null;
}
@Override
public int hashCode() {
int result = username != null ? username.hashCode() : 0;
result = 31 * result + (password != null ? password.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "URLCredential{" +
"username='" + username + '\'' +
", password='" + password + '\'' +
'}';
}
}
| 881 |
2,144 | <gh_stars>1000+
from pgmpy.models import BayesianNetwork, DynamicBayesianNetwork
from pgmpy.factors.discrete import DiscreteFactor
class ApproxInference(object):
def __init__(self, model):
"""
Initializes the Approximate Inference class.
Parameters
----------
model: Instance of pgmpy.models.BayesianNetwork or pgmpy.models.DynamicBayesianNetwork
Examples
--------
>>> from pgmpy.utils import get_example_model
>>> model = get_example_model('alarm')
>>> infer = ApproxInference(model)
"""
if not isinstance(model, (BayesianNetwork, DynamicBayesianNetwork)):
raise ValueError(
f"model should either be a Bayesian Network or Dynamic Bayesian Network. Got {type(model)}."
)
model.check_model()
self.model = model
@staticmethod
def _get_factor_from_df(df):
"""
Takes a grouby dataframe and converts it into a pgmpy.factors.discrete.DiscreteFactor object.
"""
variables = list(df.index.names)
state_names = {var: list(df.index.unique(var)) for var in variables}
cardinality = [len(state_names[var]) for var in variables]
return DiscreteFactor(
variables=variables,
cardinality=cardinality,
values=df.values,
state_names=state_names,
)
def get_distribution(self, samples, variables, joint=True):
"""
Computes distribution of `variables` from given data `samples`.
Parameters
----------
samples: pandas.DataFrame
A dataframe of samples generated from the model.
variables: list (array-like)
A list of variables whose distribution needs to be computed.
joint: boolean
If joint=True, computes the joint distribution over `variables`.
Else, returns a dict with marginal distribution of each variable in
`variables`.
"""
if joint == True:
return self._get_factor_from_df(
samples.groupby(variables).size() / samples.shape[0]
)
else:
return {
var: self._get_factor_from_df(
samples.groupby([var]).size() / samples.shape[0]
)
for var in variables
}
def query(
self,
variables,
n_samples=int(1e4),
evidence=None,
virtual_evidence=None,
joint=True,
show_progress=True,
):
"""
Method for doing approximate inference based on sampling in Bayesian
Networks and Dynamic Bayesian Networks.
Parameters
----------
variables: list
List of variables for which the probability distribution needs to be calculated.
n_samples: int
The number of samples to generate for computing the distributions. Higher `n_samples`
results in more accurate results at the cost of more computation time.
evidence: dict (default: None)
The observed values. A dict key, value pair of the form {var: state_name}.
virtual_evidence: list (default: None)
A list of pgmpy.factors.discrete.TabularCPD representing the virtual/soft
evidence.
show_progress: boolean (default: True)
If True, shows a progress bar when generating samples.
Returns
-------
Probability distribution: An instance of pgmpy.factors.discrete.TabularCPD
Examples
--------
>>> from pgmpy.utils import get_example_model
>>> from pgmpy.inference import ApproxInference
>>> model = get_example_model("alarm")
>>> infer = ApproxInference(model)
>>> infer.query(variables=["HISTORY"])
<DiscreteFactor representing phi(HISTORY:2) at 0x7f92d9f5b910>
>>> infer.query(variables=["HISTORY", "CVP"], joint=True)
<DiscreteFactor representing phi(HISTORY:2, CVP:3) at 0x7f92d9f77610>
>>> infer.query(variables=["HISTORY", "CVP"], joint=False)
{'HISTORY': <DiscreteFactor representing phi(HISTORY:2) at 0x7f92dc61eb50>,
'CVP': <DiscreteFactor representing phi(CVP:3) at 0x7f92d915ec40>}
"""
# Step 1: Generate samples for the query
samples = self.model.simulate(
n_samples=n_samples,
evidence=evidence,
virtual_evidence=virtual_evidence,
show_progress=show_progress,
)
# Step 2: Compute the distributions and return it.
return self.get_distribution(samples, variables=variables, joint=joint)
| 2,020 |
504 | <filename>src/main/java/com/structurizr/dsl/DeploymentGroupParser.java
package com.structurizr.dsl;
final class DeploymentGroupParser extends AbstractParser {
private static final String GRAMMAR = "deploymentGroup <name>";
private static final int DEPLOYMENT_GROUP_NAME_INDEX = 1;
String parse(Tokens tokens) {
// deploymentGroup <name>
if (tokens.hasMoreThan(DEPLOYMENT_GROUP_NAME_INDEX)) {
throw new RuntimeException("Too many tokens, expected: " + GRAMMAR);
} else if (tokens.size() != DEPLOYMENT_GROUP_NAME_INDEX + 1) {
throw new RuntimeException("Expected: " + GRAMMAR);
} else {
return tokens.get(DEPLOYMENT_GROUP_NAME_INDEX);
}
}
} | 295 |
7,956 | <filename>spyder/api/shellconnect/main_widget.py
# -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
"""
Main widget to use in plugins that show content that comes from the IPython
console, such as the Variable Explorer or Plots.
"""
# Third party imports
from qtpy.QtWidgets import QStackedWidget, QVBoxLayout
# Local imports
from spyder.api.translations import get_translation
from spyder.api.widgets.main_widget import PluginMainWidget
# Localization
_ = get_translation('spyder')
class ShellConnectMainWidget(PluginMainWidget):
"""
Main widget to use in a plugin that shows console-specific content.
Notes
-----
* This is composed of a QStackedWidget to stack widgets associated to each
shell widget in the console and only show one of them at a time.
* The current widget in the stack will display the content associated to
the console with focus.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Widgets
self._stack = QStackedWidget(self)
self._shellwidgets = {}
# Layout
layout = QVBoxLayout()
layout.setSpacing(0)
layout.setContentsMargins(0, 0, 0, 0)
layout.addWidget(self._stack)
self.setLayout(layout)
def update_style(self):
self._stack.setStyleSheet("QStackedWidget {padding: 0px; border: 0px}")
# ---- Stack accesors
# ------------------------------------------------------------------------
def count(self):
"""
Return the number of widgets in the stack.
Returns
-------
int
The number of widgets in the stack.
"""
return self._stack.count()
def current_widget(self):
"""
Return the current figure browser widget in the stack.
Returns
-------
QWidget
The current widget.
"""
return self._stack.currentWidget()
def get_focus_widget(self):
return self.current_widget()
# ---- Public API
# ------------------------------------------------------------------------
def add_shellwidget(self, shellwidget):
"""
Create a new widget in the stack and associate it to
shellwidget.
"""
shellwidget_id = id(shellwidget)
if shellwidget_id not in self._shellwidgets:
widget = self.create_new_widget(shellwidget)
self._stack.addWidget(widget)
self._shellwidgets[shellwidget_id] = widget
self.set_shellwidget(shellwidget)
self.update_actions()
def remove_shellwidget(self, shellwidget):
"""Remove widget associated to shellwidget."""
shellwidget_id = id(shellwidget)
if shellwidget_id in self._shellwidgets:
widget = self._shellwidgets.pop(shellwidget_id)
self._stack.removeWidget(widget)
self.close_widget(widget)
def set_shellwidget(self, shellwidget):
"""
Set widget associated with shellwidget as the current widget.
"""
shellwidget_id = id(shellwidget)
old_widget = self.current_widget()
if shellwidget_id in self._shellwidgets:
widget = self._shellwidgets[shellwidget_id]
self.switch_widget(widget, old_widget)
self._stack.setCurrentWidget(widget)
def create_new_widget(self, shellwidget):
"""Create a widget to communicate with shellwidget."""
raise NotImplementedError
def close_widget(self, widget):
"""Close the widget."""
raise NotImplementedError
def switch_widget(self, widget, old_widget):
"""Switch the current widget."""
raise NotImplementedError
def refresh(self):
"""Refresh widgets."""
if self.count():
widget = self.current_widget()
widget.refresh()
def update_actions(self):
"""Update the actions."""
widget = self.current_widget()
for __, action in self.get_actions().items():
if action:
# IMPORTANT: Since we are defining the main actions in here
# and the context is WidgetWithChildrenShortcut we need to
# assign the same actions to the children widgets in order
# for shortcuts to work
if widget:
widget_actions = widget.actions()
if action not in widget_actions:
widget.addAction(action)
| 1,824 |
783 | void stage_cu()
{
// Writing instruction memory
Xil_Out32(0x50001004, 0);
Xil_Out32(0x50001000, 0x36100003);
Xil_Out32(0x50001004, 4);
Xil_Out32(0x50001000, 0xBF810000);
// Writing SGPRs for wavefront 1
Xil_Out32(0x50002004, 0);
Xil_Out32(0x50002008, 0x2);
Xil_Out32(0x5000200C, 0x21);
Xil_Out32(0x50002010, 0x2B);
Xil_Out32(0x50002014, 0x21);
Xil_Out32(0x50002000, 1);
Xil_Out32(0x50002004, 16);
Xil_Out32(0x50002008, 0x2B);
Xil_Out32(0x5000200C, 0x1F);
Xil_Out32(0x50002010, 0x2C);
Xil_Out32(0x50002014, 0x22);
Xil_Out32(0x50002000, 1);
Xil_Out32(0x50002004, 32);
Xil_Out32(0x50002008, 0x23);
Xil_Out32(0x5000200C, 0x26);
Xil_Out32(0x50002010, 0x0);
Xil_Out32(0x50002014, 0x0);
Xil_Out32(0x50002000, 1);
}
| 424 |
1,259 | <filename>sample-code/examples/java/junit/src/test/java/com/saucelabs/appium/page_object/widgets/html/simple/HtmlMovie.java
package com.saucelabs.appium.page_object.widgets.html.simple;
import com.saucelabs.appium.page_object.widgets.Movie;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.FindBy;
public class HtmlMovie extends Movie {
@FindBy(className = "movieTitle")
private WebElement title;
@FindBy(className = "tMeterScore")
private WebElement score;
@FindBy(className = "poster_container")
private WebElement poster;
@FindBy(xpath = ".//*[@class=\"movie_info\"]/a/h3")
private WebElement linkToMovie;
protected HtmlMovie(WebElement element) {
super(element);
}
@Override
public String title() {
return title.getText();
}
@Override
public String score() {
return score.getText();
}
@Override
public Object getPoster() {
return poster.getSize();
}
@Override
public void goToReview() {
linkToMovie.click();
}
}
| 421 |
5,169 | {
"name": "MethodLoader.swift",
"module_name": "MethodLoader",
"version": "0.0.1",
"summary": "swift initialize method substitute plan.",
"swift_versions": "5.0",
"description": "Apply `MethodLoaderProtocol` for a object to instead of initalize method in swift.",
"homepage": "https://github.com/zevwings/MethodLoader",
"license": "Aphach",
"authors": {
"zevwings": "<EMAIL>"
},
"platforms": {
"ios": "9.0"
},
"source": {
"git": "<EMAIL>:zevwings/MethodLoader.git",
"tag": "0.0.1"
},
"source_files": [
"MethodLoader/MethodLoader.h",
"MethodLoader/**/*.swift"
],
"requires_arc": true,
"swift_version": "5.0"
}
| 267 |
3,562 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
#include "util/tdigest.h"
#include <gtest/gtest.h>
#include <random>
#include "test_util/test_util.h"
namespace doris {
class TDigestTest : public ::testing::Test {
protected:
// You can remove any or all of the following functions if its body
// is empty.
TDigestTest() {
// You can do set-up work for each test here.
}
virtual ~TDigestTest() {
// You can do clean-up work that doesn't throw exceptions here.
}
// If the constructor and destructor are not enough for setting up
// and cleaning up each test, you can define the following methods:
virtual void SetUp() {
// Code here will be called immediately after the constructor (right
// before each test).
}
virtual void TearDown() {
// Code here will be called immediately after each test (right
// before the destructor).
}
static void SetUpTestCase() {
static bool initialized = false;
if (!initialized) {
FLAGS_logtostderr = true;
google::InstallFailureSignalHandler();
google::InitGoogleLogging("testing::TDigestTest");
initialized = true;
}
}
// Objects declared here can be used by all tests in the test case for Foo.
};
static double quantile(const double q, const std::vector<double>& values) {
double q1;
if (values.size() == 0) {
q1 = NAN;
} else if (q == 1 || values.size() == 1) {
q1 = values[values.size() - 1];
} else {
auto index = q * values.size();
if (index < 0.5) {
q1 = values[0];
} else if (values.size() - index < 0.5) {
q1 = values[values.size() - 1];
} else {
index -= 0.5;
const int intIndex = static_cast<int>(index);
q1 = values[intIndex + 1] * (index - intIndex) +
values[intIndex] * (intIndex + 1 - index);
}
}
return q1;
}
TEST_F(TDigestTest, CrashAfterMerge) {
TDigest digest(1000);
std::uniform_real_distribution<> reals(0.0, 1.0);
std::random_device gen;
for (int i = 0; i < LOOP_LESS_OR_MORE(100, 100000); i++) {
digest.add(reals(gen));
}
digest.compress();
TDigest digest2(1000);
digest2.merge(&digest);
digest2.quantile(0.5);
}
TEST_F(TDigestTest, EmptyDigest) {
TDigest digest(100);
EXPECT_EQ(0, digest.processed().size());
}
TEST_F(TDigestTest, SingleValue) {
TDigest digest(100);
std::random_device gen;
std::uniform_real_distribution<> dist(0, 1000);
const auto value = dist(gen);
digest.add(value);
std::uniform_real_distribution<> dist2(0, 1.0);
const double q = dist2(gen);
EXPECT_NEAR(value, digest.quantile(0.0), 0.001f);
EXPECT_NEAR(value, digest.quantile(q), 0.001f);
EXPECT_NEAR(value, digest.quantile(1.0), 0.001f);
}
TEST_F(TDigestTest, FewValues) {
// When there are few values in the tree, quantiles should be exact
TDigest digest(1000);
std::random_device gen;
std::uniform_real_distribution<> reals(0.0, 100.0);
std::uniform_int_distribution<> dist(0, 10);
std::uniform_int_distribution<> bools(0, 1);
std::uniform_real_distribution<> qvalue(0.0, 1.0);
const auto length = 10; //dist(gen);
std::vector<double> values;
values.reserve(length);
for (int i = 0; i < length; ++i) {
auto const value = (i == 0 || bools(gen)) ? reals(gen) : values[i - 1];
digest.add(value);
values.push_back(value);
}
std::sort(values.begin(), values.end());
digest.compress();
EXPECT_EQ(digest.processed().size(), values.size());
std::vector<double> testValues{0.0, 1.0e-10, qvalue(gen), 0.5, 1.0 - 1e-10, 1.0};
for (auto q : testValues) {
double q1 = quantile(q, values);
auto q2 = digest.quantile(q);
if (std::isnan(q1)) {
EXPECT_TRUE(std::isnan(q2));
} else {
EXPECT_NEAR(q1, q2, 0.03) << "q = " << q;
}
}
}
TEST_F(TDigestTest, MoreThan2BValues) {
TDigest digest(1000);
std::random_device gen;
std::uniform_real_distribution<> reals(0.0, 1.0);
for (int i = 0; i < 1000; ++i) {
const double next = reals(gen);
digest.add(next);
}
for (int i = 0; i < 10; ++i) {
const double next = reals(gen);
const auto count = 1L << 28;
digest.add(next, count);
}
EXPECT_EQ(static_cast<long>(1000 + float(10L * (1 << 28))), digest.totalWeight());
EXPECT_GT(digest.totalWeight(), std::numeric_limits<int32_t>::max());
std::vector<double> quantiles{0, 0.1, 0.5, 0.9, 1, reals(gen)};
std::sort(quantiles.begin(), quantiles.end());
auto prev = std::numeric_limits<double>::min();
for (double q : quantiles) {
const double v = digest.quantile(q);
EXPECT_GE(v, prev) << "q = " << q;
prev = v;
}
}
TEST_F(TDigestTest, MergeTest) {
TDigest digest1(1000);
TDigest digest2(1000);
digest2.add(std::vector<const TDigest*>{&digest1});
}
TEST_F(TDigestTest, TestSorted) {
TDigest digest(1000);
std::uniform_real_distribution<> reals(0.0, 1.0);
std::uniform_int_distribution<> ints(0, 10);
std::random_device gen;
for (int i = 0; i < 10000; ++i) {
digest.add(reals(gen), 1 + ints(gen));
}
digest.compress();
Centroid previous(0, 0);
for (auto centroid : digest.processed()) {
if (previous.weight() != 0) {
CHECK_LE(previous.mean(), centroid.mean());
}
previous = centroid;
}
}
TEST_F(TDigestTest, ExtremeQuantiles) {
TDigest digest(1000);
// t-digest shouldn't merge extreme nodes, but let's still test how it would
// answer to extreme quantiles in that case ('extreme' in the sense that the
// quantile is either before the first node or after the last one)
digest.add(10, 3);
digest.add(20, 1);
digest.add(40, 5);
// this group tree is roughly equivalent to the following sorted array:
// [ ?, 10, ?, 20, ?, ?, 50, ?, ? ]
// and we expect it to compute approximate missing values:
// [ 5, 10, 15, 20, 30, 40, 50, 60, 70]
std::vector<double> values{5.0, 10.0, 15.0, 20.0, 30.0, 35.0, 40.0, 45.0, 50.0};
std::vector<double> quantiles{1.5 / 9.0, 3.5 / 9.0, 6.5 / 9.0};
for (auto q : quantiles) {
EXPECT_NEAR(quantile(q, values), digest.quantile(q), 0.01) << "q = " << q;
}
}
TEST_F(TDigestTest, Montonicity) {
TDigest digest(1000);
std::uniform_real_distribution<> reals(0.0, 1.0);
std::random_device gen;
for (int i = 0; i < LOOP_LESS_OR_MORE(10, 100000); i++) {
digest.add(reals(gen));
}
double lastQuantile = -1;
double lastX = -1;
for (double z = 0; z <= 1; z += LOOP_LESS_OR_MORE(0.1, 1e-5)) {
double x = digest.quantile(z);
EXPECT_GE(x, lastX);
lastX = x;
double q = digest.cdf(z);
EXPECT_GE(q, lastQuantile);
lastQuantile = q;
}
}
} // namespace doris
int main(int argc, char** argv) {
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
| 3,362 |
5,169 | {
"name": "YHBSDK",
"version": "3.0.1",
"summary": "YHBSDK SDK",
"description": "YHBSDK 用于提行方APP直接引用",
"homepage": "https://github.com/chenzikui/YHBSDK",
"license": "MIT",
"authors": {
"chenzikui": "<EMAIL>"
},
"platforms": {
"ios": "9.0"
},
"source": {
"git": "https://github.com/chenzikui/YHBSDK.git",
"tag": "3.0.1"
},
"vendored_frameworks": "YHBSDK/YHBSDK.framework",
"resources": "YHBSDK/YHBSDKBundle.bundle",
"frameworks": [
"Foundation",
"UIKit"
]
}
| 262 |
1,350 | <filename>sdk/apimanagement/azure-resourcemanager-apimanagement/src/main/java/com/azure/resourcemanager/apimanagement/models/GatewayHostnameConfigurations.java
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.apimanagement.models;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.Response;
import com.azure.core.util.Context;
/** Resource collection API of GatewayHostnameConfigurations. */
public interface GatewayHostnameConfigurations {
/**
* Lists the collection of hostname configurations for the specified gateway.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param gatewayId Gateway entity identifier. Must be unique in the current API Management service instance. Must
* not have value 'managed'.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return paged Gateway hostname configuration list representation.
*/
PagedIterable<GatewayHostnameConfigurationContract> listByService(
String resourceGroupName, String serviceName, String gatewayId);
/**
* Lists the collection of hostname configurations for the specified gateway.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param gatewayId Gateway entity identifier. Must be unique in the current API Management service instance. Must
* not have value 'managed'.
* @param filter | Field | Usage | Supported operators | Supported functions
* |</br>|-------------|-------------|-------------|-------------|</br>| name | filter | ge, le, eq,
* ne, gt, lt | substringof, contains, startswith, endswith |</br>| hostname | filter | ge, le, eq, ne,
* gt, lt | substringof, contains, startswith, endswith |</br>.
* @param top Number of records to return.
* @param skip Number of records to skip.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return paged Gateway hostname configuration list representation.
*/
PagedIterable<GatewayHostnameConfigurationContract> listByService(
String resourceGroupName,
String serviceName,
String gatewayId,
String filter,
Integer top,
Integer skip,
Context context);
/**
* Checks that hostname configuration entity specified by identifier exists for specified Gateway entity.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param gatewayId Gateway entity identifier. Must be unique in the current API Management service instance. Must
* not have value 'managed'.
* @param hcId Gateway hostname configuration identifier. Must be unique in the scope of parent Gateway entity.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
void getEntityTag(String resourceGroupName, String serviceName, String gatewayId, String hcId);
/**
* Checks that hostname configuration entity specified by identifier exists for specified Gateway entity.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param gatewayId Gateway entity identifier. Must be unique in the current API Management service instance. Must
* not have value 'managed'.
* @param hcId Gateway hostname configuration identifier. Must be unique in the scope of parent Gateway entity.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response.
*/
GatewayHostnameConfigurationsGetEntityTagResponse getEntityTagWithResponse(
String resourceGroupName, String serviceName, String gatewayId, String hcId, Context context);
/**
* Get details of a hostname configuration.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param gatewayId Gateway entity identifier. Must be unique in the current API Management service instance. Must
* not have value 'managed'.
* @param hcId Gateway hostname configuration identifier. Must be unique in the scope of parent Gateway entity.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return details of a hostname configuration.
*/
GatewayHostnameConfigurationContract get(
String resourceGroupName, String serviceName, String gatewayId, String hcId);
/**
* Get details of a hostname configuration.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param gatewayId Gateway entity identifier. Must be unique in the current API Management service instance. Must
* not have value 'managed'.
* @param hcId Gateway hostname configuration identifier. Must be unique in the scope of parent Gateway entity.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return details of a hostname configuration.
*/
Response<GatewayHostnameConfigurationContract> getWithResponse(
String resourceGroupName, String serviceName, String gatewayId, String hcId, Context context);
/**
* Deletes the specified hostname configuration from the specified Gateway.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param gatewayId Gateway entity identifier. Must be unique in the current API Management service instance. Must
* not have value 'managed'.
* @param hcId Gateway hostname configuration identifier. Must be unique in the scope of parent Gateway entity.
* @param ifMatch ETag of the Entity. ETag should match the current entity state from the header response of the GET
* request or it should be * for unconditional update.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
void delete(String resourceGroupName, String serviceName, String gatewayId, String hcId, String ifMatch);
/**
* Deletes the specified hostname configuration from the specified Gateway.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the API Management service.
* @param gatewayId Gateway entity identifier. Must be unique in the current API Management service instance. Must
* not have value 'managed'.
* @param hcId Gateway hostname configuration identifier. Must be unique in the scope of parent Gateway entity.
* @param ifMatch ETag of the Entity. ETag should match the current entity state from the header response of the GET
* request or it should be * for unconditional update.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response.
*/
Response<Void> deleteWithResponse(
String resourceGroupName, String serviceName, String gatewayId, String hcId, String ifMatch, Context context);
/**
* Get details of a hostname configuration.
*
* @param id the resource ID.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return details of a hostname configuration.
*/
GatewayHostnameConfigurationContract getById(String id);
/**
* Get details of a hostname configuration.
*
* @param id the resource ID.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return details of a hostname configuration.
*/
Response<GatewayHostnameConfigurationContract> getByIdWithResponse(String id, Context context);
/**
* Deletes the specified hostname configuration from the specified Gateway.
*
* @param id the resource ID.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
void deleteById(String id);
/**
* Deletes the specified hostname configuration from the specified Gateway.
*
* @param id the resource ID.
* @param ifMatch ETag of the Entity. ETag should match the current entity state from the header response of the GET
* request or it should be * for unconditional update.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the response.
*/
Response<Void> deleteByIdWithResponse(String id, String ifMatch, Context context);
/**
* Begins definition for a new GatewayHostnameConfigurationContract resource.
*
* @param name resource name.
* @return the first stage of the new GatewayHostnameConfigurationContract definition.
*/
GatewayHostnameConfigurationContract.DefinitionStages.Blank define(String name);
}
| 3,448 |
12,278 | {
"name": "interface",
"version": "0.0.1",
"author": "arangodb",
"description": "revalidating all possible ways to foxx from outside in.",
"main": "index.js"
}
| 64 |
507 | //
// Copyright 2018 Pixar
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#ifndef PXRUSDMAYA_FUNCTORPRIMREADER_H
#define PXRUSDMAYA_FUNCTORPRIMREADER_H
#include <mayaUsd/base/api.h>
#include <mayaUsd/fileio/primReader.h>
#include <mayaUsd/fileio/primReaderArgs.h>
#include <mayaUsd/fileio/primReaderContext.h>
#include <mayaUsd/fileio/primReaderRegistry.h>
#include <pxr/pxr.h>
#include <functional>
PXR_NAMESPACE_OPEN_SCOPE
/// \class UsdMaya_FunctorPrimReader
/// \brief This class is scaffolding to hold bare prim reader functions.
///
/// It is used by the PXRUSDMAYA_DEFINE_READER macro.
class UsdMaya_FunctorPrimReader final : public UsdMayaPrimReader
{
public:
UsdMaya_FunctorPrimReader(const UsdMayaPrimReaderArgs&, UsdMayaPrimReaderRegistry::ReaderFn);
bool Read(UsdMayaPrimReaderContext& context) override;
static UsdMayaPrimReaderSharedPtr
Create(const UsdMayaPrimReaderArgs&, UsdMayaPrimReaderRegistry::ReaderFn readerFn);
static UsdMayaPrimReaderRegistry::ReaderFactoryFn
CreateFactory(UsdMayaPrimReaderRegistry::ReaderFn readerFn);
private:
UsdMayaPrimReaderRegistry::ReaderFn _readerFn;
};
PXR_NAMESPACE_CLOSE_SCOPE
#endif
| 586 |
575 | // Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/ui/app_list/search/arc/arc_app_shortcuts_search_provider.h"
#include <memory>
#include <string>
#include <utility>
#include "ash/public/cpp/app_list/app_list_features.h"
#include "base/files/scoped_temp_dir.h"
#include "base/macros.h"
#include "base/strings/strcat.h"
#include "base/strings/string_number_conversions.h"
#include "base/strings/stringprintf.h"
#include "base/strings/utf_string_conversions.h"
#include "base/test/scoped_feature_list.h"
#include "base/test/task_environment.h"
#include "chrome/browser/ash/arc/icon_decode_request.h"
#include "chrome/browser/ui/app_list/app_list_test_util.h"
#include "chrome/browser/ui/app_list/arc/arc_app_list_prefs.h"
#include "chrome/browser/ui/app_list/arc/arc_app_test.h"
#include "chrome/browser/ui/app_list/search/chrome_search_result.h"
#include "chrome/browser/ui/app_list/test/test_app_list_controller_delegate.h"
#include "components/arc/mojom/compatibility_mode.mojom.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace app_list {
namespace {
constexpr char kFakeAppPackageName[] = "FakeAppPackageName";
} // namespace
class ArcAppShortcutsSearchProviderTest
: public AppListTestBase,
public ::testing::WithParamInterface<bool> {
protected:
ArcAppShortcutsSearchProviderTest() = default;
~ArcAppShortcutsSearchProviderTest() override = default;
// AppListTestBase:
void SetUp() override {
AppListTestBase::SetUp();
arc_test_.SetUp(profile());
controller_ = std::make_unique<test::TestAppListControllerDelegate>();
}
void TearDown() override {
controller_.reset();
arc_test_.TearDown();
AppListTestBase::TearDown();
}
arc::mojom::AppInfo CreateAppInfo(const std::string& name,
const std::string& activity,
const std::string& package_name) {
arc::mojom::AppInfo appinfo;
appinfo.name = name;
appinfo.package_name = package_name;
appinfo.activity = activity;
return appinfo;
}
std::string AddArcAppAndShortcut(const arc::mojom::AppInfo& app_info,
bool launchable) {
ArcAppListPrefs* const prefs = arc_test_.arc_app_list_prefs();
// Adding app to the prefs, and check that the app is accessible by id.
prefs->AddAppAndShortcut(
app_info.name, app_info.package_name, app_info.activity,
std::string() /* intent_uri */, std::string() /* icon_resource_id */,
false /* sticky */, true /* notifications_enabled */,
true /* app_ready */, false /* suspended */, false /* shortcut */,
launchable);
const std::string app_id =
ArcAppListPrefs::GetAppId(app_info.package_name, app_info.activity);
EXPECT_TRUE(prefs->GetApp(app_id));
return app_id;
}
base::ScopedTempDir temp_dir_;
base::test::ScopedFeatureList scoped_feature_list_;
std::unique_ptr<test::TestAppListControllerDelegate> controller_;
ArcAppTest arc_test_;
private:
DISALLOW_COPY_AND_ASSIGN(ArcAppShortcutsSearchProviderTest);
};
TEST_P(ArcAppShortcutsSearchProviderTest, Basic) {
const bool launchable = GetParam();
const std::string app_id = AddArcAppAndShortcut(
CreateAppInfo("FakeName", "FakeActivity", kFakeAppPackageName),
launchable);
const size_t kMaxResults = launchable ? 4 : 0;
constexpr char kQuery[] = "shortlabel";
auto provider = std::make_unique<ArcAppShortcutsSearchProvider>(
kMaxResults, profile(), controller_.get());
EXPECT_TRUE(provider->results().empty());
arc::IconDecodeRequest::DisableSafeDecodingForTesting();
provider->Start(base::UTF8ToUTF16(kQuery));
const auto& results = provider->results();
EXPECT_EQ(kMaxResults, results.size());
// Verify search results.
for (size_t i = 0; i < results.size(); ++i) {
EXPECT_EQ(base::StringPrintf("ShortLabel %zu", i),
base::UTF16ToUTF8(results[i]->title()));
EXPECT_EQ(ash::SearchResultDisplayType::kTile, results[i]->display_type());
}
}
INSTANTIATE_TEST_SUITE_P(All,
ArcAppShortcutsSearchProviderTest,
testing::Bool());
} // namespace app_list
| 1,644 |
396 | package me.everything.providers.android.media;
import android.net.Uri;
import android.provider.BaseColumns;
import android.provider.MediaStore;
import me.everything.providers.core.Entity;
import me.everything.providers.core.FieldMapping;
import me.everything.providers.core.IgnoreMapping;
/**
* Created by sromku
*/
public class Video extends Entity {
@IgnoreMapping
public static Uri uriExternal = MediaStore.Video.Media.EXTERNAL_CONTENT_URI;
@IgnoreMapping
public static Uri uriInternal = MediaStore.Video.Media.INTERNAL_CONTENT_URI;
@FieldMapping(columnName = BaseColumns._ID, physicalType = FieldMapping.PhysicalType.Long)
public long id;
@FieldMapping(columnName = MediaStore.MediaColumns.DATA, physicalType = FieldMapping.PhysicalType.Blob)
public byte[] data;
@FieldMapping(columnName = MediaStore.MediaColumns.SIZE, physicalType = FieldMapping.PhysicalType.Int)
public int size;
@FieldMapping(columnName = MediaStore.MediaColumns.DISPLAY_NAME, physicalType = FieldMapping.PhysicalType.String)
public String displayName;
@FieldMapping(columnName = MediaStore.MediaColumns.TITLE, physicalType = FieldMapping.PhysicalType.String)
public String title;
@FieldMapping(columnName = MediaStore.MediaColumns.DATE_ADDED, physicalType = FieldMapping.PhysicalType.Long)
public long dateAdded;
@FieldMapping(columnName = MediaStore.MediaColumns.DATE_MODIFIED, physicalType = FieldMapping.PhysicalType.Long)
public long dateModified;
@FieldMapping(columnName = MediaStore.MediaColumns.MIME_TYPE, physicalType = FieldMapping.PhysicalType.String)
public String mimeType;
@FieldMapping(columnName = MediaStore.MediaColumns.WIDTH, physicalType = FieldMapping.PhysicalType.Int)
public int width;
@FieldMapping(columnName = MediaStore.MediaColumns.HEIGHT, physicalType = FieldMapping.PhysicalType.Int)
public int height;
@FieldMapping(columnName = MediaStore.Video.VideoColumns.DURATION, physicalType = FieldMapping.PhysicalType.Int)
public int duration;
@FieldMapping(columnName = MediaStore.Video.VideoColumns.ARTIST, physicalType = FieldMapping.PhysicalType.String)
public String artist;
@FieldMapping(columnName = MediaStore.Video.VideoColumns.ALBUM, physicalType = FieldMapping.PhysicalType.String)
public String album;
@FieldMapping(columnName = MediaStore.Video.VideoColumns.RESOLUTION, physicalType = FieldMapping.PhysicalType.String)
public String resolution;
@FieldMapping(columnName = MediaStore.Video.VideoColumns.DESCRIPTION, physicalType = FieldMapping.PhysicalType.String)
public String description;
@FieldMapping(columnName = MediaStore.Video.VideoColumns.IS_PRIVATE, physicalType = FieldMapping.PhysicalType.Int, logicalType = FieldMapping.LogicalType.Boolean)
public boolean isPrivate;
@FieldMapping(columnName = MediaStore.Video.VideoColumns.TAGS, physicalType = FieldMapping.PhysicalType.String)
public String tags;
@FieldMapping(columnName = MediaStore.Video.VideoColumns.CATEGORY, physicalType = FieldMapping.PhysicalType.String)
public String category;
@FieldMapping(columnName = MediaStore.Video.VideoColumns.LANGUAGE, physicalType = FieldMapping.PhysicalType.String)
public String language;
@FieldMapping(columnName = MediaStore.Video.VideoColumns.LATITUDE, physicalType = FieldMapping.PhysicalType.Double)
public double latitude;
@FieldMapping(columnName = MediaStore.Video.VideoColumns.LONGITUDE, physicalType = FieldMapping.PhysicalType.Double)
public double longitude;
@FieldMapping(columnName = MediaStore.Video.VideoColumns.DATE_TAKEN, physicalType = FieldMapping.PhysicalType.Int, logicalType = FieldMapping.LogicalType.Long)
public long dateTaken;
@FieldMapping(columnName = MediaStore.Video.VideoColumns.MINI_THUMB_MAGIC, physicalType = FieldMapping.PhysicalType.Int)
public int miniTthumbMagic;
@FieldMapping(columnName = MediaStore.Video.VideoColumns.BUCKET_ID, physicalType = FieldMapping.PhysicalType.String)
public String bucketId;
@FieldMapping(columnName = MediaStore.Video.VideoColumns.BUCKET_DISPLAY_NAME, physicalType = FieldMapping.PhysicalType.String)
public String bucketDisplayName;
@FieldMapping(columnName = MediaStore.Video.VideoColumns.BOOKMARK, physicalType = FieldMapping.PhysicalType.Int)
public int bookmark;
}
| 1,393 |
5,156 | /* -*- Mode: C; tab-width: 8; c-basic-offset: 2; indent-tabs-mode: nil; -*- */
#include "util.h"
void callback(uint64_t env, char *name, __attribute__((unused)) map_properties_t* props) {
const char search[] = "librrpreload.so";
if (strlen(name) > strlen(search)) {
if (sizeof(void*) == 4 &&
strcmp(name + strlen(name) - strlen(search), search) == 0)
{
int* rr_is_32bit = (int*)(uintptr_t)env;
*rr_is_32bit = 1;
}
}
}
static void skip_if_rr_32_bit_under_kernel_64_bit(void) {
FILE* maps_file = fopen("/proc/self/maps", "r");
int rr_is_32bit = 0;
iterate_maps((uintptr_t)&rr_is_32bit, callback, maps_file);
struct utsname buf;
if (uname(&buf) != 0)
return;
if (rr_is_32bit && // we are inside a 32bit rr process
strcmp(buf.machine, "x86_64") == 0) // running at a 64bit kernel
{
atomic_puts("NOTE: Skipping 32-bit test because of 32-bit rr with 64-bit kernel.");
atomic_puts("EXIT-SUCCESS");
exit(0);
}
}
int main(void) {
skip_if_rr_32_bit_under_kernel_64_bit();
/* Fork-and-exec 'echo'.
The exec may fail if 'bash' is 64-bit and rr doesn't support
64-bit processes. That's fine; the test should still pass. We're
testing that rr doesn't abort.
*/
FILE* f = popen("echo -n", "r");
while (1) {
int ch = fgetc(f);
if (ch < 0) {
break;
}
putchar(ch);
}
atomic_puts("EXIT-SUCCESS");
return 0;
}
| 627 |
387 | <reponame>jtragtenberg/M5StickC
#ifndef __AXP192_H__
#define __AXP192_H__
#include <Arduino.h>
#include <Wire.h>
#define SLEEP_MSEC(us) (((uint64_t)us) * 1000L)
#define SLEEP_SEC(us) (((uint64_t)us) * 1000000L)
#define SLEEP_MIN(us) (((uint64_t)us) * 60L * 1000000L)
#define SLEEP_HR(us) (((uint64_t)us) * 60L * 60L * 1000000L)
#define ADC_RATE_025HZ (0b00 << 6)
#define ADC_RATE_050HZ (0b01 << 6)
#define ADC_RATE_100HZ (0b10 << 6)
#define ADC_RATE_200HZ (0b11 << 6)
#define CURRENT_100MA (0b0000)
#define CURRENT_190MA (0b0001)
#define CURRENT_280MA (0b0010)
#define CURRENT_360MA (0b0011)
#define CURRENT_450MA (0b0100)
#define CURRENT_550MA (0b0101)
#define CURRENT_630MA (0b0110)
#define CURRENT_700MA (0b0111)
#define VOLTAGE_4100MV (0b00 << 5)
#define VOLTAGE_4150MV (0b01 << 5)
#define VOLTAGE_4200MV (0b10 << 5)
#define VOLTAGE_4360MV (0b11 << 5)
#define VOLTAGE_OFF_2600MV (0b000)
#define VOLTAGE_OFF_2700MV (0b001)
#define VOLTAGE_OFF_2800MV (0b010)
#define VOLTAGE_OFF_2900MV (0b011)
#define VOLTAGE_OFF_3000MV (0b100)
#define VOLTAGE_OFF_3100MV (0b101)
#define VOLTAGE_OFF_3200MV (0b110)
#define VOLTAGE_OFF_3300MV (0b111)
class AXP192 {
public:
AXP192();
/**
* LDO2: Display backlight
* LDO3: Display Control
* RTC: Always ON, Switch RTC charging.
* DCDC1: Main rail. When not set the controller shuts down.
* DCDC3: Use unknown
* LDO0: MIC
*/
void begin(bool disableLDO2 = false, bool disableLDO3 = false,
bool disableRTC = false, bool disableDCDC1 = false,
bool disableDCDC3 = false, bool disableLDO0 = false);
void ScreenBreath(uint8_t brightness);
void ScreenSwitch(bool state);
bool GetBatState();
uint8_t GetInputPowerStatus();
uint8_t GetBatteryChargingStatus();
void DisableAllIRQ(void);
void ClearAllIRQ(void);
void EnablePressIRQ(bool short_press, bool long_press);
void GetPressIRQ(bool *short_press, bool *long_press);
void ClearPressIRQ(bool short_press, bool long_press);
void EnableCoulombcounter(void);
void DisableCoulombcounter(void);
void StopCoulombcounter(void);
void ClearCoulombcounter(void);
uint32_t GetCoulombchargeData(void); // Raw Data for Charge
uint32_t GetCoulombdischargeData(void); // Raw Data for Discharge
float GetCoulombData(void); // total in - total out and calc
uint16_t GetVbatData(void) __attribute__((deprecated));
uint16_t GetIchargeData(void) __attribute__((deprecated));
uint16_t GetIdischargeData(void) __attribute__((deprecated));
uint16_t GetTempData(void) __attribute__((deprecated));
uint32_t GetPowerbatData(void) __attribute__((deprecated));
uint16_t GetVinData(void) __attribute__((deprecated));
uint16_t GetIinData(void) __attribute__((deprecated));
uint16_t GetVusbinData(void) __attribute__((deprecated));
uint16_t GetIusbinData(void) __attribute__((deprecated));
uint16_t GetVapsData(void) __attribute__((deprecated));
uint8_t GetBtnPress(void);
// -- sleep
void SetSleep(void);
void DeepSleep(uint64_t time_in_us = 0);
void LightSleep(uint64_t time_in_us = 0);
uint8_t GetWarningLeve(void) __attribute__((deprecated));
public:
void SetChargeVoltage(uint8_t);
void SetChargeCurrent(uint8_t);
void SetVOff(uint8_t voltage);
float GetBatVoltage();
float GetBatCurrent();
float GetVinVoltage();
float GetVinCurrent();
float GetVBusVoltage();
float GetVBusCurrent();
float GetTempInAXP192();
float GetBatPower();
float GetBatChargeCurrent();
float GetAPSVoltage();
float GetBatCoulombInput();
float GetBatCoulombOut();
uint8_t GetWarningLevel(void);
void SetCoulombClear()
__attribute__((deprecated)); // use ClearCoulombcounter instead
void SetLDO2(bool State); // Can turn LCD Backlight OFF for power saving
void SetLDO3(bool State);
void SetGPIO0(bool State);
void SetAdcState(bool State);
void SetAdcRate(uint8_t rate);
// -- Power Off
void PowerOff();
// Power Maintained Storage
void Read6BytesStorage(uint8_t *bufPtr);
void Write6BytesStorage(uint8_t *bufPtr);
private:
void Write1Byte(uint8_t Addr, uint8_t Data);
uint8_t Read8bit(uint8_t Addr);
uint16_t Read12Bit(uint8_t Addr);
uint16_t Read13Bit(uint8_t Addr);
uint16_t Read16bit(uint8_t Addr);
uint32_t Read24bit(uint8_t Addr);
uint32_t Read32bit(uint8_t Addr);
void ReadBuff(uint8_t Addr, uint8_t Size, uint8_t *Buff);
};
#endif
| 1,917 |
8,054 | #include "floatingwidget.h"
#include <QMenu>
using namespace vnotex;
FloatingWidget::FloatingWidget(QWidget *p_parent)
: QWidget(p_parent)
{
}
void FloatingWidget::showEvent(QShowEvent *p_event)
{
QWidget::showEvent(p_event);
// May fix potential input method issue.
activateWindow();
setFocus();
}
void FloatingWidget::finish()
{
if (m_menu) {
m_menu->hide();
}
}
void FloatingWidget::setMenu(QMenu *p_menu)
{
m_menu = p_menu;
}
| 195 |
2,151 | // Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_ANDROID_PREFERENCES_PREFERENCES_LAUNCHER_H_
#define CHROME_BROWSER_ANDROID_PREFERENCES_PREFERENCES_LAUNCHER_H_
#include "base/macros.h"
namespace content {
class WebContents;
}
namespace chrome {
namespace android {
class PreferencesLauncher {
public:
// Opens the autofill settings page.
static void ShowAutofillSettings(content::WebContents* web_contents);
// Opens the password settings page.
static void ShowPasswordSettings();
private:
PreferencesLauncher() {}
~PreferencesLauncher() {}
DISALLOW_COPY_AND_ASSIGN(PreferencesLauncher);
};
} // namespace android
} // namespace chrome
#endif // CHROME_BROWSER_ANDROID_PREFERENCES_PREFERENCES_LAUNCHER_H_
| 287 |
734 | package com.cheikh.lazywaimai.module.library;
import android.content.Context;
import java.io.File;
import javax.inject.Singleton;
import dagger.Module;
import dagger.Provides;
import com.cheikh.lazywaimai.context.AppCookie;
import com.cheikh.lazywaimai.module.qualifiers.ApplicationContext;
import com.cheikh.lazywaimai.module.qualifiers.CacheDirectory;
import com.cheikh.lazywaimai.module.qualifiers.ShareDirectory;
import com.cheikh.lazywaimai.network.RestApiClient;
import com.cheikh.lazywaimai.util.Constants;
@Module(
library = true,
includes = ContextProvider.class
)
public class NetworkProvider {
@Provides @Singleton
public RestApiClient provideRestApiClient(@CacheDirectory File cacheLocation, @ApplicationContext Context context) {
RestApiClient restApiClient = new RestApiClient(context, cacheLocation);
if (AppCookie.isLoggin()) {
restApiClient.setToken(AppCookie.getAccessToken());
}
return restApiClient;
}
@Provides @Singleton @CacheDirectory
public File provideHttpCacheLocation(@ApplicationContext Context context) {
return context.getCacheDir();
}
@Provides @Singleton @ShareDirectory
public File provideShareLocation(@ApplicationContext Context context) {
return new File(context.getFilesDir(), Constants.Persistence.SHARE_FILE);
}
}
| 471 |
4,756 | // Copyright 2019 The MACE Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MACE_UTILS_THREAD_POOL_H_
#define MACE_UTILS_THREAD_POOL_H_
#include <functional>
#include <condition_variable> // NOLINT(build/c++11)
#include <mutex> // NOLINT(build/c++11)
#include <thread> // NOLINT(build/c++11)
#include <vector>
#include <atomic>
#include "mace/public/mace.h"
#include "mace/port/port.h"
#include "mace/utils/count_down_latch.h"
namespace mace {
namespace utils {
MaceStatus GetCPUCoresToUse(const std::vector<float> &cpu_max_freqs,
const CPUAffinityPolicy policy,
int *thread_count_hint,
std::vector<size_t> *cores);
class ThreadPool {
public:
ThreadPool(const int thread_count,
const CPUAffinityPolicy affinity_policy);
~ThreadPool();
void Init();
void Run(const std::function<void(const int64_t)> &func,
const int64_t iterations);
void Compute1D(const std::function<void(int64_t /* start */,
int64_t /* end */,
int64_t /* step */)> &func,
int64_t start,
int64_t end,
int64_t step,
int64_t tile_size = 0,
int cost_per_item = -1);
void Compute2D(const std::function<void(int64_t /* start */,
int64_t /* end */,
int64_t /* step */,
int64_t /* start */,
int64_t /* end */,
int64_t /* step */)> &func,
int64_t start0,
int64_t end0,
int64_t step0,
int64_t start1,
int64_t end1,
int64_t step1,
int64_t tile_size0 = 0,
int64_t tile_size1 = 0,
int cost_per_item = -1);
void Compute3D(const std::function<void(int64_t /* start */,
int64_t /* end */,
int64_t /* step */,
int64_t /* start */,
int64_t /* end */,
int64_t /* step */,
int64_t /* start */,
int64_t /* end */,
int64_t /* step */)> &func,
int64_t start0,
int64_t end0,
int64_t step0,
int64_t start1,
int64_t end1,
int64_t step1,
int64_t start2,
int64_t end2,
int64_t step2,
int64_t tile_size0 = 0,
int64_t tile_size1 = 0,
int64_t tile_size2 = 0,
int cost_per_item = -1);
private:
void Destroy();
void ThreadLoop(size_t tid);
void ThreadRun(size_t tid);
std::atomic<int> event_;
CountDownLatch count_down_latch_;
std::mutex event_mutex_;
std::condition_variable event_cond_;
std::mutex run_mutex_;
struct ThreadInfo {
std::atomic<int64_t> range_start;
std::atomic<int64_t> range_end;
std::atomic<int64_t> range_len;
uintptr_t func;
std::vector<size_t> cpu_cores;
};
std::vector<ThreadInfo> thread_infos_;
std::vector<std::thread> threads_;
std::vector<float> cpu_max_freqs_;
int64_t default_tile_count_;
};
} // namespace utils
} // namespace mace
#endif // MACE_UTILS_THREAD_POOL_H_
| 2,339 |
375 | <reponame>wulinyun/lumify
package io.lumify.palantir.service;
import io.lumify.palantir.DataToSequenceFile;
import io.lumify.palantir.model.PtOntologyType;
import org.apache.hadoop.io.SequenceFile;
import java.io.IOException;
public abstract class OntologyTypeExporterBase<T extends PtOntologyType> extends ExporterBase<T> {
protected OntologyTypeExporterBase(Class<T> ptClass) {
super(ptClass);
}
@Override
protected void processRow(Exporter.ExporterSource exporterSource, T row, SequenceFile.Writer outputFile) throws IOException {
super.processRow(exporterSource, row, outputFile);
writeOntologyXmlFile(exporterSource, row.getUri(), row.getConfig());
}
}
| 256 |
2,671 | def f():
for i in 1,2,3,4,5:
if i == 3: break
yield i
print list(f())
| 52 |
432 | #ifndef _E5_IMC_REG_H_
#define _E5_IMC_REG_H_
#ifndef _SYS_BITOPS_H_
#include <sys/bitops.h>
#endif
/*
* E5 v2/v3 supports 4 channels, each channels could have 3 DIMMs.
* However each channel could only support 8 ranks, e.g. 3 quad-
* rank DIMMs can _not_ be installed.
*
* E5 v2 only has IMC0, which has 4 channels (channel 0~3).
*
* E5 v3 has two configuration:
* - IMC0, which has 4 channels (channel 0~3).
* - IMC0, which has 2 channels (channel 0~1) and IMC1, which has
* 2 channels (channel 2~3).
*/
#define PCI_E5_IMC_VID_ID 0x8086
#define PCI_E5_IMC_CHN_MAX 4 /* max chans/sock */
#define PCI_E5_IMC_CHN_DIMM_MAX 3 /* max dimms/chan */
#define PCI_E5_IMC_ERROR_RANK_MAX 8
/*
* UBOX0
*/
/* E5 v2 */
#define PCISLOT_E5V2_UBOX0 11
#define PCIFUNC_E5V2_UBOX0 0
#define PCI_E5V2_UBOX0_DID_ID 0x0e1e
/* E5 v3 */
#define PCISLOT_E5V3_UBOX0 16
#define PCIFUNC_E5V3_UBOX0 5
#define PCI_E5V3_UBOX0_DID_ID 0x2f1e
/* UBOX0 regs */
#define PCI_E5_UBOX0_CPUNODEID 0x40
#define PCI_E5_UBOX0_CPUNODEID_LCLNODEID __BITS(0, 2) /* local socket */
/*
* IMC main (aka CPGC)
*/
/* E5 v2 */
#define PCISLOT_E5V2_IMC0_CPGC 15
#define PCIFUNC_E5V2_IMC0_CPGC 0
#define PCI_E5V2_IMC0_CPGC_DID_ID 0x0ea8
/* E5 v3 */
#define PCISLOT_E5V3_IMC0_CPGC 19
#define PCIFUNC_E5V3_IMC0_CPGC 0
#define PCI_E5V3_IMC0_CPGC_DID_ID 0x2fa8
#define PCISLOT_E5V3_IMC1_CPGC 22
#define PCIFUNC_E5V3_IMC1_CPGC 0
#define PCI_E5V3_IMC1_CPGC_DID_ID 0x2f68
/* CPGC regs */
#define PCI_E5_IMC_CPGC_MCMTR 0x7c
#define PCI_E5V2_IMC_CPGC_MCMTR_CHN_DISABLE(c) __BIT(16 + (c))
#define PCI_E5V3_IMC_CPGC_MCMTR_CHN_DISABLE(c) __BIT(18 + (c))
#define PCI_E5V3_IMC_CPGC_MCMTR_DDR4 __BIT(14)
#define PCI_E5_IMC_CPGC_MCMTR_IMC_MODE __BITS(12, 13)
#define PCI_E5_IMC_CPGC_MCMTR_IMC_MODE_DDR3 0 /* v3 native DDR */
#define PCI_E5_IMC_CPGC_MCMTR_ECC_EN __BIT(2)
/*
* Channel Target Address Decoder, per-channel
*/
/* E5 v2 */
#define PCISLOT_E5V2_IMC0_CTAD 15
#define PCIFUNC_E5V2_IMC0_CTAD(c) (2 + (c))
#define PCI_E5V2_IMC0_CTAD_DID_ID(c) (0x0eaa + (c))
/* E5 v3 */
#define PCISLOT_E5V3_IMC0_CTAD 19
#define PCIFUNC_E5V3_IMC0_CTAD(c) (2 + (c))
#define PCI_E5V3_IMC0_CTAD_DID_ID(c) (0x2faa + (c))
#define PCISLOT_E5V3_IMC1_CTAD 22
#define PCIFUNC_E5V3_IMC1_CTAD(c) (2 + (c))
#define PCI_E5V3_IMC1_CTAD_DID_ID(c) (0x2f6a + (c))
/* CTAD regs */
#define PCI_E5_IMC_CTAD_DIMMMTR(dimm) (0x80 + ((dimm) * 4))
#define PCI_E5V3_IMC_CTAD_DIMMMTR_DDR4 __BIT(20)
#define PCI_E5_IMC_CTAD_DIMMMTR_RANK_DISABLE(r) __BIT(16 + (r))
#define PCI_E5_IMC_CTAD_DIMMMTR_RANK_DISABLE_ALL __BITS(16, 19)
#define PCI_E5_IMC_CTAD_DIMMMTR_DIMM_POP __BIT(14)
#define PCI_E5_IMC_CTAD_DIMMMTR_RANK_CNT __BITS(12, 13)
#define PCI_E5_IMC_CTAD_DIMMMTR_RANK_CNT_SR 0
#define PCI_E5_IMC_CTAD_DIMMMTR_RANK_CNT_DR 1
#define PCI_E5_IMC_CTAD_DIMMMTR_RANK_CNT_QR 2
#define PCI_E5V3_IMC_CTAD_DIMMMTR_RANK_CNT_8R 3
#define PCI_E5_IMC_CTAD_DIMMMTR_DDR3_WIDTH __BITS(7, 8)
#define PCI_E5_IMC_CTAD_DIMMMTR_DDR3_WIDTH_4 0
#define PCI_E5_IMC_CTAD_DIMMMTR_DDR3_WIDTH_8 1
#define PCI_E5_IMC_CTAD_DIMMMTR_DDR3_WIDTH_16 2
#define PCI_E5_IMC_CTAD_DIMMMTR_DDR3_WIDTH_RSVD 3
#define PCI_E5_IMC_CTAD_DIMMMTR_DDR3_DNSTY __BITS(5, 6)
#define PCI_E5_IMC_CTAD_DIMMMTR_DDR3_DNSTY_1G 0 /* v3 reserved */
#define PCI_E5_IMC_CTAD_DIMMMTR_DDR3_DNSTY_2G 1
#define PCI_E5_IMC_CTAD_DIMMMTR_DDR3_DNSTY_4G 2
#define PCI_E5_IMC_CTAD_DIMMMTR_DDR3_DNSTY_8G 3
/*
* ERROR, per-channel
*/
/* E5 v2 */
#define PCISLOT_E5V2_IMC0_ERROR_CHN0 16
#define PCIFUNC_E5V2_IMC0_ERROR_CHN0 2
#define PCI_E5V2_IMC0_ERROR_CHN0_DID_ID 0x0eb2
#define PCISLOT_E5V2_IMC0_ERROR_CHN1 16
#define PCIFUNC_E5V2_IMC0_ERROR_CHN1 3
#define PCI_E5V2_IMC0_ERROR_CHN1_DID_ID 0x0eb3
#define PCISLOT_E5V2_IMC0_ERROR_CHN2 16
#define PCIFUNC_E5V2_IMC0_ERROR_CHN2 6
#define PCI_E5V2_IMC0_ERROR_CHN2_DID_ID 0x0eb6
#define PCISLOT_E5V2_IMC0_ERROR_CHN3 16
#define PCIFUNC_E5V2_IMC0_ERROR_CHN3 7
#define PCI_E5V2_IMC0_ERROR_CHN3_DID_ID 0x0eb7
/* E5 v3 */
#define PCISLOT_E5V3_IMC0_ERROR_CHN0 20
#define PCIFUNC_E5V3_IMC0_ERROR_CHN0 2
#define PCI_E5V3_IMC0_ERROR_CHN0_DID_ID 0x2fb2
#define PCISLOT_E5V3_IMC0_ERROR_CHN1 20
#define PCIFUNC_E5V3_IMC0_ERROR_CHN1 3
#define PCI_E5V3_IMC0_ERROR_CHN1_DID_ID 0x2fb3
#define PCISLOT_E5V3_IMC0_ERROR_CHN2 21
#define PCIFUNC_E5V3_IMC0_ERROR_CHN2 2
#define PCI_E5V3_IMC0_ERROR_CHN2_DID_ID 0x2fb6
#define PCISLOT_E5V3_IMC0_ERROR_CHN3 21
#define PCIFUNC_E5V3_IMC0_ERROR_CHN3 3
#define PCI_E5V3_IMC0_ERROR_CHN3_DID_ID 0x2fb7
#define PCISLOT_E5V3_IMC1_ERROR_CHN0 23
#define PCIFUNC_E5V3_IMC1_ERROR_CHN0 2
#define PCI_E5V3_IMC1_ERROR_CHN0_DID_ID 0x2fd6
#define PCISLOT_E5V3_IMC1_ERROR_CHN1 23
#define PCIFUNC_E5V3_IMC1_ERROR_CHN1 3
#define PCI_E5V3_IMC1_ERROR_CHN1_DID_ID 0x2fd7
/* ERROR regs */
#define PCI_E5_IMC_ERROR_COR_ERR_CNT(i) (0x104 + ((i) * 4))
#define PCI_E5_IMC_ERROR_COR_ERR_CNT_HI_OVFL __BIT(31)
#define PCI_E5_IMC_ERROR_COR_ERR_CNT_HI __BITS(16, 30)
#define PCI_E5_IMC_ERROR_COR_ERR_CNT_LO_OVFL __BIT(15)
#define PCI_E5_IMC_ERROR_COR_ERR_CNT_LO __BITS(0, 14)
#define PCI_E5_IMC_ERROR_COR_ERR_TH(i) (0x11c + ((i) * 4))
#define PCI_E5_IMC_ERROR_COR_ERR_TH_HI __BITS(16, 30)
#define PCI_E5_IMC_ERROR_COR_ERR_TH_LO __BITS(0, 14)
#define PCI_E5_IMC_ERROR_COR_ERR_STAT 0x134
#define PCI_E5_IMC_ERROR_COR_ERR_STAT_RANKS __BITS(0, 7)
/*
* Thermal, per-channel
*/
/* E5 v2 */
#define PCISLOT_E5V2_IMC0_THERMAL_CHN0 16
#define PCIFUNC_E5V2_IMC0_THERMAL_CHN0 0
#define PCI_E5V2_IMC0_THERMAL_CHN0_DID_ID 0x0eb0
#define PCISLOT_E5V2_IMC0_THERMAL_CHN1 16
#define PCIFUNC_E5V2_IMC0_THERMAL_CHN1 1
#define PCI_E5V2_IMC0_THERMAL_CHN1_DID_ID 0x0eb1
#define PCISLOT_E5V2_IMC0_THERMAL_CHN2 16
#define PCIFUNC_E5V2_IMC0_THERMAL_CHN2 4
#define PCI_E5V2_IMC0_THERMAL_CHN2_DID_ID 0x0eb4
#define PCISLOT_E5V2_IMC0_THERMAL_CHN3 16
#define PCIFUNC_E5V2_IMC0_THERMAL_CHN3 5
#define PCI_E5V2_IMC0_THERMAL_CHN3_DID_ID 0x0eb5
/* E5 v3 */
#define PCISLOT_E5V3_IMC0_THERMAL_CHN0 20
#define PCIFUNC_E5V3_IMC0_THERMAL_CHN0 0
#define PCI_E5V3_IMC0_THERMAL_CHN0_DID_ID 0x2fb0
#define PCISLOT_E5V3_IMC0_THERMAL_CHN1 20
#define PCIFUNC_E5V3_IMC0_THERMAL_CHN1 1
#define PCI_E5V3_IMC0_THERMAL_CHN1_DID_ID 0x2fb1
#define PCISLOT_E5V3_IMC0_THERMAL_CHN2 21
#define PCIFUNC_E5V3_IMC0_THERMAL_CHN2 0
#define PCI_E5V3_IMC0_THERMAL_CHN2_DID_ID 0x2fb4
#define PCISLOT_E5V3_IMC0_THERMAL_CHN3 21
#define PCIFUNC_E5V3_IMC0_THERMAL_CHN3 1
#define PCI_E5V3_IMC0_THERMAL_CHN3_DID_ID 0x2fb5
#define PCISLOT_E5V3_IMC1_THERMAL_CHN0 23
#define PCIFUNC_E5V3_IMC1_THERMAL_CHN0 0
#define PCI_E5V3_IMC1_THERMAL_CHN0_DID_ID 0x2fd0
#define PCISLOT_E5V3_IMC1_THERMAL_CHN1 23
#define PCIFUNC_E5V3_IMC1_THERMAL_CHN1 1
#define PCI_E5V3_IMC1_THERMAL_CHN1_DID_ID 0x2fd1
/* Thermal regs */
#define PCI_E5_IMC_THERMAL_CHN_TEMP_CFG 0x108
#define PCI_E5_IMC_THERMAL_CHN_TEMP_CFG_OLTT_EN __BIT(31)
#define PCI_E5_IMC_THERMAL_CHN_TEMP_CFG_CLTT __BIT(29)
#define PCI_E5_IMC_THERMAL_DIMM_TEMP_TH(dimm) (0x120 + ((dimm) * 4))
#define PCI_E5_IMC_THERMAL_DIMM_TEMP_TH_TEMPHI __BITS(16, 23)
#define PCI_E5_IMC_THERMAL_DIMM_TEMP_TH_TEMPMID __BITS(8, 15)
#define PCI_E5_IMC_THERMAL_DIMM_TEMP_TH_TEMPLO __BITS(0, 7)
#define PCI_E5_IMC_THERMAL_DIMM_TEMP_TH_TEMPMIN 32 /* [MIN, MAX) */
#define PCI_E5_IMC_THERMAL_DIMM_TEMP_TH_TEMPMAX 128
#define PCI_E5_IMC_THERMAL_DIMM_TEMP_TH_DISABLE 255
#define PCI_E5_IMC_THERMAL_DIMMTEMPSTAT(dimm) (0x150 + ((dimm) * 4))
#define PCI_E5_IMC_THERMAL_DIMMTEMPSTAT_TEMPHI __BIT(28)
#define PCI_E5_IMC_THERMAL_DIMMTEMPSTAT_TEMPMID __BIT(27)
#define PCI_E5_IMC_THERMAL_DIMMTEMPSTAT_TEMPLO __BIT(26)
#define PCI_E5_IMC_THERMAL_DIMMTEMPSTAT_TEMPOEMLO __BIT(25)
#define PCI_E5_IMC_THERMAL_DIMMTEMPSTAT_TEMPOEMHI __BIT(24)
#define PCI_E5_IMC_THERMAL_DIMMTEMPSTAT_TEMP __BITS(0, 7)
#define PCI_E5_IMC_THERMAL_DIMMTEMPSTAT_TEMPMIN 0 /* [MIN, MAX) */
#define PCI_E5_IMC_THERMAL_DIMMTEMPSTAT_TEMPMAX 127
#endif /* !_E5_IMC_REG_H_ */
| 4,617 |
1,738 | <reponame>brianherrera/lumberyard
/*
* All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
* its licensors.
*
* For complete copyright and license terms please see the LICENSE at the root of this
* distribution (the "License"). All use of this software is governed by the License,
* or, if provided, by the license below or the license accompanying this file. Do not
* remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
*/
#pragma once
#include <AzFramework/Physics/WorldBodyBus.h>
#include <PhysX/ColliderComponentBus.h>
namespace Blast
{
AZ_PUSH_DISABLE_WARNING(4996, "-Wdeprecated-declarations")
class ShapesProvider
: public PhysX::ColliderComponentRequestBus::Handler
, public PhysX::BodyConfigurationComponentBus::Handler
{
public:
ShapesProvider(AZ::EntityId entityId, Physics::RigidBodyConfiguration configuration);
~ShapesProvider();
void AddShape(AZStd::shared_ptr<Physics::Shape> shape);
// This class is not supposed to provide shape configurations, only shapes themselves.
Physics::ShapeConfigurationList GetShapeConfigurations() override;
AZStd::vector<AZStd::shared_ptr<Physics::Shape>> GetShapes() override;
Physics::RigidBodyConfiguration GetRigidBodyConfiguration() override;
Physics::WorldBodyConfiguration GetWorldBodyConfiguration() override;
private:
AZStd::vector<AZStd::shared_ptr<Physics::Shape>> m_shapes;
AZ::EntityId m_entityId;
Physics::RigidBodyConfiguration m_configuration;
};
AZ_POP_DISABLE_WARNING
} // namespace Blast
| 562 |
488 | // Copyright 2005,2006,2007 <NAME>, <NAME>
// $Id: spec.h,v 1.4 2008-08-23 13:47:04 gergo Exp $
#ifndef H_SPEC
#define H_SPEC
#include <string>
#include "GrammarIr.h"
enum types { T_ID, T_STR, T_NUM };
struct isn
{
enum types type;
unsigned long line, col;
const char *id;
char *str;
unsigned long num;
};
std::string splice(std::string s);
std::string macroname(std::string rulename, unsigned long ruleno);
GGrammar *synparse(void);
#endif
| 190 |
1,299 | /*
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package io.reactivex.netty.channel;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import rx.Subscriber;
import rx.functions.Action0;
import rx.subscriptions.Subscriptions;
/**
* A bridge to connect a {@link Subscriber} to a {@link ChannelFuture} so that when the {@code subscriber} is
* unsubscribed, the listener will get removed from the {@code future}. Failure to do so for futures that are long
* living, eg: {@link Channel#closeFuture()} will lead to a memory leak where the attached listener will be in the
* listener queue of the future till the channel closes.
*
* In order to bridge the future and subscriber, {@link #bridge(ChannelFuture, Subscriber)} must be called.
*/
public abstract class SubscriberToChannelFutureBridge implements ChannelFutureListener {
@Override
public final void operationComplete(ChannelFuture future) throws Exception {
if (future.isSuccess()) {
doOnSuccess(future);
} else {
doOnFailure(future, future.cause());
}
}
protected abstract void doOnSuccess(ChannelFuture future);
protected abstract void doOnFailure(ChannelFuture future, Throwable cause);
/**
* Bridges the passed subscriber and future, which means the following:
*
* <ul>
<li>Add this listener to the passed future.</li>
<li>Add a callback to the subscriber, such that on unsubscribe this listener is removed from the future.</li>
</ul>
*
* @param future Future to bridge.
* @param subscriber Subscriber to connect to the future.
*/
public void bridge(final ChannelFuture future, Subscriber<?> subscriber) {
future.addListener(this);
subscriber.add(Subscriptions.create(new Action0() {
@Override
public void call() {
future.removeListener(SubscriberToChannelFutureBridge.this);
}
}));
}
}
| 816 |
17,703 | #include <memory>
#include "envoy/network/filter.h"
#include "envoy/network/listener.h"
#include "source/common/network/listen_socket_impl.h"
#include "source/common/network/socket_option_factory.h"
#include "source/common/network/udp_packet_writer_handler_impl.h"
#include "source/server/active_udp_listener.h"
#include "test/mocks/network/mocks.h"
#include "test/test_common/environment.h"
#include "test/test_common/network_utility.h"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
using testing::_;
using testing::Invoke;
using testing::NiceMock;
using testing::Return;
using testing::ReturnRef;
namespace Envoy {
namespace Server {
namespace {
class MockUdpConnectionHandler : public Network::UdpConnectionHandler,
public Network::MockConnectionHandler {
public:
MOCK_METHOD(Event::Dispatcher&, dispatcher, ());
MOCK_METHOD(Network::UdpListenerCallbacksOptRef, getUdpListenerCallbacks,
(uint64_t listener_tag));
};
class ActiveUdpListenerTest : public testing::TestWithParam<Network::Address::IpVersion>,
protected Logger::Loggable<Logger::Id::main> {
public:
ActiveUdpListenerTest()
: version_(GetParam()), local_address_(Network::Test::getCanonicalLoopbackAddress(version_)) {
}
void SetUp() override {
ON_CALL(conn_handler_, dispatcher()).WillByDefault(ReturnRef(dispatcher_));
EXPECT_CALL(conn_handler_, statPrefix()).WillRepeatedly(ReturnRef(listener_stat_prefix_));
listen_socket_ =
std::make_shared<Network::UdpListenSocket>(local_address_, nullptr, /*bind*/ true);
listen_socket_->addOptions(Network::SocketOptionFactory::buildIpPacketInfoOptions());
listen_socket_->addOptions(Network::SocketOptionFactory::buildRxQueueOverFlowOptions());
ASSERT_TRUE(Network::Socket::applyOptions(listen_socket_->options(), *listen_socket_,
envoy::config::core::v3::SocketOption::STATE_BOUND));
ON_CALL(socket_factory_, getListenSocket(_)).WillByDefault(Return(listen_socket_));
EXPECT_CALL(listener_config_, listenSocketFactory()).WillRepeatedly(ReturnRef(socket_factory_));
// Use UdpGsoBatchWriter to perform non-batched writes for the purpose of this test, if it is
// supported.
EXPECT_CALL(listener_config_, udpListenerConfig())
.WillRepeatedly(Return(Network::UdpListenerConfigOptRef(udp_listener_config_)));
EXPECT_CALL(listener_config_, listenerScope()).WillRepeatedly(ReturnRef(scope_));
EXPECT_CALL(listener_config_, filterChainFactory());
ON_CALL(udp_listener_config_, packetWriterFactory())
.WillByDefault(ReturnRef(udp_packet_writer_factory_));
ON_CALL(udp_packet_writer_factory_, createUdpPacketWriter(_, _))
.WillByDefault(Invoke(
[&](Network::IoHandle& io_handle, Stats::Scope& scope) -> Network::UdpPacketWriterPtr {
#if UDP_GSO_BATCH_WRITER_COMPILETIME_SUPPORT
return std::make_unique<Quic::UdpGsoBatchWriter>(io_handle, scope);
#else
UNREFERENCED_PARAMETER(scope);
return std::make_unique<Network::UdpDefaultWriter>(io_handle);
#endif
}));
EXPECT_CALL(cb_.udp_listener_, onDestroy());
}
void setup() {
active_listener_ =
std::make_unique<ActiveRawUdpListener>(0, 1, conn_handler_, dispatcher_, listener_config_);
}
std::string listener_stat_prefix_{"listener_stat_prefix"};
NiceMock<Event::MockDispatcher> dispatcher_{"test"};
NiceMock<MockUdpConnectionHandler> conn_handler_;
Network::Address::IpVersion version_;
Network::Address::InstanceConstSharedPtr local_address_;
Network::SocketSharedPtr listen_socket_;
NiceMock<Network::MockListenSocketFactory> socket_factory_;
Stats::IsolatedStoreImpl scope_;
NiceMock<Network::MockUdpListenerConfig> udp_listener_config_;
NiceMock<Network::MockUdpPacketWriterFactory> udp_packet_writer_factory_;
Network::MockListenerConfig listener_config_;
std::unique_ptr<ActiveRawUdpListener> active_listener_;
NiceMock<Network::MockUdpReadFilterCallbacks> cb_;
};
INSTANTIATE_TEST_SUITE_P(ActiveUdpListenerTests, ActiveUdpListenerTest,
testing::ValuesIn(TestEnvironment::getIpVersionsForTest()),
TestUtility::ipTestParamsToString);
TEST_P(ActiveUdpListenerTest, MultipleFiltersOnData) {
setup();
auto* test_filter = new NiceMock<Network::MockUdpListenerReadFilter>(cb_);
EXPECT_CALL(*test_filter, onData(_))
.WillOnce(Invoke([](Network::UdpRecvData&) -> Network::FilterStatus {
return Network::FilterStatus::Continue;
}));
auto* test_filter2 = new NiceMock<Network::MockUdpListenerReadFilter>(cb_);
EXPECT_CALL(*test_filter2, onData(_))
.WillOnce(Invoke([](Network::UdpRecvData&) -> Network::FilterStatus {
return Network::FilterStatus::StopIteration;
}));
active_listener_->addReadFilter(Network::UdpListenerReadFilterPtr{test_filter});
active_listener_->addReadFilter(Network::UdpListenerReadFilterPtr{test_filter2});
Network::UdpRecvData data;
active_listener_->onDataWorker(std::move(data));
}
TEST_P(ActiveUdpListenerTest, MultipleFiltersOnDataStopIteration) {
setup();
auto* test_filter = new NiceMock<Network::MockUdpListenerReadFilter>(cb_);
EXPECT_CALL(*test_filter, onData(_))
.WillOnce(Invoke([](Network::UdpRecvData&) -> Network::FilterStatus {
return Network::FilterStatus::StopIteration;
}));
auto* test_filter2 = new NiceMock<Network::MockUdpListenerReadFilter>(cb_);
EXPECT_CALL(*test_filter2, onData(_)).Times(0);
active_listener_->addReadFilter(Network::UdpListenerReadFilterPtr{test_filter});
active_listener_->addReadFilter(Network::UdpListenerReadFilterPtr{test_filter2});
Network::UdpRecvData data;
active_listener_->onDataWorker(std::move(data));
}
TEST_P(ActiveUdpListenerTest, MultipleFiltersOnReceiveError) {
setup();
auto* test_filter = new NiceMock<Network::MockUdpListenerReadFilter>(cb_);
EXPECT_CALL(*test_filter, onReceiveError(_))
.WillOnce(Invoke([](Api::IoError::IoErrorCode) -> Network::FilterStatus {
return Network::FilterStatus::Continue;
}));
auto* test_filter2 = new NiceMock<Network::MockUdpListenerReadFilter>(cb_);
EXPECT_CALL(*test_filter2, onReceiveError(_))
.WillOnce(Invoke([](Api::IoError::IoErrorCode) -> Network::FilterStatus {
return Network::FilterStatus::StopIteration;
}));
active_listener_->addReadFilter(Network::UdpListenerReadFilterPtr{test_filter});
active_listener_->addReadFilter(Network::UdpListenerReadFilterPtr{test_filter2});
Network::UdpRecvData data;
active_listener_->onReceiveError(Api::IoError::IoErrorCode::UnknownError);
}
TEST_P(ActiveUdpListenerTest, MultipleFiltersOnReceiveErrorStopIteration) {
setup();
auto* test_filter = new NiceMock<Network::MockUdpListenerReadFilter>(cb_);
EXPECT_CALL(*test_filter, onReceiveError(_))
.WillOnce(Invoke([](Api::IoError::IoErrorCode) -> Network::FilterStatus {
return Network::FilterStatus::StopIteration;
}));
auto* test_filter2 = new NiceMock<Network::MockUdpListenerReadFilter>(cb_);
EXPECT_CALL(*test_filter2, onReceiveError(_)).Times(0);
active_listener_->addReadFilter(Network::UdpListenerReadFilterPtr{test_filter});
active_listener_->addReadFilter(Network::UdpListenerReadFilterPtr{test_filter2});
Network::UdpRecvData data;
active_listener_->onReceiveError(Api::IoError::IoErrorCode::UnknownError);
}
} // namespace
} // namespace Server
} // namespace Envoy
| 2,825 |
462 | ##
# Copyright (c) 2005-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from twisted.internet.defer import inlineCallbacks
from calendarserver.tools.resources import migrateResources
from twistedcaldav.test.util import StoreTestCase
from txdav.who.test.support import InMemoryDirectoryService
from twext.who.directory import DirectoryRecord
from txdav.who.idirectory import RecordType as CalRecordType
from txdav.who.directory import CalendarDirectoryRecordMixin
class TestRecord(DirectoryRecord, CalendarDirectoryRecordMixin):
pass
class MigrateResourcesTest(StoreTestCase):
@inlineCallbacks
def setUp(self):
yield super(MigrateResourcesTest, self).setUp()
self.store = self.storeUnderTest()
self.sourceService = InMemoryDirectoryService(None)
fieldName = self.sourceService.fieldName
records = (
TestRecord(
self.sourceService,
{
fieldName.uid: u"location1",
fieldName.shortNames: (u"loc1",),
fieldName.recordType: CalRecordType.location,
}
),
TestRecord(
self.sourceService,
{
fieldName.uid: u"location2",
fieldName.shortNames: (u"loc2",),
fieldName.recordType: CalRecordType.location,
}
),
TestRecord(
self.sourceService,
{
fieldName.uid: u"resource1",
fieldName.shortNames: (u"res1",),
fieldName.recordType: CalRecordType.resource,
}
),
)
yield self.sourceService.updateRecords(records, create=True)
@inlineCallbacks
def test_migrateResources(self):
# Record location1 has not been migrated
record = yield self.directory.recordWithUID(u"location1")
self.assertEquals(record, None)
# Migrate location1, location2, and resource1
yield migrateResources(self.sourceService, self.directory)
record = yield self.directory.recordWithUID(u"location1")
self.assertEquals(record.uid, u"location1")
self.assertEquals(record.shortNames[0], u"loc1")
record = yield self.directory.recordWithUID(u"location2")
self.assertEquals(record.uid, u"location2")
self.assertEquals(record.shortNames[0], u"loc2")
record = yield self.directory.recordWithUID(u"resource1")
self.assertEquals(record.uid, u"resource1")
self.assertEquals(record.shortNames[0], u"res1")
# Add a new location to the sourceService, and modify an existing
# location
fieldName = self.sourceService.fieldName
newRecords = (
TestRecord(
self.sourceService,
{
fieldName.uid: u"location1",
fieldName.shortNames: (u"newloc1",),
fieldName.recordType: CalRecordType.location,
}
),
TestRecord(
self.sourceService,
{
fieldName.uid: u"location3",
fieldName.shortNames: (u"loc3",),
fieldName.recordType: CalRecordType.location,
}
),
)
yield self.sourceService.updateRecords(newRecords, create=True)
yield migrateResources(self.sourceService, self.directory)
# Ensure an existing record does not get migrated again; verified by
# seeing if shortNames changed, which they should not:
record = yield self.directory.recordWithUID(u"location1")
self.assertEquals(record.uid, u"location1")
self.assertEquals(record.shortNames[0], u"loc1")
# Ensure new record does get migrated
record = yield self.directory.recordWithUID(u"location3")
self.assertEquals(record.uid, u"location3")
self.assertEquals(record.shortNames[0], u"loc3")
| 1,979 |
1,779 | //
// Copyright 2019 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#ifndef ZETASQL_PARSER_PARSE_TREE_H_
#define ZETASQL_PARSER_PARSE_TREE_H_
// The complete set of headers comprises 3 parts:
// - ast_node.h is the header for ASTNode, the parent
// - parse_tree_generated.h contains classes generated by gen_parse_tree.py
// - additional miscellany defined in this file.
//
#include <stack>
#include "zetasql/parser/ast_node.h"
#include "zetasql/parser/parse_tree_generated.h"
#include "zetasql/parser/visit_result.h"
namespace zetasql {
// This is a fake ASTNode implementation that exists only for tests,
// which may need to pass an ASTNode* to some methods.
class FakeASTNode final : public ASTNode {
public:
static constexpr ASTNodeKind kConcreteNodeKind = AST_FAKE;
FakeASTNode() : ASTNode(kConcreteNodeKind) {}
void Accept(ParseTreeVisitor* visitor, void* data) const override {
ZETASQL_LOG(FATAL) << "FakeASTNode does not support Accept";
}
absl::StatusOr<VisitResult> Accept(
NonRecursiveParseTreeVisitor* visitor) const override {
ZETASQL_LOG(FATAL) << "FakeASTNode does not support Accept";
}
void InitFields() final {
{
FieldLoader fl(this); // Triggers check that there were no children.
}
set_start_location(ParseLocationPoint::FromByteOffset("fake_filename", 7));
set_end_location(ParseLocationPoint::FromByteOffset("fake_filename", 10));
}
};
inline IdString ASTAlias::GetAsIdString() const {
return identifier()->GetAsIdString();
}
namespace parse_tree_internal {
// Concrete types (the 'leaves' of the hierarchy) must be constructible.
template <typename T>
using EnableIfConcrete =
typename std::enable_if<std::is_constructible<T>::value, int>::type;
// Non Concrete types (internal nodes) of the hierarchy must _not_ be
// constructible.
template <typename T>
using EnableIfNotConcrete =
typename std::enable_if<!std::is_constructible<T>::value, int>::type;
// GetAsOrNull implementation optimized for concrete types. We assume that all
// concrete types define:
// static constexpr Type kConcreteNodeKind;
//
// This allows us to avoid invoking dynamic_cast.
template <typename T, typename MaybeConstRoot, EnableIfConcrete<T> = 0>
inline T* GetAsOrNullImpl(MaybeConstRoot* n) {
if (n->node_kind() == T::kConcreteNodeKind) {
return static_cast<T*>(n);
} else {
return nullptr;
}
}
// GetAsOrNull implemented simply with dynamic_cast. This is used for
// intermediate nodes (such as ASTExpression).
template <typename T, typename MaybeConstRoot, EnableIfNotConcrete<T> = 0>
inline T* GetAsOrNullImpl(MaybeConstRoot* r) {
// Note, if this proves too slow, it could be implemented with ancestor enums
// sets.
return dynamic_cast<T*>(r);
}
} // namespace parse_tree_internal
template <typename NodeType>
inline const NodeType* ASTNode::GetAsOrNull() const {
static_assert(std::is_base_of<ASTNode, NodeType>::value,
"NodeType must be a member of the ASTNode class hierarchy");
return parse_tree_internal::GetAsOrNullImpl<const NodeType, const ASTNode>(
this);
}
template <typename NodeType>
inline NodeType* ASTNode::GetAsOrNull() {
static_assert(std::is_base_of<ASTNode, NodeType>::value,
"NodeType must be a member of the ASTNode class hierarchy");
return parse_tree_internal::GetAsOrNullImpl<NodeType, ASTNode>(this);
}
} // namespace zetasql
#endif // ZETASQL_PARSER_PARSE_TREE_H_
| 1,304 |
1,171 | /**
* Copyright 2014 Facebook
* @author <NAME> (<EMAIL>)
*/
#include "src/util/AsyncCopier.h"
#include "src/util/Misc.h"
#include <folly/Format.h>
#include <glog/logging.h>
namespace facebook { namespace cuda {
uint8_t* allocPageLocked(size_t size) {
void* ptr;
checkCudaError(cudaHostAlloc(&ptr, size, cudaHostAllocPortable),
"cudaHostAlloc");
return static_cast<uint8_t*>(ptr);
}
void AsyncCopier::Deallocator::operator()(uint8_t* ptr) const {
if (ptr) {
cudaFreeHost(ptr);
}
}
AsyncCopier::Event::Event(int d)
: device(d),
refCount(0) {
event.emplace();
checkCudaError(
cudaEventCreateWithFlags(
get_pointer(event), cudaEventDisableTiming | cudaEventBlockingSync),
"cudaEventCreateWithFlags");
}
AsyncCopier::AsyncCopier(size_t bufferSize)
: bufferSize_(bufferSize),
buffer_(allocPageLocked(bufferSize)) {
int deviceCount;
checkCudaError(cudaGetDeviceCount(&deviceCount), "cudaGetDeviceCount");
events_.resize(deviceCount);
freeEvents_.resize(deviceCount);
}
bool AsyncCopier::pollEvent(Event* event) {
auto result = cudaEventQuery(*event->event);
switch (result) {
case cudaSuccess:
VLOG(2) << "Poll event " << *event->event << ": ready";
return true;
case cudaErrorNotReady:
VLOG(2) << "Poll event " << *event->event << ": not ready";
return false;
default:
throwCudaError(result, "cudaEventQuery");
}
}
void AsyncCopier::waitEvent(Event* event) {
VLOG(2) << "Wait for event " << *event->event;
checkCudaError(cudaEventSynchronize(*event->event), "cudaEventSynchronize");
}
auto AsyncCopier::getEventLocked() -> Event* {
int device;
checkCudaError(cudaGetDevice(&device), "cudaGetDevice");
auto& events = events_[device];
auto& freeEvents = freeEvents_[device];
Event* ev;
if (!freeEvents.empty()) {
ev = freeEvents.back();
freeEvents.pop_back();
VLOG(2) << "Get free event " << *ev->event;
} else {
events.emplace_back(device);
ev = &events.back();
VLOG(2) << "Allocate new event " << *ev->event;
}
++ev->refCount;
return ev;
}
void AsyncCopier::releaseEventLocked(Event* ev) {
if (--ev->refCount <= 0) {
DCHECK_EQ(ev->refCount, 0);
VLOG(2) << "Release event " << *ev->event;
freeEvents_[ev->device].push_back(ev);
}
}
// Return the unallocated ranges; at most two: one at the end of the
// buffer and one at the beginning.
auto AsyncCopier::getRangesLocked() const -> RangeVec {
RangeVec ranges;
if (allocated_.empty()) {
ranges.emplace_back(0, bufferSize_);
} else {
auto& first = allocated_.front();
auto& last = allocated_.back();
auto start = first.start;
auto end = last.start + last.length;
if (start < end) {
if (end < bufferSize_) {
ranges.emplace_back(end, bufferSize_ - end);
}
if (start > 0) {
ranges.emplace_back(0, start);
}
} else if (start > end) {
ranges.emplace_back(end, start - end);
}
}
DCHECK(ranges.size() <= 2);
return ranges;
}
void AsyncCopier::copyHtoD(void* dest, const void* src, size_t size) {
VLOG(1) << "copyHtoD " << size;
auto pdest = static_cast<uint8_t*>(dest);
auto psrc = static_cast<const uint8_t*>(src);
unsigned int flags;
auto err = cudaHostGetFlags(&flags, const_cast<void*>(src));
if (err == cudaSuccess) {
// Page-locked using cudaHostAlloc / cudaHostRegister, copy directly.
checkCudaError(cudaMemcpyAsync(dest, src, size, cudaMemcpyHostToDevice),
"cudaMemcpyAsync");
return;
} else if (err != cudaErrorInvalidValue) {
checkCudaError(err, "invalid return code from cudaMemcpyAsync");
}
cudaGetLastError(); // reset last error
// This is dicey -- what if another kernel has completed with an error?
// But there's nothing else we can do, as any cuda function may return an
// error from a previous kernel launch.
if (size > bufferSize_) {
// Copy synchronously.
checkCudaError(cudaMemcpy(dest, src, size, cudaMemcpyHostToDevice),
"cudaMemcpy");
return;
}
Event* eventToWait = nullptr;
auto copyRange = [this, &size, &pdest, &psrc] (AllocatedBlock& range) {
size_t n = std::min(size, range.length);
range.length = n;
VLOG(1) << "Copy " << range.start << " + " << n;
auto bufPtr = buffer_.get() + range.start;
memcpy(bufPtr, psrc, n);
checkCudaError(cudaMemcpyAsync(pdest, bufPtr, n, cudaMemcpyHostToDevice),
"cudaMemcpyAsync");
pdest += n;
psrc += n;
size -= n;
checkCudaError(cudaEventRecord(*range.event->event), "cudaEventRecord");
allocated_.push_back(range);
};
for (;;) {
{
std::lock_guard<std::mutex> lock(mutex_);
if (eventToWait) {
releaseEventLocked(eventToWait);
eventToWait = nullptr;
}
// Always reap
while (!allocated_.empty() && pollEvent(allocated_.front().event)) {
releaseEventLocked(allocated_.front().event);
allocated_.pop_front();
}
auto ranges = getRangesLocked();
if (!ranges.empty()) {
auto ev = getEventLocked();
for (auto it = ranges.begin(); size != 0 && it != ranges.end(); ++it) {
auto& range = *it;
++ev->refCount;
range.event = ev;
copyRange(range);
}
releaseEventLocked(ev);
if (size == 0) {
break;
}
}
// Sigh, we have to wait.
eventToWait = allocated_.front().event;
++eventToWait->refCount;
}
DCHECK(eventToWait);
VLOG(1) << "Waiting, remaining " << size;
waitEvent(eventToWait);
}
VLOG(1) << "End copyHtoD";
DCHECK(!eventToWait);
}
}} // namespaces
| 2,341 |
892 | {
"schema_version": "1.2.0",
"id": "GHSA-2fch-jvg5-crf6",
"modified": "2021-08-30T14:56:20Z",
"published": "2019-03-25T16:17:53Z",
"aliases": [
"CVE-2019-6690"
],
"summary": "Moderate severity vulnerability that affects python-gnupg",
"details": "python-gnupg 0.4.3 allows context-dependent attackers to trick gnupg to decrypt other ciphertext than intended. To perform the attack, the passphrase to gnupg must be controlled by the adversary and the ciphertext should be trusted. Related to a \"CWE-20: Improper Input Validation\" issue affecting the affect functionality component.",
"severity": [
{
"type": "CVSS_V3",
"score": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:H/A:N"
}
],
"affected": [
{
"package": {
"ecosystem": "PyPI",
"name": "python-gnupg"
},
"ranges": [
{
"type": "ECOSYSTEM",
"events": [
{
"introduced": "0"
},
{
"fixed": "0.4.4"
}
]
}
]
}
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2019-6690"
},
{
"type": "WEB",
"url": "https://blog.hackeriet.no/cve-2019-6690-python-gnupg-vulnerability/"
},
{
"type": "ADVISORY",
"url": "https://github.com/advisories/GHSA-2fch-jvg5-crf6"
},
{
"type": "WEB",
"url": "https://lists.debian.org/debian-lts-announce/2019/02/msg00021.html"
},
{
"type": "WEB",
"url": "https://lists.debian.org/debian-lts-announce/2021/12/msg00027.html"
},
{
"type": "WEB",
"url": "https://lists.fedoraproject.org/archives/list/package-announce@<EMAIL>/message/3WMV6XNPPL3VB3RQRFFOBCJ3AGWC4K47/"
},
{
"type": "WEB",
"url": "https://lists.fedoraproject.org/archives/list/[email protected]/message/W6KYZMN2PWXY4ENZVJUVTGFBVYEVY7II/"
},
{
"type": "WEB",
"url": "https://lists.fedoraproject.org/archives/list/package-announce@<EMAIL>.fedoraproject.org/message/X4VFRUG56542LTYK4444TPJBGR57MT25/"
},
{
"type": "WEB",
"url": "https://pypi.org/project/python-gnupg/#history"
},
{
"type": "WEB",
"url": "https://seclists.org/bugtraq/2019/Jan/41"
},
{
"type": "WEB",
"url": "https://usn.ubuntu.com/3964-1/"
},
{
"type": "WEB",
"url": "http://lists.opensuse.org/opensuse-security-announce/2019-02/msg00008.html"
},
{
"type": "WEB",
"url": "http://lists.opensuse.org/opensuse-security-announce/2019-02/msg00058.html"
},
{
"type": "WEB",
"url": "http://packetstormsecurity.com/files/151341/Python-GnuPG-0.4.3-Improper-Input-Validation.html"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/bid/106756"
}
],
"database_specific": {
"cwe_ids": [
"CWE-20"
],
"severity": "HIGH",
"github_reviewed": true
}
} | 1,548 |
462 | from mamba import description, before, context, it
from expects import expect, be_false, raise_error, contain
from doublex_expects import have_been_called_with
from doublex import Spy
from spec.object_mother import *
from mamba import reporter, runnable
from mamba.example_group import PendingExampleGroup
with description(PendingExampleGroup) as self:
with before.each:
self.example_group = a_pending_example_group()
self.reporter = Spy(reporter.Reporter)
self.example = a_pending_example()
with context('when run'):
with before.each:
self.example_group.append(self.example)
self.example_group.execute(self.reporter, runnable.ExecutionContext())
with it('not runs its children'):
expect(self.example_group.examples[0].was_run).to(be_false)
with it('notifies that an example group is pending'):
expect(self.reporter.example_group_pending).to(have_been_called_with(self.example_group))
with it('notifies that children examples are pending'):
expect(self.reporter.example_pending).to(have_been_called_with(self.example))
with context('when adding a new examples as children'):
with it('raises a type error if is not a pending example'):
expect(lambda: self.example_group.append(an_example)).to(raise_error(TypeError))
with it('appends pending example'):
pending_example = a_pending_example()
self.example_group.append(pending_example)
expect(self.example_group.examples).to(contain(pending_example))
with context('when adding groups as children'):
with it('raises a type error if is not a pending example group'):
expect(lambda: self.example_group.append(an_example_group())).to(raise_error(TypeError))
with it('appends a pending example group'):
pending_example_group = a_pending_example_group()
self.example_group.append(pending_example_group)
expect(self.example_group.examples).to(contain(pending_example_group))
| 811 |
2,242 | <gh_stars>1000+
{
"please_wait": "Lütfen bekleyin...",
"validation_messages": {
"accept": "Resim dosyası GIF, JPG veya PNG formatında olmalıdır.",
"creditcard": "Lütfen geçerli bir kredi kartı numarası giriniz.",
"date": "Lütfen geçerli bir tarih giriniz.",
"dateISO": "Lütfen geçerli bir tarih giriniz (ISO).",
"digits": "Lütfen sadece rakam giriniz.",
"email": "Lütfen geçerli bir e-posta adresi giriniz.",
"equalTo": "Lütfen aynı değeri tekrar giriniz.",
"max": "Lütfen küçük eşit {0} olacak bir değer giriniz.",
"maxlength": "Lütfen {0} karakterden fazla giriş yapmayınız.",
"min": "Lütfen büyük eşit {0} olan bir değer giriniz.",
"minlength": "Lütfen en az {0} karakter giriniz.",
"number": "Lütfen geçerli bir sayı giriniz.",
"range": "Lütfen {0} ile {1} arasında bir değer giriniz.",
"rangelength": "Lütfen {0} ile {1} karakter uzunluğunda bir değer giriniz.",
"remote": "Lütfen bu alanı düzeltiniz.",
"required": "Bu alan zorunludur.",
"url": "Lütfen geçerli bir URL girin.",
"address_validator": "Bu yer bulunamamıştır.",
"money": "Geçerli bir parasal değer girmeniz gerekiyor.",
"night_selected": "En az bir gece seçmelisiniz",
"availability_range": "Seçilen aralık, mevcut olmayan tarihleri içeriyor",
"min_bound": "Lütfen {0} en yüksek değerinden daha küçük bir değer giriniz.",
"max_bound": "Lütfen {0} en düşük değerinden daha yüksek bir değer giriniz.",
"number_no_decimals": "Lütfen bir tam sayı giriniz.",
"number_decimals": "Lütfen ondalıkları ayırmak için nokta (.) veya virgül (,) kullandığınız geçerli bir sayı giriniz."
}
} | 798 |
355 | #!/usr/bin/env python3
"""
This housekeeping script reads a GFF3 file and writes a new one, excluding any features
which aren't related to another feature.
The initial use-case here was a GFF file with gene features that had no mRNA children.
Warning: Because feature parentage can be anywhere in the file, it can be difficult to
do some things like this without holding the whole file into memory. Instead, this makes
two passes over the file and just stores line indexes to keep, along with a few IDs to
track parentage. A bit more processing time, much less memory needed.
Author: <NAME>
"""
import argparse
from biocode import gff
def main():
parser = argparse.ArgumentParser( description='Removes orphaned features in a GFF3 file')
## output file to be written
parser.add_argument('-i', '--input', type=str, required=True, help='Path to the input GFF3 file' )
parser.add_argument('-o', '--output', type=str, required=True, help='Output GFF3 file to write' )
#parser.add_argument('-t', '--type', type=str, required=False, help='Type of features to remove' )
args = parser.parse_args()
# going to try saving memory by tracking line numbers instead of storing all of it
# true means keep the line, false means to omit it
# doing tracking this way since it's technically legal for a feature to have no identifier at all.
lines = list()
parents = dict()
current_line_num = -1
infile = open(args.input)
for line in infile:
current_line_num += 1
if line.startswith('#'):
lines.append(True)
continue
line = line.rstrip()
cols = line.split("\t")
if len(cols) != 9:
lines.append(True)
continue
id = gff.column_9_value(cols[8], 'ID')
parent = gff.column_9_value(cols[8], 'Parent')
if parent is None:
# this might be overwritten later
lines.append(False)
if id is not None:
if parent not in parents:
parents[parent] = False
else:
lines.append(True)
parents[parent] = True
infile.seek(0)
current_line_num = -1
outfh = open(args.output, 'wt')
for line in infile:
current_line_num += 1
if lines[current_line_num] == True:
outfh.write(line)
else:
line = line.rstrip()
cols = line.split("\t")
if len(cols) == 9:
id = gff.column_9_value(cols[8], 'ID')
if id is not None and id in parents and parents[id] == True:
outfh.write("{0}\n".format(line))
else:
print("WARN: removing this line: {0}".format(line))
if __name__ == '__main__':
main()
| 1,250 |
461 | <gh_stars>100-1000
#include "cpu/exec.h"
make_EHelper(ld) {
rtl_lm(&s0, &id_src->addr, decinfo.width);
rtl_sr(id_dest->reg, &s0, 4);
switch (decinfo.width) {
case 4: print_asm_template2(lw); break;
case 2: print_asm_template2(lhu); break;
case 1: print_asm_template2(lbu); break;
default: assert(0);
}
}
make_EHelper(st) {
rtl_sm(&id_src->addr, &id_dest->val, decinfo.width);
switch (decinfo.width) {
case 4: print_asm_template2(sw); break;
case 2: print_asm_template2(sh); break;
case 1: print_asm_template2(sb); break;
default: assert(0);
}
}
| 267 |
3,156 | from test.integration.base import DBTIntegrationTest, use_profile
import os
import re
import yaml
import pytest
class TestDebug(DBTIntegrationTest):
@property
def schema(self):
return 'dbt_debug_049'
@staticmethod
def dir(value):
return os.path.normpath(value)
@property
def models(self):
return self.dir('models')
def postgres_profile(self):
profile = super(TestDebug, self).postgres_profile()
profile['test']['outputs'].update({
'nopass': {
'type': 'postgres',
'threads': 4,
'host': self.database_host,
'port': 5432,
'user': 'root',
# 'pass': 'password',
'dbname': 'dbt',
'schema': self.unique_schema()
},
'wronguser': {
'type': 'postgres',
'threads': 4,
'host': self.database_host,
'port': 5432,
'user': 'notmyuser',
'pass': '<PASSWORD>',
'dbname': 'dbt',
'schema': self.unique_schema()
},
'none_target': None
})
return profile
@pytest.fixture(autouse=True)
def capsys(self, capsys):
self.capsys = capsys
def assertGotValue(self, linepat, result):
found = False
output = self.capsys.readouterr().out
for line in output.split('\n'):
if linepat.match(line):
found = True
self.assertIn(result, line, 'result "{}" not found in "{}" line'.format(result, linepat))
self.assertTrue(found, 'linepat {} not found in stdout: {}'.format(linepat, output))
@use_profile('postgres')
def test_postgres_ok(self):
self.run_dbt(['debug'])
self.assertNotIn('ERROR', self.capsys.readouterr().out)
@use_profile('postgres')
def test_postgres_nopass(self):
self.run_dbt(['debug', '--target', 'nopass'], expect_pass=False)
self.assertGotValue(re.compile(r'\s+profiles\.yml file'), 'ERROR invalid')
@use_profile('postgres')
def test_postgres_wronguser(self):
self.run_dbt(['debug', '--target', 'wronguser'], expect_pass=False)
self.assertGotValue(re.compile(r'\s+Connection test'), 'ERROR')
@use_profile('postgres')
def test_postgres_empty_target(self):
self.run_dbt(['debug', '--target', 'none_target'], expect_pass=False)
self.assertGotValue(re.compile(r"\s+output 'none_target'"), 'misconfigured')
class TestDebugProfileVariable(TestDebug):
@property
def project_config(self):
return {
'config-version': 2,
'profile': '{{ "te" ~ "st" }}'
}
class TestDebugInvalidProject(DBTIntegrationTest):
@property
def schema(self):
return 'dbt_debug_049'
@staticmethod
def dir(value):
return os.path.normpath(value)
@property
def models(self):
return self.dir('models')
@pytest.fixture(autouse=True)
def capsys(self, capsys):
self.capsys = capsys
@use_profile('postgres')
def test_postgres_empty_project(self):
with open('dbt_project.yml', 'w') as f:
pass
self.run_dbt(['debug', '--profile', 'test'], expect_pass=False)
splitout = self.capsys.readouterr().out.split('\n')
for line in splitout:
if line.strip().startswith('dbt_project.yml file'):
self.assertIn('ERROR invalid', line)
elif line.strip().startswith('profiles.yml file'):
self.assertNotIn('ERROR invalid', line)
@use_profile('postgres')
def test_postgres_badproject(self):
# load a special project that is an error
self.use_default_project(overrides={
'invalid-key': 'not a valid key so this is bad project',
})
self.run_dbt(['debug', '--profile', 'test'], expect_pass=False)
splitout = self.capsys.readouterr().out.split('\n')
for line in splitout:
if line.strip().startswith('dbt_project.yml file'):
self.assertIn('ERROR invalid', line)
elif line.strip().startswith('profiles.yml file'):
self.assertNotIn('ERROR invalid', line)
@use_profile('postgres')
def test_postgres_not_found_project_dir(self):
self.run_dbt(['debug', '--project-dir', 'nopass'], expect_pass=False)
splitout = self.capsys.readouterr().out.split('\n')
for line in splitout:
if line.strip().startswith('dbt_project.yml file'):
self.assertIn('ERROR not found', line)
elif line.strip().startswith('profiles.yml file'):
self.assertNotIn('ERROR invalid', line)
@use_profile('postgres')
def test_postgres_invalid_project_outside_current_dir(self):
# create a dbt_project.yml
project_config = {
'invalid-key': 'not a valid key in this project'
}
os.makedirs('custom', exist_ok=True)
with open("custom/dbt_project.yml", 'w') as f:
yaml.safe_dump(project_config, f, default_flow_style=True)
self.run_dbt(['debug', '--project-dir', 'custom'], expect_pass=False)
splitout = self.capsys.readouterr().out.split('\n')
for line in splitout:
if line.strip().startswith('dbt_project.yml file'):
self.assertIn('ERROR invalid', line)
| 2,588 |
1,564 | <filename>core/src/test/java/org/modelmapper/bugs/GH550.java
package org.modelmapper.bugs;
import static org.testng.Assert.assertEquals;
import java.util.HashMap;
import org.modelmapper.AbstractTest;
import org.testng.annotations.Test;
@Test
public class GH550 extends AbstractTest {
public void shouldSupportHashMapSubClass() {
SourceHashMap attributes = new SourceHashMap();
attributes.put("foo", "bar");
Destination destination = modelMapper.map(
new Source(attributes), Destination.class);
assertEquals(destination.attributes.get("foo"), "bar");
}
private static class Source {
SourceHashMap attributes;
public Source(SourceHashMap attributes) {
this.attributes = attributes;
}
public SourceHashMap getAttributes() {
return attributes;
}
}
private static class SourceHashMap extends HashMap<String, Object> {
}
private static class Destination {
DestinationHashMap attributes;
public void setAttributes(DestinationHashMap attributes) {
this.attributes = attributes;
}
}
private static class DestinationHashMap extends HashMap<String, Object> {
}
}
| 362 |
1,179 | <gh_stars>1000+
// SPDX-License-Identifier: BSD-2-Clause
/* LibTomCrypt, modular cryptographic library -- <NAME>
*
* LibTomCrypt is a library that provides various cryptographic
* algorithms in a highly modular and flexible manner.
*
* The library is free for all purposes without any express
* guarantee it works.
*/
#include "tomcrypt_private.h"
/**
@file crypt_cipher_descriptor.c
Stores the cipher descriptor table, <NAME>is
*/
const struct ltc_cipher_descriptor *cipher_descriptor[TAB_SIZE];
LTC_MUTEX_GLOBAL(ltc_cipher_mutex)
/* ref: $Format:%D$ */
/* git commit: $Format:%H$ */
/* commit time: $Format:%ai$ */
| 220 |
4,036 | package remove_type_mismatch;
import java.util.Collection;
public class A {
void test1(Collection<StringBuffer> c, String s, StringBuffer b) {
c.remove(s);
c.remove(b);
}
void test2(Collection<? extends CharSequence> c, A a, String b) {
c.remove(a);
c.remove(b);
}
}
interface RunnableList extends Runnable, java.util.List {}
class TestB {
Collection<? extends Runnable> coll1 = null;
Collection<? extends java.util.List> coll2 = null;
Collection<RunnableList> coll3;
{
coll3.remove("");
}
}
class MyIntList extends java.util.LinkedList<Integer> {
public boolean remove(Object o) { return super.remove(o); }
}
class TestC {
MyIntList mil;
{
mil.remove("");
}
}
class MyOtherIntList<T> extends java.util.LinkedList<Integer> {
public boolean remove(Object o) { return super.remove(o); }
}
class TestD {
MyOtherIntList<Runnable> moil;
{
moil.remove("");
}
} | 338 |
14,668 | // Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef IOS_CHROME_BROWSER_UI_FULLSCREEN_FULLSCREEN_WEB_STATE_OBSERVER_H_
#define IOS_CHROME_BROWSER_UI_FULLSCREEN_FULLSCREEN_WEB_STATE_OBSERVER_H_
#include "ios/web/public/web_state_observer.h"
#include "url/gurl.h"
class FullscreenController;
class FullscreenMediator;
class FullscreenModel;
@class FullscreenWebViewProxyObserver;
// A WebStateObserver that updates a FullscreenModel for navigation events.
class FullscreenWebStateObserver : public web::WebStateObserver {
public:
// Constructor for an observer that updates |controller| and |model|.
FullscreenWebStateObserver(FullscreenController* controller,
FullscreenModel* model,
FullscreenMediator* mediator);
~FullscreenWebStateObserver() override;
// Tells the observer to start observing |web_state|.
void SetWebState(web::WebState* web_state);
private:
// WebStateObserver:
void WasShown(web::WebState* web_state) override;
void DidFinishNavigation(web::WebState* web_state,
web::NavigationContext* navigation_context) override;
void DidStartLoading(web::WebState* web_state) override;
void WebStateDestroyed(web::WebState* web_state) override;
// The WebState being observed.
web::WebState* web_state_ = nullptr;
// The FullscreenController passed on construction.
FullscreenController* controller_;
// The model passed on construction.
FullscreenModel* model_;
// The mediator passed on construction.
FullscreenMediator* mediator_ = nullptr;
// Observer for |web_state_|'s scroll view proxy.
__strong FullscreenWebViewProxyObserver* web_view_proxy_observer_;
// The URL received in the NavigationContext of the last finished navigation.
GURL last_navigation_url_;
};
#endif // IOS_CHROME_BROWSER_UI_FULLSCREEN_FULLSCREEN_WEB_STATE_OBSERVER_H_
| 690 |
314 | <filename>Multiplex/IDEHeaders/IDEHeaders/DVTKit/DVTWebViewTextAttachmentCell.h
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>.
//
#import "CDStructures.h"
@class WebView;
@interface DVTWebViewTextAttachmentCell : NSViewTextAttachmentCell
{
WebView *_webView;
}
@property(retain) WebView *webView; // @synthesize webView=_webView;
- (id)viewWithFrame:(struct CGRect)arg1 forView:(id)arg2 characterIndex:(unsigned long long)arg3 layoutManager:(id)arg4;
- (struct CGSize)cellSize;
@end
| 216 |
9,516 | <gh_stars>1000+
/*!
* Copyright (c) 2018 by Contributors
* \file scheduler/scheduler.cc
* \brief DGL Scheduler implementation
*/
#include <dgl/scheduler.h>
#include <unordered_map>
#include <vector>
namespace dgl {
namespace sched {
template <class IdType>
std::vector<IdArray> DegreeBucketing(const IdArray& msg_ids, const IdArray& vids,
const IdArray& recv_ids) {
auto n_msgs = msg_ids->shape[0];
const IdType* vid_data = static_cast<IdType*>(vids->data);
const IdType* msg_id_data = static_cast<IdType*>(msg_ids->data);
const IdType* recv_id_data = static_cast<IdType*>(recv_ids->data);
// in edge: dst->msgs
std::unordered_map<IdType, std::vector<IdType>> in_edges;
for (IdType i = 0; i < n_msgs; ++i) {
in_edges[vid_data[i]].push_back(msg_id_data[i]);
}
// bkt: deg->dsts
std::unordered_map<IdType, std::vector<IdType>> bkt;
for (const auto& it : in_edges) {
bkt[it.second.size()].push_back(it.first);
}
std::unordered_set<IdType> zero_deg_nodes;
for (IdType i = 0; i < recv_ids->shape[0]; ++i) {
if (in_edges.find(recv_id_data[i]) == in_edges.end()) {
zero_deg_nodes.insert(recv_id_data[i]);
}
}
auto n_zero_deg = zero_deg_nodes.size();
// calc output size
IdType n_deg = bkt.size();
IdType n_dst = in_edges.size();
IdType n_mid_sec = bkt.size(); // zero deg won't affect message size
if (n_zero_deg > 0) {
n_deg += 1;
n_dst += n_zero_deg;
}
// initialize output
IdArray degs = IdArray::Empty({n_deg}, vids->dtype, vids->ctx);
IdArray nids = IdArray::Empty({n_dst}, vids->dtype, vids->ctx);
IdArray nid_section = IdArray::Empty({n_deg}, vids->dtype, vids->ctx);
IdArray mids = IdArray::Empty({n_msgs}, vids->dtype, vids->ctx);
IdArray mid_section = IdArray::Empty({n_mid_sec}, vids->dtype, vids->ctx);
IdType* deg_ptr = static_cast<IdType*>(degs->data);
IdType* nid_ptr = static_cast<IdType*>(nids->data);
IdType* nsec_ptr = static_cast<IdType*>(nid_section->data);
IdType* mid_ptr = static_cast<IdType*>(mids->data);
IdType* msec_ptr = static_cast<IdType*>(mid_section->data);
// fill in bucketing ordering
for (const auto& it : bkt) { // for each bucket
const IdType deg = it.first;
const IdType bucket_size = it.second.size();
*deg_ptr++ = deg;
*nsec_ptr++ = bucket_size;
*msec_ptr++ = deg * bucket_size;
for (const auto dst : it.second) { // for each dst in this bucket
*nid_ptr++ = dst;
for (const auto mid : in_edges[dst]) { // for each in edge of dst
*mid_ptr++ = mid;
}
}
}
if (n_zero_deg > 0) {
*deg_ptr = 0;
*nsec_ptr = n_zero_deg;
for (const auto dst : zero_deg_nodes) {
*nid_ptr++ = dst;
}
}
std::vector<IdArray> ret;
ret.push_back(std::move(degs));
ret.push_back(std::move(nids));
ret.push_back(std::move(nid_section));
ret.push_back(std::move(mids));
ret.push_back(std::move(mid_section));
return ret;
}
template std::vector<IdArray> DegreeBucketing<int32_t>(const IdArray& msg_ids,
const IdArray& vids,
const IdArray& recv_ids);
template std::vector<IdArray> DegreeBucketing<int64_t>(const IdArray& msg_ids,
const IdArray& vids,
const IdArray& recv_ids);
template <class IdType>
std::vector<IdArray> GroupEdgeByNodeDegree(const IdArray& uids,
const IdArray& vids,
const IdArray& eids) {
auto n_edge = eids->shape[0];
const IdType* eid_data = static_cast<IdType*>(eids->data);
const IdType* uid_data = static_cast<IdType*>(uids->data);
const IdType* vid_data = static_cast<IdType*>(vids->data);
// node2edge: group_by nodes uid -> (eid, the other end vid)
std::unordered_map<IdType, std::vector<std::pair<IdType, IdType>>> node2edge;
for (IdType i = 0; i < n_edge; ++i) {
node2edge[uid_data[i]].emplace_back(eid_data[i], vid_data[i]);
}
// bkt: deg -> group_by node uid
std::unordered_map<IdType, std::vector<IdType>> bkt;
for (const auto& it : node2edge) {
bkt[it.second.size()].push_back(it.first);
}
// number of unique degree
IdType n_deg = bkt.size();
// initialize output
IdArray degs = IdArray::Empty({n_deg}, eids->dtype, eids->ctx);
IdArray new_uids = IdArray::Empty({n_edge}, uids->dtype, uids->ctx);
IdArray new_vids = IdArray::Empty({n_edge}, vids->dtype, vids->ctx);
IdArray new_eids = IdArray::Empty({n_edge}, eids->dtype, eids->ctx);
IdArray sections = IdArray::Empty({n_deg}, eids->dtype, eids->ctx);
IdType* deg_ptr = static_cast<IdType*>(degs->data);
IdType* uid_ptr = static_cast<IdType*>(new_uids->data);
IdType* vid_ptr = static_cast<IdType*>(new_vids->data);
IdType* eid_ptr = static_cast<IdType*>(new_eids->data);
IdType* sec_ptr = static_cast<IdType*>(sections->data);
// fill in bucketing ordering
for (const auto& it : bkt) { // for each bucket
// degree of this bucket
const IdType deg = it.first;
// number of edges in this bucket
const IdType bucket_size = it.second.size();
*deg_ptr++ = deg;
*sec_ptr++ = deg * bucket_size;
for (const auto u : it.second) { // for uid in this bucket
for (const auto& pair : node2edge[u]) { // for each edge of uid
*uid_ptr++ = u;
*vid_ptr++ = pair.second;
*eid_ptr++ = pair.first;
}
}
}
std::vector<IdArray> ret;
ret.push_back(std::move(degs));
ret.push_back(std::move(new_uids));
ret.push_back(std::move(new_vids));
ret.push_back(std::move(new_eids));
ret.push_back(std::move(sections));
return ret;
}
template std::vector<IdArray> GroupEdgeByNodeDegree<int32_t>(
const IdArray& uids, const IdArray& vids, const IdArray& eids);
template std::vector<IdArray> GroupEdgeByNodeDegree<int64_t>(
const IdArray& uids, const IdArray& vids, const IdArray& eids);
} // namespace sched
} // namespace dgl
| 2,885 |
2,151 | // Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/chromeos/arc/voice_interaction/arc_voice_interaction_framework_service.h"
#include <memory>
#include <string>
#include <utility>
#include "ash/shell.h"
#include "ash/test/ash_test_base.h"
#include "base/bind.h"
#include "base/files/scoped_temp_dir.h"
#include "chrome/browser/chromeos/arc/arc_session_manager.h"
#include "chrome/browser/chromeos/arc/voice_interaction/fake_voice_interaction_controller.h"
#include "chrome/browser/chromeos/arc/voice_interaction/highlighter_controller_client.h"
#include "chrome/browser/chromeos/arc/voice_interaction/voice_interaction_controller_client.h"
#include "chrome/browser/ui/browser.h"
#include "chrome/test/base/test_browser_window.h"
#include "chrome/test/base/test_browser_window_aura.h"
#include "chrome/test/base/testing_profile.h"
#include "chromeos/dbus/dbus_thread_manager.h"
#include "chromeos/dbus/fake_cras_audio_client.h"
#include "components/arc/arc_bridge_service.h"
#include "components/arc/arc_util.h"
#include "components/arc/test/connection_holder_util.h"
#include "components/arc/test/fake_arc_session.h"
#include "components/arc/test/fake_voice_interaction_framework_instance.h"
#include "components/prefs/pref_service.h"
#include "components/session_manager/core/session_manager.h"
#include "services/service_manager/public/cpp/service.h"
#include "services/service_manager/public/cpp/test/test_connector_factory.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "ui/compositor/layer.h"
#include "ui/compositor/layer_tree_owner.h"
namespace arc {
namespace {
class TestHighlighterController : public ash::mojom::HighlighterController,
public service_manager::Service {
public:
TestHighlighterController() : binding_(this) {}
~TestHighlighterController() override = default;
void CallHandleSelection(const gfx::Rect& rect) {
client_->HandleSelection(rect);
}
void CallHandleEnabledStateChange(bool enabled) {
is_enabled_ = enabled;
client_->HandleEnabledStateChange(enabled);
}
bool client_attached() const { return static_cast<bool>(client_); }
// ash::mojom::HighlighterController:
void SetClient(ash::mojom::HighlighterControllerClientPtr client) override {
DCHECK(!client_);
client_ = std::move(client);
// Okay to use base::Unretained(this), as |client_| will be destroyed before
// |this|.
client_.set_connection_error_handler(
base::BindOnce(&TestHighlighterController::OnClientConnectionLost,
base::Unretained(this)));
}
void ExitHighlighterMode() override {
// simulate exiting current session.
CallHandleEnabledStateChange(false);
}
void FlushMojo() { client_.FlushForTesting(); }
bool is_enabled() { return is_enabled_; }
// service_manager::Service:
void OnBindInterface(const service_manager::BindSourceInfo& source_info,
const std::string& interface_name,
mojo::ScopedMessagePipeHandle interface_pipe) override {
DCHECK(interface_name == ash::mojom::HighlighterController::Name_);
binding_.Bind(
ash::mojom::HighlighterControllerRequest(std::move(interface_pipe)));
}
private:
void OnClientConnectionLost() {
client_.reset();
binding_.Close();
}
mojo::Binding<ash::mojom::HighlighterController> binding_;
ash::mojom::HighlighterControllerClientPtr client_;
bool is_enabled_ = false;
DISALLOW_COPY_AND_ASSIGN(TestHighlighterController);
};
std::unique_ptr<TestBrowserWindow> CreateTestBrowserWindow(
aura::Window* parent) {
auto window =
std::make_unique<aura::Window>(nullptr, aura::client::WINDOW_TYPE_NORMAL);
window->Init(ui::LAYER_TEXTURED);
window->SetBounds(gfx::Rect(0, 0, 200, 200));
parent->AddChild(window.get());
return std::make_unique<TestBrowserWindowAura>(std::move(window));
}
ui::Layer* FindLayer(ui::Layer* root, ui::Layer* target) {
if (root == target)
return target;
for (auto* child : root->children()) {
auto* result = FindLayer(child, target);
if (result)
return result;
}
return nullptr;
}
} // namespace
class ArcVoiceInteractionFrameworkServiceTest : public ash::AshTestBase {
public:
ArcVoiceInteractionFrameworkServiceTest() = default;
void SetUp() override {
AshTestBase::SetUp();
// Setup test profile.
ASSERT_TRUE(temp_dir_.CreateUniqueTempDir());
TestingProfile::Builder profile_builder;
profile_builder.SetProfileName("<EMAIL>");
profile_builder.SetPath(temp_dir_.GetPath().AppendASCII("TestArcProfile"));
profile_ = profile_builder.Build();
// Setup dependencies for voice interaction framework service.
session_manager_ = std::make_unique<session_manager::SessionManager>();
arc_session_manager_ = std::make_unique<ArcSessionManager>(
std::make_unique<ArcSessionRunner>(base::Bind(FakeArcSession::Create)));
arc_bridge_service_ = std::make_unique<ArcBridgeService>();
auto highlighter_controller_ptr =
std::make_unique<TestHighlighterController>();
highlighter_controller_ = highlighter_controller_ptr.get();
voice_interaction_controller_ =
std::make_unique<FakeVoiceInteractionController>();
voice_interaction_controller_client_ =
std::make_unique<VoiceInteractionControllerClient>();
connector_factory_ =
service_manager::TestConnectorFactory::CreateForUniqueService(
std::move(highlighter_controller_ptr));
connector_ = connector_factory_->CreateConnector();
framework_service_ = std::make_unique<ArcVoiceInteractionFrameworkService>(
profile_.get(), arc_bridge_service_.get());
framework_service_->GetHighlighterClientForTesting()
->SetConnectorForTesting(connector_.get());
voice_interaction_controller_client()->SetControllerForTesting(
voice_interaction_controller_->CreateInterfacePtrAndBind());
framework_instance_ =
std::make_unique<FakeVoiceInteractionFrameworkInstance>();
arc_bridge_service_->voice_interaction_framework()->SetInstance(
framework_instance_.get());
WaitForInstanceReady(arc_bridge_service_->voice_interaction_framework());
// Flushing is required for the AttachClient call to get through to the
// highligther controller.
FlushHighlighterControllerMojo();
framework_service()->SetVoiceInteractionSetupCompleted();
// Flushing is required for the notify mojo call to get through to the voice
// interaction controller.
FlushVoiceInteractionControllerMojo();
}
void TearDown() override {
arc_bridge_service_->voice_interaction_framework()->CloseInstance(
framework_instance_.get());
voice_interaction_controller_.reset();
voice_interaction_controller_client_.reset();
framework_instance_.reset();
framework_service_.reset();
arc_bridge_service_.reset();
arc_session_manager_.reset();
session_manager_.reset();
profile_.reset();
AshTestBase::TearDown();
}
protected:
ArcBridgeService* arc_bridge_service() const {
return arc_bridge_service_.get();
}
ArcVoiceInteractionFrameworkService* framework_service() const {
return framework_service_.get();
}
FakeVoiceInteractionFrameworkInstance* framework_instance() const {
return framework_instance_.get();
}
TestHighlighterController* highlighter_controller() const {
return highlighter_controller_;
}
FakeVoiceInteractionController* voice_interaction_controller() {
return voice_interaction_controller_.get();
}
VoiceInteractionControllerClient* voice_interaction_controller_client() {
return voice_interaction_controller_client_.get();
}
void FlushHighlighterControllerMojo() {
framework_service_->GetHighlighterClientForTesting()->FlushMojoForTesting();
}
void FlushVoiceInteractionControllerMojo() {
voice_interaction_controller_client()->FlushMojoForTesting();
}
TestingProfile* profile() const { return profile_.get(); }
private:
std::unique_ptr<TestingProfile> profile_;
base::ScopedTempDir temp_dir_;
std::unique_ptr<session_manager::SessionManager> session_manager_;
std::unique_ptr<ArcBridgeService> arc_bridge_service_;
std::unique_ptr<ArcSessionManager> arc_session_manager_;
std::unique_ptr<service_manager::TestConnectorFactory> connector_factory_;
std::unique_ptr<service_manager::Connector> connector_;
// |highlighter_controller_| is valid until |connector_factory_| is deleted.
TestHighlighterController* highlighter_controller_;
std::unique_ptr<FakeVoiceInteractionController> voice_interaction_controller_;
std::unique_ptr<ArcVoiceInteractionFrameworkService> framework_service_;
std::unique_ptr<FakeVoiceInteractionFrameworkInstance> framework_instance_;
std::unique_ptr<VoiceInteractionControllerClient>
voice_interaction_controller_client_;
DISALLOW_COPY_AND_ASSIGN(ArcVoiceInteractionFrameworkServiceTest);
};
TEST_F(ArcVoiceInteractionFrameworkServiceTest, StartSetupWizard) {
framework_service()->StartVoiceInteractionSetupWizard();
// The signal to start setup wizard should be sent.
EXPECT_EQ(1u, framework_instance()->setup_wizard_count());
}
TEST_F(ArcVoiceInteractionFrameworkServiceTest, ShowSettings) {
framework_service()->ShowVoiceInteractionSettings();
// The signal to show voice interaction settings should be sent.
EXPECT_EQ(1u, framework_instance()->show_settings_count());
}
TEST_F(ArcVoiceInteractionFrameworkServiceTest, StartSession) {
framework_service()->StartSessionFromUserInteraction(gfx::Rect());
// A notification should be sent if the container is not ready yet.
FlushVoiceInteractionControllerMojo();
EXPECT_EQ(ash::mojom::VoiceInteractionState::NOT_READY,
voice_interaction_controller()->voice_interaction_state());
// The signal to start voice interaction session should be sent.
EXPECT_EQ(1u, framework_instance()->start_session_count());
}
TEST_F(ArcVoiceInteractionFrameworkServiceTest, StartSessionWithoutFlag) {
// Remove the voice interaction enabled flag.
framework_service()->SetVoiceInteractionEnabled(false,
base::BindOnce([](bool) {}));
framework_service()->StartSessionFromUserInteraction(gfx::Rect());
// The signal should not be sent when voice interaction disabled.
EXPECT_EQ(0u, framework_instance()->start_session_count());
}
TEST_F(ArcVoiceInteractionFrameworkServiceTest, StartSessionWithoutInstance) {
// Reset the framework instance.
arc_bridge_service()->voice_interaction_framework()->CloseInstance(
framework_instance());
framework_service()->StartSessionFromUserInteraction(gfx::Rect());
// A notification should be sent if the container is not ready yet.
FlushVoiceInteractionControllerMojo();
EXPECT_EQ(ash::mojom::VoiceInteractionState::NOT_READY,
voice_interaction_controller()->voice_interaction_state());
// The signal should not be sent when framework instance not ready.
EXPECT_EQ(0u, framework_instance()->start_session_count());
}
TEST_F(ArcVoiceInteractionFrameworkServiceTest, ToggleSession) {
framework_service()->ToggleSessionFromUserInteraction();
// A notification should be sent if the container is not ready yet.
FlushVoiceInteractionControllerMojo();
EXPECT_EQ(ash::mojom::VoiceInteractionState::NOT_READY,
voice_interaction_controller()->voice_interaction_state());
// The signal to toggle voice interaction session should be sent.
EXPECT_EQ(1u, framework_instance()->toggle_session_count());
}
TEST_F(ArcVoiceInteractionFrameworkServiceTest, HotwordTriggered) {
auto* audio_client = static_cast<chromeos::FakeCrasAudioClient*>(
chromeos::DBusThreadManager::Get()->GetCrasAudioClient());
audio_client->NotifyHotwordTriggeredForTesting(0, 0);
EXPECT_TRUE(framework_service()->ValidateTimeSinceUserInteraction());
}
TEST_F(ArcVoiceInteractionFrameworkServiceTest, HighlighterControllerClient) {
EXPECT_TRUE(highlighter_controller()->client_attached());
// Enabled state should propagate to the framework instance.
highlighter_controller()->CallHandleEnabledStateChange(true);
highlighter_controller()->FlushMojo();
EXPECT_EQ(1u, framework_instance()->set_metalayer_visibility_count());
EXPECT_TRUE(framework_instance()->metalayer_visible());
// Disabled state should propagate to the framework instance.
framework_instance()->ResetCounters();
highlighter_controller()->CallHandleEnabledStateChange(false);
highlighter_controller()->FlushMojo();
EXPECT_EQ(1u, framework_instance()->set_metalayer_visibility_count());
EXPECT_FALSE(framework_instance()->metalayer_visible());
// Enable the state again.
framework_instance()->ResetCounters();
highlighter_controller()->CallHandleEnabledStateChange(true);
highlighter_controller()->FlushMojo();
EXPECT_EQ(1u, framework_instance()->set_metalayer_visibility_count());
EXPECT_TRUE(framework_instance()->metalayer_visible());
// Simulate a valid selection.
framework_instance()->ResetCounters();
const gfx::Rect selection(100, 200, 300, 400);
highlighter_controller()->CallHandleSelection(selection);
highlighter_controller()->CallHandleEnabledStateChange(false);
highlighter_controller()->FlushMojo();
// Neither the selected region nor the state update should reach the
// framework instance yet.
EXPECT_EQ(0u, framework_instance()->start_session_for_region_count());
EXPECT_EQ(0u, framework_instance()->set_metalayer_visibility_count());
EXPECT_TRUE(framework_instance()->metalayer_visible());
framework_service()
->GetHighlighterClientForTesting()
->SimulateSelectionTimeoutForTesting();
// After a timeout, the selected region should reach the framework instance.
EXPECT_EQ(1u, framework_instance()->start_session_for_region_count());
EXPECT_EQ(selection.ToString(),
framework_instance()->selected_region().ToString());
// However, the state update should not be explicitly sent to the framework
// instance, since the state change is implied with a valid selection.
EXPECT_EQ(0u, framework_instance()->set_metalayer_visibility_count());
// Clear the framework instance to simulate the container crash.
// The client should become detached.
arc_bridge_service()->voice_interaction_framework()->CloseInstance(
framework_instance());
FlushHighlighterControllerMojo();
EXPECT_FALSE(highlighter_controller()->client_attached());
// Set the framework instance again to simulate the container restart.
// The client should become attached again.
arc_bridge_service()->voice_interaction_framework()->SetInstance(
framework_instance());
WaitForInstanceReady(arc_bridge_service()->voice_interaction_framework());
FlushHighlighterControllerMojo();
EXPECT_TRUE(highlighter_controller()->client_attached());
// State update should reach the client normally.
framework_instance()->ResetCounters();
highlighter_controller()->CallHandleEnabledStateChange(true);
highlighter_controller()->FlushMojo();
EXPECT_EQ(1u, framework_instance()->set_metalayer_visibility_count());
EXPECT_TRUE(framework_instance()->metalayer_visible());
}
TEST_F(ArcVoiceInteractionFrameworkServiceTest,
ExitVoiceInteractionAlsoExitHighlighter) {
highlighter_controller()->CallHandleEnabledStateChange(true);
framework_service()->ToggleSessionFromUserInteraction();
framework_instance()->FlushMojoForTesting();
FlushHighlighterControllerMojo();
EXPECT_EQ(ash::mojom::VoiceInteractionState::RUNNING,
framework_service()->GetStateForTesting());
framework_service()->ToggleSessionFromUserInteraction();
framework_instance()->FlushMojoForTesting();
FlushHighlighterControllerMojo();
EXPECT_EQ(ash::mojom::VoiceInteractionState::STOPPED,
framework_service()->GetStateForTesting());
EXPECT_FALSE(highlighter_controller()->is_enabled());
}
TEST_F(ArcVoiceInteractionFrameworkServiceTest,
VoiceInteractionControllerClient) {
FakeVoiceInteractionController* controller = voice_interaction_controller();
VoiceInteractionControllerClient* controller_client =
voice_interaction_controller_client();
// The voice interaction flags should be set after the initial setup.
EXPECT_EQ(controller->voice_interaction_state(),
ash::mojom::VoiceInteractionState::STOPPED);
// Send the signal to set the voice interaction state.
controller_client->NotifyStatusChanged(
ash::mojom::VoiceInteractionState::RUNNING);
FlushVoiceInteractionControllerMojo();
EXPECT_EQ(controller->voice_interaction_state(),
ash::mojom::VoiceInteractionState::RUNNING);
}
TEST_F(ArcVoiceInteractionFrameworkServiceTest,
CapturingScreenshotBlocksIncognitoWindows) {
auto browser_window =
CreateTestBrowserWindow(ash::Shell::GetPrimaryRootWindow());
Browser::CreateParams params(profile(), true);
params.type = Browser::TYPE_TABBED;
params.window = browser_window.get();
Browser browser(params);
browser_window->GetNativeWindow()->Show();
profile()->ForceIncognito(true);
// Layer::RecreateLayer() will replace the window's layer with the newly
// created layer. Thus, we need to save the |old_layer| for comparison.
auto* old_layer = browser_window->GetNativeWindow()->layer();
auto layer_owner = framework_service()->CreateLayerTreeForSnapshotForTesting(
ash::Shell::GetPrimaryRootWindow());
EXPECT_FALSE(FindLayer(layer_owner->root(), old_layer));
profile()->ForceIncognito(false);
old_layer = browser_window->GetNativeWindow()->layer();
layer_owner = framework_service()->CreateLayerTreeForSnapshotForTesting(
ash::Shell::GetPrimaryRootWindow());
EXPECT_TRUE(FindLayer(layer_owner->root(), old_layer));
ash::Shell::GetPrimaryRootWindow()->RemoveChild(
browser_window->GetNativeWindow());
}
} // namespace arc
| 5,750 |
4,256 | <filename>tests/sidetrail/working/stubs/s2n_hash.h
/*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#pragma once
#include <stdint.h>
#include <openssl/sha.h>
#include <openssl/md5.h>
#include "crypto/s2n_evp.h"
#define S2N_MAX_DIGEST_LEN SHA512_DIGEST_LENGTH
typedef enum {
S2N_HASH_NONE,
S2N_HASH_MD5,
S2N_HASH_SHA1,
S2N_HASH_SHA224,
S2N_HASH_SHA256,
S2N_HASH_SHA384,
S2N_HASH_SHA512,
S2N_HASH_MD5_SHA1,
/* Don't add any hash algorithms below S2N_HASH_SENTINEL */
S2N_HASH_SENTINEL
} s2n_hash_algorithm;
struct s2n_hash_state {
s2n_hash_algorithm alg;
int currently_in_hash_block;
};
/* SHA1
* These fields were determined from the SHA specification, augmented by
* analyzing SHA implementations.
* PER_BLOCK_COST is the cost of a compression round. Pessimistically assume
* it is 1000 cycles/block, which is worse than real implementations (larger
* numbers here make lucky13 leakages look worse), and hence large is safer.
* PER_BYTE_COST is the cost of memcopy one byte that is already in cache,
* to a location already in cache.
*/
enum {
PER_BLOCK_COST = 1000,
PER_BYTE_COST = 1,
BLOCK_SIZE = 64,
LENGTH_FIELD_SIZE = 8,
DIGEST_SIZE = 20
};
#define MAX_SIZE 1024
enum {
SUCCESS = 0,
FAILURE = -1
};
extern int s2n_hash_digest_size(s2n_hash_algorithm alg, uint8_t *out);
extern int s2n_hash_new(struct s2n_hash_state *state);
S2N_RESULT s2n_hash_state_validate(struct s2n_hash_state *state);
extern int s2n_hash_init(struct s2n_hash_state *state, s2n_hash_algorithm alg);
extern int s2n_hash_update(struct s2n_hash_state *state, const void *data, uint32_t size);
extern int s2n_hash_digest(struct s2n_hash_state *state, void *out, uint32_t size);
extern int s2n_hash_copy(struct s2n_hash_state *to, struct s2n_hash_state *from);
extern int s2n_hash_reset(struct s2n_hash_state *state);
extern int s2n_hash_free(struct s2n_hash_state *state);
extern int s2n_hash_get_currently_in_hash_total(struct s2n_hash_state *state, uint64_t *out);
| 971 |
9,472 | <reponame>kzh3ka/japronto<gh_stars>1000+
import asyncio
from japronto import Application
# This is a synchronous handler.
def synchronous(request):
return request.Response(text='I am synchronous!')
# This is an asynchronous handler. It spends most of the time in the event loop.
# It wakes up every second 1 to print and finally returns after 3 seconds.
# This lets other handlers execute in the same processes while
# from the point of view of the client it took 3 seconds to complete.
async def asynchronous(request):
for i in range(1, 4):
await asyncio.sleep(1)
print(i, 'seconds elapsed')
return request.Response(text='3 seconds elapsed')
app = Application()
r = app.router
r.add_route('/sync', synchronous)
r.add_route('/async', asynchronous)
app.run()
| 248 |
1,350 | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.hybridcompute.fluent;
import com.azure.core.http.HttpPipeline;
import java.time.Duration;
/** The interface for HybridComputeManagementClient class. */
public interface HybridComputeManagementClient {
/**
* Gets The ID of the target subscription.
*
* @return the subscriptionId value.
*/
String getSubscriptionId();
/**
* Gets server parameter.
*
* @return the endpoint value.
*/
String getEndpoint();
/**
* Gets Api Version.
*
* @return the apiVersion value.
*/
String getApiVersion();
/**
* Gets The HTTP pipeline to send requests through.
*
* @return the httpPipeline value.
*/
HttpPipeline getHttpPipeline();
/**
* Gets The default poll interval for long-running operation.
*
* @return the defaultPollInterval value.
*/
Duration getDefaultPollInterval();
/**
* Gets the MachinesClient object to access its operations.
*
* @return the MachinesClient object.
*/
MachinesClient getMachines();
/**
* Gets the MachineExtensionsClient object to access its operations.
*
* @return the MachineExtensionsClient object.
*/
MachineExtensionsClient getMachineExtensions();
/**
* Gets the OperationsClient object to access its operations.
*
* @return the OperationsClient object.
*/
OperationsClient getOperations();
/**
* Gets the PrivateLinkScopesClient object to access its operations.
*
* @return the PrivateLinkScopesClient object.
*/
PrivateLinkScopesClient getPrivateLinkScopes();
/**
* Gets the PrivateLinkResourcesClient object to access its operations.
*
* @return the PrivateLinkResourcesClient object.
*/
PrivateLinkResourcesClient getPrivateLinkResources();
/**
* Gets the PrivateEndpointConnectionsClient object to access its operations.
*
* @return the PrivateEndpointConnectionsClient object.
*/
PrivateEndpointConnectionsClient getPrivateEndpointConnections();
}
| 755 |
1,738 | <filename>dev/Code/Tools/CryCommonTools/ZipDir/ZipDirFindRW.h
/*
* All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
* its licensors.
*
* For complete copyright and license terms please see the LICENSE at the root of this
* distribution (the "License"). All use of this software is governed by the License,
* or, if provided, by the license below or the license accompanying this file. Do not
* remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
*/
// Original file Copyright Crytek GMBH or its affiliates, used under license.
// Description : Declaration of the class that can be used to search for the entries
// in a zip dir cache
#ifndef CRYINCLUDE_CRYCOMMONTOOLS_ZIPDIR_ZIPDIRFINDRW_H
#define CRYINCLUDE_CRYCOMMONTOOLS_ZIPDIR_ZIPDIRFINDRW_H
#pragma once
namespace ZipDir
{
// create this structure and loop:
// FindData fd (pZip);
// for (fd.FindFirst("*.cgf"); fd.GetFileEntry(); fd.FindNext())
// {} // inside the loop, use GetFileEntry() and GetFileName() to get the file entry and name records
class FindDataRW
{
public:
FindDataRW (FileEntryTree* pRoot)
: m_pRoot (pRoot)
, m_pDirHeader (NULL)
{
}
// returns the directory to which the current object belongs
FileEntryTree* GetParentDir() {return m_pDirHeader; }
protected:
// initializes everything until the point where the file must be searched for
// after this call returns successfully (with true returned), the m_szWildcard
// contains the file name/wildcard and m_pDirHeader contains the directory where
// the file (s) are to be found
bool PreFind (const char* szWildcard);
// matches the file wilcard in the m_szWildcard to the given file/dir name
// this takes into account the fact that xxx. is the alias name for xxx
bool MatchWildcard(const char* szName);
// the directory inside which the current object (file or directory) is being searched
FileEntryTree* m_pDirHeader;
FileEntryTree* m_pRoot; // the root of the zip file in which to search
// the actual wildcard being used in the current scan - the file name wildcard only!
char m_szWildcard[_MAX_PATH];
};
class FindFileRW
: public FindDataRW
{
public:
FindFileRW (FileEntryTree* pRoot)
: FindDataRW(pRoot)
{
}
// if bExactFile is passed, only the file is searched, and besides with the exact name as passed (no wildcards)
bool FindFirst (const char* szWildcard);
FileEntry* FindExact (const char* szPath);
// goes on to the next file entry
bool FindNext ();
FileEntry* GetFileEntry();
const char* GetFileName ();
protected:
bool SkipNonMatchingFiles();
FileEntryTree::FileMap::iterator m_itFile; // the current file iterator inside the parent directory
};
class FindDirRW
: public FindDataRW
{
public:
FindDirRW (FileEntryTree* pRoot)
: FindDataRW(pRoot)
{
}
// if bExactFile is passed, only the file is searched, and besides with the exact name as passed (no wildcards)
bool FindFirst (const char* szWildcard);
FileEntryTree* FindExact (const char* szPath);
// goes on to the next file entry
bool FindNext ();
FileEntryTree* GetDirEntry();
const char* GetDirName ();
protected:
bool SkipNonMatchingDirs();
FileEntryTree::SubdirMap::iterator m_itDir; // the current dir index inside the parent directory
};
}
#endif // CRYINCLUDE_CRYCOMMONTOOLS_ZIPDIR_ZIPDIRFINDRW_H
| 1,434 |
839 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cxf.jca.cxf;
import java.net.URL;
import java.util.Properties;
import java.util.ResourceBundle;
import java.util.logging.Logger;
import javax.resource.ResourceException;
import javax.resource.spi.ConnectionManager;
import javax.resource.spi.ConnectionRequestInfo;
import javax.resource.spi.ManagedConnection;
import javax.security.auth.Subject;
import org.apache.cxf.Bus;
import org.apache.cxf.common.i18n.BundleUtils;
import org.apache.cxf.common.i18n.Message;
import org.apache.cxf.common.logging.LogUtils;
import org.apache.cxf.jca.core.resourceadapter.AbstractManagedConnectionFactoryImpl;
import org.apache.cxf.jca.core.resourceadapter.AbstractManagedConnectionImpl;
import org.apache.cxf.jca.core.resourceadapter.ResourceAdapterInternalException;
public class ManagedConnectionFactoryImpl
extends AbstractManagedConnectionFactoryImpl
implements CXFManagedConnectionFactory {
private static final long serialVersionUID = -891927761318109853L;
private static final Logger LOG = LogUtils.getL7dLogger(ManagedConnectionFactoryImpl.class);
private static final ResourceBundle BUNDLE = BundleUtils.getBundle(ManagedConnectionFactoryImpl.class);
protected JCABusFactory jcaBusFactory;
public ManagedConnectionFactoryImpl() {
super();
LOG.info("ManagedConnectionFactoryImpl constructed without props by appserver...");
}
public ManagedConnectionFactoryImpl(Properties props) {
super(props);
LOG.info("ManagedConnectionFactoryImpl constructed with props by appserver. props = " + props);
}
public void setLogLevel(String logLevel) {
setProperty(LOG_LEVEL, logLevel);
}
public void setEJBServicePropertiesURL(String name) {
setProperty(EJB_SERVICE_PROPERTIES_URL, name);
}
public void setMonitorEJBServiceProperties(Boolean monitor) {
setProperty(MONITOR_EJB_SERVICE_PROPERTIES, monitor.toString());
}
public void setEJBServicePropertiesPollInterval(Integer pollInterval) {
setProperty(MONITOR_POLL_INTERVAL, pollInterval.toString());
}
public String getLogLevel() {
return getPluginProps().getProperty(LOG_LEVEL);
}
public String getEJBServicePropertiesURL() {
return getPluginProps().getProperty(EJB_SERVICE_PROPERTIES_URL);
}
public Boolean getMonitorEJBServiceProperties() {
return Boolean.valueOf(getPluginProps().getProperty(MONITOR_EJB_SERVICE_PROPERTIES));
}
public Integer getEJBServicePropertiesPollInterval() {
return Integer.valueOf(getPluginProps().getProperty(MONITOR_POLL_INTERVAL,
DEFAULT_MONITOR_POLL_INTERVAL));
}
public URL getEJBServicePropertiesURLInstance() throws ResourceException {
return getPropsURL(getEJBServicePropertiesURL());
}
public String getEJBServantBaseURL() throws ResourceException {
return getPluginProps().getProperty(EJB_SERVANT_BASE_URL);
}
public void setEJBServantBaseURL(String url) throws ResourceException {
setProperty(EJB_SERVANT_BASE_URL, url);
}
// compliance: WL9 checks
// need to ensure multiple instances with same config properties are equal
// multiple instances with same config do not make sense to me
protected void validateReference(AbstractManagedConnectionImpl conn, javax.security.auth.Subject subj) {
}
public Object createConnectionFactory() throws ResourceException {
throw new ResourceAdapterInternalException(
new Message("NON_MANAGED_CONNECTION_IS_NOT_SUPPORTED", BUNDLE).toString());
}
public Object createConnectionFactory(ConnectionManager connMgr) throws ResourceException {
LOG.info("connManager=" + connMgr);
if (connMgr == null) {
throw new ResourceAdapterInternalException(
new Message("NON_MANAGED_CONNECTION_IS_NOT_SUPPORTED", BUNDLE).toString());
}
init(connMgr.getClass().getClassLoader());
LOG.fine("Setting AppServer classloader in jcaBusFactory. " + connMgr.getClass().getClassLoader());
return new ConnectionFactoryImpl(this, connMgr);
}
public ManagedConnection createManagedConnection(Subject subject, ConnectionRequestInfo connReqInfo)
throws ResourceException {
LOG.info("create connection, subject=" + subject + " connReqInfo=" + connReqInfo);
init(Thread.currentThread().getContextClassLoader());
return new ManagedConnectionImpl(this, connReqInfo, subject);
}
public void close() throws javax.resource.spi.ResourceAdapterInternalException {
}
protected synchronized void init(ClassLoader appserverClassLoader) throws ResourceException {
if (jcaBusFactory == null) {
jcaBusFactory = new JCABusFactory(this);
jcaBusFactory.create(appserverClassLoader, getBootstrapContext());
}
}
public Bus getBus() {
return (jcaBusFactory != null) ? jcaBusFactory.getBus() : null;
}
protected Object getBootstrapContext() {
return null;
}
}
| 2,052 |
502 | <filename>wlplayer/app/src/main/java/com/ywl5320/player/base/BaseBean.java<gh_stars>100-1000
package com.ywl5320.player.base;
import com.ywl5320.player.util.BeanUtil;
import java.io.Serializable;
/**
* Created by ywl5320 on 2017/9/4.
*/
public class BaseBean implements Serializable{
public static final long serialVersionUID = -316172390920775219L;
@Override
public String toString() {
return BeanUtil.bean2string(this);
}
}
| 178 |
460 | /*
* Copyright 2018 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.swarm.internal;
import java.util.function.Consumer;
import org.junit.Assert;
import org.junit.Test;
/**
* Basic tests for the {@link FileSystemLayout} class.
*/
public class FileSystemLayoutTest {
/**
* Verify the default behavior of the FileSystemLayout w.r.t current build system.
*/
@Test
public void verifyDefaultBehavior() {
withProperty(null, layout -> {
// By default, we should be getting the MavenFileSystemLayout
Assert.assertEquals("Unexpected file system layout. Did the default layout change?",
MavenFileSystemLayout.class, layout.getClass());
});
}
/**
* Verify that we can specify a custom "valid" FileSystemLayout.
*/
@Test
public void verifySystemProperty() {
withProperty("org.wildfly.swarm.internal.GradleFileSystemLayout", layout -> {
Assert.assertEquals("Unexpected file system layout.", GradleFileSystemLayout.class, layout.getClass());
});
}
/**
* Verify that we get the default layout when using an invalid system property.
*/
@Test
public void verifyInvalidSystemProperty() {
withProperty(" ", layout -> {
// By default, we should be getting the MavenFileSystemLayout
Assert.assertEquals("Unexpected file system layout. Did the default layout change?",
MavenFileSystemLayout.class, layout.getClass());
});
}
/**
* Verify that we can pass in a custom layout via the system properties.
*/
@Test
public void verifyCustomLayout() {
withProperty("org.wildfly.swarm.internal.CustomFileSystemLayout", layout -> {
Assert.assertEquals("Unexpected file system layout.", CustomFileSystemLayout.class, layout.getClass());
});
}
/**
* Verify that an instantiation exception in the custom layout is propagated all through.
*/
@Test(expected = IllegalArgumentException.class)
public void verifyCustomLayoutFailure() {
withProperty("org.wildfly.swarm.internal.CustomFileSystemLayout$CustomInvalidFileSystemLayout", layout -> {
Assert.fail("An exception is expected at this point.");
});
}
/**
* Convenience method that sets & clears out the property for the FileSystemLayout. This method is synchronized so that
* we don't run in to race conditions if the tests are run in parallel.
*
* @param propertyValue the value of the implementation class.
* @param consumer a simple function that needs to be invoked on the FileSystemLayout that was generated.
*/
private synchronized void withProperty(String propertyValue, Consumer<FileSystemLayout> consumer) {
try {
if (propertyValue != null) {
System.setProperty(FileSystemLayout.CUSTOM_LAYOUT_CLASS, propertyValue);
}
FileSystemLayout layout = FileSystemLayout.create();
consumer.accept(layout);
} finally {
System.clearProperty(FileSystemLayout.CUSTOM_LAYOUT_CLASS);
}
}
}
| 1,316 |
577 | <filename>fluentlenium-core/src/main/java/org/fluentlenium/core/inject/Parent.java
package org.fluentlenium.core.inject;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* Mark a field whose class declaration is annotated with one of Selenium's {@code Find...} annotation,
* with this annotation to inject the parent container. Parent container in this case means parent in the HTML DOM.
*
* It doesn't matter if the parent is a direct parent or is any element that is upper in the DOM relative to the current element.
* What element the parent represents depends on the {@code Find...} annotation applied on the parent component or page class.
*
* <h3>Examples</h3>
* For an HTML snippet like
* <pre>
* <div id="homepage">
* <div class="component">
* <div class="sub-component">
* ...
* </div>
* </div>
* </div>
* </pre>
* there can be a custom component for the {@code div.component} element:
* <pre>
* @FindBy(className = "component")
* public class Component extends FluentWebElement {
* @Parent
* public Homepage homepage;
* ...
* }
* </pre>
* for which a sub-component can include it as a parent component:
* <pre>
* @FindBy(className = "sub-component")
* public class SubComponent extends FluentWebElement {
* @Parent
* public Component parent;
* ...
* }
* </pre>
* Similarly a parent can be created for {@code Component} in the form of a page:
* <pre>
* @FindBy(id = "homepage")
* public class Homepage extends FluentPage {
* ...
* }
* </pre>
* This structure can be achieved with any page-component/component-page relationship using custom
* {@link org.fluentlenium.core.FluentPage} and {@link org.fluentlenium.core.domain.FluentWebElement} implementations.
*/
@Target(FIELD)
@Retention(RUNTIME)
public @interface Parent {
}
| 695 |
14,668 | /* Copyright (c) 2013 The Chromium Authors. All rights reserved.
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file. */
/* XRay -- a simple profiler for Native Client */
#include <assert.h>
#include <errno.h>
#include <stdarg.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/time.h>
#include <unistd.h>
#include "xray/xray_priv.h"
#if defined(XRAY)
/* GTSC - Get Time Stamp Counter */
#if defined(__amd64__) && !defined(XRAY_NO_RDTSC)
XRAY_INLINE uint64_t RDTSC64();
uint64_t RDTSC64() {
uint64_t a, d;
__asm__ __volatile__("rdtsc" : "=a" (a), "=d" (d));
return ((uint64_t)a) | (((uint64_t)d) << 32);
}
#define GTSC(_x) _x = RDTSC64()
#elif defined(__i386__) && !defined(XRAY_NO_RDTSC)
#define GTSC(_x) __asm__ __volatile__ ("rdtsc" : "=A" (_x));
#else
XRAY_INLINE uint64_t GTOD();
uint64_t GTOD() {
struct timeval tv;
gettimeofday(&tv, NULL);
return (uint64_t)tv.tv_sec * 1000000 + (uint64_t)tv.tv_usec;
}
#define GTSC(_x) _x = GTOD();
#endif
/* Use a TLS variable for cheap thread uid. */
__thread struct XRayTraceCapture* g_xray_capture = NULL;
__thread int g_xray_thread_id_placeholder = 0;
struct XRayTraceStackEntry {
uint32_t depth_addr;
uint64_t tsc;
uint32_t dest;
uint32_t annotation_index;
};
struct XRayTraceFrameEntry {
/* Indices into global tracebuffer */
int start;
int end;
uint64_t start_tsc;
uint64_t end_tsc;
uint64_t total_ticks;
int annotation_count;
bool valid;
#ifndef XRAY_DISABLE_BROWSER_INTEGRATION
struct XRayTimestampPair start_time;
struct XRayTimestampPair end_time;
#endif
};
struct XRayTraceFrame {
struct XRayTraceFrameEntry* entry;
int head;
int tail;
int count;
};
struct XRayTraceCapture {
/* Common variables share cache line */
bool recording;
uint32_t stack_depth;
uint32_t max_stack_depth;
int buffer_index;
int buffer_size;
int disabled;
int annotation_count;
struct XRaySymbolTable* symbols;
bool initialized;
uint32_t annotation_filter;
uint32_t guard0;
struct XRayTraceStackEntry stack[XRAY_TRACE_STACK_SIZE] XRAY_ALIGN64;
uint32_t guard1;
uint32_t guard2;
char annotation[XRAY_ANNOTATION_STACK_SIZE] XRAY_ALIGN64;
uint32_t guard3;
struct XRayTraceBufferEntry* buffer;
struct XRayTraceFrame frame;
#ifndef XRAY_DISABLE_BROWSER_INTEGRATION
int32_t thread_id;
#endif
} XRAY_ALIGN64;
#ifdef __cplusplus
extern "C" {
#endif
#if defined(__pnacl__)
XRAY_NO_INSTRUMENT void __pnacl_profile_func_enter(const char* fname);
XRAY_NO_INSTRUMENT void __pnacl_profile_func_exit(const char* fname);
#else
XRAY_NO_INSTRUMENT void __cyg_profile_func_enter(void* this_fn,
void* call_site);
XRAY_NO_INSTRUMENT void __cyg_profile_func_exit(void* this_fn,
void* call_site);
#endif
XRAY_INLINE int XRayTraceDecrementIndexInline(
struct XRayTraceCapture* capture, int index);
XRAY_INLINE int XRayTraceIncrementIndexInline(
struct XRayTraceCapture* capture, int index);
XRAY_NO_INSTRUMENT void __xray_profile_append_annotation(
struct XRayTraceCapture* capture,
struct XRayTraceStackEntry* se,
struct XRayTraceBufferEntry* be);
#ifdef __cplusplus
}
#endif
/* Asserts that the guard values haven't changed. */
void XRayCheckGuards(struct XRayTraceCapture* capture) {
assert(capture->guard0 == XRAY_GUARD_VALUE_0x12345678);
assert(capture->guard1 == XRAY_GUARD_VALUE_0x12345678);
assert(capture->guard2 == XRAY_GUARD_VALUE_0x87654321);
assert(capture->guard3 == XRAY_GUARD_VALUE_0x12345678);
}
/* Decrements the trace index, wrapping around if needed. */
int XRayTraceDecrementIndexInline(
struct XRayTraceCapture* capture, int index) {
--index;
if (index < 0)
index = capture->buffer_size - 1;
return index;
}
/* Increments the trace index, wrapping around if needed. */
int XRayTraceIncrementIndexInline(
struct XRayTraceCapture* capture, int index) {
++index;
if (index >= capture->buffer_size)
index = 0;
return index;
}
/* Returns true if the trace entry is an annotation string. */
bool XRayTraceIsAnnotation(
struct XRayTraceCapture* capture, int index) {
struct XRayTraceBufferEntry* be = &capture->buffer[index];
char* dst = (char*)be;
return 0 == *dst;
}
int XRayTraceIncrementIndex(struct XRayTraceCapture* capture, int index) {
return XRayTraceIncrementIndexInline(capture, index);
}
int XRayTraceDecrementIndex(struct XRayTraceCapture* capture, int index) {
return XRayTraceDecrementIndexInline(capture, index);
}
/* The entry in the tracebuffer at index is an annotation string. */
/* Calculate the next index value representing the next trace entry. */
int XRayTraceSkipAnnotation(struct XRayTraceCapture* capture, int index) {
/* Annotations are strings embedded in the trace buffer. */
/* An annotation string can span multiple trace entries. */
/* Skip over the string by looking for zero termination. */
assert(capture);
assert(XRayTraceIsAnnotation(capture, index));
bool done = false;
int start_index = 1;
int i;
while (!done) {
char* str = (char*) &capture->buffer[index];
const int num = sizeof(capture->buffer[index]);
for (i = start_index; i < num; ++i) {
if (0 == str[i]) {
done = true;
break;
}
}
index = XRayTraceIncrementIndexInline(capture, index);
start_index = 0;
}
return index;
}
struct XRayTraceBufferEntry* XRayTraceGetEntry(
struct XRayTraceCapture* capture, int index) {
return &capture->buffer[index];
}
/* Starting at index, return the index into the trace buffer */
/* for the next trace entry. Index can wrap (ringbuffer) */
int XRayTraceNextEntry(struct XRayTraceCapture* capture, int index) {
if (XRayTraceIsAnnotation(capture, index))
index = XRayTraceSkipAnnotation(capture, index);
else
index = XRayTraceIncrementIndexInline(capture, index);
return index;
}
int XRayFrameGetTraceStartIndex(struct XRayTraceCapture* capture, int frame) {
assert(capture);
assert(capture->initialized);
assert(!capture->recording);
return capture->frame.entry[frame].start;
}
int XRayFrameGetTraceEndIndex(struct XRayTraceCapture* capture, int frame) {
assert(capture);
assert(capture->initialized);
assert(!capture->recording);
return capture->frame.entry[frame].end;
}
/* Not very accurate, annotation strings will also be counted as "entries" */
int XRayFrameGetTraceCount(
struct XRayTraceCapture* capture, int frame) {
assert(true == capture->initialized);
assert(frame >= 0);
assert(frame < capture->frame.count);
assert(!capture->recording);
int start = capture->frame.entry[frame].start;
int end = capture->frame.entry[frame].end;
int num;
if (start < end)
num = end - start;
else
num = capture->buffer_size - (start - end);
return num;
}
/* Append a string to trace buffer. */
void XRayTraceAppendString(struct XRayTraceCapture* capture, char* src) {
int index = capture->buffer_index;
bool done = false;
int start_index = 1;
int s = 0;
int i;
char* dst = (char*)&capture->buffer[index];
const int num = sizeof(capture->buffer[index]);
dst[0] = 0;
while (!done) {
for (i = start_index; i < num; ++i) {
dst[i] = src[s];
if (0 == src[s]) {
dst[i] = 0;
done = true;
break;
}
++s;
}
index = XRayTraceIncrementIndexInline(capture, index);
dst = (char*)&capture->buffer[index];
start_index = 0;
}
capture->buffer_index = index;
}
/* Copies annotation from trace buffer to output string. */
int XRayTraceCopyToString(
struct XRayTraceCapture* capture, int index, char* dst) {
assert(XRayTraceIsAnnotation(capture, index));
bool done = false;
int i;
int d = 0;
int start_index = 1;
while (!done) {
char* src = (char*) &capture->buffer[index];
const int num = sizeof(capture->buffer[index]);
for (i = start_index; i < num; ++i) {
dst[d] = src[i];
if (0 == src[i]) {
done = true;
break;
}
++d;
}
index = XRayTraceIncrementIndexInline(capture, index);
start_index = 0;
}
return index;
}
/* Generic memory malloc for XRay */
/* validates pointer returned by malloc */
/* memsets memory block to zero */
void* XRayMalloc(size_t t) {
void* data;
data = calloc(1, t);
if (NULL == data) {
printf("XRay: malloc(%d) failed, panic shutdown!\n", t);
exit(-1);
}
return data;
}
/* Generic memory free for XRay */
void XRayFree(void* data) {
assert(NULL != data);
free(data);
}
/* Main profile capture function that is called at the start */
/* of every instrumented function. This function is implicitly */
/* called when code is compilied with the -finstrument-functions option */
#if defined(__pnacl__)
void __pnacl_profile_func_enter(const char* this_fn) {
#else
void __cyg_profile_func_enter(void* this_fn, void* call_site) {
#endif
struct XRayTraceCapture* capture = g_xray_capture;
if (capture && capture->recording) {
uint32_t depth = capture->stack_depth;
if (depth < capture->max_stack_depth) {
struct XRayTraceStackEntry* se = &capture->stack[depth];
uint32_t addr = (uint32_t)(uintptr_t)this_fn;
se->depth_addr = XRAY_PACK_DEPTH_ADDR(depth, addr);
se->dest = capture->buffer_index;
se->annotation_index = 0;
GTSC(se->tsc);
capture->buffer_index =
XRayTraceIncrementIndexInline(capture, capture->buffer_index);
}
++capture->stack_depth;
}
}
/* Main profile capture function that is called at the exit of */
/* every instrumented function. This function is implicity called */
/* when the code is compiled with the -finstrument-functions option */
#if defined(__pnacl__)
void __pnacl_profile_func_exit(const char* this_fn) {
#else
void __cyg_profile_func_exit(void* this_fn, void* call_site) {
#endif
struct XRayTraceCapture* capture = g_xray_capture;
if (capture && capture->recording) {
--capture->stack_depth;
if (capture->stack_depth < capture->max_stack_depth) {
uint32_t depth = capture->stack_depth;
struct XRayTraceStackEntry* se = &capture->stack[depth];
uint32_t buffer_index = se->dest;
uint64_t tsc;
struct XRayTraceBufferEntry* be = &capture->buffer[buffer_index];
GTSC(tsc);
be->depth_addr = se->depth_addr;
be->start_tick = se->tsc;
be->end_tick = tsc;
be->annotation_index = 0;
if (0 != se->annotation_index)
__xray_profile_append_annotation(capture, se, be);
}
}
}
#ifndef XRAY_DISABLE_BROWSER_INTEGRATION
void XRayGetTSC(uint64_t* tsc) {
GTSC(*tsc);
}
int32_t XRayGetSavedThreadID(struct XRayTraceCapture* capture) {
return capture->thread_id;
}
struct XRayTimestampPair XRayFrameGetStartTimestampPair(
struct XRayTraceCapture* capture, int frame) {
return capture->frame.entry[frame].start_time;
}
struct XRayTimestampPair XRayFrameGetEndTimestampPair(
struct XRayTraceCapture* capture, int frame) {
return capture->frame.entry[frame].end_time;
}
#endif
/* Special case appending annotation string to trace buffer */
/* this function should only ever be called from __cyg_profile_func_exit() */
void __xray_profile_append_annotation(struct XRayTraceCapture* capture,
struct XRayTraceStackEntry* se,
struct XRayTraceBufferEntry* be) {
struct XRayTraceStackEntry* parent = se - 1;
int start = parent->annotation_index;
be->annotation_index = capture->buffer_index;
char* str = &capture->annotation[start];
XRayTraceAppendString(capture, str);
*str = 0;
++capture->annotation_count;
}
/* Annotates the trace buffer. no filtering. */
void __XRayAnnotate(const char* fmt, ...) {
va_list args;
struct XRayTraceCapture* capture = g_xray_capture;
/* Only annotate functions recorded in the trace buffer. */
if (capture && capture->initialized) {
if (0 == capture->disabled) {
if (capture->recording) {
char buffer[1024];
int r;
va_start(args, fmt);
r = vsnprintf(buffer, sizeof(buffer), fmt, args);
va_end(args);
{
/* Get current string ptr */
int depth = capture->stack_depth - 1;
struct XRayTraceStackEntry* se = &capture->stack[depth];
if (0 == se->annotation_index) {
struct XRayTraceStackEntry* parent = se - 1;
se->annotation_index = parent->annotation_index;
}
char* dst = &capture->annotation[se->annotation_index];
strcpy(dst, buffer);
int len = strlen(dst);
se->annotation_index += len;
}
}
}
}
}
/* Annotates the trace buffer with user strings. Can be filtered. */
void __XRayAnnotateFiltered(const uint32_t filter, const char* fmt, ...) {
va_list args;
struct XRayTraceCapture* capture = g_xray_capture;
if (capture && capture->initialized) {
if (0 != (filter & capture->annotation_filter)) {
if (0 == capture->disabled) {
if (capture->recording) {
char buffer[XRAY_TRACE_ANNOTATION_LENGTH];
int r;
va_start(args, fmt);
r = vsnprintf(buffer, sizeof(buffer), fmt, args);
va_end(args);
{
/* get current string ptr */
int depth = capture->stack_depth - 1;
struct XRayTraceStackEntry* se = &capture->stack[depth];
if (0 == se->annotation_index) {
struct XRayTraceStackEntry* parent = se - 1;
se->annotation_index = parent->annotation_index;
}
char* dst = &capture->annotation[se->annotation_index];
strcpy(dst, buffer);
int len = strlen(dst);
se->annotation_index += len;
}
}
}
}
}
}
/* Allows user to specify annotation filter value, a 32 bit mask. */
void XRaySetAnnotationFilter(struct XRayTraceCapture* capture,
uint32_t filter) {
capture->annotation_filter = filter;
}
/* Reset xray profiler. */
void XRayReset(struct XRayTraceCapture* capture) {
assert(capture);
assert(capture->initialized);
assert(!capture->recording);
capture->buffer_index = 0;
capture->stack_depth = 0;
capture->disabled = 0;
capture->frame.head = 0;
capture->frame.tail = 0;
memset(capture->frame.entry, 0,
sizeof(capture->frame.entry[0]) * capture->frame.count);
memset(&capture->stack, 0,
sizeof(capture->stack[0]) * XRAY_TRACE_STACK_SIZE);
XRayCheckGuards(capture);
}
/* Change the maximum stack depth captures are made. */
void XRaySetMaxStackDepth(struct XRayTraceCapture* capture, int stack_depth) {
assert(capture);
assert(capture->initialized);
assert(!capture->recording);
if (stack_depth < 1)
stack_depth = 1;
if (stack_depth >= XRAY_TRACE_STACK_SIZE)
stack_depth = (XRAY_TRACE_STACK_SIZE - 1);
capture->max_stack_depth = stack_depth;
}
int XRayFrameGetCount(struct XRayTraceCapture* capture) {
return capture->frame.count;
}
int XRayFrameGetTail(struct XRayTraceCapture* capture) {
return capture->frame.tail;
}
int XRayFrameGetHead(struct XRayTraceCapture* capture) {
return capture->frame.head;
}
int XRayFrameGetPrev(struct XRayTraceCapture* capture, int i) {
i = i - 1;
if (i < 0)
i = capture->frame.count - 1;
return i;
}
int XRayFrameGetNext(struct XRayTraceCapture* capture, int i) {
i = i + 1;
if (i >= capture->frame.count)
i = 0;
return i;
}
bool XRayFrameIsValid(struct XRayTraceCapture* capture, int i) {
return capture->frame.entry[i].valid;
}
uint64_t XRayFrameGetTotalTicks(struct XRayTraceCapture* capture, int i) {
return capture->frame.entry[i].total_ticks;
}
int XRayFrameGetAnnotationCount(struct XRayTraceCapture* capture, int i) {
return capture->frame.entry[i].annotation_count;
}
void XRayFrameMakeLabel(struct XRayTraceCapture* capture,
int counter,
char* label) {
snprintf(label, XRAY_MAX_LABEL, "@@@frame%d@@@", counter);
}
/* Scans the ring buffer going backwards to find last valid complete frame. */
/* Will mark whether frames are valid or invalid during the traversal. */
int XRayFrameFindTail(struct XRayTraceCapture* capture) {
int head = capture->frame.head;
int index = XRayFrameGetPrev(capture, head);
int total_capture = 0;
int last_valid_frame = index;
/* Check for no captures */
if (capture->frame.head == capture->frame.tail)
return capture->frame.head;
/* Go back and invalidate all captures that have been stomped. */
while (index != head) {
bool valid = capture->frame.entry[index].valid;
if (valid) {
total_capture += XRayFrameGetTraceCount(capture, index) + 1;
if (total_capture < capture->buffer_size) {
last_valid_frame = index;
capture->frame.entry[index].valid = true;
} else {
capture->frame.entry[index].valid = false;
}
}
index = XRayFrameGetPrev(capture, index);
}
return last_valid_frame;
}
/* Starts a new frame and enables capturing, and must be paired with */
/* XRayEndFrame() Trace capturing only occurs on the thread which called */
/* XRayBeginFrame() and each instance of capture can only trace one thread */
/* at a time. */
void XRayStartFrame(struct XRayTraceCapture* capture) {
int i;
assert(NULL == g_xray_capture);
assert(capture->initialized);
assert(!capture->recording);
i = capture->frame.head;
XRayCheckGuards(capture);
/* Add a trace entry marker so we can detect wrap around stomping */
struct XRayTraceBufferEntry* be = &capture->buffer[capture->buffer_index];
be->depth_addr = XRAY_FRAME_MARKER;
capture->buffer_index =
XRayTraceIncrementIndex(capture, capture->buffer_index);
/* Set start of the frame we're about to trace */
capture->frame.entry[i].start = capture->buffer_index;
capture->disabled = 0;
capture->stack_depth = 1;
/* The trace stack[0] is reserved */
memset(&capture->stack[0], 0, sizeof(capture->stack[0]));
/* Annotation index 0 is reserved to indicate no annotation */
capture->stack[0].annotation_index = 1;
capture->annotation[0] = 0;
capture->annotation[1] = 0;
capture->annotation_count = 0;
capture->recording = true;
GTSC(capture->frame.entry[i].start_tsc);
g_xray_capture = capture;
#ifndef XRAY_DISABLE_BROWSER_INTEGRATION
capture->frame.entry[i].start_time = XRayGenerateTimestampsNow();
#endif
}
/* Ends a frame and disables capturing. Advances to the next frame. */
/* Must be paired with XRayStartFrame(), and called from the same thread. */
void XRayEndFrame(struct XRayTraceCapture* capture) {
int i;
assert(capture);
assert(capture->initialized);
assert(capture->recording);
assert(g_xray_capture == capture);
assert(0 == capture->disabled);
assert(1 == capture->stack_depth);
i = capture->frame.head;
GTSC(capture->frame.entry[i].end_tsc);
capture->frame.entry[i].total_ticks =
capture->frame.entry[i].end_tsc - capture->frame.entry[i].start_tsc;
capture->recording = NULL;
capture->frame.entry[i].end = capture->buffer_index;
capture->frame.entry[i].valid = true;
capture->frame.entry[i].annotation_count = capture->annotation_count;
capture->frame.head = XRayFrameGetNext(capture, capture->frame.head);
/* If the table is filled, bump the tail. */
if (capture->frame.head == capture->frame.tail)
capture->frame.tail = XRayFrameGetNext(capture, capture->frame.tail);
capture->frame.tail = XRayFrameFindTail(capture);
/* Check that we didn't stomp over trace entry marker. */
int marker = XRayTraceDecrementIndex(capture, capture->frame.entry[i].start);
struct XRayTraceBufferEntry* be = &capture->buffer[marker];
if (be->depth_addr != XRAY_FRAME_MARKER) {
fprintf(stderr,
"XRay: XRayStopFrame() detects insufficient trace buffer size!\n");
XRayReset(capture);
} else {
/* Replace marker with an empty annotation string. */
be->depth_addr = XRAY_NULL_ANNOTATION;
XRayCheckGuards(capture);
}
g_xray_capture = NULL;
#ifndef XRAY_DISABLE_BROWSER_INTEGRATION
capture->frame.entry[i].end_time = XRayGenerateTimestampsNow();
#endif
}
/* Get the last frame captured. Do not call while capturing. */
/* (ie call outside of XRayStartFrame() / XRayStopFrame() pair) */
int XRayGetLastFrame(struct XRayTraceCapture* capture) {
assert(capture);
assert(capture->initialized);
assert(!capture->recording);
assert(0 == capture->disabled);
assert(1 == capture->stack_depth);
int last_frame = XRayFrameGetPrev(capture, capture->frame.head);
return last_frame;
}
/* Disables capturing until a paired XRayEnableCapture() is called */
/* This call can be nested, but must be paired with an enable */
/* (If you need to just exclude a specific function and not its */
/* children, the XRAY_NO_INSTRUMENT modifier might be better) */
/* Must be called from same thread as XRayBeginFrame() / XRayEndFrame() */
void XRayDisableCapture(struct XRayTraceCapture* capture) {
assert(capture);
assert(capture == g_xray_capture);
assert(capture->initialized);
++capture->disabled;
capture->recording = false;
}
/* Re-enables capture. Must be paired with XRayDisableCapture() */
void XRayEnableCapture(struct XRayTraceCapture* capture) {
assert(capture);
assert(capture == g_xray_capture);
assert(capture->initialized);
assert(0 < capture->disabled);
--capture->disabled;
if (0 == capture->disabled) {
capture->recording = true;
}
}
struct XRaySymbolTable* XRayGetSymbolTable(struct XRayTraceCapture* capture) {
return capture->symbols;
}
/* Initialize XRay */
struct XRayTraceCapture* XRayInit(int stack_depth,
int buffer_size,
int frame_count,
const char* mapfilename) {
struct XRayTraceCapture* capture;
capture = (struct XRayTraceCapture*)XRayMalloc(
sizeof(struct XRayTraceCapture));
int adj_frame_count = frame_count + 1;
size_t buffer_size_in_bytes =
sizeof(capture->buffer[0]) * buffer_size;
size_t frame_size_in_bytes =
sizeof(capture->frame.entry[0]) * adj_frame_count;
capture->buffer =
(struct XRayTraceBufferEntry *)XRayMalloc(buffer_size_in_bytes);
capture->frame.entry =
(struct XRayTraceFrameEntry *)XRayMalloc(frame_size_in_bytes);
capture->buffer_size = buffer_size;
capture->frame.count = adj_frame_count;
capture->frame.head = 0;
capture->frame.tail = 0;
capture->disabled = 0;
capture->annotation_filter = 0xFFFFFFFF;
capture->guard0 = XRAY_GUARD_VALUE_0x12345678;
capture->guard1 = XRAY_GUARD_VALUE_0x12345678;
capture->guard2 = XRAY_GUARD_VALUE_0x87654321;
capture->guard3 = XRAY_GUARD_VALUE_0x12345678;
capture->initialized = true;
capture->recording = false;
XRaySetMaxStackDepth(capture, stack_depth);
XRayReset(capture);
/* Mapfile is optional; we don't need it for captures, only for reports. */
capture->symbols =
XRaySymbolTableCreate(XRAY_DEFAULT_SYMBOL_TABLE_SIZE);
if (NULL != mapfilename)
XRaySymbolTableParseMapfile(capture->symbols, mapfilename);
#ifndef XRAY_DISABLE_BROWSER_INTEGRATION
/* Use the address of a thread local variable as a fake thread id. */
capture->thread_id = (int32_t)(&g_xray_thread_id_placeholder);
#endif
return capture;
}
/* Shut down and free memory used by XRay. */
void XRayShutdown(struct XRayTraceCapture* capture) {
assert(capture);
assert(capture->initialized);
assert(!capture->recording);
XRayCheckGuards(capture);
if (NULL != capture->symbols) {
XRaySymbolTableFree(capture->symbols);
}
XRayFree(capture->frame.entry);
XRayFree(capture->buffer);
capture->initialized = false;
XRayFree(capture);
}
#endif /* XRAY */
| 9,061 |
34,359 | // PCG Random Number Generation for C++
//
// Copyright 2014-2019 <NAME> <<EMAIL>>,
// and the PCG Project contributors.
//
// SPDX-License-Identifier: (Apache-2.0 OR MIT)
//
// Licensed under the Apache License, Version 2.0 (provided in
// LICENSE-APACHE.txt and at http://www.apache.org/licenses/LICENSE-2.0)
// or under the MIT license (provided in LICENSE-MIT.txt and at
// http://opensource.org/licenses/MIT), at your option. This file may not
// be copied, modified, or distributed except according to those terms.
//
// Distributed on an "AS IS" BASIS, WITHOUT WARRANTY OF ANY KIND, either
// express or implied. See your chosen license for details.
//
// For additional information about the PCG random number generation scheme,
// visit http://www.pcg-random.org/.
//
// -----------------------------------------------------------------------------
//
// <NAME> <<EMAIL>>:
// The following contents are an extract of pcg_engines::oneseq_dxsm_64_32
// reduced down to the bare essentials, while retaining base functionality.
namespace pcg_engines {
class oneseq_dxsm_64_32 {
using xtype = uint32_t;
using itype = uint64_t;
itype state_;
static constexpr uint64_t multiplier() {
return 6364136223846793005ULL;
}
static constexpr uint64_t increment() {
return 1442695040888963407ULL;
}
static itype bump(itype state) {
return state * multiplier() + increment();
}
itype base_generate0() {
itype old_state = state_;
state_ = bump(state_);
return old_state;
}
public:
explicit oneseq_dxsm_64_32(itype state = 0xcafef00dd15ea5e5ULL) : state_(bump(state + increment())) {
}
// Returns a value in the interval [0, UINT32_MAX].
xtype operator()() {
constexpr auto xtypebits = uint8_t(sizeof(xtype) * 8);
constexpr auto itypebits = uint8_t(sizeof(itype) * 8);
auto internal = base_generate0();
auto hi = xtype(internal >> (itypebits - xtypebits));
auto lo = xtype(internal);
lo |= 1;
hi ^= hi >> (xtypebits / 2);
hi *= xtype(multiplier());
hi ^= hi >> (3 * (xtypebits / 4));
hi *= lo;
return hi;
}
// Returns a value in the interval [0, upper_bound).
xtype operator()(xtype upper_bound) {
uint32_t threshold = (UINT64_MAX + uint32_t(1) - upper_bound) % upper_bound;
for (;;) {
auto r = operator()();
if (r >= threshold)
return r % upper_bound;
}
}
};
}
| 1,277 |
500 | <reponame>spmallette/blueprints<filename>blueprints-core/src/main/java/com/tinkerpop/blueprints/util/wrappers/batch/cache/StringIDVertexCache.java
package com.tinkerpop.blueprints.util.wrappers.batch.cache;
import com.tinkerpop.blueprints.Graph;
import com.tinkerpop.blueprints.Vertex;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* (c) <NAME> (<EMAIL>)
*/
public class StringIDVertexCache implements VertexCache {
private static final int INITIAL_CAPACITY = 1000;
private static final int INITIAL_TX_CAPACITY = 100;
private final Map<String, Object> map;
private final Set<String> mapKeysInCurrentTx;
private final StringCompression compression;
public StringIDVertexCache(final StringCompression compression) {
if (compression == null) throw new IllegalArgumentException("Compression expected.");
this.compression = compression;
map = new HashMap<String, Object>(INITIAL_CAPACITY);
mapKeysInCurrentTx = new HashSet<String>(INITIAL_TX_CAPACITY);
}
public StringIDVertexCache() {
this(StringCompression.NO_COMPRESSION);
}
@Override
public Object getEntry(Object externalId) {
String id = compression.compress(externalId.toString());
return map.get(id);
}
@Override
public void set(Vertex vertex, Object externalId) {
setId(vertex,externalId);
}
@Override
public void setId(Object vertexId, Object externalId) {
String id = compression.compress(externalId.toString());
map.put(id, vertexId);
mapKeysInCurrentTx.add(id);
}
@Override
public boolean contains(Object externalId) {
return map.containsKey(compression.compress(externalId.toString()));
}
@Override
public void newTransaction() {
for (String id : mapKeysInCurrentTx) {
Object o = map.get(id);
assert null != o;
if (o instanceof Vertex) {
Vertex v = (Vertex)o;
map.put(id, v.getId());
}
}
mapKeysInCurrentTx.clear();
}
} | 839 |
594 | <reponame>Chengyu-Cui/USTC_CG
#pragma once
#include <Engine/Scene/Component.h>
namespace Ubpa {
class Material;
class CmptMaterial final : public Component {
public:
CmptMaterial(Ptr<SObj> sobj, Ptr<Material> material)
: Component(sobj), material(material) { }
public:
static const Ptr<CmptMaterial> New(Ptr<SObj> sobj, Ptr<Material> material) {
return Ubpa::New<CmptMaterial>(sobj, material);
}
protected:
virtual ~CmptMaterial() = default;
public:
Ptr<Material> material;
};
}
| 197 |
1,247 | package org.andengine.util.adt;
/**
* (c) 2010 <NAME>
* (c) 2011 Zynga Inc.
*
* @author <NAME>
* @since 17:59:55 - 14.07.2011
*/
public interface DataConstants {
// ===========================================================
// Constants
// ===========================================================
public static final int BYTES_PER_BYTE = 1;
public static final int BYTES_PER_SHORT = Short.SIZE / Byte.SIZE;
public static final int BYTES_PER_INT = Integer.SIZE / Byte.SIZE;
public static final int BYTES_PER_LONG = Long.SIZE / Byte.SIZE;
public static final int BYTES_PER_FLOAT = Float.SIZE / Byte.SIZE;
public static final int BYTES_PER_DOUBLE = Double.SIZE / Byte.SIZE;
public static final int BYTES_PER_KILOBYTE = 1024;
public static final int BYTES_PER_MEGABYTE = 1024 * DataConstants.BYTES_PER_KILOBYTE;
public static final int BYTES_PER_GIGABYTE = 1024 * DataConstants.BYTES_PER_MEGABYTE;
public static final int BYTE_TO_KILOBYTE_SHIFT = 10;
public static final int BYTE_TO_MEGABYTE_SHIFT = 20;
public static final int BYTE_TO_GIGABYTE_SHIFT = 30;
public static final short UNSIGNED_BYTE_MAX_VALUE = (short)Byte.MAX_VALUE - (short)Byte.MIN_VALUE;
public static final int UNSIGNED_SHORT_MAX_VALUE = (int)Short.MAX_VALUE - (int)Short.MIN_VALUE;
public static final long UNSIGNED_INT_MAX_VALUE = (long)Integer.MAX_VALUE - (long)Integer.MIN_VALUE;
public static final int BITS_PER_BYTE = 8;
public static final int BITS_PER_SHORT = BYTES_PER_SHORT * BITS_PER_BYTE;
public static final int BITS_PER_INT = BYTES_PER_INT * BITS_PER_BYTE;
public static final int BITS_PER_LONG = BYTES_PER_LONG * BITS_PER_BYTE;
public static final int BITS_PER_FLOAT = BYTES_PER_FLOAT * BITS_PER_BYTE;
public static final int BITS_PER_DOUBLE = BYTES_PER_DOUBLE * BITS_PER_BYTE;
// ===========================================================
// Methods
// ===========================================================
}
| 676 |
1,350 | <gh_stars>1000+
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.cognitiveservices.language.luis.authoring.models;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* Explicit (exception) list item.
*/
public class ExplicitListItem {
/**
* The explicit list item ID.
*/
@JsonProperty(value = "id")
private Long id;
/**
* The explicit list item value.
*/
@JsonProperty(value = "explicitListItem")
private String explicitListItem;
/**
* Get the id value.
*
* @return the id value
*/
public Long id() {
return this.id;
}
/**
* Set the id value.
*
* @param id the id value to set
* @return the ExplicitListItem object itself.
*/
public ExplicitListItem withId(Long id) {
this.id = id;
return this;
}
/**
* Get the explicitListItem value.
*
* @return the explicitListItem value
*/
public String explicitListItem() {
return this.explicitListItem;
}
/**
* Set the explicitListItem value.
*
* @param explicitListItem the explicitListItem value to set
* @return the ExplicitListItem object itself.
*/
public ExplicitListItem withExplicitListItem(String explicitListItem) {
this.explicitListItem = explicitListItem;
return this;
}
}
| 589 |
1,042 | /*
* Copyright (c) scott.cgi All Rights Reserved.
*
* This source code belongs to project Mojoc, which is a pure C Game Engine hosted on GitHub.
* The Mojoc Game Engine is licensed under the MIT License, and will continue to be iterated with coding passion.
*
* License : https://github.com/scottcgi/Mojoc/blob/master/LICENSE
* GitHub : https://github.com/scottcgi/Mojoc
* CodeStyle: https://github.com/scottcgi/Mojoc/blob/master/Docs/CodeStyle.md
*
* Since : 2018-12-6
* Update : 2019-1-28
* Author : scott.cgi
*/
#ifndef DEVICE_INFO_H
#define DEVICE_INFO_H
#include <stdbool.h>
/**
* Get system info in native platform.
*/
struct ASystemInfo
{
/**
* The outLanguageCode will be filled with an array of two characters.
*/
void (*GetLanguageCode)(char outLanguageCode[2]);
};
extern struct ASystemInfo ASystemInfo[1];
#endif
| 302 |
303 | <reponame>davidmoten/rxjava-extras
package com.github.davidmoten.rx.testing;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import rx.Observable;
import rx.functions.Func1;
public class TestingHelperConcatTest extends TestCase {
public static TestSuite suite() {
return TestingHelper.function(CONCAT)
// test empty
.name("testConcatWithEmptyReturnsThree").fromEmpty().expect(1, 2, 3)
// test error
.name("testConcatErrorReturnsError").fromError().expectError()
// test error after some emission
.name("testConcatErrorAfterTwoEmissionsReturnsError").fromErrorAfter(5, 6)
.expectError()
// test non-empty count
.name("testConcatWithTwoReturnsFive").from(5, 6).expect(5, 6, 1, 2, 3)
// test single input
.name("testConcatWithOneReturnsFour").from(5).expect(5, 1, 2, 3)
// get test suites
.testSuite(TestingHelperConcatTest.class);
}
public void testDummy() {
// just here to fool eclipse
}
private static final Func1<Observable<Integer>, Observable<Integer>> CONCAT = new Func1<Observable<Integer>, Observable<Integer>>() {
@Override
public Observable<Integer> call(Observable<Integer> o) {
return o.concatWith(Observable.just(1, 2, 3));
}
};
}
| 648 |
763 | <reponame>zabrewer/batfish
package org.batfish.datamodel.matchers;
import org.batfish.datamodel.Flow;
import org.batfish.datamodel.Ip;
import org.batfish.datamodel.IpProtocol;
import org.hamcrest.FeatureMatcher;
import org.hamcrest.Matcher;
public final class FlowMatchersImpl {
private FlowMatchersImpl() {}
public static final class HasDstIp extends FeatureMatcher<Flow, Ip> {
HasDstIp(Matcher<? super Ip> subMatcher) {
super(subMatcher, "A Flow with dstIp:", "dstIp");
}
@Override
protected Ip featureValueOf(Flow flow) {
return flow.getDstIp();
}
}
public static final class HasDstPort extends FeatureMatcher<Flow, Integer> {
HasDstPort(Matcher<? super Integer> subMatcher) {
super(subMatcher, "A Flow with dstPort:", "dstPort");
}
@Override
protected Integer featureValueOf(Flow flow) {
return flow.getDstPort();
}
}
public static final class HasIcmpCode extends FeatureMatcher<Flow, Integer> {
HasIcmpCode(Matcher<? super Integer> subMatcher) {
super(subMatcher, "A Flow with icmpCode:", "icmpCode");
}
@Override
protected Integer featureValueOf(Flow flow) {
return flow.getIcmpCode();
}
}
public static final class HasIcmpType extends FeatureMatcher<Flow, Integer> {
HasIcmpType(Matcher<? super Integer> subMatcher) {
super(subMatcher, "A Flow with icmpType:", "icmpType");
}
@Override
protected Integer featureValueOf(Flow flow) {
return flow.getIcmpType();
}
}
public static final class HasIngressInterface extends FeatureMatcher<Flow, String> {
HasIngressInterface(Matcher<? super String> subMatcher) {
super(subMatcher, "A Flow with ingressInterface:", "ingressInterface");
}
@Override
protected String featureValueOf(Flow flow) {
return flow.getIngressInterface();
}
}
public static final class HasIngressNode extends FeatureMatcher<Flow, String> {
HasIngressNode(Matcher<? super String> subMatcher) {
super(subMatcher, "A Flow with ingressNode:", "ingressNode");
}
@Override
protected String featureValueOf(Flow flow) {
return flow.getIngressNode();
}
}
public static final class HasIngressVrf extends FeatureMatcher<Flow, String> {
HasIngressVrf(Matcher<? super String> subMatcher) {
super(subMatcher, "A Flow with ingressVrf:", "ingressVrf");
}
@Override
protected String featureValueOf(Flow flow) {
return flow.getIngressVrf();
}
}
public static final class HasIpProtocol extends FeatureMatcher<Flow, IpProtocol> {
HasIpProtocol(Matcher<? super IpProtocol> subMatcher) {
super(subMatcher, "A Flow with ipProtocol:", "ipProtocol");
}
@Override
protected IpProtocol featureValueOf(Flow flow) {
return flow.getIpProtocol();
}
}
public static final class HasSrcIp extends FeatureMatcher<Flow, Ip> {
HasSrcIp(Matcher<? super Ip> subMatcher) {
super(subMatcher, "A Flow with srcIp:", "srcIp");
}
@Override
protected Ip featureValueOf(Flow flow) {
return flow.getSrcIp();
}
}
public static final class HasSrcPort extends FeatureMatcher<Flow, Integer> {
HasSrcPort(Matcher<? super Integer> subMatcher) {
super(subMatcher, "A Flow with srcPort:", "srcPort");
}
@Override
protected Integer featureValueOf(Flow flow) {
return flow.getSrcPort();
}
}
public static final class HasTcpFlagsAck extends FeatureMatcher<Flow, Integer> {
HasTcpFlagsAck(Matcher<? super Integer> subMatcher) {
super(subMatcher, "A Flow with tcpFlagsAck:", "tcpFlagsAck");
}
@Override
protected Integer featureValueOf(Flow flow) {
return flow.getTcpFlagsAck();
}
}
public static final class HasTcpFlagsCwr extends FeatureMatcher<Flow, Integer> {
HasTcpFlagsCwr(Matcher<? super Integer> subMatcher) {
super(subMatcher, "A Flow with tcpFlagsCwr:", "tcpFlagsCwr");
}
@Override
protected Integer featureValueOf(Flow flow) {
return flow.getTcpFlagsCwr();
}
}
public static final class HasTcpFlagsEce extends FeatureMatcher<Flow, Integer> {
HasTcpFlagsEce(Matcher<? super Integer> subMatcher) {
super(subMatcher, "A Flow with tcpFlagsEce:", "tcpFlagsEce");
}
@Override
protected Integer featureValueOf(Flow flow) {
return flow.getTcpFlagsEce();
}
}
public static final class HasTcpFlagsFin extends FeatureMatcher<Flow, Integer> {
HasTcpFlagsFin(Matcher<? super Integer> subMatcher) {
super(subMatcher, "A Flow with tcpFlagsFin:", "tcpFlagsFin");
}
@Override
protected Integer featureValueOf(Flow flow) {
return flow.getTcpFlagsFin();
}
}
public static final class HasTcpFlagsPsh extends FeatureMatcher<Flow, Integer> {
HasTcpFlagsPsh(Matcher<? super Integer> subMatcher) {
super(subMatcher, "A Flow with tcpFlagsPsh:", "tcpFlagsPsh");
}
@Override
protected Integer featureValueOf(Flow flow) {
return flow.getTcpFlagsPsh();
}
}
public static final class HasTcpFlagsRst extends FeatureMatcher<Flow, Integer> {
HasTcpFlagsRst(Matcher<? super Integer> subMatcher) {
super(subMatcher, "A Flow with tcpFlagsRst:", "tcpFlagsRst");
}
@Override
protected Integer featureValueOf(Flow flow) {
return flow.getTcpFlagsRst();
}
}
public static final class HasTcpFlagsUrg extends FeatureMatcher<Flow, Integer> {
HasTcpFlagsUrg(Matcher<? super Integer> subMatcher) {
super(subMatcher, "A Flow with tcpFlagsUrg:", "tcpFlagsUrg");
}
@Override
protected Integer featureValueOf(Flow flow) {
return flow.getTcpFlagsUrg();
}
}
}
| 2,125 |
841 | /*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.services.ejb.remote.api;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import org.apache.commons.io.input.ClassLoaderObjectInputStream;
public abstract class AbstractRemoteObject {
private transient ClassLoader classLoader = this.getClass().getClassLoader();
public void setClassLoader(ClassLoader classLoader) {
this.classLoader = classLoader;
}
protected Object deserialize(byte[] bytes) {
Object result = null;
ObjectInputStream in = null;
try {
in = new ClassLoaderObjectInputStream(classLoader, new ByteArrayInputStream(bytes));
result = in.readObject();
} catch (Exception e) {
throw new RuntimeException("Unable to deserialize stream ", e);
} finally {
if (in != null) {
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return result;
}
protected byte[] serialize(Object input) {
try {
ByteArrayOutputStream bout = new ByteArrayOutputStream();
ObjectOutputStream oout = new ObjectOutputStream(bout);
oout.writeObject(input);
return bout.toByteArray();
} catch (Exception e) {
throw new RuntimeException("Unable to serialize object " + input, e);
}
}
}
| 742 |
4,054 | <gh_stars>1000+
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.opengl;
import org.lwjgl.system.*;
/**
* Native bindings to the <a target="_blank" href="https://www.khronos.org/registry/OpenGL/extensions/ARB/ARB_texture_storage.txt">ARB_texture_storage</a> extension.
*
* <p>The texture image specification commands in OpenGL allow each level to be separately specified with different sizes, formats, types and so on, and only
* imposes consistency checks at draw time. This adds overhead for implementations.</p>
*
* <p>This extension provides a mechanism for specifying the entire structure of a texture in a single call, allowing certain consistency checks and memory
* allocations to be done up front. Once specified, the format and dimensions of the image array become immutable, to simplify completeness checks in the
* implementation.</p>
*
* <p>When using this extension, it is no longer possible to supply texture data using TexImage*. Instead, data can be uploaded using TexSubImage*, or
* produced by other means (such as render-to-texture, mipmap generation, or rendering to a sibling EGLImage).</p>
*
* <p>This extension has complicated interactions with other extensions. The goal of most of these interactions is to ensure that a texture is always mipmap
* complete (and cube complete for cubemap textures).</p>
*
* <p>Requires {@link GL12 OpenGL 1.2}. Promoted to core in {@link GL42 OpenGL 4.2}.</p>
*/
public class ARBTextureStorage {
static { GL.initialize(); }
/** Accepted by the {@code value} parameter of GetTexParameter{if}v. */
public static final int GL_TEXTURE_IMMUTABLE_FORMAT = 0x912F;
protected ARBTextureStorage() {
throw new UnsupportedOperationException();
}
// --- [ glTexStorage1D ] ---
/**
* Simultaneously specifies storage for all levels of a one-dimensional texture.
*
* @param target the target of the operation. One of:<br><table><tr><td>{@link GL11#GL_TEXTURE_1D TEXTURE_1D}</td><td>{@link GL11#GL_PROXY_TEXTURE_1D PROXY_TEXTURE_1D}</td></tr></table>
* @param levels the number of texture levels
* @param internalformat the sized internal format to be used to store texture image data
* @param width the width of the texture, in texels
*/
public static void glTexStorage1D(@NativeType("GLenum") int target, @NativeType("GLsizei") int levels, @NativeType("GLenum") int internalformat, @NativeType("GLsizei") int width) {
GL42C.glTexStorage1D(target, levels, internalformat, width);
}
// --- [ glTexStorage2D ] ---
/**
* Simultaneously specifies storage for all levels of a two-dimensional or one-dimensional array texture.
*
* @param target the target of the operation. One of:<br><table><tr><td>{@link GL11#GL_TEXTURE_2D TEXTURE_2D}</td><td>{@link GL30#GL_TEXTURE_1D_ARRAY TEXTURE_1D_ARRAY}</td><td>{@link GL31#GL_TEXTURE_RECTANGLE TEXTURE_RECTANGLE}</td><td>{@link GL13#GL_TEXTURE_CUBE_MAP TEXTURE_CUBE_MAP}</td></tr><tr><td>{@link GL11#GL_PROXY_TEXTURE_2D PROXY_TEXTURE_2D}</td><td>{@link GL30#GL_PROXY_TEXTURE_1D_ARRAY PROXY_TEXTURE_1D_ARRAY}</td><td>{@link GL31#GL_PROXY_TEXTURE_RECTANGLE PROXY_TEXTURE_RECTANGLE}</td><td>{@link GL13#GL_PROXY_TEXTURE_CUBE_MAP PROXY_TEXTURE_CUBE_MAP}</td></tr></table>
* @param levels the number of texture levels
* @param internalformat the sized internal format to be used to store texture image data
* @param width the width of the texture, in texels
* @param height the height of the texture, in texels
*/
public static void glTexStorage2D(@NativeType("GLenum") int target, @NativeType("GLsizei") int levels, @NativeType("GLenum") int internalformat, @NativeType("GLsizei") int width, @NativeType("GLsizei") int height) {
GL42C.glTexStorage2D(target, levels, internalformat, width, height);
}
// --- [ glTexStorage3D ] ---
/**
* Simultaneously specifies storage for all levels of a three-dimensional, two-dimensional array or cube-map array texture.
*
* @param target the target of the operation. One of:<br><table><tr><td>{@link GL12#GL_TEXTURE_3D TEXTURE_3D}</td><td>{@link GL30#GL_TEXTURE_2D_ARRAY TEXTURE_2D_ARRAY}</td><td>{@link GL40#GL_TEXTURE_CUBE_MAP_ARRAY TEXTURE_CUBE_MAP_ARRAY}</td><td>{@link GL12#GL_PROXY_TEXTURE_3D PROXY_TEXTURE_3D}</td></tr><tr><td>{@link GL30#GL_PROXY_TEXTURE_2D_ARRAY PROXY_TEXTURE_2D_ARRAY}</td><td>{@link GL40#GL_PROXY_TEXTURE_CUBE_MAP_ARRAY PROXY_TEXTURE_CUBE_MAP_ARRAY}</td></tr></table>
* @param levels the number of texture levels
* @param internalformat the sized internal format to be used to store texture image data
* @param width the width of the texture, in texels
* @param height the height of the texture, in texels
* @param depth the depth of the texture, in texels
*/
public static void glTexStorage3D(@NativeType("GLenum") int target, @NativeType("GLsizei") int levels, @NativeType("GLenum") int internalformat, @NativeType("GLsizei") int width, @NativeType("GLsizei") int height, @NativeType("GLsizei") int depth) {
GL42C.glTexStorage3D(target, levels, internalformat, width, height, depth);
}
// --- [ glTextureStorage1DEXT ] ---
/**
* DSA version of {@link #glTexStorage1D TexStorage1D}.
*
* @param texture the texture object to update
* @param target the target of the operation. One of:<br><table><tr><td>{@link GL11#GL_TEXTURE_1D TEXTURE_1D}</td><td>{@link GL11#GL_PROXY_TEXTURE_1D PROXY_TEXTURE_1D}</td></tr></table>
* @param levels the number of texture levels
* @param internalformat the sized internal format to be used to store texture image data
* @param width the width of the texture, in texels
*/
public static native void glTextureStorage1DEXT(@NativeType("GLuint") int texture, @NativeType("GLenum") int target, @NativeType("GLsizei") int levels, @NativeType("GLenum") int internalformat, @NativeType("GLsizei") int width);
// --- [ glTextureStorage2DEXT ] ---
/**
* DSA version of {@link #glTexStorage2D TexStorage2D}.
*
* @param texture the texture object to update
* @param target the target of the operation. One of:<br><table><tr><td>{@link GL11#GL_TEXTURE_2D TEXTURE_2D}</td><td>{@link GL30#GL_TEXTURE_1D_ARRAY TEXTURE_1D_ARRAY}</td><td>{@link GL31#GL_TEXTURE_RECTANGLE TEXTURE_RECTANGLE}</td><td>{@link GL13#GL_TEXTURE_CUBE_MAP TEXTURE_CUBE_MAP}</td></tr><tr><td>{@link GL11#GL_PROXY_TEXTURE_2D PROXY_TEXTURE_2D}</td><td>{@link GL30#GL_PROXY_TEXTURE_1D_ARRAY PROXY_TEXTURE_1D_ARRAY}</td><td>{@link GL31#GL_PROXY_TEXTURE_RECTANGLE PROXY_TEXTURE_RECTANGLE}</td><td>{@link GL13#GL_PROXY_TEXTURE_CUBE_MAP PROXY_TEXTURE_CUBE_MAP}</td></tr></table>
* @param levels the number of texture levels
* @param internalformat the sized internal format to be used to store texture image data
* @param width the width of the texture, in texels
* @param height the height of the texture, in texels
*/
public static native void glTextureStorage2DEXT(@NativeType("GLuint") int texture, @NativeType("GLenum") int target, @NativeType("GLsizei") int levels, @NativeType("GLenum") int internalformat, @NativeType("GLsizei") int width, @NativeType("GLsizei") int height);
// --- [ glTextureStorage3DEXT ] ---
/**
* DSA version of {@link #glTexStorage3D TexStorage3D}.
*
* @param texture the texture object to update
* @param target the target of the operation. One of:<br><table><tr><td>{@link GL12#GL_TEXTURE_3D TEXTURE_3D}</td><td>{@link GL30#GL_TEXTURE_2D_ARRAY TEXTURE_2D_ARRAY}</td><td>{@link GL40#GL_TEXTURE_CUBE_MAP_ARRAY TEXTURE_CUBE_MAP_ARRAY}</td><td>{@link GL12#GL_PROXY_TEXTURE_3D PROXY_TEXTURE_3D}</td></tr><tr><td>{@link GL30#GL_PROXY_TEXTURE_2D_ARRAY PROXY_TEXTURE_2D_ARRAY}</td><td>{@link GL40#GL_PROXY_TEXTURE_CUBE_MAP_ARRAY PROXY_TEXTURE_CUBE_MAP_ARRAY}</td></tr></table>
* @param levels the number of texture levels
* @param internalformat the sized internal format to be used to store texture image data
* @param width the width of the texture, in texels
* @param height the height of the texture, in texels
* @param depth the depth of the texture, in texels
*/
public static native void glTextureStorage3DEXT(@NativeType("GLuint") int texture, @NativeType("GLenum") int target, @NativeType("GLsizei") int levels, @NativeType("GLenum") int internalformat, @NativeType("GLsizei") int width, @NativeType("GLsizei") int height, @NativeType("GLsizei") int depth);
} | 3,231 |
872 | <reponame>phoemark2o20/Malware-Analysis-Training
# IDA Sync Server Constants
# Copyright (C) 2005 <NAME> <<EMAIL>>
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59 Temple
# Place, Suite 330, Boston, MA 02111-1307 USA
class server_constants:
# IDA Sync constants
JUMPTO = 0x01
NAME = 0x02
REG_COMMENT = 0x04
REP_COMMENT = 0x08
STACK_NAME = 0x10
# connection record constants
SOCK = 0
MODULE = 1
PROJECT = 2
USERNAME = 3
TIMESTAMP = 4 | 409 |
851 | /*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <string>
#include "rtc_base/experiments/field_trial_parser.h"
#include "rtc_base/experiments/field_trial_units.h"
#include "rtc_base/gunit.h"
namespace webrtc {
namespace {
struct DummyExperiment {
FieldTrialParameter<DataRate> target_rate =
FieldTrialParameter<DataRate>("t", DataRate::kbps(100));
FieldTrialParameter<TimeDelta> period =
FieldTrialParameter<TimeDelta>("p", TimeDelta::ms(100));
FieldTrialOptional<DataSize> max_buffer =
FieldTrialOptional<DataSize>("b", absl::nullopt);
explicit DummyExperiment(std::string field_trial) {
ParseFieldTrial({&target_rate, &max_buffer, &period}, field_trial);
}
};
} // namespace
TEST(FieldTrialParserUnitsTest, FallsBackToDefaults) {
DummyExperiment exp("");
EXPECT_EQ(exp.target_rate.Get(), DataRate::kbps(100));
EXPECT_FALSE(exp.max_buffer.GetOptional().has_value());
EXPECT_EQ(exp.period.Get(), TimeDelta::ms(100));
}
TEST(FieldTrialParserUnitsTest, ParsesUnitParameters) {
DummyExperiment exp("t:300kbps,b:5bytes,p:300ms");
EXPECT_EQ(exp.target_rate.Get(), DataRate::kbps(300));
EXPECT_EQ(*exp.max_buffer.GetOptional(), DataSize::bytes(5));
EXPECT_EQ(exp.period.Get(), TimeDelta::ms(300));
}
TEST(FieldTrialParserUnitsTest, ParsesDefaultUnitParameters) {
DummyExperiment exp("t:300,b:5,p:300");
EXPECT_EQ(exp.target_rate.Get(), DataRate::kbps(300));
EXPECT_EQ(*exp.max_buffer.GetOptional(), DataSize::bytes(5));
EXPECT_EQ(exp.period.Get(), TimeDelta::ms(300));
}
TEST(FieldTrialParserUnitsTest, ParsesInfinityParameter) {
DummyExperiment exp("t:inf,p:inf");
EXPECT_EQ(exp.target_rate.Get(), DataRate::Infinity());
EXPECT_EQ(exp.period.Get(), TimeDelta::PlusInfinity());
}
TEST(FieldTrialParserUnitsTest, ParsesOtherUnitParameters) {
DummyExperiment exp("t:300bps,p:0.3 seconds,b:8 bytes");
EXPECT_EQ(exp.target_rate.Get(), DataRate::bps(300));
EXPECT_EQ(*exp.max_buffer.GetOptional(), DataSize::bytes(8));
EXPECT_EQ(exp.period.Get(), TimeDelta::ms(300));
}
} // namespace webrtc
| 872 |
5,316 | <reponame>pgliaskovitis/aerosolve
package com.airbnb.aerosolve.core.transforms;
import com.airbnb.aerosolve.core.FeatureVector;
import com.typesafe.config.Config;
import java.io.Serializable;
import java.util.stream.Stream;
/**
* Created by hector_yee on 8/25/14.
* Base class for feature transforms.
*/
public interface Transform extends Serializable {
/**
* Configure the transform from the supplied config and key. <p> This is where initialization
* should take place. Ideally we want this to be a constructor instead or use a builder pattern.
*/
void configure(Config config, String key);
/**
* Apply this transform to a single feature vector.
*/
void doTransform(FeatureVector featureVector);
/**
* Applies this transform to a series of featureVector.
*
* @implNote this function can be overridden if the transform can be applied much more efficiency
* in (small) batches If such implementation exists, one would typically override the single
* feature vector implementation with the following instead:
* <pre> <code>
* @Override
* public void doTransform(FeatureVector featureVector) {
* doTransform(Stream.of(featureVector));
* }
* </code> </pre>
*/
default void doTransform(Iterable<FeatureVector> featureVectors) {
featureVectors.forEach(this::doTransform);
}
}
| 398 |
562 | <reponame>mostafaelhoushi/CompilerGym
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""Unit tests for //compiler_gym/util:capture_output."""
import sys
from compiler_gym.util.capture_output import capture_output
from tests.test_main import main
def test_capture_print_statements():
with capture_output() as out:
print("Hello")
print("World!", file=sys.stderr)
assert out.stdout == "Hello\n"
assert out.stderr == "World!\n"
def test_nested_capture():
with capture_output() as outer:
with capture_output() as inner:
print("Hello")
print("World!")
assert inner.stdout == "Hello\n"
assert outer.stdout == "World!\n"
if __name__ == "__main__":
main()
| 317 |
5,754 | <gh_stars>1000+
import sys
import pytest
import shutil
from pathlib import Path
from cookiecutter import main
CCDS_ROOT = Path(__file__).parents[1].resolve()
args = {
'project_name': 'DrivenData',
'author_name': 'DrivenData',
'open_source_license': 'BSD-3-Clause',
'python_interpreter': 'python'
}
def system_check(basename):
platform = sys.platform
if 'linux' in platform:
basename = basename.lower()
return basename
@pytest.fixture(scope='class', params=[{}, args])
def default_baked_project(tmpdir_factory, request):
temp = tmpdir_factory.mktemp('data-project')
out_dir = Path(temp).resolve()
pytest.param = request.param
main.cookiecutter(
str(CCDS_ROOT),
no_input=True,
extra_context=pytest.param,
output_dir=out_dir
)
pn = pytest.param.get('project_name') or 'project_name'
# project name gets converted to lower case on Linux but not Mac
pn = system_check(pn)
proj = out_dir / pn
request.cls.path = proj
yield
# cleanup after
shutil.rmtree(out_dir) | 468 |
10,433 | #ifndef skynet_hashid_h
#define skynet_hashid_h
#include <assert.h>
#include <stdlib.h>
#include <string.h>
struct hashid_node {
int id;
struct hashid_node *next;
};
struct hashid {
int hashmod;
int cap;
int count;
struct hashid_node *id;
struct hashid_node **hash;
};
static void
hashid_init(struct hashid *hi, int max) {
int i;
int hashcap;
hashcap = 16;
while (hashcap < max) {
hashcap *= 2;
}
hi->hashmod = hashcap - 1;
hi->cap = max;
hi->count = 0;
hi->id = skynet_malloc(max * sizeof(struct hashid_node));
for (i=0;i<max;i++) {
hi->id[i].id = -1;
hi->id[i].next = NULL;
}
hi->hash = skynet_malloc(hashcap * sizeof(struct hashid_node *));
memset(hi->hash, 0, hashcap * sizeof(struct hashid_node *));
}
static void
hashid_clear(struct hashid *hi) {
skynet_free(hi->id);
skynet_free(hi->hash);
hi->id = NULL;
hi->hash = NULL;
hi->hashmod = 1;
hi->cap = 0;
hi->count = 0;
}
static int
hashid_lookup(struct hashid *hi, int id) {
int h = id & hi->hashmod;
struct hashid_node * c = hi->hash[h];
while(c) {
if (c->id == id)
return c - hi->id;
c = c->next;
}
return -1;
}
static int
hashid_remove(struct hashid *hi, int id) {
int h = id & hi->hashmod;
struct hashid_node * c = hi->hash[h];
if (c == NULL)
return -1;
if (c->id == id) {
hi->hash[h] = c->next;
goto _clear;
}
while(c->next) {
if (c->next->id == id) {
struct hashid_node * temp = c->next;
c->next = temp->next;
c = temp;
goto _clear;
}
c = c->next;
}
return -1;
_clear:
c->id = -1;
c->next = NULL;
--hi->count;
return c - hi->id;
}
static int
hashid_insert(struct hashid * hi, int id) {
struct hashid_node *c = NULL;
int i;
for (i=0;i<hi->cap;i++) {
int index = (i+id) % hi->cap;
if (hi->id[index].id == -1) {
c = &hi->id[index];
break;
}
}
assert(c);
++hi->count;
c->id = id;
assert(c->next == NULL);
int h = id & hi->hashmod;
if (hi->hash[h]) {
c->next = hi->hash[h];
}
hi->hash[h] = c;
return c - hi->id;
}
static inline int
hashid_full(struct hashid *hi) {
return hi->count == hi->cap;
}
#endif
| 977 |
530 | package org.carlspring.strongbox.yaml.configuration.repository;
import org.carlspring.strongbox.yaml.repository.RepositoryConfiguration;
public interface NugetRepositoryConfiguration extends RepositoryConfiguration
{
String getFeedVersion();
Integer getRemoteFeedPageSize();
} | 81 |
513 | <reponame>HarrisJT/plibsys
/*
* The MIT License
*
* Copyright (C) 2017 <NAME> <<EMAIL>>
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* 'Software'), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#include "perror.h"
#include "pfile.h"
#include "plibraryloader.h"
#include "pmem.h"
#include "pstring.h"
#define INCL_DOSMODULEMGR
#define INCL_DOSERRORS
#include <os2.h>
typedef HMODULE plibrary_handle;
struct PLibraryLoader_ {
plibrary_handle handle;
APIRET last_error;
};
static void pp_library_loader_clean_handle (plibrary_handle handle);
static void
pp_library_loader_clean_handle (plibrary_handle handle)
{
APIRET ulrc;
while ((ulrc = DosFreeModule (handle)) == ERROR_INTERRUPT)
;
if (P_UNLIKELY (ulrc != NO_ERROR))
P_ERROR ("PLibraryLoader::pp_library_loader_clean_handle: DosFreeModule() failed");
}
P_LIB_API PLibraryLoader *
p_library_loader_new (const pchar *path)
{
PLibraryLoader *loader = NULL;
plibrary_handle handle = NULLHANDLE;
UCHAR load_err[256];
APIRET ulrc;
if (!p_file_is_exists (path))
return NULL;
while ((ulrc = DosLoadModule ((PSZ) load_err,
sizeof (load_err),
(PSZ) path,
(PHMODULE) &handle)) == ERROR_INTERRUPT)
;
if (P_UNLIKELY (ulrc != NO_ERROR)) {
P_ERROR ("PLibraryLoader::p_library_loader_new: DosLoadModule() failed");
return NULL;
}
if (P_UNLIKELY ((loader = p_malloc0 (sizeof (PLibraryLoader))) == NULL)) {
P_ERROR ("PLibraryLoader::p_library_loader_new: failed to allocate memory");
pp_library_loader_clean_handle (handle);
return NULL;
}
loader->handle = handle;
loader->last_error = NO_ERROR;
return loader;
}
P_LIB_API PFuncAddr
p_library_loader_get_symbol (PLibraryLoader *loader, const pchar *sym)
{
PFN func_addr = NULL;
APIRET ulrc;
if (P_UNLIKELY (loader == NULL || sym == NULL || loader->handle == NULL))
return NULL;
if (P_UNLIKELY ((ulrc = DosQueryProcAddr (loader->handle, 0, (PSZ) sym, &func_addr)) != NO_ERROR)) {
P_ERROR ("PLibraryLoader::p_library_loader_get_symbol: DosQueryProcAddr() failed");
loader->last_error = ulrc;
return NULL;
}
loader->last_error = NO_ERROR;
return (PFuncAddr) func_addr;
}
P_LIB_API void
p_library_loader_free (PLibraryLoader *loader)
{
if (P_UNLIKELY (loader == NULL))
return;
pp_library_loader_clean_handle (loader->handle);
p_free (loader);
}
P_LIB_API pchar *
p_library_loader_get_last_error (PLibraryLoader *loader)
{
if (loader == NULL)
return NULL;
switch (loader->last_error) {
case NO_ERROR:
return NULL;
case ERROR_INVALID_HANDLE:
return p_strdup ("Invalid resource handler");
case ERROR_INVALID_NAME:
return p_strdup ("Invalid procedure name");
default:
return p_strdup ("Unknown error");
}
}
P_LIB_API pboolean
p_library_loader_is_ref_counted (void)
{
return TRUE;
}
void
p_library_loader_init (void)
{
}
void
p_library_loader_shutdown (void)
{
}
| 1,396 |
5,169 | <reponame>Gantios/Specs
{
"name": "WrappedValue",
"version": "0.1.0",
"summary": "Forget the struggle of decoding Strings!",
"swift_version": "4.2",
"description": "Is your RESTful API returning Strings instead of Doubles? Are you tired of writing all that custom decoding? Let WrappedValue do it for you!",
"homepage": "https://github.com/cmtrounce/WrappedValue",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"authors": {
"cmtrounce": "<EMAIL>"
},
"source": {
"git": "https://github.com/cmtrounce/WrappedValue.git",
"tag": "0.1.0"
},
"platforms": {
"ios": "8.0"
},
"source_files": "WrappedValue/Classes/**/*"
}
| 267 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.