max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
354 | <reponame>iabernikhin/VK-GL-CTS
#ifndef _VKTMULTIVIEWRENDERPASSUTIL_HPP
#define _VKTMULTIVIEWRENDERPASSUTIL_HPP
/*------------------------------------------------------------------------
* Vulkan Conformance Tests
* ------------------------
*
* Copyright (c) 2018 The Khronos Group Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*//*!
* \file
* \brief RenderPass utils
*//*--------------------------------------------------------------------*/
#include "tcuDefs.hpp"
#include "vkRef.hpp"
#include "vkDefs.hpp"
#include "vkTypeUtil.hpp"
namespace vkt
{
namespace MultiView
{
using namespace vk;
class AttachmentDescription1 : public vk::VkAttachmentDescription
{
public:
AttachmentDescription1 (const void* pNext,
VkAttachmentDescriptionFlags flags,
VkFormat format,
VkSampleCountFlagBits samples,
VkAttachmentLoadOp loadOp,
VkAttachmentStoreOp storeOp,
VkAttachmentLoadOp stencilLoadOp,
VkAttachmentStoreOp stencilStoreOp,
VkImageLayout initialLayout,
VkImageLayout finalLayout);
};
class AttachmentDescription2 : public vk::VkAttachmentDescription2
{
public:
AttachmentDescription2 (const void* pNext,
VkAttachmentDescriptionFlags flags,
VkFormat format,
VkSampleCountFlagBits samples,
VkAttachmentLoadOp loadOp,
VkAttachmentStoreOp storeOp,
VkAttachmentLoadOp stencilLoadOp,
VkAttachmentStoreOp stencilStoreOp,
VkImageLayout initialLayout,
VkImageLayout finalLayout);
};
class AttachmentReference1 : public vk::VkAttachmentReference
{
public:
AttachmentReference1 (const void* pNext,
deUint32 attachment,
VkImageLayout layout,
VkImageAspectFlags aspectMask);
};
class AttachmentReference2 : public vk::VkAttachmentReference2
{
public:
AttachmentReference2 (const void* pNext,
deUint32 attachment,
VkImageLayout layout,
VkImageAspectFlags aspectMask);
};
class SubpassDescription1 : public vk::VkSubpassDescription
{
public:
SubpassDescription1 (const void* pNext,
VkSubpassDescriptionFlags flags,
VkPipelineBindPoint pipelineBindPoint,
deUint32 viewMask,
deUint32 inputAttachmentCount,
const VkAttachmentReference* pInputAttachments,
deUint32 colorAttachmentCount,
const VkAttachmentReference* pColorAttachments,
const VkAttachmentReference* pResolveAttachments,
const VkAttachmentReference* pDepthStencilAttachment,
deUint32 preserveAttachmentCount,
const deUint32* pPreserveAttachments);
};
class SubpassDescription2 : public vk::VkSubpassDescription2
{
public:
SubpassDescription2 (const void* pNext,
VkSubpassDescriptionFlags flags,
VkPipelineBindPoint pipelineBindPoint,
deUint32 viewMask,
deUint32 inputAttachmentCount,
const VkAttachmentReference2* pInputAttachments,
deUint32 colorAttachmentCount,
const VkAttachmentReference2* pColorAttachments,
const VkAttachmentReference2* pResolveAttachments,
const VkAttachmentReference2* pDepthStencilAttachment,
deUint32 preserveAttachmentCount,
const deUint32* pPreserveAttachments);
};
class SubpassDependency1 : public vk::VkSubpassDependency
{
public:
SubpassDependency1 (const void* pNext,
deUint32 srcSubpass,
deUint32 dstSubpass,
VkPipelineStageFlags srcStageMask,
VkPipelineStageFlags dstStageMask,
VkAccessFlags srcAccessMask,
VkAccessFlags dstAccessMask,
VkDependencyFlags dependencyFlags,
deInt32 viewOffset);
};
class SubpassDependency2 : public vk::VkSubpassDependency2
{
public:
SubpassDependency2 (const void* pNext,
deUint32 srcSubpass,
deUint32 dstSubpass,
VkPipelineStageFlags srcStageMask,
VkPipelineStageFlags dstStageMask,
VkAccessFlags srcAccessMask,
VkAccessFlags dstAccessMask,
VkDependencyFlags dependencyFlags,
deInt32 viewOffset);
};
class RenderPassCreateInfo1 : public VkRenderPassCreateInfo
{
public:
RenderPassCreateInfo1 (const void* pNext,
VkRenderPassCreateFlags flags,
deUint32 attachmentCount,
const VkAttachmentDescription* pAttachments,
deUint32 subpassCount,
const VkSubpassDescription* pSubpasses,
deUint32 dependencyCount,
const VkSubpassDependency* pDependencies,
deUint32 correlatedViewMaskCount,
const deUint32* pCorrelatedViewMasks);
Move<VkRenderPass> createRenderPass (const DeviceInterface& vk,
VkDevice device) const;
};
class RenderPassCreateInfo2 : public VkRenderPassCreateInfo2
{
public:
RenderPassCreateInfo2 (const void* pNext,
VkRenderPassCreateFlags flags,
deUint32 attachmentCount,
const VkAttachmentDescription2* pAttachments,
deUint32 subpassCount,
const VkSubpassDescription2* pSubpasses,
deUint32 dependencyCount,
const VkSubpassDependency2* pDependencies,
deUint32 correlatedViewMaskCount,
const deUint32* pCorrelatedViewMasks);
Move<VkRenderPass> createRenderPass (const DeviceInterface& vk,
VkDevice device) const;
};
class SubpassBeginInfo1
{
public:
SubpassBeginInfo1 (const void* pNext,
VkSubpassContents contents);
VkSubpassContents contents;
};
class SubpassBeginInfo2 : public VkSubpassBeginInfo
{
public:
SubpassBeginInfo2 (const void* pNext,
VkSubpassContents contents);
};
class SubpassEndInfo1
{
public:
SubpassEndInfo1 (const void* pNext);
};
class SubpassEndInfo2 : public VkSubpassEndInfo
{
public:
SubpassEndInfo2 (const void* pNext);
};
class RenderpassSubpass1
{
public:
typedef SubpassBeginInfo1 SubpassBeginInfo;
typedef SubpassEndInfo1 SubpassEndInfo;
static void cmdBeginRenderPass (const DeviceInterface& vk,
VkCommandBuffer cmdBuffer,
const VkRenderPassBeginInfo* pRenderPassBegin,
const SubpassBeginInfo* pSubpassBeginInfo);
static void cmdNextSubpass (const DeviceInterface& vk,
VkCommandBuffer cmdBuffer,
const SubpassBeginInfo* pSubpassBeginInfo,
const SubpassEndInfo* pSubpassEndInfo);
static void cmdEndRenderPass (const DeviceInterface& vk,
VkCommandBuffer cmdBuffer,
const SubpassEndInfo* pSubpassEndInfo);
};
class RenderpassSubpass2
{
public:
typedef SubpassBeginInfo2 SubpassBeginInfo;
typedef SubpassEndInfo2 SubpassEndInfo;
static void cmdBeginRenderPass (const DeviceInterface& vk,
VkCommandBuffer cmdBuffer,
const VkRenderPassBeginInfo* pRenderPassBegin,
const SubpassBeginInfo* pSubpassBeginInfo);
static void cmdNextSubpass (const DeviceInterface& vk,
VkCommandBuffer cmdBuffer,
const SubpassBeginInfo* pSubpassBeginInfo,
const SubpassEndInfo* pSubpassEndInfo);
static void cmdEndRenderPass (const DeviceInterface& vk,
VkCommandBuffer cmdBuffer,
const SubpassEndInfo* pSubpassEndInfo);
};
} // renderpass
} // vkt
#endif // _VKTMULTIVIEWRENDERPASSUTIL_HPP
| 3,508 |
720 | <filename>src/integrationTest/java/com/pubnub/api/integration/objects/uuid/UUIDMetadataIT.java
package com.pubnub.api.integration.objects.uuid;
import com.pubnub.api.PubNubException;
import com.pubnub.api.integration.objects.ObjectsApiBaseIT;
import com.pubnub.api.models.consumer.objects_api.uuid.PNGetAllUUIDMetadataResult;
import com.pubnub.api.models.consumer.objects_api.uuid.PNGetUUIDMetadataResult;
import com.pubnub.api.models.consumer.objects_api.uuid.PNRemoveUUIDMetadataResult;
import com.pubnub.api.models.consumer.objects_api.uuid.PNSetUUIDMetadataResult;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.http.HttpStatus;
import org.junit.After;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.hasProperty;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.isEmptyOrNullString;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
public class UUIDMetadataIT extends ObjectsApiBaseIT {
private static final Logger LOG = LoggerFactory.getLogger(UUIDMetadataIT.class);
private static final int NUMBER_OF_RANDOM_TEST_UUIDS = 10;
private static final int FETCH_LIMIT = 3;
private final List<String> randomTestUUIDs = randomTestUUIDs();
private final String randomTestUUID = randomTestUUIDs.get(0);
private final List<PNSetUUIDMetadataResult> createdUUIDMetadataList = new ArrayList<>();
private final String randomName = randomName();
private final String randomEmail = randomEmail();
private final String randomProfileUrl = randomProfileUrl();
private final String randomExternalId = randomExternalId();
@Test
public void setUUIDHappyPath() throws PubNubException {
//given
//when
final PNSetUUIDMetadataResult setUUIDMetadataResult = pubNubUnderTest.setUUIDMetadata()
.uuid(randomTestUUID)
.name(randomName)
.email(randomEmail)
.profileUrl(randomProfileUrl)
.externalId(randomExternalId)
.custom(customUUIDObject())
.includeCustom(true)
.sync();
//then
assertNotNull(setUUIDMetadataResult);
assertEquals(HttpStatus.SC_OK, setUUIDMetadataResult.getStatus());
createdUUIDMetadataList.add(setUUIDMetadataResult);
assertEquals(randomTestUUID, setUUIDMetadataResult.getData().getId());
assertEquals(randomName, setUUIDMetadataResult.getData().getName());
assertEquals(randomEmail, setUUIDMetadataResult.getData().getEmail());
assertEquals(randomProfileUrl, setUUIDMetadataResult.getData().getProfileUrl());
assertEquals(randomExternalId, setUUIDMetadataResult.getData().getExternalId());
assertNotNull(setUUIDMetadataResult.getData().getCustom());
}
@Test
public void getUUIDHappyPath() throws PubNubException {
//given
final PNSetUUIDMetadataResult setUUIDMetadataResult = pubNubUnderTest.setUUIDMetadata()
.uuid(randomTestUUID)
.name(randomName)
.email(randomEmail)
.profileUrl(randomProfileUrl)
.externalId(randomExternalId)
.custom(customUUIDObject())
.includeCustom(true)
.sync();
createdUUIDMetadataList.add(setUUIDMetadataResult);
//when
final PNGetUUIDMetadataResult getUUIDMetadataResult = pubNubUnderTest.getUUIDMetadata()
.uuid(randomTestUUID)
.includeCustom(true)
.sync();
//then
assertNotNull(getUUIDMetadataResult);
assertEquals(HttpStatus.SC_OK, getUUIDMetadataResult.getStatus());
assertEquals(randomTestUUID, getUUIDMetadataResult.getData().getId());
assertEquals(setUUIDMetadataResult.getData().getName(), getUUIDMetadataResult.getData().getName());
assertEquals(setUUIDMetadataResult.getData().getEmail(), getUUIDMetadataResult.getData().getEmail());
assertEquals(setUUIDMetadataResult.getData().getProfileUrl(), getUUIDMetadataResult.getData().getProfileUrl());
assertEquals(setUUIDMetadataResult.getData().getExternalId(), getUUIDMetadataResult.getData().getExternalId());
assertNotNull(getUUIDMetadataResult.getData().getCustom());
}
@Test
public void getAllUUIDHappyPath() throws PubNubException {
//given
for (String testUUID: randomTestUUIDs) {
final PNSetUUIDMetadataResult setUUIDMetadataResult = pubNubUnderTest.setUUIDMetadata()
.uuid(testUUID)
.name(randomName)
.email(randomEmail)
.profileUrl(randomProfileUrl)
.externalId(randomExternalId)
.custom(customUUIDObject())
.includeCustom(true)
.sync();
createdUUIDMetadataList.add(setUUIDMetadataResult);
}
//when
final PNGetAllUUIDMetadataResult getAllUUIDMetadataResult = pubNubUnderTest.getAllUUIDMetadata()
.includeCustom(true)
.includeTotalCount(true)
.limit(FETCH_LIMIT)
.sync();
//then
assertThat(getAllUUIDMetadataResult, allOf(
notNullValue(),
hasProperty("status", equalTo(HttpStatus.SC_OK)),
hasProperty("data", allOf(
not(empty()),
hasSize(FETCH_LIMIT))),
hasProperty("totalCount", greaterThanOrEqualTo(NUMBER_OF_RANDOM_TEST_UUIDS)),
hasProperty("next", not(isEmptyOrNullString())),
hasProperty("prev", isEmptyOrNullString())));
}
@Test
public void removeUUIDHappyPath() throws PubNubException {
//given
final PNSetUUIDMetadataResult setUUIDMetadataResult = pubNubUnderTest.setUUIDMetadata()
.uuid(randomTestUUID)
.name(randomName)
.email(randomEmail)
.profileUrl(randomProfileUrl)
.externalId(randomExternalId)
.custom(customUUIDObject())
.includeCustom(true)
.sync();
createdUUIDMetadataList.add(setUUIDMetadataResult);
//when
final PNRemoveUUIDMetadataResult removeUUIDMetadataResult = pubNubUnderTest.removeUUIDMetadata()
.uuid(randomTestUUID)
.sync();
//then
assertNotNull(removeUUIDMetadataResult);
assertEquals(HttpStatus.SC_OK, removeUUIDMetadataResult.getStatus());
}
@After
public void cleanUp() {
createdUUIDMetadataList.forEach(pnSetUUIDMetadataResult -> {
try {
pubNubUnderTest.removeUUIDMetadata()
.uuid(pnSetUUIDMetadataResult.getData().getId())
.sync();
} catch (Exception e) {
LOG.warn("Could not cleanup {}", pnSetUUIDMetadataResult, e);
}
});
}
private Map<String, Object> customUUIDObject() {
return new HashMap<String, Object>() {
{
putIfAbsent("uuid_param1", "val1");
putIfAbsent("uuid_param2", "val2");
}
};
}
private String randomExternalId() {
return UUID.randomUUID().toString();
}
private String randomEmail() {
return RandomStringUtils.randomAlphabetic(6) + "@example.com";
}
private String randomName() {
return RandomStringUtils.randomAlphabetic(5, 10) + " " + RandomStringUtils.randomAlphabetic(5, 10);
}
private String randomProfileUrl() {
return "http://" + RandomStringUtils.randomAlphabetic(5, 15) + ".com";
}
private static List<String> randomTestUUIDs() {
final List<String> uuids = new ArrayList<>();
for (int i = 0; i < NUMBER_OF_RANDOM_TEST_UUIDS; i++) {
uuids.add(UUID.randomUUID().toString());
}
return uuids;
}
}
| 3,795 |
14,668 | <gh_stars>1000+
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/ui/global_media_controls/media_toolbar_button_controller.h"
#include "chrome/browser/ui/global_media_controls/media_toolbar_button_controller_delegate.h"
#include "components/global_media_controls/public/media_item_manager.h"
MediaToolbarButtonController::MediaToolbarButtonController(
MediaToolbarButtonControllerDelegate* delegate,
global_media_controls::MediaItemManager* item_manager)
: delegate_(delegate), item_manager_(item_manager) {
DCHECK(delegate_);
item_manager_->AddObserver(this);
UpdateToolbarButtonState();
}
MediaToolbarButtonController::~MediaToolbarButtonController() {
item_manager_->RemoveObserver(this);
}
void MediaToolbarButtonController::OnItemListChanged() {
UpdateToolbarButtonState();
}
void MediaToolbarButtonController::OnMediaDialogOpened() {
UpdateToolbarButtonState();
}
void MediaToolbarButtonController::OnMediaDialogClosed() {
UpdateToolbarButtonState();
delegate_->MaybeShowStopCastingPromo();
}
void MediaToolbarButtonController::ShowToolbarButton() {
if (delegate_display_state_ != DisplayState::kShown) {
delegate_->Enable();
delegate_->Show();
delegate_display_state_ = DisplayState::kShown;
}
}
void MediaToolbarButtonController::UpdateToolbarButtonState() {
if (item_manager_->HasActiveItems() || item_manager_->HasOpenDialog()) {
ShowToolbarButton();
return;
}
if (!item_manager_->HasFrozenItems()) {
if (delegate_display_state_ != DisplayState::kHidden)
delegate_->Hide();
delegate_display_state_ = DisplayState::kHidden;
return;
}
if (!item_manager_->HasOpenDialog()) {
if (delegate_display_state_ != DisplayState::kDisabled)
delegate_->Disable();
delegate_display_state_ = DisplayState::kDisabled;
}
}
| 628 |
1,064 | typedef struct {
SPFLOAT value;
SPFLOAT target;
SPFLOAT rate;
int state;
SPFLOAT attackRate;
SPFLOAT decayRate;
SPFLOAT sustainLevel;
SPFLOAT releaseRate;
SPFLOAT atk;
SPFLOAT rel;
SPFLOAT sus;
SPFLOAT dec;
int mode;
} sp_tadsr;
int sp_tadsr_create(sp_tadsr **p);
int sp_tadsr_destroy(sp_tadsr **p);
int sp_tadsr_init(sp_data *sp, sp_tadsr *p);
int sp_tadsr_compute(sp_data *sp, sp_tadsr *p, SPFLOAT *trig, SPFLOAT *out);
| 244 |
677 | <reponame>InfiniteSynthesis/lynx-native
#ifndef LYNX_GL_CANVAS_LYNX_TEXTURE_H_
#define LYNX_GL_CANVAS_LYNX_TEXTURE_H_
#include <EGL/egl.h>
#include <GLES2/gl2.h>
namespace canvas {
class LxTexture {
public:
void Bind() { glBindTexture(GL_TEXTURE_2D, id_); }
void UnBind() { glBindTexture(GL_TEXTURE_2D, 0); }
GLuint id() { return id_; }
void set_id(GLuint texture_id) { id_ = texture_id; }
unsigned int width() { return width_; }
void set_width(unsigned int width) { width_ = width; }
unsigned int height() { return height_; }
void set_height(unsigned int height) { height_ = height; }
GLenum format() { return format_; }
void set_format(GLenum format) { format_ = format; }
static LxTexture* Create(unsigned int width,
unsigned int height,
GLenum format,
void* data = nullptr) {
GLint max_texture_size;
glGetIntegerv(GL_MAX_TEXTURE_SIZE, &max_texture_size);
if (width > max_texture_size || height > max_texture_size) {
return nullptr;
}
GLint binding_texture = 0;
glGetIntegerv(GL_TEXTURE_BINDING_2D, &binding_texture);
LxTexture* texture = new LxTexture;
GLuint id = 0;
glGenTextures(1, &id);
glBindTexture(GL_TEXTURE_2D, id);
glTexImage2D(GL_TEXTURE_2D, 0, format, width, height, 0, format,
GL_UNSIGNED_BYTE, data);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glBindTexture(GL_TEXTURE_2D, binding_texture);
texture->set_id(id);
texture->set_width(width);
texture->set_height(height);
texture->set_format(format);
return texture;
}
private:
GLuint id_;
unsigned int width_;
unsigned int height_;
GLenum format_;
};
} // namespace canvas
#endif | 856 |
1,194 | <gh_stars>1000+
package ca.uhn.fhir.tinder.model;
public class AnyChild extends Child {
@Override
public String getReferenceType() {
return "IDatatype";
}
@Override
public String getAnnotationType() {
return getReferenceType();
}
@Override
public boolean isSingleChildInstantiable() {
return false;
}
}
| 112 |
3,363 | from torch2trt.torch2trt import *
def is_private(method):
method = method.split('.')[-1] # remove prefix
return method[0] == '_' and method[1] != '_'
def is_function_type(method):
fntype = eval(method + '.__class__.__name__')
return fntype == 'function' or fntype == 'builtin_function_or_method' or fntype == 'method_descriptor'
def get_methods(namespace):
methods = []
for method in dir(eval(namespace)):
full_method = namespace + '.' + method
if not is_private(full_method) and is_function_type(full_method):
methods.append(full_method)
return methods
TORCH_METHODS = []
TORCH_METHODS += get_methods('torch')
TORCH_METHODS += get_methods('torch.Tensor')
TORCH_METHODS += get_methods('torch.nn.functional')
for method in TORCH_METHODS:
@tensorrt_converter(method, is_real=False)
def warn_method(ctx):
print('Warning: Encountered known unsupported method %s' % ctx.method_str)
@tensorrt_converter('torch.Tensor.dim', is_real=False)
@tensorrt_converter('torch.Tensor.size', is_real=False)
def dont_warn(ctx):
pass
| 451 |
1,694 | <filename>stagemonitor-web-servlet/src/test/java/org/stagemonitor/web/servlet/jaxrs/JaxRsRequestNameDeterminerTransformerTest.java
package org.stagemonitor.web.servlet.jaxrs;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.stagemonitor.configuration.ConfigurationOption;
import org.stagemonitor.configuration.ConfigurationRegistry;
import org.stagemonitor.core.CorePlugin;
import org.stagemonitor.core.MeasurementSession;
import org.stagemonitor.core.Stagemonitor;
import org.stagemonitor.core.metrics.metrics2.Metric2Filter;
import org.stagemonitor.core.metrics.metrics2.Metric2Registry;
import org.stagemonitor.tracing.GlobalTracerTestHelper;
import org.stagemonitor.tracing.MonitoredMethodRequest;
import org.stagemonitor.tracing.MonitoredRequest;
import org.stagemonitor.tracing.RequestMonitor;
import org.stagemonitor.tracing.SpanCapturingReporter;
import org.stagemonitor.tracing.SpanContextInformation;
import org.stagemonitor.tracing.TracingPlugin;
import org.stagemonitor.tracing.reporter.ReportingSpanEventListener;
import org.stagemonitor.tracing.sampling.SamplePriorityDeterminingSpanEventListener;
import org.stagemonitor.tracing.wrapper.SpanWrappingTracer;
import org.stagemonitor.web.servlet.ServletPlugin;
import java.util.ArrayList;
import java.util.Collections;
import java.util.regex.Pattern;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import io.opentracing.mock.MockTracer;
import io.opentracing.util.GlobalTracer;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.stagemonitor.tracing.BusinessTransactionNamingStrategy.METHOD_NAME_SPLIT_CAMEL_CASE;
public class JaxRsRequestNameDeterminerTransformerTest {
private TestResource resource = new TestResource();
private SpanCapturingReporter spanCapturingReporter;
@BeforeClass
@AfterClass
public static void reset() {
Stagemonitor.reset();
}
private ConfigurationRegistry configuration = mock(ConfigurationRegistry.class);
private TracingPlugin tracingPlugin = mock(TracingPlugin.class);
private ServletPlugin servletPlugin = mock(ServletPlugin.class);
private CorePlugin corePlugin = mock(CorePlugin.class);
private RequestMonitor requestMonitor;
private Metric2Registry registry = new Metric2Registry();
@Before
public void before() throws Exception {
Stagemonitor.reset(new MeasurementSession("JaxRsRequestNameDeterminerTransformerTest", "testHost", "testInstance"));
registry.removeMatching(Metric2Filter.ALL);
when(configuration.getConfig(TracingPlugin.class)).thenReturn(tracingPlugin);
when(configuration.getConfig(ServletPlugin.class)).thenReturn(servletPlugin);
when(configuration.getConfig(CorePlugin.class)).thenReturn(corePlugin);
when(corePlugin.isStagemonitorActive()).thenReturn(true);
when(corePlugin.getThreadPoolQueueCapacityLimit()).thenReturn(1000);
when(corePlugin.getApplicationName()).thenReturn("JaxRsRequestNameDeterminerTransformerTest");
when(corePlugin.getInstanceName()).thenReturn("test");
when(tracingPlugin.getDefaultRateLimitSpansPerMinute()).thenReturn(1000000d);
when(tracingPlugin.getDefaultRateLimitSpansPerMinuteOption()).thenReturn(mock(ConfigurationOption.class));
when(tracingPlugin.getDefaultRateLimitSpansPerMinuteOption()).thenReturn(mock(ConfigurationOption.class));
when(tracingPlugin.getDefaultRateLimitSpansPercentOption()).thenReturn(mock(ConfigurationOption.class));
when(tracingPlugin.getRateLimitSpansPerMinutePercentPerTypeOption()).thenReturn(mock(ConfigurationOption.class));
when(tracingPlugin.getDefaultRateLimitSpansPercent()).thenReturn(1.0);
when(tracingPlugin.getRateLimitSpansPerMinutePercentPerType()).thenReturn(Collections.emptyMap());
when(tracingPlugin.getProfilerRateLimitPerMinuteOption()).thenReturn(mock(ConfigurationOption.class));
when(tracingPlugin.getBusinessTransactionNamingStrategy()).thenReturn(METHOD_NAME_SPLIT_CAMEL_CASE);
when(tracingPlugin.isSampled(any())).thenReturn(true);
when(servletPlugin.getGroupUrls()).thenReturn(Collections.singletonMap(Pattern.compile("(.*).js$"), "*.js"));
requestMonitor = new RequestMonitor(configuration, registry);
when(tracingPlugin.getRequestMonitor()).thenReturn(requestMonitor);
final ReportingSpanEventListener reportingSpanEventListener = new ReportingSpanEventListener(configuration);
spanCapturingReporter = new SpanCapturingReporter();
reportingSpanEventListener.addReporter(spanCapturingReporter);
final SpanWrappingTracer tracer = TracingPlugin.createSpanWrappingTracer(new MockTracer(),
configuration, registry, new ArrayList<>(),
new SamplePriorityDeterminingSpanEventListener(configuration), reportingSpanEventListener);
GlobalTracerTestHelper.resetGlobalTracer();
GlobalTracer.register(tracer);
when(tracingPlugin.getTracer()).thenReturn(tracer);
}
@After
public void after() {
GlobalTracerTestHelper.resetGlobalTracer();
}
@Test
public void testSetNameForRestCalls() throws Exception {
final MonitoredRequest request = new MonitoredMethodRequest(configuration, "override me", () -> resource.getTestString());
requestMonitor.monitor(request);
final SpanContextInformation info = spanCapturingReporter.get();
assertNotNull(info);
assertEquals("Get Test String", info.getOperationName());
}
@Path("/")
public class TestResource {
@GET
public String getTestString() {
return "test";
}
}
}
| 1,768 |
880 | /**
* Copyright 2019 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ch.qos.logback.core.net;
import java.net.ConnectException;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketAddress;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import ch.qos.logback.core.net.SocketConnector.ExceptionHandler;
import ch.qos.logback.core.net.server.ServerSocketUtil;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.fail;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
/**
* Unit tests for {@link DefaultSocketConnector}.
*
* @author <NAME>
*/
@Ignore
public class DefaultSocketConnectorTest {
private static final int DELAY = 1000;
private static final int SHORT_DELAY = 10;
private static final int RETRY_DELAY = 10;
private MockExceptionHandler exceptionHandler = new MockExceptionHandler();
private ServerSocket serverSocket;
private DefaultSocketConnector connector;
ExecutorService executor = Executors.newSingleThreadExecutor();
@Before
public void setUp() throws Exception {
serverSocket = ServerSocketUtil.createServerSocket();
connector = new DefaultSocketConnector(serverSocket.getInetAddress(),
serverSocket.getLocalPort(), 0, RETRY_DELAY);
connector.setExceptionHandler(exceptionHandler);
}
@After
public void tearDown() throws Exception {
if (serverSocket != null) {
serverSocket.close();
}
}
@Test
public void testConnect() throws Exception {
Future<Socket> connectorTask = executor.submit(connector);
Socket socket = connectorTask.get(2 * DELAY, TimeUnit.MILLISECONDS);
assertNotNull(socket);
connectorTask.cancel(true);
assertTrue(connectorTask.isDone());
socket.close();
}
@Test
public void testConnectionFails() throws Exception {
serverSocket.close();
Future<Socket> connectorTask = executor.submit(connector);
// this connection attempt will always timeout
try {
connectorTask.get(SHORT_DELAY, TimeUnit.MILLISECONDS);
fail();
} catch(TimeoutException e) {
}
Exception lastException = exceptionHandler.awaitConnectionFailed(DELAY);
assertTrue(lastException instanceof ConnectException);
assertFalse(connectorTask.isDone());
connectorTask.cancel(true);
//thread.join(4 * DELAY);
assertTrue(connectorTask.isCancelled());
}
@Test(timeout = 5000)
public void testConnectEventually() throws Exception {
serverSocket.close();
Future<Socket> connectorTask = executor.submit(connector);
// this connection attempt will always timeout
try {
connectorTask.get(SHORT_DELAY, TimeUnit.MILLISECONDS);
fail();
} catch(TimeoutException e) {
}
// on Ceki's machine (Windows 7) this always takes 1second regardless of the value of DELAY
Exception lastException = exceptionHandler.awaitConnectionFailed(DELAY);
assertNotNull(lastException);
assertTrue(lastException instanceof ConnectException);
// now rebind to the same local address
SocketAddress address = serverSocket.getLocalSocketAddress();
serverSocket = new ServerSocket();
serverSocket.setReuseAddress(true);
serverSocket.bind(address);
// now we should be able to connect
Socket socket = connectorTask.get(2 * DELAY, TimeUnit.MILLISECONDS);
assertNotNull(socket);
assertFalse(connectorTask.isCancelled());
socket.close();
}
private static class MockExceptionHandler implements ExceptionHandler {
private final Lock lock = new ReentrantLock();
private final Condition failedCondition = lock.newCondition();
private Exception lastException;
public void connectionFailed(SocketConnector connector, Exception ex) {
lastException = ex;
}
public Exception awaitConnectionFailed(long delay)
throws InterruptedException {
lock.lock();
try {
long increment = 10;
while (lastException == null && delay > 0) {
boolean success = failedCondition.await(increment, TimeUnit.MILLISECONDS);
delay -= increment;
if(success) break;
}
return lastException;
}
finally {
lock.unlock();
}
}
}
}
| 1,646 |
9,402 | #ifdef ENABLE_EXPERIMENT_TIERED
#ifndef __MONO_MINI_TIERED_H__
#define __MONO_MINI_TIERED_H__
#define TIERED_PATCH_KIND_INTERP 0
#define TIERED_PATCH_KIND_JIT 1
#define TIERED_PATCH_KIND_NUM 2
typedef struct {
int hotness;
gboolean promoted;
} MiniTieredCounter;
typedef struct {
gint64 methods_promoted;
} MiniTieredStats;
typedef struct {
MonoMethod *target_method;
int tier_level;
} MiniTieredPatchPointContext;
typedef gboolean (*CallsitePatcher)(MiniTieredPatchPointContext *context, gpointer patchsite);
void
mini_tiered_init (void);
void
mini_tiered_inc (MonoMethod *method, MiniTieredCounter *tcnt, int level);
void
mini_tiered_record_callsite (gpointer callsite, MonoMethod *target_method, int level);
void
mini_tiered_register_callsite_patcher (CallsitePatcher func, int level);
#endif /* __MONO_MINI_TIERED_H__ */
#endif /* ENABLE_EXPERIMENT_TIERED */
| 352 |
315 | <reponame>BenjaFriend/EngineBay<filename>FlingEngine/Gameplay/inc/World.inl
#pragma once
#include "World.h"
#include "Components/Transform.h"
#include "MeshRenderer.h"
#include "Lighting/DirectionalLight.hpp"
#include "Lighting/PointLight.hpp"
// Definition of what world components we want to serialize to the disk when
// saving and loading a scene
#define WORLD_COMPONENTS Fling::Transform, MeshRenderer, DirectionalLight, PointLight
namespace Fling
{
template<class ...ARGS>
bool World::OutputLevelFile(const std::string& t_LevelToLoad)
{
std::string FullPath = FlingPaths::EngineAssetsDir() + "/" + t_LevelToLoad;
std::ofstream OutStream(FullPath);
if(!OutStream.is_open())
{
F_LOG_ERROR("Failed to open out stream to level {}", FullPath);
return false;
}
F_LOG_TRACE("Outputting Level file to {}", FullPath);
cereal::JSONOutputArchive archive(OutStream);
// Write out a copy of what is in this registry
m_Registry.snapshot()
.entities(archive)
.component<WORLD_COMPONENTS, ARGS...>(archive);
return true;
}
template<class ...ARGS>
bool World::LoadLevelFile(const std::string& t_LevelToLoad)
{
std::string FullPath = FlingPaths::EngineAssetsDir() + "/" + t_LevelToLoad;
F_LOG_TRACE("Load Scene file to: {}", FullPath);
// Create a cereal input stream
std::ifstream InputStream(FullPath);
if(!InputStream.is_open())
{
F_LOG_ERROR("Failed to open input stream from file: {}", FullPath);
return false;
}
F_LOG_TRACE("Loading Level file from {}", FullPath);
cereal::JSONInputArchive archive(InputStream);
// This type of loader requires the registry to be cleared first
m_Registry.reset();
// Load into the registry based on the serialization from the stream
m_Registry.loader()
.entities(archive)
.component<WORLD_COMPONENTS, ARGS...>(archive);
return true;
}
} | 673 |
526 | <reponame>rychagova/egeria<filename>open-metadata-implementation/admin-services/admin-services-api/src/main/java/org/odpi/openmetadata/adminservices/configuration/registration/package-info.java
/* SPDX-License-Identifier: Apache 2.0 */
/* Copyright Contributors to the ODPi Egeria project. */
/**
* The registration package provides the definitions and interfaces to describe each of the Open Metadata
* and Governance services that support the different subsystems within the OMAG Server.
*/
package org.odpi.openmetadata.adminservices.configuration.registration;
| 156 |
348 | <filename>docs/data/leg-t2/053/05301016.json
{"nom":"Bais","circ":"1ère circonscription","dpt":"Mayenne","inscrits":883,"abs":460,"votants":423,"blancs":30,"nuls":10,"exp":383,"res":[{"nuance":"SOC","nom":"<NAME>","voix":240},{"nuance":"REM","nom":"Mme <NAME>","voix":143}]} | 110 |
580 | <reponame>b2220333/convnet-drawer<gh_stars>100-1000
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from convnet_drawer import Model, Conv2D, MaxPooling2D, Flatten, Dense, config
from matplotlib_util import save_model_to_file
from matplotlib import pyplot as plt
plt.xkcd()
def main():
config.text_size = 16
model = Model(input_shape=(227, 227, 3))
model.add(Conv2D(96, (11, 11), (4, 4)))
model.add(MaxPooling2D((3, 3), strides=(2, 2)))
model.add(Conv2D(256, (5, 5), padding="same"))
model.add(MaxPooling2D((3, 3), strides=(2, 2)))
model.add(Conv2D(384, (3, 3), padding="same"))
model.add(Conv2D(384, (3, 3), padding="same"))
model.add(Conv2D(256, (3, 3), padding="same"))
model.add(MaxPooling2D((3, 3), strides=(2, 2)))
model.add(Flatten())
model.add(Dense(4096))
model.add(Dense(4096))
model.add(Dense(1000))
save_model_to_file(model, os.path.splitext(os.path.basename(__file__))[0] + ".pdf")
if __name__ == '__main__':
main()
| 461 |
900 | <filename>java/foam/android/view/EditIntBridge.java<gh_stars>100-1000
package foam.android.view;
import android.content.Context;
import android.util.AttributeSet;
import android.widget.EditText;
import foam.core.Value;
/**
* View bridge that binds an {@link EditText} to a FOAM {@link Value<Integer>}.
*/
public class EditIntBridge extends AbstractTextFieldBridge<EditText, Integer> {
public EditIntBridge(Context context) {
super(context);
}
public EditIntBridge(Context context, AttributeSet attrs) {
super(context, attrs);
}
@Override
protected EditText makeInnerView(Context context, AttributeSet attrs) {
return new EditText(context, attrs);
}
@Override
protected Integer convertStringToValue(String s) {
return Integer.parseInt(s);
}
@Override
protected String convertValueToString(Integer s) {
return s.toString();
}
}
| 276 |
646 | <reponame>monocilindro/qgis-earthengine-examples<gh_stars>100-1000
# GitHub URL: https://github.com/giswqs/qgis-earthengine-examples/tree/master/ImageCollection/select_image_by_index.py
import ee
from ee_plugin import Map
collection = ee.ImageCollection('LANDSAT/LC08/C01/T1_TOA')\
.filter(ee.Filter.eq('WRS_PATH', 44))\
.filter(ee.Filter.eq('WRS_ROW', 34))\
.filterDate('2014-01-01', '2015-01-01')
image = ee.Image(collection.toList(collection.size()).get(0)) # select by index from 0 to size-1
print(image.get('system:id').getInfo())
Map.setCenter(-122.3578, 37.7726, 12)
Map.addLayer(image, {"bands": ['B4', 'B3', 'B2'], "max": 0.3}, 'median')
| 281 |
1,431 | from .base import login_manager, handle_bad_request, handle_unauthorized_access, handle_access_forbidden, handle_page_not_found, handle_internal_server_error
from .index import index_bp
from .user import user_bp
from .dashboard import dashboard_bp
from .domain import domain_bp
from .admin import admin_bp
from .api import api_bp
def init_app(app):
login_manager.init_app(app)
app.register_blueprint(index_bp)
app.register_blueprint(user_bp)
app.register_blueprint(dashboard_bp)
app.register_blueprint(domain_bp)
app.register_blueprint(admin_bp)
app.register_blueprint(api_bp)
app.register_error_handler(400, handle_bad_request)
app.register_error_handler(401, handle_unauthorized_access)
app.register_error_handler(403, handle_access_forbidden)
app.register_error_handler(404, handle_page_not_found)
app.register_error_handler(500, handle_internal_server_error)
| 316 |
1,755 | /* Copyright 2018 University Corporation for Atmospheric
Research/Unidata. See COPYRIGHT file for more info. */
/**
@file
The functions in this file define, inquire about, and rename
dimensions.
*/
#include "ncdispatch.h"
/**
@defgroup dimensions Dimensions
Dimensions are used to define the shape of data in netCDF.
Dimensions for a netCDF dataset are defined when it is created,
while the netCDF dataset is in define mode. Additional dimensions
may be added later by reentering define mode. A netCDF dimension
has a name and a length. In a netCDF classic or 64-bit offset file,
at most one dimension can have the unlimited length, which means
variables using this dimension can grow along this dimension. In a
netCDF-4 file multiple unlimited dimensions are supported.
There is a suggested limit (1024) to the number of dimensions that
can be defined in a single netCDF dataset. The limit is the value
of the predefined macro ::NC_MAX_DIMS. The purpose of the limit is
to make writing generic applications simpler. They need only
provide an array of ::NC_MAX_DIMS dimensions to handle any netCDF
dataset. The implementation of the netCDF library does not enforce
this advisory maximum, so it is possible to use more dimensions, if
necessary, but netCDF utilities that assume the advisory maximums
may not be able to handle the resulting netCDF datasets.
::NC_MAX_VAR_DIMS, which must not exceed ::NC_MAX_DIMS, is the
maximum number of dimensions that can be used to specify the shape
of a single variable. It is also intended to simplify writing
generic applications.
Ordinarily, the name and length of a dimension are fixed when the
dimension is first defined. The name may be changed later, but the
length of a dimension (other than the unlimited dimension) cannot
be changed without copying all the data to a new netCDF dataset
with a redefined dimension length.
Dimension lengths in the C interface are type size_t rather than
type int to make it possible to access all the data in a netCDF
dataset on a platform that only supports a 16-bit int data type,
for example MSDOS. If dimension lengths were type int instead, it
would not be possible to access data from variables with a
dimension length greater than a 16-bit int can accommodate.
A netCDF dimension in an open netCDF dataset is referred to by a
small integer called a dimension ID. In the C interface, dimension
IDs are 0, 1, 2, ..., in the order in which the dimensions were
defined.
Operations supported on dimensions are:
- Create a dimension, given its name and length.
- Get a dimension ID from its name.
- Get a dimension's name and length from its ID.
- Rename a dimension.
*/
/** @{ */
/**
Define a new dimension. The function nc_def_dim() adds a new
dimension to an open netCDF dataset in define mode. It returns (as an
argument) a dimension ID, given the netCDF ID, the dimension name, and
the dimension length. At most one unlimited length dimension, called
the record dimension, may be defined for each classic or 64-bit offset
netCDF dataset. NetCDF-4 datasets may have multiple unlimited
dimensions.
@param ncid NetCDF or group ID, from a previous call to nc_open(),
nc_create(), nc_def_grp(), or associated inquiry functions such as
nc_inq_ncid().
@param name Name of the dimension to be created.
@param len Length of the dimension to be created. Use NC_UNLIMITED for
unlimited dimensions.
@param idp Pointer where dimension ID will be stored.
@return ::NC_NOERR No error.
@return ::NC_EBADID Not a valid ID.
@return ::NC_EMAXNAME Name is too long.
@return ::NC_EBADNAME Name breaks netCDF name rules.
@return ::NC_EINVAL Invalid input.
@return ::NC_ENOTINDEFINE Not in define mode.
@return ::NC_EDIMSIZE Invalid dimension size.
@return ::NC_EUNLIMIT NC_UNLIMITED size already in use
@return ::NC_EMAXDIMS NC_MAX_DIMS exceeded [not enforced after 4.5.0]
@return ::NC_ENAMEINUSE String match to name in use
@return ::NC_ENOMEM Memory allocation (malloc) failure
@return ::NC_EPERM Write to read only
@section nc_def_dim_example Example
Here is an example using nc_def_dim() to create a dimension named lat of
length 18 and a unlimited dimension named rec in a new netCDF dataset
named foo.nc:
@code
#include <netcdf.h>
...
int status, ncid, latid, recid;
...
status = nc_create("foo.nc", NC_NOCLOBBER, &ncid);
if (status != NC_NOERR) handle_error(status);
...
status = nc_def_dim(ncid, "lat", 18L, &latid);
if (status != NC_NOERR) handle_error(status);
status = nc_def_dim(ncid, "rec", NC_UNLIMITED, &recid);
if (status != NC_NOERR) handle_error(status);
@endcode
@author <NAME>, <NAME>, <NAME>, <NAME>, <NAME>
*/
int
nc_def_dim(int ncid, const char *name, size_t len, int *idp)
{
NC* ncp;
int stat = NC_check_id(ncid, &ncp);
if(stat != NC_NOERR) return stat;
TRACE(nc_def_dim);
return ncp->dispatch->def_dim(ncid, name, len, idp);
}
/**
Find the ID of a dimension from the name.
The function nc_inq_dimid returns (as an argument) the ID of a
netCDF dimension, given the name of the dimension. If ndims is the
number of dimensions defined for a netCDF dataset, each dimension
has an ID between 0 and ndims-1.
@param ncid NetCDF or group ID, from a previous call to nc_open(),
nc_create(), nc_def_grp(), or associated inquiry functions such as
nc_inq_ncid().
@param name Name of the dimension.
@param idp Pointer where dimension ID will be stored.
@return ::NC_NOERR No error.
@return ::NC_EBADID Not a valid ID.
@return ::NC_EBADDIM Invalid dimension ID.
@author <NAME>, <NAME>, <NAME>, <NAME>, <NAME>
*/
int
nc_inq_dimid(int ncid, const char *name, int *idp)
{
NC* ncp;
int stat = NC_check_id(ncid, &ncp);
if(stat != NC_NOERR) return stat;
TRACE(nc_inq_dimid);
return ncp->dispatch->inq_dimid(ncid,name,idp);
}
/**
Find the name and length of a dimension.
The length for the unlimited dimension, if any, is the number of
records written so far.
@param ncid NetCDF or group ID, from a previous call to nc_open(),
nc_create(), nc_def_grp(), or associated inquiry functions such as
nc_inq_ncid().
@param dimid Dimension ID, from a previous call to nc_inq_dimid() or
nc_def_dim().
@param name Returned dimension name. The caller must allocate space
for the returned name. The maximum possible length, in characters, of
a dimension name is given by the predefined constant
::NC_MAX_NAME. (This doesn't include the null terminator, so declare
your array to be size NC_MAX_NAME+1). The returned character array
will be null-terminated.
@param lenp Pointer to location for returned length of dimension. For
the unlimited dimension, this is the number of records written so far.
@return ::NC_NOERR No error.
@return ::NC_EBADID Not a valid ID.
@return ::NC_EBADDIM Invalid dimension ID or name.
@section nc_inq_dim_example Example
Here is an example using nc_inq_dim() to determine the length of a
dimension named lat, and the name and current maximum length of the
unlimited dimension for an existing netCDF dataset named foo.nc:
@code
#include <netcdf.h>
...
int status, ncid, latid, recid;
size_t latlength, recs;
char recname[NC_MAX_NAME+1];
...
status = nc_open("foo.nc", NC_NOWRITE, &ncid);
if (status != NC_NOERR) handle_error(status);
status = nc_inq_unlimdim(ncid, &recid);
if (status != NC_NOERR) handle_error(status);
...
status = nc_inq_dimid(ncid, "lat", &latid);
if (status != NC_NOERR) handle_error(status);
status = nc_inq_dimlen(ncid, latid, &latlength);
if (status != NC_NOERR) handle_error(status);
status = nc_inq_dim(ncid, recid, recname, &recs);
if (status != NC_NOERR) handle_error(status);
@endcode
@author <NAME>, <NAME>, <NAME>, <NAME>, <NAME>
*/
int
nc_inq_dim(int ncid, int dimid, char *name, size_t *lenp)
{
NC* ncp;
int stat = NC_check_id(ncid, &ncp);
if(stat != NC_NOERR) return stat;
TRACE(nc_inq_dim);
return ncp->dispatch->inq_dim(ncid,dimid,name,lenp);
}
/**
Rename a dimension.
This function renames an existing dimension in a netCDF dataset
open for writing. You cannot rename a dimension to have the same
name as another dimension.
For netCDF classic and 64-bit offset files, if the new name is
longer than the old name, which has been flushed to disk, the
netCDF dataset must be in define mode.
For netCDF-4 files the length of the name is not checked against
the length of the old name, even for classic model files. This is
due to the difficulty of exactly reproducing classic library
behavior in this case.
@param ncid NetCDF or group ID, from a previous call to nc_open(),
nc_create(), nc_def_grp(), or associated inquiry functions such as
nc_inq_ncid().
@param dimid Dimension ID, from a previous call to nc_inq_dimid()
or nc_def_dim().
@param name New name for dimension. Must be a null-terminated
string with length less than ::NC_MAX_NAME.
@return ::NC_NOERR No error.
@return ::NC_EBADID Not a valid ID.
@return ::NC_EBADDIM Invalid dimension ID or name.
@return ::NC_ENAMEINUSE String match to name in use
@return ::NC_ENOMEM Memory allocation (malloc) failure
@return ::NC_EPERM Write to read only
@return ::NC_ENOTINDEFINE Not in define mode and new name is longer
than old.
@section nc_rename_dim_example Example
Here is an example using nc_rename_dim to rename the dimension lat
to latitude in an existing netCDF dataset named foo.nc:
@code
#include <netcdf.h>
...
int status, ncid, latid;
...
status = nc_open("foo.nc", NC_WRITE, &ncid);
if (status != NC_NOERR) handle_error(status);
...
status = nc_redef(ncid);
if (status != NC_NOERR) handle_error(status);
status = nc_inq_dimid(ncid, "lat", &latid);
if (status != NC_NOERR) handle_error(status);
status = nc_rename_dim(ncid, latid, "latitude");
if (status != NC_NOERR) handle_error(status);
status = nc_enddef(ncid);
if (status != NC_NOERR) handle_error(status);
@endcode
@author <NAME>, <NAME>, <NAME>, <NAME>, <NAME>
*/
int
nc_rename_dim(int ncid, int dimid, const char *name)
{
NC* ncp;
int stat = NC_check_id(ncid, &ncp);
if(stat != NC_NOERR) return stat;
TRACE(nc_rename_dim);
return ncp->dispatch->rename_dim(ncid,dimid,name);
}
/**
Find the number of dimensions.
In a classic model netCDF file, this function returns the number of
defined dimensions. In a netCDF-4/HDF5 file, this function returns
the number of dimensions available in the group specified by ncid,
which may be less than the total number of dimensions in a file. In
a netCDF-4/HDF5 file, dimensions are in all sub-groups,
sub-sub-groups, etc.
@param ncid NetCDF or group ID, from a previous call to nc_open(),
nc_create(), nc_def_grp(), or associated inquiry functions such as
nc_inq_ncid().
@param ndimsp Pointer where number of dimensions will be
written. Ignored if NULL.
@return ::NC_NOERR No error.
@return ::NC_EBADID Not a valid ID.
@author <NAME>, <NAME>, <NAME>, <NAME>, <NAME>
*/
int
nc_inq_ndims(int ncid, int *ndimsp)
{
NC* ncp;
int stat = NC_check_id(ncid, &ncp);
if(stat != NC_NOERR) return stat;
if(ndimsp == NULL) return NC_NOERR;
TRACE(nc_inq_ndims);
return ncp->dispatch->inq(ncid,ndimsp,NULL,NULL,NULL);
}
/**
Find the ID of the unlimited dimension.
This function finds the ID of the unlimited dimension. For
netCDF-4/HDF5 files (which may have more than one unlimited
dimension), the ID of the first unlimited dimesnion is
returned. For these files, nc_inq_unlimdims() will return all the
unlimited dimension IDs.
@param ncid NetCDF or group ID, from a previous call to nc_open(),
nc_create(), nc_def_grp(), or associated inquiry functions such as
nc_inq_ncid().
@param unlimdimidp Pointer where unlimited dimension ID will be
stored. If there is no unlimited dimension, -1 will be stored
here. Ignored if NULL.
@return ::NC_NOERR No error.
@return ::NC_EBADID Not a valid ID.
@author <NAME>, <NAME>, <NAME>, <NAME>, <NAME>
*/
int
nc_inq_unlimdim(int ncid, int *unlimdimidp)
{
NC* ncp;
int stat = NC_check_id(ncid, &ncp);
if(stat != NC_NOERR) return stat;
TRACE(nc_inq_unlimdim);
return ncp->dispatch->inq_unlimdim(ncid,unlimdimidp);
}
/**
Find out the name of a dimension.
@param ncid NetCDF or group ID, from a previous call to nc_open(),
nc_create(), nc_def_grp(), or associated inquiry functions such as
nc_inq_ncid().
@param dimid Dimension ID, from a previous call to nc_inq_dimid()
or nc_def_dim().
@param name Returned dimension name. The caller must allocate space
for the returned name. The maximum possible length, in characters,
of a dimension name is given by the predefined constant
::NC_MAX_NAME. (This doesn't include the null terminator, so
declare your array to be size NC_MAX_NAME+1). The returned
character array will be null-terminated. Ignored if NULL.
@return ::NC_NOERR No error.
@return ::NC_EBADID Not a valid ID.
@return ::NC_EBADDIM Invalid dimension ID or name.
@section nc_inq_dim_example2 Example
Here is an example using nc_inq_dim() to determine the length of a
dimension named lat, and the name and current maximum length of the
unlimited dimension for an existing netCDF dataset named foo.nc:
@code
#include <netcdf.h>
...
int status, ncid, latid, recid;
size_t latlength, recs;
char recname[NC_MAX_NAME+1];
...
status = nc_open("foo.nc", NC_NOWRITE, &ncid);
if (status != NC_NOERR) handle_error(status);
status = nc_inq_unlimdim(ncid, &recid);
if (status != NC_NOERR) handle_error(status);
...
status = nc_inq_dimid(ncid, "lat", &latid);
if (status != NC_NOERR) handle_error(status);
status = nc_inq_dimlen(ncid, latid, &latlength);
if (status != NC_NOERR) handle_error(status);
status = nc_inq_dim(ncid, recid, recname, &recs);
if (status != NC_NOERR) handle_error(status);
@endcode
@author <NAME>, <NAME>, <NAME>, <NAME>, <NAME>
*/
int
nc_inq_dimname(int ncid, int dimid, char *name)
{
NC* ncp;
int stat = NC_check_id(ncid, &ncp);
if(stat != NC_NOERR) return stat;
if(name == NULL) return NC_NOERR;
TRACE(nc_inq_dimname);
return ncp->dispatch->inq_dim(ncid,dimid,name,NULL);
}
/**
Find the length of a dimension.
The length for the unlimited dimension, if any, is the number of
records written so far.
@param ncid NetCDF or group ID, from a previous call to nc_open(),
nc_create(), nc_def_grp(), or associated inquiry functions such as
nc_inq_ncid().
@param dimid Dimension ID, from a previous call to nc_inq_dimid()
or nc_def_dim().
@param lenp Pointer where the length will be stored.
@return ::NC_NOERR No error.
@return ::NC_EBADID Not a valid ID.
@return ::NC_EBADDIM Invalid dimension ID or name.
@section nc_inq_dim_example3 Example
Here is an example using nc_inq_dim() to determine the length of a
dimension named lat, and the name and current maximum length of the
unlimited dimension for an existing netCDF dataset named foo.nc:
@code
#include <netcdf.h>
...
int status, ncid, latid, recid;
size_t latlength, recs;
char recname[NC_MAX_NAME+1];
...
status = nc_open("foo.nc", NC_NOWRITE, &ncid);
if (status != NC_NOERR) handle_error(status);
status = nc_inq_unlimdim(ncid, &recid);
if (status != NC_NOERR) handle_error(status);
...
status = nc_inq_dimid(ncid, "lat", &latid);
if (status != NC_NOERR) handle_error(status);
status = nc_inq_dimlen(ncid, latid, &latlength);
if (status != NC_NOERR) handle_error(status);
status = nc_inq_dim(ncid, recid, recname, &recs);
if (status != NC_NOERR) handle_error(status);
@endcode
@author <NAME>, <NAME>, <NAME>, <NAME>, <NAME>
*/
int
nc_inq_dimlen(int ncid, int dimid, size_t *lenp)
{
NC* ncp;
int stat = NC_check_id(ncid, &ncp);
if(stat != NC_NOERR) return stat;
if(lenp == NULL) return NC_NOERR;
TRACE(nc_inq_dimlen);
return ncp->dispatch->inq_dim(ncid,dimid,NULL,lenp);
}
/** @} */
| 5,876 |
1,403 | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Verify that the Copy() Action symlink soft-copy support works.
"""
import os
import TestSCons
import SCons.Defaults
SCons.Defaults.DefaultEnvironment( tools = [] )
test = TestSCons.TestSCons()
if not test.platform_has_symlink():
test.skip_test('No os.symlink() method, no symlinks to test.\n')
filelinkToCopy = 'filelinkToCopy'
fileToLink = 'file.in'
fileContents = 'stuff n things\n'
dirToLink = 'dir'
dirlinkToCopy = 'dirlinkToCopy'
treeToLink = 'tree'
treelinkToCopy = 'treelinkToCopy'
badToLink = 'None' # do not write this item
badlinkToCopy = 'badlinkToCopy'
relToLink = os.path.join( treeToLink, fileToLink )
rellinkToCopy = 'relLinkToCopy'
test.symlink( fileToLink, filelinkToCopy )
test.symlink( dirToLink, dirlinkToCopy )
test.symlink( treeToLink, treelinkToCopy )
test.symlink( badToLink, badlinkToCopy )
test.symlink( relToLink, rellinkToCopy )
test.write( fileToLink, fileContents )
test.subdir( dirToLink )
test.subdir( treeToLink )
test.write( relToLink, fileContents )
sconstructPath = 'SConstruct'
sconscriptPath = os.path.join( treeToLink, 'SConscript' )
test.write( sconstructPath,
"""\
import SCons.Defaults
SCons.Defaults.DefaultEnvironment( tools = [] )
Execute( Copy( 'F1', '%(filelinkToCopy)s', False ) )
Execute( Copy( 'L1', '%(filelinkToCopy)s' ) )
Execute( Copy( 'L2', '%(filelinkToCopy)s', True ) )
Execute( Copy( 'D1', '%(dirlinkToCopy)s', False ) )
Execute( Copy( 'L3', '%(dirlinkToCopy)s' ) )
Execute( Copy( 'L4', '%(dirlinkToCopy)s', True ) )
Execute( Copy( 'T1', '%(treelinkToCopy)s', False ) )
Execute( Copy( 'L5', '%(treelinkToCopy)s' ) )
Execute( Copy( 'L6', '%(treelinkToCopy)s', True ) )
Execute( Copy( 'Fails', '%(badlinkToCopy)s', False ) )
Execute( Copy( 'L7', '%(badlinkToCopy)s' ) )
Execute( Copy( 'L8', '%(badlinkToCopy)s', True ) )
SConscript( '%(sconscriptPath)s' )
"""
% locals()
)
relLinkCopyPath = os.path.join( '..', rellinkToCopy )
test.write( sconscriptPath,
"""\
Execute( Copy( 'F2', '%(relLinkCopyPath)s', False ) )
Execute( Copy( 'L9', '%(relLinkCopyPath)s' ) )
Execute( Copy( 'L10', '%(relLinkCopyPath)s', True ) )
"""
% locals()
)
test.must_exist( sconstructPath )
test.must_exist( sconscriptPath )
test.must_exist( fileToLink )
test.must_exist( filelinkToCopy )
test.must_exist( dirlinkToCopy )
test.must_exist( treelinkToCopy )
test.must_not_exist( badToLink )
test.must_exist( badlinkToCopy )
test.must_exist( rellinkToCopy )
expectStdout = test.wrap_stdout(
read_str =
'''\
Copy("F1", "%(filelinkToCopy)s")
Copy("L1", "%(filelinkToCopy)s")
Copy("L2", "%(filelinkToCopy)s")
Copy("D1", "%(dirlinkToCopy)s")
Copy("L3", "%(dirlinkToCopy)s")
Copy("L4", "%(dirlinkToCopy)s")
Copy("T1", "%(treelinkToCopy)s")
Copy("L5", "%(treelinkToCopy)s")
Copy("L6", "%(treelinkToCopy)s")
Copy("Fails", "%(badlinkToCopy)s")
Copy("L7", "%(badlinkToCopy)s")
Copy("L8", "%(badlinkToCopy)s")
Copy("F2", "%(relLinkCopyPath)s")
Copy("L9", "%(relLinkCopyPath)s")
Copy("L10", "%(relLinkCopyPath)s")
''' % locals(),
build_str =
'''\
scons: `.' is up to date.
'''
)
expectStderr = \
'''\
scons: *** %s: No such file or directory
''' % os.path.join( os.getcwd(), badToLink )
test.run( stdout = expectStdout, stderr = expectStderr, status = None )
F2 = os.path.join( treeToLink, 'F2' )
L9 = os.path.join( treeToLink, 'L9' )
L10 = os.path.join( treeToLink, 'L10' )
test.must_exist('D1')
test.must_exist('F1')
test.must_exist( F2 )
test.must_exist('L2')
test.must_exist('L3')
test.must_exist('L4')
test.must_exist('L5')
test.must_exist('L6')
test.must_exist('L7')
test.must_exist('L8')
test.must_exist( L9 )
test.must_exist( L10 )
test.must_exist('T1')
test.must_not_exist( 'Fails' )
test.must_match( fileToLink, fileContents )
test.must_match( 'F1', fileContents )
test.must_match( F2 , fileContents )
test.must_match( 'L1', fileContents )
test.must_match( 'L2', fileContents )
test.must_match( os.path.join( treeToLink, fileToLink ), fileContents )
test.fail_test( condition=os.path.islink('D1') )
test.fail_test( condition=os.path.islink('F1') )
test.fail_test( condition=os.path.islink( F2 ) )
test.fail_test( condition=os.path.islink('T1') )
test.fail_test( condition=(not os.path.isdir('D1')) )
test.fail_test( condition=(not os.path.isfile('F1')) )
test.fail_test( condition=(not os.path.isdir('T1')) )
test.fail_test( condition=(not os.path.islink('L1')) )
test.fail_test( condition=(not os.path.islink('L2')) )
test.fail_test( condition=(not os.path.islink('L3')) )
test.fail_test( condition=(not os.path.islink('L4')) )
test.fail_test( condition=(not os.path.islink('L5')) )
test.fail_test( condition=(not os.path.islink('L6')) )
test.fail_test( condition=(not os.path.islink('L7')) )
test.fail_test( condition=(not os.path.islink('L8')) )
test.fail_test( condition=(not os.path.islink( L9 )) )
test.fail_test( condition=(not os.path.islink( L10 )) )
test.fail_test( condition=(os.path.exists('L7')) )
test.fail_test( condition=(os.path.exists('L8')) )
test.fail_test( condition=(os.path.exists( L9 )) )
test.fail_test( condition=(os.path.exists( L10 )) )
test.fail_test( condition=(os.readlink(filelinkToCopy) != os.readlink('L1')) )
test.fail_test( condition=(os.readlink(filelinkToCopy) != os.readlink('L2')) )
test.fail_test( condition=(os.readlink(dirlinkToCopy) != os.readlink('L3')) )
test.fail_test( condition=(os.readlink(dirlinkToCopy) != os.readlink('L4')) )
test.fail_test( condition=(os.readlink(treelinkToCopy) != os.readlink('L5')) )
test.fail_test( condition=(os.readlink(treelinkToCopy) != os.readlink('L6')) )
test.fail_test( condition=(os.readlink(badlinkToCopy) != os.readlink('L7')) )
test.fail_test( condition=(os.readlink(badlinkToCopy) != os.readlink('L8')) )
test.fail_test( condition=(os.readlink(rellinkToCopy) != os.readlink( L9 )) )
test.fail_test( condition=(os.readlink(rellinkToCopy) != os.readlink( L10 )) )
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| 2,748 |
647 | <reponame>shixiongjing/DeepRobust
import numpy as np
# ---------------------attack config------------------------#
attack_params = {
"FGSM_MNIST": {
'epsilon': 0.2,
'order': np.inf,
'clip_max': None,
'clip_min': None
},
"PGD_CIFAR10": {
'epsilon': 0.1,
'clip_max': 1.0,
'clip_min': 0.0,
'print_process': True
},
"LBFGS_MNIST": {
'epsilon': 1e-4,
'maxiter': 20,
'clip_max': 1,
'clip_min': 0,
'class_num': 10
},
"CW_MNIST": {
'confidence': 1e-4,
'clip_max': 1,
'clip_min': 0,
'max_iterations': 1000,
'initial_const': 1e-2,
'binary_search_steps': 5,
'learning_rate': 5e-3,
'abort_early': True,
}
}
#-----------defense(Adversarial training) config------------#
defense_params = {
"PGDtraining_MNIST":{
'save_dir': "./defense_model",
'save_model': True,
'save_name' : "mnist_pgdtraining_0.3.pt",
'epsilon' : 0.3,
'epoch_num' : 80,
'lr' : 0.01
},
"FGSMtraining_MNIST":{
'save_dir': "./defense_model",
'save_model': True,
'save_name' : "mnist_fgsmtraining_0.2.pt",
'epsilon' : 0.2,
'epoch_num' : 50,
'lr_train' : 0.001
},
"FAST_MNIST":{
'save_dir': "./defense_model",
'save_model': True,
'save_name' : "fast_mnist_0.3.pt",
'epsilon' : 0.3,
'epoch_num' : 50,
'lr_train' : 0.001
}
}
| 771 |
392 | <reponame>Da-Krause/settlers-remake
/*******************************************************************************
* Copyright (c) 2016 - 2018
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*******************************************************************************/
package jsettlers.main.swing.lookandfeel.ui;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.image.BufferedImage;
import javax.swing.BorderFactory;
import javax.swing.JComponent;
import javax.swing.plaf.basic.BasicLabelUI;
import jsettlers.main.swing.lookandfeel.ui.img.UiImageLoader;
/**
* Label UI, with stone background
*
* @author <NAME>
*
*/
public class StoneBackgroundLabel extends BasicLabelUI {
/**
* Foreground color
*/
private final Color foregroundColor;
/**
* Background Image
*/
private final BufferedImage backgroundImage = UiImageLoader.get("ui_static_info_bg/ui_static-info-bg.png");
/**
* border scale factor
*/
private float BORDER_FACTOR = 0.3f;
/**
* Border images if the Button is not pressed
*/
private final BufferedImage[] BORDER = {
UiImageLoader.get("ui_static_info_bg/ui_static-info-corner-upper-left.png"),
UiImageLoader.get("ui_static_info_bg/ui_static-info_border-top.png"),
UiImageLoader.get("ui_static_info_bg/ui_static-info-corner-upper-right.png"),
UiImageLoader.get("ui_static_info_bg/ui_static-info_border-right.png"),
UiImageLoader.get("ui_static_info_bg/ui_static-info-corner-bottom-right.png"),
UiImageLoader.get("ui_static_info_bg/ui_static-info_border-bottom.png"),
UiImageLoader.get("ui_static_info_bg/ui_static-info-corner-bottom_left.png"),
UiImageLoader.get("ui_static_info_bg/ui_static-info_border-left.png")
};
/**
* Fixed padding, the border image is to big
*/
private int paddingTop = (int) (BORDER[1/* top */].getHeight() * BORDER_FACTOR);
/**
* Fixed padding, the border image is to big
*/
private int paddingBottom = (int) (BORDER[5/* bottom */].getHeight() * BORDER_FACTOR);
/**
* Constructor
*
* @param foregroundColor
* Foreground color of the Label
*/
public StoneBackgroundLabel(Color foregroundColor) {
this.foregroundColor = foregroundColor;
}
@Override
public void installUI(JComponent c) {
super.installUI(c);
c.setForeground(foregroundColor);
c.setFont(UIDefaults.FONT);
c.setBorder(BorderFactory.createEmptyBorder(paddingTop, (int) (BORDER[7/* left */].getWidth() * BORDER_FACTOR),
paddingBottom, (int) (BORDER[3/* right */].getWidth() * BORDER_FACTOR)));
c.setOpaque(false);
}
@Override
public Dimension getPreferredSize(JComponent c) {
Dimension size = super.getPreferredSize(c);
size.width += (BORDER[3/* right */].getWidth() + BORDER[7/* left */].getWidth()) * BORDER_FACTOR;
// size.height += paddingTop + paddingBottom;
return size;
}
@Override
public void paint(Graphics g, JComponent c) {
// Repeat the graphic as much as needed, the graphic is designed for this, so the start fits to the end of the graphic
for (int i = 0; i < c.getWidth(); i += backgroundImage.getWidth()) {
g.drawImage(backgroundImage, i, 0, c);
}
BorderHelper.drawBorder(g, c, BORDER, BORDER_FACTOR);
super.paint(g, c);
}
}
| 1,384 |
22,688 | /******************************************************************************
* Copyright 2019 The Apollo Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
#include "modules/canbus/vehicle/ge3/protocol/scu_3_303.h"
#include "gtest/gtest.h"
namespace apollo {
namespace canbus {
namespace ge3 {
class Scu3303Test : public ::testing::Test {
public:
virtual void SetUp() {}
};
TEST_F(Scu3303Test, reset) {
Scu3303 scu3303;
int32_t length = 8;
ChassisDetail chassis_detail;
uint8_t bytes[8] = {0x41, 0x42, 0x43, 0x61, 0x62, 0x30, 0x31, 0x32};
scu3303.Parse(bytes, length, &chassis_detail);
EXPECT_DOUBLE_EQ(chassis_detail.ge3().scu_3_303().vin08(), 'A'); // 65
EXPECT_DOUBLE_EQ(chassis_detail.ge3().scu_3_303().vin09(), 'B'); // 66
EXPECT_DOUBLE_EQ(chassis_detail.ge3().scu_3_303().vin10(), 67); // 'C'
EXPECT_DOUBLE_EQ(chassis_detail.ge3().scu_3_303().vin11(), 97); // 'a'
EXPECT_DOUBLE_EQ(chassis_detail.ge3().scu_3_303().vin12(), 'b'); // 98
EXPECT_DOUBLE_EQ(chassis_detail.ge3().scu_3_303().vin13(), 48); // '0'
EXPECT_DOUBLE_EQ(chassis_detail.ge3().scu_3_303().vin14(), '1'); // 49
EXPECT_DOUBLE_EQ(chassis_detail.ge3().scu_3_303().vin15(), '2'); // 50
}
} // namespace ge3
} // namespace canbus
} // namespace apollo
| 677 |
3,372 | <reponame>rbalamohan/aws-sdk-java
/*
* Copyright 2016-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.codeguruprofiler.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Notification medium for users to get alerted for events that occur in application profile. We support SNS topic as a
* notification channel.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/codeguruprofiler-2019-07-18/Channel" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class Channel implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* List of publishers for different type of events that may be detected in an application from the profile. Anomaly
* detection is the only event publisher in Profiler.
* </p>
*/
private java.util.List<String> eventPublishers;
/**
* <p>
* Unique identifier for each <code>Channel</code> in the notification configuration of a Profiling Group. A random
* UUID for channelId is used when adding a channel to the notification configuration if not specified in the
* request.
* </p>
*/
private String id;
/**
* <p>
* Unique arn of the resource to be used for notifications. We support a valid SNS topic arn as a channel uri.
* </p>
*/
private String uri;
/**
* <p>
* List of publishers for different type of events that may be detected in an application from the profile. Anomaly
* detection is the only event publisher in Profiler.
* </p>
*
* @return List of publishers for different type of events that may be detected in an application from the profile.
* Anomaly detection is the only event publisher in Profiler.
* @see EventPublisher
*/
public java.util.List<String> getEventPublishers() {
return eventPublishers;
}
/**
* <p>
* List of publishers for different type of events that may be detected in an application from the profile. Anomaly
* detection is the only event publisher in Profiler.
* </p>
*
* @param eventPublishers
* List of publishers for different type of events that may be detected in an application from the profile.
* Anomaly detection is the only event publisher in Profiler.
* @see EventPublisher
*/
public void setEventPublishers(java.util.Collection<String> eventPublishers) {
if (eventPublishers == null) {
this.eventPublishers = null;
return;
}
this.eventPublishers = new java.util.ArrayList<String>(eventPublishers);
}
/**
* <p>
* List of publishers for different type of events that may be detected in an application from the profile. Anomaly
* detection is the only event publisher in Profiler.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setEventPublishers(java.util.Collection)} or {@link #withEventPublishers(java.util.Collection)} if you
* want to override the existing values.
* </p>
*
* @param eventPublishers
* List of publishers for different type of events that may be detected in an application from the profile.
* Anomaly detection is the only event publisher in Profiler.
* @return Returns a reference to this object so that method calls can be chained together.
* @see EventPublisher
*/
public Channel withEventPublishers(String... eventPublishers) {
if (this.eventPublishers == null) {
setEventPublishers(new java.util.ArrayList<String>(eventPublishers.length));
}
for (String ele : eventPublishers) {
this.eventPublishers.add(ele);
}
return this;
}
/**
* <p>
* List of publishers for different type of events that may be detected in an application from the profile. Anomaly
* detection is the only event publisher in Profiler.
* </p>
*
* @param eventPublishers
* List of publishers for different type of events that may be detected in an application from the profile.
* Anomaly detection is the only event publisher in Profiler.
* @return Returns a reference to this object so that method calls can be chained together.
* @see EventPublisher
*/
public Channel withEventPublishers(java.util.Collection<String> eventPublishers) {
setEventPublishers(eventPublishers);
return this;
}
/**
* <p>
* List of publishers for different type of events that may be detected in an application from the profile. Anomaly
* detection is the only event publisher in Profiler.
* </p>
*
* @param eventPublishers
* List of publishers for different type of events that may be detected in an application from the profile.
* Anomaly detection is the only event publisher in Profiler.
* @return Returns a reference to this object so that method calls can be chained together.
* @see EventPublisher
*/
public Channel withEventPublishers(EventPublisher... eventPublishers) {
java.util.ArrayList<String> eventPublishersCopy = new java.util.ArrayList<String>(eventPublishers.length);
for (EventPublisher value : eventPublishers) {
eventPublishersCopy.add(value.toString());
}
if (getEventPublishers() == null) {
setEventPublishers(eventPublishersCopy);
} else {
getEventPublishers().addAll(eventPublishersCopy);
}
return this;
}
/**
* <p>
* Unique identifier for each <code>Channel</code> in the notification configuration of a Profiling Group. A random
* UUID for channelId is used when adding a channel to the notification configuration if not specified in the
* request.
* </p>
*
* @param id
* Unique identifier for each <code>Channel</code> in the notification configuration of a Profiling Group. A
* random UUID for channelId is used when adding a channel to the notification configuration if not specified
* in the request.
*/
public void setId(String id) {
this.id = id;
}
/**
* <p>
* Unique identifier for each <code>Channel</code> in the notification configuration of a Profiling Group. A random
* UUID for channelId is used when adding a channel to the notification configuration if not specified in the
* request.
* </p>
*
* @return Unique identifier for each <code>Channel</code> in the notification configuration of a Profiling Group. A
* random UUID for channelId is used when adding a channel to the notification configuration if not
* specified in the request.
*/
public String getId() {
return this.id;
}
/**
* <p>
* Unique identifier for each <code>Channel</code> in the notification configuration of a Profiling Group. A random
* UUID for channelId is used when adding a channel to the notification configuration if not specified in the
* request.
* </p>
*
* @param id
* Unique identifier for each <code>Channel</code> in the notification configuration of a Profiling Group. A
* random UUID for channelId is used when adding a channel to the notification configuration if not specified
* in the request.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Channel withId(String id) {
setId(id);
return this;
}
/**
* <p>
* Unique arn of the resource to be used for notifications. We support a valid SNS topic arn as a channel uri.
* </p>
*
* @param uri
* Unique arn of the resource to be used for notifications. We support a valid SNS topic arn as a channel
* uri.
*/
public void setUri(String uri) {
this.uri = uri;
}
/**
* <p>
* Unique arn of the resource to be used for notifications. We support a valid SNS topic arn as a channel uri.
* </p>
*
* @return Unique arn of the resource to be used for notifications. We support a valid SNS topic arn as a channel
* uri.
*/
public String getUri() {
return this.uri;
}
/**
* <p>
* Unique arn of the resource to be used for notifications. We support a valid SNS topic arn as a channel uri.
* </p>
*
* @param uri
* Unique arn of the resource to be used for notifications. We support a valid SNS topic arn as a channel
* uri.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public Channel withUri(String uri) {
setUri(uri);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getEventPublishers() != null)
sb.append("EventPublishers: ").append(getEventPublishers()).append(",");
if (getId() != null)
sb.append("Id: ").append(getId()).append(",");
if (getUri() != null)
sb.append("Uri: ").append(getUri());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof Channel == false)
return false;
Channel other = (Channel) obj;
if (other.getEventPublishers() == null ^ this.getEventPublishers() == null)
return false;
if (other.getEventPublishers() != null && other.getEventPublishers().equals(this.getEventPublishers()) == false)
return false;
if (other.getId() == null ^ this.getId() == null)
return false;
if (other.getId() != null && other.getId().equals(this.getId()) == false)
return false;
if (other.getUri() == null ^ this.getUri() == null)
return false;
if (other.getUri() != null && other.getUri().equals(this.getUri()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getEventPublishers() == null) ? 0 : getEventPublishers().hashCode());
hashCode = prime * hashCode + ((getId() == null) ? 0 : getId().hashCode());
hashCode = prime * hashCode + ((getUri() == null) ? 0 : getUri().hashCode());
return hashCode;
}
@Override
public Channel clone() {
try {
return (Channel) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.codeguruprofiler.model.transform.ChannelMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| 4,476 |
338 | package com.tvd12.ezyfoxserver.testing.service;
import org.testng.annotations.Test;
public class EzyResponseSerializerTest {
@Test
public void test() {
}
}
| 68 |
14,668 | // Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "ash/public/cpp/shelf_config.h"
#include "ash/shelf/shelf.h"
#include "ash/shelf/shelf_view.h"
#include "ash/shelf/shelf_view_test_api.h"
#include "ash/shell.h"
#include "base/command_line.h"
#include "base/run_loop.h"
#include "chrome/browser/ui/browser.h"
#include "chrome/browser/ui/browser_list.h"
#include "chrome/browser/ui/browser_window.h"
#include "chrome/test/base/in_process_browser_test.h"
#include "chrome/test/base/interactive_test_utils.h"
#include "components/keep_alive_registry/keep_alive_types.h"
#include "components/keep_alive_registry/scoped_keep_alive.h"
#include "content/public/test/browser_test.h"
#include "ui/compositor/scoped_animation_duration_scale_mode.h"
#include "ui/events/test/event_generator.h"
#include "ui/views/view.h"
#include "ui/views/view_model.h"
#include "ui/wm/core/coordinate_conversion.h"
namespace {
// Get the bounds of the browser shortcut item in screen space.
gfx::Rect GetChromeIconBoundsInScreen(aura::Window* root) {
ash::ShelfView* shelf_view =
ash::Shelf::ForWindow(root)->GetShelfViewForTesting();
const views::ViewModel* view_model = shelf_view->view_model_for_test();
EXPECT_EQ(1, view_model->view_size());
gfx::Rect bounds = view_model->view_at(0)->GetBoundsInScreen();
return bounds;
}
// Ensure animations progress to give the shelf button a non-empty size.
void EnsureShelfInitialization() {
aura::Window* root = ash::Shell::GetPrimaryRootWindow();
ash::ShelfView* shelf_view =
ash::Shelf::ForWindow(root)->GetShelfViewForTesting();
ash::ShelfViewTestAPI(shelf_view).RunMessageLoopUntilAnimationsDone();
ASSERT_GT(GetChromeIconBoundsInScreen(root).height(), 0);
}
// Launch a new browser window by left-clicking the browser shortcut item.
void OpenBrowserUsingShelfOnRootWindow(aura::Window* root) {
ui::test::EventGenerator generator(root);
gfx::Point center = GetChromeIconBoundsInScreen(root).CenterPoint();
generator.MoveMouseTo(center);
generator.ClickLeftButton();
}
class WindowSizerTest : public InProcessBrowserTest {
public:
WindowSizerTest() = default;
WindowSizerTest(const WindowSizerTest&) = delete;
WindowSizerTest& operator=(const WindowSizerTest&) = delete;
~WindowSizerTest() override = default;
void SetUpCommandLine(base::CommandLine* command_line) override {
// Make screens sufficiently wide to host 2 browsers side by side.
command_line->AppendSwitchASCII("ash-host-window-bounds",
"800x600,801+0-800x600");
}
ui::ScopedAnimationDurationScaleMode zero_duration_{
ui::ScopedAnimationDurationScaleMode::ZERO_DURATION};
};
// TODO(crbug.com/1038342): Test is flaky on sanitizers.
#if defined(ADDRESS_SANITIZER) || defined(MEMORY_SANITIZER)
#define MAYBE_OpenBrowserUsingShelfItem DISABLED_OpenBrowserUsingShelfItem
#else
#define MAYBE_OpenBrowserUsingShelfItem OpenBrowserUsingShelfItem
#endif
IN_PROC_BROWSER_TEST_F(WindowSizerTest, MAYBE_OpenBrowserUsingShelfItem) {
// Don't shutdown when closing the last browser window.
ScopedKeepAlive test_keep_alive(KeepAliveOrigin::BROWSER_PROCESS_CHROMEOS,
KeepAliveRestartOption::DISABLED);
aura::Window::Windows root_windows = ash::Shell::GetAllRootWindows();
BrowserList* browser_list = BrowserList::GetInstance();
EnsureShelfInitialization();
EXPECT_EQ(1u, browser_list->size());
// Close the browser window so that clicking the icon creates a new window.
CloseBrowserSynchronously(browser_list->get(0));
EXPECT_EQ(0u, browser_list->size());
EXPECT_EQ(root_windows[0], ash::Shell::GetRootWindowForNewWindows());
OpenBrowserUsingShelfOnRootWindow(root_windows[1]);
// A new browser window should be opened on the 2nd display.
display::Screen* screen = display::Screen::GetScreen();
std::pair<display::Display, display::Display> displays =
ui_test_utils::GetDisplays(screen);
EXPECT_EQ(1u, browser_list->size());
EXPECT_EQ(displays.second.id(),
screen
->GetDisplayNearestWindow(
browser_list->get(0)->window()->GetNativeWindow())
.id());
EXPECT_EQ(root_windows[1], ash::Shell::GetRootWindowForNewWindows());
// Close the browser window so that clicking the icon creates a new window.
CloseBrowserSynchronously(browser_list->get(0));
EXPECT_EQ(0u, browser_list->size());
OpenBrowserUsingShelfOnRootWindow(root_windows[0]);
// A new browser window should be opened on the 1st display.
EXPECT_EQ(1u, browser_list->size());
EXPECT_EQ(displays.first.id(),
screen
->GetDisplayNearestWindow(
browser_list->get(0)->window()->GetNativeWindow())
.id());
EXPECT_EQ(root_windows[0], ash::Shell::GetRootWindowForNewWindows());
}
} // namespace
| 1,794 |
1,091 | #!/usr/bin/python
from onosnet import run
from regionabc import RegionABC
run( RegionABC() )
| 31 |
5,957 | <filename>sample/src/main/java/com/yarolegovich/discretescrollview/sample/shop/Shop.java
package com.yarolegovich.discretescrollview.sample.shop;
import android.content.Context;
import android.content.SharedPreferences;
import com.yarolegovich.discretescrollview.sample.App;
import com.yarolegovich.discretescrollview.sample.R;
import java.util.Arrays;
import java.util.List;
/**
* Created by yarolegovich on 07.03.2017.
*/
public class Shop {
private static final String STORAGE = "shop";
public static Shop get() {
return new Shop();
}
private SharedPreferences storage;
private Shop() {
storage = App.getInstance().getSharedPreferences(STORAGE, Context.MODE_PRIVATE);
}
public List<Item> getData() {
return Arrays.asList(
new Item(1, "Everyday Candle", "$12.00 USD", R.drawable.shop1),
new Item(2, "Small Porcelain Bowl", "$50.00 USD", R.drawable.shop2),
new Item(3, "Favourite Board", "$265.00 USD", R.drawable.shop3),
new Item(4, "Earthenware Bowl", "$18.00 USD", R.drawable.shop4),
new Item(5, "Porcelain Dessert Plate", "$36.00 USD", R.drawable.shop5),
new Item(6, "Detailed Rolling Pin", "$145.00 USD", R.drawable.shop6));
}
public boolean isRated(int itemId) {
return storage.getBoolean(String.valueOf(itemId), false);
}
public void setRated(int itemId, boolean isRated) {
storage.edit().putBoolean(String.valueOf(itemId), isRated).apply();
}
}
| 631 |
587 | /*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.auraframework.impl.expression.parser;
import static org.mockito.Mockito.mock;
import java.util.Collection;
import java.util.List;
import org.auraframework.impl.expression.AuraExpressionBuilder;
import org.auraframework.impl.expression.ExpressionFunctions;
import org.auraframework.service.ContextService;
import org.auraframework.service.DefinitionService;
import org.auraframework.throwable.quickfix.InvalidExpressionException;
import org.auraframework.util.test.annotation.UnAdaptableTest;
import org.auraframework.util.test.util.UnitTestCase;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.mockito.Mockito;
import com.google.common.collect.Lists;
/**
* Shotgun tests for special chars. Tokens shouldn't contain these chars. Remaining special chars have their own tests
* in ExpressionParserTest.
*/
@UnAdaptableTest
@RunWith(Parameterized.class)
public class ExpressionParserSpecialCharactersTest extends UnitTestCase {
private static final String validChars = "oO0_";
private static final char[] otherChars = "`=[]',~@#^&{}|:\"天".toCharArray();
private static final String[] errorMsgStartsWith = { "unexpected token: '`'",
"expecting '=', found 'o'",
"unexpected token: a left square bracket",
"unexpected token: a right square bracket",
"expecting ''', found '<EOF>'",
"unexpected token: a comma",
"unexpected token: '~'",
"unexpected token: '@'",
"unexpected token: '#'",
"unexpected token: '^'",
"expecting '&', found 'o'",
"unclosed brace",
"unexpected token: '}'",
"expecting '|', found 'o'",
"unexpected token: a colon",
"unexpected token: '\"'",
"unexpected token: '天'",
};
private static final String[] errorMsgEndsWith = { "unexpected token: '`'",
"expecting '=', found '<EOF>'",
"unexpected end of expression",
"unexpected token: a right square bracket at column 5 of expression: oO0_]",
"expecting ''', found '<EOF>'",
"unexpected token: ','",
"unexpected token: '~'",
"unexpected token: '@'",
"unexpected token: '#'",
"unexpected token: '^'",
"expecting '&', found '<EOF>'",
"unexpected token: '{'",
"unexpected token: '}'",
"expecting '|', found '<EOF>'",
"unexpected token: ':'",
"unexpected token: '\"'",
"unexpected token: '天'",
};
private static final String[] errorMsgContains = { "unexpected token: '`'",
"expecting '=', found 'o'",
"expecting a positive integer, found 'oO0_'",
"unexpected token: a right square bracket at column 5 of expression: oO0_]oO0_",
"expecting ''', found '<EOF>'",
"unexpected token: ','",
"unexpected token: '~'",
"unexpected token: '@'",
"unexpected token: '#'",
"unexpected token: '^'",
"expecting '&', found 'o'",
"unexpected token: '{'",
"unexpected token: '}'",
"expecting '|', found 'o'",
"unexpected token: ':' ",
"unexpected token: '\"'",
"unexpected token: '天'",
};
private String expression;
private String msgStartsWith;
public ExpressionParserSpecialCharactersTest(String name, String expression, String errorMsg) {
super();
this.expression = expression;
this.msgStartsWith = errorMsg;
}
@Parameters(name = "{0}")
public static Collection<Object> generateTestParameters() {
List<Object> parameters = Lists.newLinkedList();
for (int i = 0; i < otherChars.length; i++) {
char c = otherChars[i];
String hex = String.format("%#x", (int) c);
parameters.add(new Object[]{"TokenStartsWith" + hex + "ThrowsQuickFixException", c + validChars,
errorMsgStartsWith[i]});
parameters.add(new Object[]{"TokenStartsWith" + hex + "ThrowsQuickFixException", c + validChars,
errorMsgStartsWith[i]});
parameters.add(new Object[]{"TokenEndsWith" + hex + "ThrowsQuickFixException", validChars + c,
errorMsgEndsWith[i]});
parameters.add(new Object[]{"TokenContains" + hex + "ThrowsQuickFixException",
validChars + c + validChars, errorMsgContains[i]});
}
return parameters;
}
@Test
public void test() throws Exception {
final ContextService contextService = mock(ContextService.class);
final DefinitionService definitionService = mock(DefinitionService.class);
try {
new AuraExpressionBuilder(new ExpressionFunctions(contextService, definitionService)).buildExpression(expression, null);
fail("No exception thrown for <" + expression + ">. Expected InvalidExpressionException");
} catch (InvalidExpressionException e) {
assertTrue("Unexpected error message trying to parse <" + expression + ">. Expected to start with: "
+ msgStartsWith + ". But got: " + e.getMessage(), e.getMessage().startsWith(msgStartsWith));
}
Mockito.verifyZeroInteractions(contextService, definitionService);
}
}
| 2,515 |
473 | <reponame>pingjuiliao/cb-multios
/*
Author: <NAME> <<EMAIL>>
Copyright (c) 2015 Cromulence LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
extern "C"
{
#include "cgc_stdint.h"
#include "cgc_stdio.h"
#include "cgc_stdlib.h"
#include "cgc_string.h"
}
#include "cgc_dma.h"
#include "cgc_mmu.h"
CDMA::CDMA( )
{
for ( uint32_t i = 0; i < 256; i++ )
m_pPeripherals[i] = NULL;
}
CDMA::~CDMA( )
{
}
void CDMA::InitDMA( void )
{
for ( uint32_t i = 0; i < MAX_DMA_WORKERS; i++ )
m_dmaWorkers[i].ClearWorker();
m_workerLast = 0;
m_workerCount = 0;
}
bool CDMA::AddPeripheral( uint8_t deviceID, CPeripheral *pPeripheral )
{
if ( !pPeripheral )
return (false);
if ( m_pPeripherals[deviceID] )
return (false);
m_pPeripherals[deviceID] = pPeripheral;
return (true);
}
bool CDMA::InitReadWorker( uint8_t deviceID, uint16_t address, uint16_t length )
{
uint32_t workerCur = m_workerCount++;
#ifdef PATCHED_1
if ( workerCur >= MAX_DMA_WORKERS )
#else
if ( workerCur > MAX_DMA_WORKERS )
#endif
return (false);
m_dmaWorkers[workerCur].StartWorker( DMA_WORKER_READ, deviceID, address, length );
return (true);
}
bool CDMA::InitWriteWorker( uint8_t deviceID, uint16_t address, uint16_t length )
{
uint32_t workerCur = m_workerCount++;
#ifdef PATCHED_1
if ( workerCur >= MAX_DMA_WORKERS )
#else
if ( workerCur > MAX_DMA_WORKERS )
#endif
return (false);
m_dmaWorkers[workerCur].StartWorker( DMA_WORKER_WRITE, deviceID, address, length );
return (true);
}
void CDMA::ServiceDMA( CMMU *pMMU )
{
if ( !pMMU )
return;
uint8_t dmaTransferCount = 0;
for ( dmaTransferCount = 0; dmaTransferCount < m_workerCount; dmaTransferCount++ )
{
// Only allow a maximum number of transfers
if ( dmaTransferCount >= MAX_DMA_TRANSFER_COUNT )
break;
// Round robin each DMA worker
uint8_t workerCur = m_workerLast;
// Find a worker to service
bool bWorkersAvailable = false;
uint32_t tryCount = 0;
for ( tryCount = 0; tryCount < MAX_DMA_WORKERS; tryCount++ )
{
if ( workerCur >= MAX_DMA_WORKERS )
workerCur = 0;
if ( m_dmaWorkers[workerCur].IsWorkerAvailable() )
{
bWorkersAvailable = true;
break;
}
workerCur++;
}
if ( bWorkersAvailable )
{
uint8_t deviceID = m_dmaWorkers[workerCur].GetDeviceID();
uint16_t address = m_dmaWorkers[workerCur].GetAddress();
uint16_t length = m_dmaWorkers[workerCur].GetLength();
uint16_t position = m_dmaWorkers[workerCur].GetPosition();
eDMAWorkerAction actionType = m_dmaWorkers[workerCur].GetType();
uint32_t dmaAmount = (length - position);
if ( dmaAmount > 4 )
dmaAmount = 4;
if ( dmaAmount == 0 )
{
// Remove worker
m_workerCount--;
// End transfer
m_dmaWorkers[workerCur].ClearWorker();
}
else if ( m_pPeripherals[deviceID] )
{
uint8_t readValue[4];
if ( actionType == DMA_WORKER_READ )
{
if ( m_pPeripherals[deviceID]->Read( readValue, dmaAmount ) )
pMMU->WriteDMA( address+position, readValue, dmaAmount );
}
else
{
if ( pMMU->ReadDMA( address+position, readValue, dmaAmount ) )
m_pPeripherals[deviceID]->Write( readValue, dmaAmount );
}
if ( position+dmaAmount >= length )
{
// Remove worker
m_workerCount--;
// End transfer
m_dmaWorkers[workerCur].ClearWorker();
}
else
m_dmaWorkers[workerCur].AdvancePosition( dmaAmount );
}
else
{
m_dmaWorkers[workerCur].ClearWorker();
m_workerCount--;
}
// Advance to next worker
m_workerLast = workerCur++;
}
else
{
m_workerLast = 0;
break;
}
}
}
| 1,838 |
2,151 | // Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/ui/extensions/extension_message_bubble_bridge.h"
#include <utility>
#include "chrome/browser/extensions/extension_message_bubble_controller.h"
#include "chrome/browser/profiles/profile.h"
#include "chrome/grit/generated_resources.h"
#include "components/vector_icons/vector_icons.h"
#include "extensions/browser/extension_registry.h"
#include "ui/base/l10n/l10n_util.h"
ExtensionMessageBubbleBridge::ExtensionMessageBubbleBridge(
std::unique_ptr<extensions::ExtensionMessageBubbleController> controller)
: controller_(std::move(controller)) {}
ExtensionMessageBubbleBridge::~ExtensionMessageBubbleBridge() {}
bool ExtensionMessageBubbleBridge::ShouldShow() {
return controller_->ShouldShow();
}
bool ExtensionMessageBubbleBridge::ShouldCloseOnDeactivate() {
return controller_->CloseOnDeactivate();
}
bool ExtensionMessageBubbleBridge::IsPolicyIndicationNeeded(
const extensions::Extension* extension) {
return controller_->delegate()->SupportsPolicyIndicator() &&
extensions::Manifest::IsPolicyLocation(extension->location());
}
base::string16 ExtensionMessageBubbleBridge::GetHeadingText() {
return controller_->delegate()->GetTitle();
}
base::string16 ExtensionMessageBubbleBridge::GetBodyText(
bool anchored_to_action) {
return controller_->delegate()->GetMessageBody(
anchored_to_action, controller_->GetExtensionIdList().size());
}
base::string16 ExtensionMessageBubbleBridge::GetItemListText() {
return controller_->GetExtensionListForDisplay();
}
base::string16 ExtensionMessageBubbleBridge::GetActionButtonText() {
const extensions::ExtensionIdList& list = controller_->GetExtensionIdList();
DCHECK(!list.empty());
// Normally, the extension is enabled, but this might not be the case (such as
// for the SuspiciousExtensionBubbleDelegate, which warns the user about
// disabled extensions).
const extensions::Extension* extension =
extensions::ExtensionRegistry::Get(controller_->profile())
->GetExtensionById(list[0],
extensions::ExtensionRegistry::EVERYTHING);
DCHECK(extension);
// An empty string is returned so that we don't display the button prompting
// to remove policy-installed extensions.
if (IsPolicyIndicationNeeded(extension))
return base::string16();
return controller_->delegate()->GetActionButtonLabel();
}
base::string16 ExtensionMessageBubbleBridge::GetDismissButtonText() {
return controller_->delegate()->GetDismissButtonLabel();
}
ui::DialogButton ExtensionMessageBubbleBridge::GetDefaultDialogButton() {
// TODO(estade): we should set a default where appropriate. See
// http://crbug.com/751279
return ui::DIALOG_BUTTON_NONE;
}
std::string ExtensionMessageBubbleBridge::GetAnchorActionId() {
return controller_->GetExtensionIdList().size() == 1u
? controller_->GetExtensionIdList()[0]
: std::string();
}
void ExtensionMessageBubbleBridge::OnBubbleShown(
const base::Closure& close_bubble_callback) {
controller_->OnShown(close_bubble_callback);
}
void ExtensionMessageBubbleBridge::OnBubbleClosed(CloseAction action) {
switch (action) {
case CLOSE_DISMISS_USER_ACTION:
case CLOSE_DISMISS_DEACTIVATION: {
bool close_by_deactivate = action == CLOSE_DISMISS_DEACTIVATION;
controller_->OnBubbleDismiss(close_by_deactivate);
break;
}
case CLOSE_EXECUTE:
controller_->OnBubbleAction();
break;
case CLOSE_LEARN_MORE:
controller_->OnLinkClicked();
break;
}
}
std::unique_ptr<ToolbarActionsBarBubbleDelegate::ExtraViewInfo>
ExtensionMessageBubbleBridge::GetExtraViewInfo() {
const extensions::ExtensionIdList& list = controller_->GetExtensionIdList();
int include_mask = controller_->delegate()->ShouldLimitToEnabledExtensions() ?
extensions::ExtensionRegistry::ENABLED :
extensions::ExtensionRegistry::EVERYTHING;
const extensions::Extension* extension =
extensions::ExtensionRegistry::Get(controller_->profile())
->GetExtensionById(list[0], include_mask);
DCHECK(extension);
std::unique_ptr<ExtraViewInfo> extra_view_info =
std::make_unique<ExtraViewInfo>();
if (IsPolicyIndicationNeeded(extension)) {
DCHECK_EQ(1u, list.size());
extra_view_info->resource = &vector_icons::kBusinessIcon;
extra_view_info->text =
l10n_util::GetStringUTF16(IDS_EXTENSIONS_INSTALLED_BY_ADMIN);
extra_view_info->is_learn_more = false;
} else {
extra_view_info->text = controller_->delegate()->GetLearnMoreLabel();
extra_view_info->is_learn_more = true;
}
return extra_view_info;
}
| 1,622 |
778 | <gh_stars>100-1000
// | / |
// ' / __| _` | __| _ \ __|
// . \ | ( | | ( |\__ `
// _|\_\_| \__,_|\__|\___/ ____/
// Multi-Physics
//
// License: BSD License
// Kratos default license: kratos/license.txt
//
// Main authors: Author <NAME> and <NAME>
//
//#define GRADPN_FORM
// System includes
// External includes
// Project includes
#include "includes/define.h"
#include "custom_elements/fluid_2dGLS_expl.h"
#include "utilities/math_utils.h"
#include "ULF_application.h"
#include "utilities/geometry_utilities.h"
namespace Kratos
{
//THIS IS A COMPRESSIBLE FLUID ELEMENT, WITH GLS STABILIZATION, RUNGE-KUTTA Momentum Time integration, FRACTIONAL STEP
//************************************************************************************
//************************************************************************************
Fluid2DGLS_expl::Fluid2DGLS_expl(IndexType NewId, GeometryType::Pointer pGeometry)
: Element(NewId, pGeometry)
{
//DO NOT ADD DOFS HERE!!!
}
//************************************************************************************
//************************************************************************************
Fluid2DGLS_expl::Fluid2DGLS_expl(IndexType NewId, GeometryType::Pointer pGeometry, PropertiesType::Pointer pProperties)
: Element(NewId, pGeometry, pProperties)
{
}
//************************************************************************************
//************************************************************************************
void Fluid2DGLS_expl::CalculateLumpedMass()
{
//note that for the compressible case, rho will be also a variable
const double rho0 = GetGeometry()[0].FastGetSolutionStepValue(DENSITY);
const double rho1 = GetGeometry()[1].FastGetSolutionStepValue(DENSITY);
const double rho2 = GetGeometry()[2].FastGetSolutionStepValue(DENSITY);
//double Area;
//GeometryUtils::CalculateGeometryData(GetGeometry(),msDN_DX,msN,Area);
double Area = GeometryUtils::CalculateVolume2D(GetGeometry());
double lumped_mass_fac = Area * 0.33333333333333333;
//filling in the diagonal of the lumped mass matrix, (later I can change it to vector...)
GetGeometry()[0].FastGetSolutionStepValue(NODAL_MASS)+=lumped_mass_fac*rho0;
GetGeometry()[1].FastGetSolutionStepValue(NODAL_MASS)+=lumped_mass_fac*rho1;
GetGeometry()[2].FastGetSolutionStepValue(NODAL_MASS)+=lumped_mass_fac*rho2;
}
//************************************************************************************
//************************************************************************************
Element::Pointer Fluid2DGLS_expl::Create(IndexType NewId, NodesArrayType const& ThisNodes, PropertiesType::Pointer pProperties) const
{
KRATOS_TRY
return Element::Pointer(new Fluid2DGLS_expl(NewId, GetGeometry().Create(ThisNodes), pProperties));
KRATOS_CATCH("");
}
Fluid2DGLS_expl::~Fluid2DGLS_expl()
{
}
//************************************************************************************
//************************************************************************************
void Fluid2DGLS_expl::CalculateRightHandSide(VectorType& rRightHandSideVector, ProcessInfo& rCurrentProcessInfo)
{
KRATOS_THROW_ERROR(std::logic_error, "method not implemented" , "");
}
//************************************************************************************
//************************************************************************************
void Fluid2DGLS_expl::InitializeSolutionStep(ProcessInfo& CurrentProcessInfo)
{
//KRATOS_WATCH("Empty function for this element")
//KRATOS_THROW_ERROR(std::logic_error, "method not implemented" , "");
}
//************************************************************************************
//************************************************************************************
void Fluid2DGLS_expl::CalculateGalerkinMomentumResidual(VectorType& GalerkinRHS)
{
KRATOS_TRY
///////////////////////NECESSARY LOCALS///////////////////////////////////////////
boost::numeric::ublas::bounded_matrix<double,3,3> msWorkMatrix = ZeroMatrix(3,3);
boost::numeric::ublas::bounded_matrix<double,3,2> msDN_DX = ZeroMatrix(3,2);
array_1d<double,3> msN = ZeroVector(3); //dimension = number of nodes
boost::numeric::ublas::bounded_matrix<double,6,2> msShapeFunc = ZeroMatrix(6,2);
boost::numeric::ublas::bounded_matrix<double,2,6> msConvOp = ZeroMatrix(2,6);
boost::numeric::ublas::bounded_matrix<double,6,6> msAuxMat = ZeroMatrix(6,6);
array_1d<double,6> msAuxVec = ZeroVector(6); //dimension = number of nodes
array_1d<double,2> ms_adv_vel = ZeroVector(2); //dimesion coincides with space dimension
array_1d<double,2> ms_vel_gauss = ZeroVector(2); //dimesion coincides with space dimension
///////////////////////////////////////////////////////////////////////////////////
//first we compute the force term and pressure gradient terms:
//getting data for the given geometry
double Area;
GeometryUtils::CalculateGeometryData(GetGeometry(),msDN_DX,msN,Area);
//if (Area<0.0000000001) KRATOS_THROW_ERROR(std::logic_error, "method not implemented" , "");
//getting the velocity on the nodes and other necessary variabless
const array_1d<double,3> vel0 = GetGeometry()[0].FastGetSolutionStepValue(VELOCITY);
double p_n0 = GetGeometry()[0].FastGetSolutionStepValue(PRESSURE,1);
const double nu0 = GetGeometry()[0].FastGetSolutionStepValue(VISCOSITY);
const double rho0 = GetGeometry()[0].FastGetSolutionStepValue(DENSITY);
const array_1d<double,3> vel1 = GetGeometry()[1].FastGetSolutionStepValue(VELOCITY);
double p_n1 = GetGeometry()[1].FastGetSolutionStepValue(PRESSURE,1);//
const double nu1 = GetGeometry()[1].FastGetSolutionStepValue(VISCOSITY);
const double rho1 = GetGeometry()[1].FastGetSolutionStepValue(DENSITY);
const array_1d<double,3>& vel2 = GetGeometry()[2].FastGetSolutionStepValue(VELOCITY);
double p_n2 = GetGeometry()[2].FastGetSolutionStepValue(PRESSURE,1);//
const double nu2 = GetGeometry()[2].FastGetSolutionStepValue(VISCOSITY);
const double rho2 = GetGeometry()[2].FastGetSolutionStepValue(DENSITY);
//====================================================================
//calculating viscosity and density
double nu = 0.333333333333333333333333*(nu0 + nu1 + nu2 );
double density = 0.3333333333333333333333*(rho0 + rho1 + rho2 );
//VISCOUS CONTRIBUTION
// += Laplacian * nu; --> ONE GAUSS POINT
//msWorkMatrix is used now to store the element laplacian 3x3
/*
noalias(msWorkMatrix) = Area*density*nu * prod(msDN_DX,trans(msDN_DX));
//x comp
GalerkinRHS[0]=-1.0*(msWorkMatrix(0,0)*vel0[0]+msWorkMatrix(0,1)*vel1[0]+msWorkMatrix(0,2)*vel2[0]);
//y comp
GalerkinRHS[1]=-1.0*(msWorkMatrix(0,0)*vel0[1]+msWorkMatrix(0,1)*vel1[1]+msWorkMatrix(0,2)*vel2[1]);
//x comp
GalerkinRHS[2]=-1.0*(msWorkMatrix(1,0)*vel0[0]+msWorkMatrix(1,1)*vel1[0]+msWorkMatrix(1,2)*vel2[0]);
//y comp
GalerkinRHS[3]=-1.0*(msWorkMatrix(1,0)*vel0[1]+msWorkMatrix(1,1)*vel1[1]+msWorkMatrix(1,2)*vel2[1]);
//x comp
GalerkinRHS[4]=-1.0*(msWorkMatrix(2,0)*vel0[0]+msWorkMatrix(2,1)*vel1[0]+msWorkMatrix(2,2)*vel2[0]);
//y comp
GalerkinRHS[5]=-1.0*(msWorkMatrix(2,0)*vel0[1]+msWorkMatrix(2,1)*vel1[1]+msWorkMatrix(2,2)*vel2[1]);
*/
/// ANOTHER FORM OF VISCOUS MATRIX CONTRIBUTION
////////////////////////////////////////
boost::numeric::ublas::bounded_matrix<double,3,6> msB;
boost::numeric::ublas::bounded_matrix<double,3,3> ms_constitutive_matrix;
boost::numeric::ublas::bounded_matrix<double,3,6> ms_temp;
boost::numeric::ublas::bounded_matrix<double,6,6> DampingMatrix;
unsigned int NumberOfNodes = GetGeometry().size();
unsigned int dim = GetGeometry().WorkingSpaceDimension();
//VISCOUS CONTRIBUTION TO THE STIFFNESS MATRIX
//filling matrix B
for (unsigned int i=0; i<NumberOfNodes; i++)
{
unsigned int index = dim*i;
msB(0,index+0)=msDN_DX(i,0);
msB(0,index+1)= 0.0;
msB(1,index+0)=0.0;
msB(1,index+1)= msDN_DX(i,1);
msB(2,index+0)= msDN_DX(i,1);
msB(2,index+1)= msDN_DX(i,0);
}
//constitutive tensor
ms_constitutive_matrix(0,0) = (4.0/3.0)*nu*density;
ms_constitutive_matrix(0,1) = -2.0/3.0*nu*density;
ms_constitutive_matrix(0,2) = 0.0;
ms_constitutive_matrix(1,0) = -2.0/3.0*nu*density;
ms_constitutive_matrix(1,1) = 4.0/3.0*nu*density;
ms_constitutive_matrix(1,2) = 0.0;
ms_constitutive_matrix(2,0) = 0.0;
ms_constitutive_matrix(2,1) = 0.0;
ms_constitutive_matrix(2,2) = nu*density;
//calculating viscous contributions
ms_temp = prod( ms_constitutive_matrix , msB);
noalias(DampingMatrix) = prod( trans(msB) , ms_temp);
DampingMatrix *= Area;
msAuxVec[0]=vel0[0];
msAuxVec[1]=vel0[1];
msAuxVec[2]=vel1[0];
msAuxVec[3]=vel1[1];
msAuxVec[4]=vel2[0];
msAuxVec[5]=vel2[1];
noalias(GalerkinRHS)=-prod(DampingMatrix, msAuxVec);
//and now we add the pressure gradient and the force term
//external forces (component)
const array_1d<double,3> body_force = 0.333333333333333*(GetGeometry()[0].FastGetSolutionStepValue(BODY_FORCE)+
GetGeometry()[1].FastGetSolutionStepValue(BODY_FORCE) +
GetGeometry()[2].FastGetSolutionStepValue(BODY_FORCE));
unsigned int number_of_nodes=3;
for(unsigned int i = 0; i<number_of_nodes; i++)
{
//f=A*N_I*b, N_I=0.33333333 for 1 Gauss point
GalerkinRHS[i*2] += body_force[0]* density * Area * 0.3333333333333;
GalerkinRHS[i*2+1] += body_force[1] * density * Area * 0.3333333333333;
}
//Now we shall add the Gp term(integrated by parts)
double p_avg = msN[0]* p_n0 + msN[1] * p_n1 + msN[2] * p_n2;
p_avg *= Area;
//p_avg *= 0.0;
GalerkinRHS[0] += msDN_DX(0, 0) * p_avg;
GalerkinRHS[1] += msDN_DX(0, 1) * p_avg;
GalerkinRHS[2] += msDN_DX(1, 0) * p_avg;
GalerkinRHS[3] += msDN_DX(1, 1) * p_avg;
GalerkinRHS[4] += msDN_DX(2, 0) * p_avg;
GalerkinRHS[5] += msDN_DX(2, 1) * p_avg;
array_1d<double,3>& rhs0 = GetGeometry()[0].FastGetSolutionStepValue(VELOCITY_OLD_OLD);
rhs0[0] += GalerkinRHS[0];
rhs0[1] += GalerkinRHS[1];
rhs0[2] = 0.0;
array_1d<double,3>& rhs1 = GetGeometry()[1].FastGetSolutionStepValue(VELOCITY_OLD_OLD);
rhs1[0] += GalerkinRHS[2];
rhs1[1] += GalerkinRHS[3];
rhs1[2] = 0.0;
array_1d<double,3>& rhs2 = GetGeometry()[2].FastGetSolutionStepValue(VELOCITY_OLD_OLD);
rhs2[0] += GalerkinRHS[4];
rhs2[1] += GalerkinRHS[5];
rhs2[2] = 0.0;
KRATOS_CATCH("")
}
//************************************************************************************
//************************************************************************************
void Fluid2DGLS_expl::CalculateRHSVector(VectorType& Galerkin_RHS, double& dt)
{
KRATOS_TRY
KRATOS_CATCH("")
}
//************************************************************************************
//************************************************************************************
void Fluid2DGLS_expl::CalculateLocalSystem(MatrixType& rLeftHandSideMatrix, VectorType& rRightHandSideVector, ProcessInfo& rCurrentProcessInfo)
{
KRATOS_TRY
///////////////////////NECESSARY LOCALS///////////////////////////////////////////
boost::numeric::ublas::bounded_matrix<double,3,3> msWorkMatrix = ZeroMatrix(3,3);
boost::numeric::ublas::bounded_matrix<double,3,2> msDN_DX = ZeroMatrix(3,2);
array_1d<double,3> msN = ZeroVector(3); //dimension = number of nodes
boost::numeric::ublas::bounded_matrix<double,6,2> msShapeFunc = ZeroMatrix(6,2);
boost::numeric::ublas::bounded_matrix<double,2,6> msConvOp = ZeroMatrix(2,6);
boost::numeric::ublas::bounded_matrix<double,6,6> msAuxMat = ZeroMatrix(6,6);
array_1d<double,6> msAuxVec = ZeroVector(6); //dimension = number of nodes
array_1d<double,2> ms_adv_vel = ZeroVector(2); //dimesion coincides with space dimension
array_1d<double,2> ms_vel_gauss = ZeroVector(2); //dimesion coincides with space dimension
array_1d<double,3> ms_temp_vec_np = ZeroVector(3); //dimension = number of nodes
array_1d<double,3> ms_aux0 = ZeroVector(3); //dimension = number of nodes
array_1d<double,3> ms_aux1 = ZeroVector(3); //dimension = number of nodes
///////////////////////////////////////////////////////////////////////////////////
if(rRightHandSideVector.size() != 3)
{
rLeftHandSideMatrix.resize(3,3,false);
rRightHandSideVector.resize(3,false);
}
double dt = rCurrentProcessInfo[DELTA_TIME];
//fract. vel, that is calculated in the first Fractional Step.. but is saved inside the "VELOCITY" VARIABLE
//so, u_n os VELOCITY, 1 and u_n-1 VELOCITY,2
const array_1d<double,3>& fv0 = GetGeometry()[0].FastGetSolutionStepValue(VELOCITY);
const array_1d<double,3>& fv0_old = GetGeometry()[0].FastGetSolutionStepValue(VELOCITY,1);
const double nu0 = GetGeometry()[0].FastGetSolutionStepValue(VISCOSITY);
const double rho0 = GetGeometry()[0].FastGetSolutionStepValue(DENSITY);
double p0 = GetGeometry()[0].FastGetSolutionStepValue(PRESSURE);
double p0_old = GetGeometry()[0].FastGetSolutionStepValue(PRESSURE,1);
const array_1d<double,3>& ff0 = GetGeometry()[0].FastGetSolutionStepValue(BODY_FORCE);
const array_1d<double,3>& fv1 = GetGeometry()[1].FastGetSolutionStepValue(VELOCITY);
const array_1d<double,3>& fv1_old = GetGeometry()[1].FastGetSolutionStepValue(VELOCITY,1);
const double nu1 = GetGeometry()[1].FastGetSolutionStepValue(VISCOSITY);
const double rho1 = GetGeometry()[1].FastGetSolutionStepValue(DENSITY);
double p1 = GetGeometry()[1].FastGetSolutionStepValue(PRESSURE);
double p1_old = GetGeometry()[1].FastGetSolutionStepValue(PRESSURE,1);
const array_1d<double,3>& ff1 = GetGeometry()[1].FastGetSolutionStepValue(BODY_FORCE);
const array_1d<double,3>& fv2 = GetGeometry()[2].FastGetSolutionStepValue(VELOCITY);
const array_1d<double,3>& fv2_old = GetGeometry()[2].FastGetSolutionStepValue(VELOCITY,1);
const double nu2 = GetGeometry()[2].FastGetSolutionStepValue(VISCOSITY);
const double rho2 = GetGeometry()[2].FastGetSolutionStepValue(DENSITY);
double p2 = GetGeometry()[2].FastGetSolutionStepValue(PRESSURE);
double p2_old = GetGeometry()[2].FastGetSolutionStepValue(PRESSURE,1);
//old iteration can be used if we want to iterate between 1st and 2nd fractional steps
const array_1d<double,3>& ff2 = GetGeometry()[2].FastGetSolutionStepValue(BODY_FORCE);
double one_sixth = 0.166666666666667;
//in msAuxVec we store the velocity, (not the frac step vel, but u_n, the one that enters the stabilization)
msAuxVec[0]=fv0[0];
msAuxVec[1]=fv0[1];
msAuxVec[2]=fv1[0];
msAuxVec[3]=fv1[1];
msAuxVec[4]=fv2[0];
msAuxVec[5]=fv2[1];
//getting data for the given geometry
double Area;
GeometryUtils::CalculateGeometryData(GetGeometry(),msDN_DX,msN,Area);
//calculating average density and viscosity
double nu = 0.33333333333333*(nu0 + nu1 + nu2 );
double density = 0.33333333333333*(rho0 + rho1 + rho2 );
//ms_vel_gauss[i] = msN[0]*(fv0[i]) + msN[1]*(fv1[i]) + msN[2]*(fv2[i]);
//but with one integration N=0.333333333
ms_vel_gauss[0] = 0.33333333333333*(fv0[0]+fv1[0]+fv2[0]);
ms_vel_gauss[1] = 0.33333333333333*(fv0[1]+fv1[1]+fv2[1]);
//calculating parameter tau (saved internally to each element)
double h = sqrt(2.00*Area);
double norm_u = ms_vel_gauss[0]*ms_vel_gauss[0] + ms_vel_gauss[1]*ms_vel_gauss[1];
norm_u = sqrt(norm_u);
double tau = 1.00 / ( 1.0 * 4.00*nu/(h*h) + 1.0/dt);
//AND NOW WE ADD THE RESPECTIVE CONTRIBUTIONS TO THE RHS AND LHS of THE SECOND FRAC STEP
//we use Backward Euler for this step, therefore stab. contribution no RHS +=Tau1*(gradQ, residual)
// and LHS +=Tau1*(gradQ, gradP)
//laplacian term L = Dt * gradN * trans(gradN);
//stabilization term Spp = tau * gradN * trans(gradN);
//WATCH OUT for DIVISION with RHO - check if it changes or not in case of Momentum being the primary Variable
//
// msWorkMatrix stores the element laplacian
//
noalias(msWorkMatrix)=prod(msDN_DX,trans(msDN_DX));
noalias(rLeftHandSideMatrix) = (one_sixth*dt + tau) * Area*msWorkMatrix;
//////////////////////////////////////////////////////////
//////////// AND NOW RHS //////////////////
//////////////////////////////////////////////////////////
//Dirichlet contribution (that is: LHS*p_new)
ms_temp_vec_np[0] = p0;
ms_temp_vec_np[1] = p1;
ms_temp_vec_np[2] = p2;
//LHS is already multiplied by AREA
noalias(rRightHandSideVector) = -prod(rLeftHandSideMatrix,ms_temp_vec_np);
//NOW RHS-=dt L p_old
//changing the meaning of temp_vec_np
ms_temp_vec_np[0] = p0_old;
ms_temp_vec_np[1] = p1_old;
ms_temp_vec_np[2] = p2_old;
noalias(rRightHandSideVector) += one_sixth*Area*dt* (prod(msWorkMatrix,ms_temp_vec_np)) ;
//here we have the Du_tila term
double Gaux;
Gaux = msDN_DX(0,0)*fv0[0] + msDN_DX(0,1)*fv0[1];
Gaux += msDN_DX(1,0)*fv1[0] + msDN_DX(1,1)*fv1[1];
Gaux += msDN_DX(2,0)*fv2[0] + msDN_DX(2,1)*fv2[1];
//RHS+=-Dv
rRightHandSideVector[0] -= density*Area*Gaux * msN[0];
rRightHandSideVector[1] -= density*Area*Gaux * msN[1];
rRightHandSideVector[2] -= density*Area*Gaux * msN[2];
//RHS = +tau*nablaN*f, we reuse aux
//ms_aux0 stores ff_gauss;
ms_aux0=0.33333333333333333*(ff0+ff1+ff2);
//ms_aux1 - is the product of: (nabla q, f)
ms_aux1[0]=msDN_DX(0,0)*ms_aux0[0]+msDN_DX(0,1)*ms_aux0[1];
ms_aux1[1]=msDN_DX(1,0)*ms_aux0[0]+msDN_DX(1,1)*ms_aux0[1];
ms_aux1[2]=msDN_DX(2,0)*ms_aux0[0]+msDN_DX(2,1)*ms_aux0[1];
rRightHandSideVector += tau*density*Area*ms_aux1;
//RHS += -tau*nablaN*du_gausspoint/dt
//we reuse ms_vel_gauss to store the accelerations( (u_n - u_n-1)/dt)
ms_vel_gauss[0]=0.33333333333*(fv0[0]+fv1[0]+fv2[0]-fv0_old[0]-fv1_old[0]-fv2_old[0])/dt;
ms_vel_gauss[1]=0.33333333333*(fv0[1]+fv1[1]+fv2[1]-fv0_old[1]-fv1_old[1]-fv2_old[1])/dt;
//and now we reuse ms_aux1
ms_aux1=prod(msDN_DX,ms_vel_gauss);
noalias(rRightHandSideVector) -= tau*density*Area*ms_aux1;
KRATOS_CATCH("")
}
//************************************************************************************
//************************************************************************************
void Fluid2DGLS_expl::FinalFractionalStep(const ProcessInfo& rCurrentProcessInfo)
{
KRATOS_TRY
KRATOS_THROW_ERROR(std::logic_error, "METHOD NOT IMPL inside the element Final Fractional Step is done within the low_mach strategy.. " , "");
KRATOS_CATCH("")
}
//************************************************************************************
//************************************************************************************
void Fluid2DGLS_expl::Calculate(const Variable<double>& rVariable, double& Output, const ProcessInfo& rCurrentProcessInfo)
{
//if the VAr is NODAL_MASS, we calculate the lumped mass
if(rVariable == NODAL_MASS)
{
CalculateLumpedMass();
}
else
KRATOS_THROW_ERROR(std::logic_error, "You are doing something wrong FCT calculate... of nodal_mass with wring parameters.. " , "");
}
//************************************************************************************
//************************************************************************************
void Fluid2DGLS_expl::Calculate(const Variable<array_1d<double,3> >& rVariable, array_1d<double,3>& Output, const ProcessInfo& rCurrentProcessInfo)
{
if(rVariable == VELOCITY && Output[0]==1.0)
//we use "Output" as a switch between 1st Frac Step and last Frac Step(Correction Step)
{
//here the residual will be temporarily written
Vector TmpRhs(6);
// first we write the Galerkin contributions to the momentum residual
CalculateGalerkinMomentumResidual(TmpRhs);
//and now the stabilization terms added
double dt = rCurrentProcessInfo[DELTA_TIME];
CalculateRHSVector(TmpRhs, dt);
}
else if(rVariable == VELOCITY && Output[0]==2.0)
{
FinalFractionalStep(rCurrentProcessInfo);
}
else
{
KRATOS_THROW_ERROR(std::logic_error, "You are doing something wrong in ur fractional step.... " , "");
}
}
//************************************************************************************
//************************************************************************************
void Fluid2DGLS_expl::EquationIdVector(EquationIdVectorType& rResult, ProcessInfo& CurrentProcessInfo)
{
KRATOS_TRY
unsigned int number_of_nodes = GetGeometry().PointsNumber();
if(rResult.size() != number_of_nodes)
rResult.resize(number_of_nodes,false);
for (unsigned int i=0; i<number_of_nodes; i++)
{
rResult[i] = GetGeometry()[i].GetDof(PRESSURE).EquationId();
}
KRATOS_CATCH("")
}
//************************************************************************************
//************************************************************************************
void Fluid2DGLS_expl::GetDofList(DofsVectorType& ElementalDofList,ProcessInfo& CurrentProcessInfo)
{
KRATOS_TRY
unsigned int number_of_nodes = GetGeometry().PointsNumber();
if(ElementalDofList.size() != number_of_nodes)
ElementalDofList.resize(number_of_nodes);
for (unsigned int i=0; i<number_of_nodes; i++)
{
ElementalDofList[i] = GetGeometry()[i].pGetDof(PRESSURE);
}
KRATOS_CATCH("");
}
} // Namespace Kratos
| 8,693 |
4,054 | <filename>ann_benchmark/src/tests/ann_benchmark/test_angular.py
# Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
import pytest
import sys
import os
import math
sys.path.insert(0, os.path.abspath("../../vespa/ann_benchmark"))
from vespa_ann_benchmark import DistanceMetric, HnswIndexParams, HnswIndex
class Fixture:
def __init__(self, normalize):
metric = DistanceMetric.InnerProduct if normalize else DistanceMetric.Angular
self.index = HnswIndex(2, HnswIndexParams(16, 200, metric, False), normalize)
self.index.set_vector(0, [1, 0])
self.index.set_vector(1, [10, 10])
def find(self, k, value):
return self.index.find_top_k(k, value, k + 200)
def run_test(self):
top = self.find(10, [1, 1])
assert [top[0][0], top[1][0]] == [0, 1]
# Allow some rounding errors
epsilon = 5e-8
assert abs((1 - top[0][1]) - math.sqrt(0.5)) < epsilon
assert abs((1 - top[1][1]) - 1) < epsilon
top2 = self.find(10, [0, 2])
# Result is not sorted by distance
assert [top2[0][0], top2[1][0]] == [0, 1]
assert abs((1 - top2[0][1]) - 0) < epsilon
assert abs((1 - top2[1][1]) - math.sqrt(0.5)) < epsilon
assert 1 == self.find(1, [1, 1])[0][0]
assert 0 == self.find(1, [1, -1])[0][0]
def test_find_angular():
f = Fixture(False)
f.run_test()
def test_find_angular_normalized():
f = Fixture(True)
f.run_test()
| 679 |
1,143 | <reponame>DotModus/pinball
# Copyright 2015, Pinterest, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Validation tests for pinball_util tool."""
import collections
import mock
import unittest
from pinball.tools.pinball_util import Cat
from pinball.tools.pinball_util import Ls
from pinball.tools.pinball_util import Rm
from pinball.tools.pinball_util import Update
from pinball.master.thrift_lib.ttypes import GroupRequest
from pinball.master.thrift_lib.ttypes import GroupResponse
from pinball.master.thrift_lib.ttypes import ModifyRequest
from pinball.master.thrift_lib.ttypes import ModifyResponse
from pinball.master.thrift_lib.ttypes import Query
from pinball.master.thrift_lib.ttypes import QueryRequest
from pinball.master.thrift_lib.ttypes import QueryResponse
from pinball.master.thrift_lib.ttypes import Token
__author__ = '<NAME>'
__copyright__ = 'Copyright 2015, Pinterest, Inc.'
__credits__ = [__author__]
__license__ = 'Apache'
__version__ = '2.0'
class CatTestCase(unittest.TestCase):
def test_empty(self):
Options = collections.namedtuple('args', 'recursive command_args')
options = Options(recursive=False, command_args=['/some_path'])
command = Cat()
command.prepare(options)
client = mock.Mock()
response = QueryResponse()
client.query.return_value = response
output = command.execute(client, None)
query = Query(namePrefix='/some_path')
request = QueryRequest(queries=[query])
client.query.assert_called_once_with(request)
self.assertEqual('total 0\n', output)
def test_recursive(self):
Options = collections.namedtuple('args', 'recursive command_args')
options = Options(recursive=True, command_args=['/some_path'])
command = Cat()
command.prepare(options)
client = mock.Mock()
token = Token(version=10,
name='/some_path/some_token',
owner='some_owner',
expirationTime=10,
data='some_data')
query_response = QueryResponse(tokens=[[token]])
client.query.return_value = query_response
output = command.execute(client, None)
query = Query(namePrefix='/some_path')
query_request = QueryRequest(queries=[query])
client.query.assert_called_once_with(query_request)
self.assertEqual('total 1\nToken(version=10, owner=some_owner, '
'expirationTime=1970-01-01 00:00:10 UTC, '
'priority=0.000000, name=/some_path/some_token, '
'data=some_data)\n',
output)
class LsTestCase(unittest.TestCase):
def test_empty(self):
Options = collections.namedtuple('args', 'recursive command_args')
options = Options(recursive=False, command_args=['/some_path'])
command = Ls()
command.prepare(options)
client = mock.Mock()
response = GroupResponse()
client.group.return_value = response
output = command.execute(client, None)
request = GroupRequest(namePrefix='/some_path', groupSuffix='/')
client.group.assert_called_once_with(request)
self.assertEqual('total 0\n', output)
def test_recursive(self):
Options = collections.namedtuple('args', 'recursive command_args')
options = Options(recursive=True, command_args='/')
command = Ls()
command.prepare(options)
client = mock.Mock()
# Respond 10, and that should come in the output of the executed
# command.
response = GroupResponse(counts={'/some_path': 10})
client.group.return_value = response
output = command.execute(client, None)
self.assertEqual('total 1\n/some_path [10 token(s)]\n', output)
class RmTestCase(unittest.TestCase):
def test_empty(self):
Options = collections.namedtuple('args',
'recursive force command_args')
options = Options(recursive=False, force=True,
command_args=['/some_path'])
command = Rm()
command.prepare(options)
client = mock.Mock()
response = QueryResponse()
client.query.return_value = response
output = command.execute(client, None)
query = Query(namePrefix='/some_path')
request = QueryRequest(queries=[query])
client.query.assert_called_once_with(request)
self.assertEqual('no tokens found\nremoved 0 token(s)\n', output)
def test_recursive(self):
Options = collections.namedtuple('args',
'recursive force command_args')
options = Options(recursive=True, force=True,
command_args=['/some_path'])
command = Rm()
command.prepare(options)
client = mock.Mock()
token = Token(version=10,
name='/some_path/some_token',
owner='some_owner',
expirationTime=10,
data='some_data')
query_response = QueryResponse(tokens=[[token]])
client.query.return_value = query_response
modify_response = ModifyResponse()
client.modify.return_value = modify_response
output = command.execute(client, None)
query = Query(namePrefix='/some_path')
query_request = QueryRequest(queries=[query])
client.query.assert_called_once_with(query_request)
modify_request = ModifyRequest(deletes=[token])
client.modify.assert_called_once_with(modify_request)
self.assertEqual('removed 1 token(s)\n', output)
class UpdateTestCase(unittest.TestCase):
def test_insert(self):
Options = collections.namedtuple('args', 'name version owner '
'expiration_time priority data '
'command_args')
options = Options(name='/some_path/some_token',
version=None,
owner=None,
expiration_time=None,
priority=0,
data=None,
command_args=None)
command = Update()
command.prepare(options)
client = mock.Mock()
output_token = Token(version=10,
name='/some_path/some_token',
owner='some_owner',
expirationTime=10,
data='some_data')
response = ModifyResponse(updates=[output_token])
client.modify.return_value = response
output = command.execute(client, None)
input_token = Token(name='/some_path/some_token')
request = ModifyRequest(updates=[input_token])
client.modify.assert_called_once_with(request)
self.assertEqual('inserted %s\nupdated 1 token\n' % str(output_token),
output)
def test_update(self):
Options = collections.namedtuple('args', 'name version owner '
'expiration_time priority data '
'command_args')
options = Options(name='/some_path/some_token',
version=10,
owner='some_other_owner',
expiration_time=100,
priority=10,
data='some_other_data',
command_args=None)
command = Update()
command.prepare(options)
client = mock.Mock()
output_token = Token(version=11,
name='/some_path/some_token',
owner='some_other_owner',
expirationTime=100,
priority=10,
data='some_other_data')
response = ModifyResponse(updates=[output_token])
client.modify.return_value = response
output = command.execute(client, None)
input_token = Token(version=10,
name='/some_path/some_token',
owner='some_other_owner',
expirationTime=100,
priority=10,
data='some_other_data')
request = ModifyRequest(updates=[input_token])
client.modify.assert_called_once_with(request)
self.assertEqual('updated %s\nupdated 1 token\n' % str(output_token),
output)
| 4,253 |
1,125 | // Copyright (c) Facebook, Inc. and its affiliates.
// All rights reserved.
//
// Copyright 2019 Google LLC
//
// This source code is licensed under the BSD-style license found in the
// LICENSE file in the root directory of this source tree.
#include <gtest/gtest.h>
#include <xnnpack/common.h>
#include <xnnpack/isa-checks.h>
#include <xnnpack/rmax.h>
#include "rmax-microkernel-tester.h"
#if XNN_ARCH_ARM || XNN_ARCH_ARM64
TEST(U8RMAX__NEON, n_lt_16) {
TEST_REQUIRES_ARM_NEON;
for (size_t n = 1; n < 16; n++) {
RMaxMicrokernelTester()
.n(n)
.Test(xnn_u8_rmax_ukernel__neon);
}
}
TEST(U8RMAX__NEON, n_eq_16) {
TEST_REQUIRES_ARM_NEON;
RMaxMicrokernelTester()
.n(16)
.Test(xnn_u8_rmax_ukernel__neon);
}
TEST(U8RMAX__NEON, n_div_16) {
TEST_REQUIRES_ARM_NEON;
for (size_t n = 16; n < 128; n += 16) {
RMaxMicrokernelTester()
.n(n)
.Test(xnn_u8_rmax_ukernel__neon);
}
}
TEST(U8RMAX__NEON, n_gt_16) {
TEST_REQUIRES_ARM_NEON;
for (size_t n = 16; n < 32; n++) {
RMaxMicrokernelTester()
.n(n)
.Test(xnn_u8_rmax_ukernel__neon);
}
}
#endif // XNN_ARCH_ARM || XNN_ARCH_ARM64
#if XNN_ARCH_X86 || XNN_ARCH_X86_64
TEST(U8RMAX__SSE2, n_lt_16) {
TEST_REQUIRES_X86_SSE2;
for (size_t n = 1; n < 16; n++) {
RMaxMicrokernelTester()
.n(n)
.Test(xnn_u8_rmax_ukernel__sse2);
}
}
TEST(U8RMAX__SSE2, n_eq_16) {
TEST_REQUIRES_X86_SSE2;
RMaxMicrokernelTester()
.n(16)
.Test(xnn_u8_rmax_ukernel__sse2);
}
TEST(U8RMAX__SSE2, n_div_16) {
TEST_REQUIRES_X86_SSE2;
for (size_t n = 16; n < 128; n += 16) {
RMaxMicrokernelTester()
.n(n)
.Test(xnn_u8_rmax_ukernel__sse2);
}
}
TEST(U8RMAX__SSE2, n_gt_16) {
TEST_REQUIRES_X86_SSE2;
for (size_t n = 17; n < 32; n++) {
RMaxMicrokernelTester()
.n(n)
.Test(xnn_u8_rmax_ukernel__sse2);
}
}
#endif // XNN_ARCH_X86 || XNN_ARCH_X86_64
TEST(U8RMAX__SCALAR, n_lt_2) {
for (size_t n = 1; n < 2; n++) {
RMaxMicrokernelTester()
.n(n)
.Test(xnn_u8_rmax_ukernel__scalar);
}
}
TEST(U8RMAX__SCALAR, n_eq_2) {
RMaxMicrokernelTester()
.n(2)
.Test(xnn_u8_rmax_ukernel__scalar);
}
TEST(U8RMAX__SCALAR, n_div_2) {
for (size_t n = 2; n < 16; n += 2) {
RMaxMicrokernelTester()
.n(n)
.Test(xnn_u8_rmax_ukernel__scalar);
}
}
TEST(U8RMAX__SCALAR, n_gt_2) {
for (size_t n = 3; n < 4; n++) {
RMaxMicrokernelTester()
.n(n)
.Test(xnn_u8_rmax_ukernel__scalar);
}
}
| 1,427 |
1,794 | <reponame>chauvu/AmpliGraph
# Copyright 2019-2021 The AmpliGraph Authors. All Rights Reserved.
#
# This file is Licensed under the Apache License, Version 2.0.
# A copy of the Licence is available in LICENCE, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
from .EmbeddingModel import EmbeddingModel
from .TransE import TransE
from .DistMult import DistMult
from .ComplEx import ComplEx
from .HolE import HolE
from .RandomBaseline import RandomBaseline
from .ConvKB import ConvKB
from .ConvE import ConvE
__all__ = ['EmbeddingModel', 'TransE', 'DistMult', 'ComplEx', 'HolE', 'ConvKB', 'ConvE', 'RandomBaseline']
| 209 |
1,144 | /*
* Copyright (C) 2010 <NAME> <<EMAIL>>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 as published
* by the Free Software Foundation.
*
*/
#include <stdio.h>
#include <stdlib.h>
#include <stdint.h>
#include <string.h>
#include <unistd.h> /* for unlink() */
#include <libgen.h>
#include <getopt.h> /* for getopt() */
#include <stdarg.h>
#include <errno.h>
#include <sys/stat.h>
#include "cyg_crc.h"
#if (__BYTE_ORDER == __BIG_ENDIAN)
# define HOST_TO_BE32(x) (x)
# define BE32_TO_HOST(x) (x)
# define HOST_TO_LE32(x) bswap_32(x)
# define LE32_TO_HOST(x) bswap_32(x)
#else
# define HOST_TO_BE32(x) bswap_32(x)
# define BE32_TO_HOST(x) bswap_32(x)
# define HOST_TO_LE32(x) (x)
# define LE32_TO_HOST(x) (x)
#endif
#define MAGIC_FIRMWARE 0x6d726966 /* 'firm' */
#define MAGIC_KERNEL 0x676d694b /* 'Kimg' */
#define MAGIC_ROOTFS 0x676d6952 /* 'Rimg' */
struct file_info {
char *file_name; /* name of the file */
uint32_t file_size; /* length of the file */
};
struct fw_header {
uint32_t magic;
uint32_t length;
uint32_t unk1;
uint32_t unk2;
} __attribute__ ((packed));
struct fw_tail {
uint32_t hw_id;
uint32_t crc;
} __attribute__ ((packed));
struct board_info {
char *id;
uint32_t hw_id;
uint32_t kernel_len;
uint32_t rootfs_len;
};
/*
* Globals
*/
static char *ofname;
static char *progname;
static char *board_id;
static struct board_info *board;
static struct file_info kernel_info;
static struct file_info rootfs_info;
static struct board_info boards[] = {
{
.id = "ZCN-1523H-2-8",
.hw_id = 0x66661523,
.kernel_len = 0x170000,
.rootfs_len = 0x610000,
}, {
.id = "ZCN-1523H-5-16",
.hw_id = 0x6615235A,
.kernel_len = 0x170000,
.rootfs_len = 0x610000,
}, {
/* terminating entry */
}
};
/*
* Message macros
*/
#define ERR(fmt, ...) do { \
fflush(0); \
fprintf(stderr, "[%s] *** error: " fmt "\n", \
progname, ## __VA_ARGS__ ); \
} while (0)
#define ERRS(fmt, ...) do { \
int save = errno; \
fflush(0); \
fprintf(stderr, "[%s] *** error: " fmt ": %s\n", \
progname, ## __VA_ARGS__, strerror(save)); \
} while (0)
#define DBG(fmt, ...) do { \
fprintf(stderr, "[%s] " fmt "\n", progname, ## __VA_ARGS__ ); \
} while (0)
static struct board_info *find_board(char *id)
{
struct board_info *ret;
struct board_info *board;
ret = NULL;
for (board = boards; board->id != NULL; board++){
if (strcasecmp(id, board->id) == 0) {
ret = board;
break;
}
};
return ret;
}
static void usage(int status)
{
FILE *stream = (status != EXIT_SUCCESS) ? stderr : stdout;
struct board_info *board;
fprintf(stream, "Usage: %s [OPTIONS...]\n", progname);
fprintf(stream,
"\n"
"Options:\n"
" -B <board> create image for the board specified with <board>\n"
" -k <file> read kernel image from the file <file>\n"
" -r <file> read rootfs image from the file <file>\n"
" -o <file> write output to the file <file>\n"
" -h show this screen\n"
);
exit(status);
}
static int get_file_stat(struct file_info *fdata)
{
struct stat st;
int res;
if (fdata->file_name == NULL)
return 0;
res = stat(fdata->file_name, &st);
if (res){
ERRS("stat failed on %s", fdata->file_name);
return res;
}
fdata->file_size = st.st_size;
return 0;
}
static int read_to_buf(struct file_info *fdata, char *buf)
{
FILE *f;
int ret = EXIT_FAILURE;
f = fopen(fdata->file_name, "r");
if (f == NULL) {
ERRS("could not open \"%s\" for reading", fdata->file_name);
goto out;
}
errno = 0;
fread(buf, fdata->file_size, 1, f);
if (errno != 0) {
ERRS("unable to read from file \"%s\"", fdata->file_name);
goto out_close;
}
ret = EXIT_SUCCESS;
out_close:
fclose(f);
out:
return ret;
}
static int check_options(void)
{
int ret;
if (board_id == NULL) {
ERR("no board specified");
return -1;
}
board = find_board(board_id);
if (board == NULL) {
ERR("unknown/unsupported board id \"%s\"", board_id);
return -1;
}
if (kernel_info.file_name == NULL) {
ERR("no kernel image specified");
return -1;
}
ret = get_file_stat(&kernel_info);
if (ret)
return ret;
if (kernel_info.file_size > board->kernel_len) {
ERR("kernel image is too big");
return -1;
}
if (rootfs_info.file_name == NULL) {
ERR("no rootfs image specified");
return -1;
}
ret = get_file_stat(&rootfs_info);
if (ret)
return ret;
if (rootfs_info.file_size > board->rootfs_len) {
ERR("rootfs image is too big");
return -1;
}
if (ofname == NULL) {
ERR("no output file specified");
return -1;
}
return 0;
}
static int write_fw(char *data, int len)
{
FILE *f;
int ret = EXIT_FAILURE;
f = fopen(ofname, "w");
if (f == NULL) {
ERRS("could not open \"%s\" for writing", ofname);
goto out;
}
errno = 0;
fwrite(data, len, 1, f);
if (errno) {
ERRS("unable to write output file");
goto out_flush;
}
DBG("firmware file \"%s\" completed", ofname);
ret = EXIT_SUCCESS;
out_flush:
fflush(f);
fclose(f);
if (ret != EXIT_SUCCESS) {
unlink(ofname);
}
out:
return ret;
}
static int build_fw(void)
{
int buflen;
char *buf;
char *p;
int ret = EXIT_FAILURE;
int writelen = 0;
uint32_t crc;
struct fw_header *hdr;
struct fw_tail *tail;
buflen = 3 * sizeof(struct fw_header) +
kernel_info.file_size + rootfs_info.file_size +
3 * sizeof(struct fw_tail);
buf = malloc(buflen);
if (!buf) {
ERR("no memory for buffer\n");
goto out;
}
p = buf;
memset(p, 0, buflen);
/* fill firmware header */
hdr = (struct fw_header *) p;
hdr->magic = HOST_TO_LE32(MAGIC_FIRMWARE);
hdr->length = HOST_TO_LE32(buflen - sizeof(struct fw_header));
p += sizeof(struct fw_header);
/* fill kernel block header */
hdr = (struct fw_header *) p;
hdr->magic = HOST_TO_LE32(MAGIC_KERNEL);
hdr->length = HOST_TO_LE32(kernel_info.file_size +
sizeof(struct fw_tail));
p += sizeof(struct fw_header);
/* read kernel data */
ret = read_to_buf(&kernel_info, p);
if (ret)
goto out_free_buf;
/* fill firmware tail */
tail = (struct fw_tail *) (p + kernel_info.file_size);
tail->hw_id = HOST_TO_BE32(board->hw_id);
tail->crc = HOST_TO_BE32(cyg_crc32(p, kernel_info.file_size +
sizeof(struct fw_tail) - 4));
p += kernel_info.file_size + sizeof(struct fw_tail);
/* fill rootfs block header */
hdr = (struct fw_header *) p;
hdr->magic = HOST_TO_LE32(MAGIC_ROOTFS);
hdr->length = HOST_TO_LE32(rootfs_info.file_size +
sizeof(struct fw_tail));
p += sizeof(struct fw_header);
/* read rootfs data */
ret = read_to_buf(&rootfs_info, p);
if (ret)
goto out_free_buf;
/* fill firmware tail */
tail = (struct fw_tail *) (p + rootfs_info.file_size);
tail->hw_id = HOST_TO_BE32(board->hw_id);
tail->crc = HOST_TO_BE32(cyg_crc32(p, rootfs_info.file_size +
sizeof(struct fw_tail) - 4));
p += rootfs_info.file_size + sizeof(struct fw_tail);
/* fill firmware tail */
tail = (struct fw_tail *) p;
tail->hw_id = HOST_TO_BE32(board->hw_id);
tail->crc = HOST_TO_BE32(cyg_crc32(buf + sizeof(struct fw_header),
buflen - sizeof(struct fw_header) - 4));
ret = write_fw(buf, buflen);
if (ret)
goto out_free_buf;
ret = EXIT_SUCCESS;
out_free_buf:
free(buf);
out:
return ret;
}
int main(int argc, char *argv[])
{
int ret = EXIT_FAILURE;
int err;
FILE *outfile;
progname = basename(argv[0]);
while ( 1 ) {
int c;
c = getopt(argc, argv, "B:k:r:o:h");
if (c == -1)
break;
switch (c) {
case 'B':
board_id = optarg;
break;
case 'k':
kernel_info.file_name = optarg;
break;
case 'r':
rootfs_info.file_name = optarg;
break;
case 'o':
ofname = optarg;
break;
case 'h':
usage(EXIT_SUCCESS);
break;
default:
usage(EXIT_FAILURE);
break;
}
}
ret = check_options();
if (ret)
goto out;
ret = build_fw();
out:
return ret;
}
| 3,580 |
320 | <gh_stars>100-1000
# -*- coding: utf-8 -*-
from hachoir.wx.dialogs import file_save_dialog
import wx
class field_menu_t:
def __init__(self, parent, menu):
self.parent = parent
self.menu = menu
# forward this call because xrc doesn't allow menu
# subclassing (as of 2.6.3)
self.Bind = self.menu.Bind
def show_opts(self):
self.parent.PopupMenu(self.menu)
def ask_for_dump_file(self, title):
dump_dlog = file_save_dialog(title)
if wx.ID_OK == dump_dlog.ShowModal():
return dump_dlog.GetPath()
| 268 |
998 | <reponame>ADMTec/VoxelPlugin<gh_stars>100-1000
// Copyright 2021 Phyronnaz
#pragma once
#include "CoreMinimal.h"
#include "VoxelTools/Tools/VoxelToolBase.h"
#include "VoxelTrimTool.generated.h"
UCLASS()
class VOXEL_API UVoxelTrimTool : public UVoxelToolBase
{
GENERATED_BODY()
public:
UPROPERTY(Category = "Tool Preview Settings", EditAnywhere, BlueprintReadWrite, meta = (HideInPanel))
UMaterialInterface* ToolMaterial = nullptr;
public:
UPROPERTY(Category = "Tool Settings", EditAnywhere, BlueprintReadWrite, meta = (UIMin = "0", UIMax = "1"))
float Falloff = 0.5;
UPROPERTY(Category = "Tool Settings", EditAnywhere, BlueprintReadWrite, meta = (UIMin = "0", UIMax = "1"))
float Roughness = 0;
public:
UVoxelTrimTool();
//~ Begin UVoxelToolBase Interface
virtual void GetToolConfig(FVoxelToolBaseConfig& OutConfig) const override;
virtual void Tick() override;
virtual void UpdateRender(UMaterialInstanceDynamic* OverlayMaterialInstance, UMaterialInstanceDynamic* MeshMaterialInstance) override;
virtual FVoxelIntBoxWithValidity DoEdit() override;
//~ End UVoxelToolBase Interface
private:
FVector Position;
FVector Normal;
}; | 429 |
724 | <filename>services/metric-service/src/test/java/com/amazon/aws/partners/saasfactory/saasboost/ExampleConstants.java
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazon.aws.partners.saasfactory.saasboost;
public class ExampleConstants {
public static final int CLIENT_EXECUTION_TIMEOUT = 100000;
public static final String ATHENA_OUTPUT_BUCKET = "s3://some-test-bucket/query-results/"; //change the bucket name to match your environment
// This example demonstrates how to query a table with a CSV For information, see
//https://docs.aws.amazon.com/athena/latest/ug/work-with-data.html
public static final String ATHENA_SAMPLE_QUERY = "SELECT request_url,\n" +
" target_status_code,\n" +
" date_trunc('hour', (date_parse(time, '%Y-%m-%dT%H:%i:%s.%fZ'))) as time_hour,\n" +
" count(1) AS count,\n" +
" avg(target_processing_time) AS avg_time,\n" +
" max(target_processing_time) AS max_time\n" +
"FROM alb_logs\n" +
"where time > '2020-07-07T14'\n" +
" and target_status_code = '200'\n" +
"GROUP BY request_url,\n " +
" target_status_code,\n" +
" date_trunc('hour', (date_parse(time, '%Y-%m-%dT%H:%i:%s.%fZ')))\n" +
";"; //change the Query statement to match your environment
public static final String ATHENA_PATH_COUNT_QUERY = "SELECT date_trunc('hour', (date_parse(time, '%Y-%m-%dT%H:%i:%s.%fZ'))) AS time_hour,\n" +
"concat(url_extract_path(request_url), '+',request_verb) as url,\n" +
"count(1) as count\n" +
"FROM alb_logs\n" +
"WHERE target_status_code = '200'\n" +
"GROUP BY concat(url_extract_path(request_url),'+',request_verb),\n" +
"date_trunc('hour', (date_parse(time, '%Y-%m-%dT%H:%i:%s.%fZ'))) \n" +
"order by 1;";
public static final long SLEEP_AMOUNT_IN_MS = 500;
public static final String ATHENA_DEFAULT_DATABASE = "saas-boost-alb-log"; //Change the database to match your database
public static final String QUERY1 = "SELECT\n" +
"concat(url_extract_path(request_url), '+',request_verb) as url,\n" +
"count(1) as request_count\n" +
"FROM alb_logs\n" +
" where target_status_code = '200'\n" +
"and time >= '2020-07-09T23:11:33.827Z' and time <= '2020-07-10T23:11:33.827Z' \n" +
"GROUP BY concat(url_extract_path(request_url),'+',request_verb)\n" +
"order by 2 desc\n" +
"limit 10;";
}
| 1,439 |
303 | <reponame>ofZach/landlinesApp
{"id":6022,"line-1":"Province de Diego-Suarez","line-2":"Madagascar","attribution":"©2015 DigitalGlobe","url":"https://www.google.com/maps/@-12.832598,49.854798,17z/data=!3m1!1e3"} | 87 |
407 | package com.alibaba.tesla.appmanager.server.addon;
import com.alibaba.tesla.appmanager.common.enums.ComponentTypeEnum;
import com.alibaba.tesla.appmanager.domain.schema.ComponentSchema;
import com.alibaba.tesla.appmanager.server.addon.req.ApplyAddonInstanceReq;
import com.alibaba.tesla.appmanager.server.addon.req.ReleaseAddonInstanceReq;
/**
* Addon 统一描述接口
*
* @author <EMAIL>
*/
public interface Addon {
/**
* 创建 Addon 实例,高耗时
*
* @param request 创建请求
* @return dataOutput 数据
*/
ComponentSchema applyInstance(ApplyAddonInstanceReq request);
/**
* 释放 Addon 实例
*
* @param request 释放请求
*/
void releaseInstance(ReleaseAddonInstanceReq request);
/**
* 获取 Addon 唯一标识符
*
* @return addonId
*/
String getAddonId();
/**
* 获取 Addon 版本号
*
* @return addonVersion
*/
String getAddonVersion();
/**
* 获取 Addon Label
*
* @return addonLabel
*/
String getAddonLabel();
/**
* 获取 Addon Description
*
* @return addonDescription
*/
String getAddonDescription();
/**
* 获取 Addon 类型
*
* @return addonType
*/
ComponentTypeEnum getAddonType();
/**
* 获取当前 Addon Schema 定义
*
* @return AddonSchema
*/
ComponentSchema getAddonSchema();
/**
* 获取当前 Addon Schema中spec中的前端组件配置
*
* @return
*/
String getAddonConfigSchema();
}
| 749 |
354 | /*-------------------------------------------------------------------------
* OpenGL Conformance Test Suite
* -----------------------------
*
* Copyright (c) 2016 Google Inc.
* Copyright (c) 2016 The Khronos Group Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/ /*!
* \file
* \brief CTS Android Activity.
*/ /*-------------------------------------------------------------------*/
#include "glcAndroidTestActivity.hpp"
#include "glcTestRunner.hpp"
#include "tcuAndroidAssets.hpp"
#include "tcuAndroidPlatform.hpp"
#include "tcuAndroidUtil.hpp"
#include <android/window.h>
#include <stdlib.h>
namespace glcts
{
namespace Android
{
using tcu::Android::Platform;
using tcu::Android::AssetArchive;
using tcu::Android::NativeActivity;
static const char* DEFAULT_LOG_PATH = "/sdcard";
static std::string getWaiverPath(ANativeActivity* activity)
{
return tcu::Android::getIntentStringExtra(activity, "waivers");
}
static std::string getLogPath(ANativeActivity* activity)
{
std::string path = tcu::Android::getIntentStringExtra(activity, "logdir");
return path.empty() ? std::string(DEFAULT_LOG_PATH) : path;
}
static deUint32 getFlags(ANativeActivity* activity)
{
deUint32 flags = 0;
if (tcu::Android::getIntentStringExtra(activity, "verbose") == "true")
flags |= TestRunner::VERBOSE_ALL;
else if (tcu::Android::getIntentStringExtra(activity, "summary") == "true")
flags |= TestRunner::PRINT_SUMMARY;
return flags;
}
TestThread::TestThread(NativeActivity& activity, tcu::Android::AssetArchive& archive, const std::string& waiverPath,
const std::string& logPath, glu::ApiType runType, deUint32 runFlags)
: RenderThread(activity)
, m_platform(activity)
, m_archive(archive)
, m_app(m_platform, m_archive, waiverPath.c_str(), logPath.c_str(), runType, runFlags)
, m_finished(false)
{
}
TestThread::~TestThread(void)
{
// \note m_testApp is managed by thread.
}
void TestThread::run(void)
{
RenderThread::run();
}
void TestThread::onWindowCreated(ANativeWindow* window)
{
m_platform.getWindowRegistry().addWindow(window);
}
void TestThread::onWindowDestroyed(ANativeWindow* window)
{
m_platform.getWindowRegistry().destroyWindow(window);
}
void TestThread::onWindowResized(ANativeWindow* window)
{
// \todo [2013-05-12 pyry] Handle this in some sane way.
DE_UNREF(window);
tcu::print("Warning: Native window was resized, results may be undefined");
}
bool TestThread::render(void)
{
if (!m_finished)
m_finished = !m_app.iterate();
return !m_finished;
}
// TestActivity
TestActivity::TestActivity(ANativeActivity* activity, glu::ApiType runType)
: RenderActivity(activity)
, m_archive(activity->assetManager)
, m_cmdLine(tcu::Android::getIntentStringExtra(activity, "cmdLine"))
, m_testThread(*this, m_archive, getWaiverPath(activity), getLogPath(activity), runType, getFlags(activity))
, m_started(false)
{
// Set initial orientation.
tcu::Android::setRequestedOrientation(getNativeActivity(),
tcu::Android::mapScreenRotation(m_cmdLine.getScreenRotation()));
// Set up window flags.
ANativeActivity_setWindowFlags(activity,
AWINDOW_FLAG_KEEP_SCREEN_ON | AWINDOW_FLAG_TURN_SCREEN_ON | AWINDOW_FLAG_FULLSCREEN |
AWINDOW_FLAG_SHOW_WHEN_LOCKED,
0);
}
TestActivity::~TestActivity(void)
{
}
void TestActivity::onStart(void)
{
if (!m_started)
{
setThread(&m_testThread);
m_testThread.start();
m_started = true;
}
RenderActivity::onStart();
}
void TestActivity::onDestroy(void)
{
if (m_started)
{
setThread(DE_NULL);
m_testThread.stop();
m_started = false;
}
RenderActivity::onDestroy();
// Kill this process.
tcu::print("Done, killing process");
exit(0);
}
void TestActivity::onConfigurationChanged(void)
{
RenderActivity::onConfigurationChanged();
// Update rotation.
tcu::Android::setRequestedOrientation(getNativeActivity(),
tcu::Android::mapScreenRotation(m_cmdLine.getScreenRotation()));
}
} // Android
} // glcts
| 1,518 |
411 | <reponame>elihschiff/Submitty
/* OUTPUT: ERROR: ABORT SIGNAL
* Program Terminated
* (RED) WARNING: This file should be empty (Refering to STDERR)
* Student Standard ERROR (STDERR)
* *** Error in `./a.out': free(): invalid next size (fast): 0x0000000000efa
* Child exited with status 6
* Note: Probably caused by a heap overflow - in this cases
* trying to access memory out of bounds of dynamically
* allocated array
* Message located in execute.cpp
*/
int main(void)
{
int *j = new int[10];
for (int i = 0; i < 15; ++i) {
j[i] = i;
}
delete[] j;
}
| 232 |
14,668 | // Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.content.browser.accessibility;
import static org.chromium.content.browser.accessibility.AccessibilityContentShellTestUtils.ANP_ERROR;
import static org.chromium.content.browser.accessibility.AccessibilityContentShellTestUtils.END_OF_TEST_ERROR;
import static org.chromium.content.browser.accessibility.AccessibilityContentShellTestUtils.NODE_TIMEOUT_ERROR;
import static org.chromium.content.browser.accessibility.AccessibilityContentShellTestUtils.READY_FOR_TEST_ERROR;
import static org.chromium.content.browser.accessibility.AccessibilityContentShellTestUtils.sContentShellDelegate;
import android.annotation.SuppressLint;
import android.os.Bundle;
import android.os.Environment;
import android.view.View;
import android.view.ViewGroup;
import android.view.accessibility.AccessibilityNodeInfo;
import android.view.accessibility.AccessibilityNodeProvider;
import org.hamcrest.Matchers;
import org.junit.After;
import org.junit.Assert;
import org.chromium.base.test.util.Criteria;
import org.chromium.base.test.util.CriteriaHelper;
import org.chromium.base.test.util.UrlUtils;
import org.chromium.content_public.browser.test.util.TestThreadUtils;
import org.chromium.content_shell_apk.ContentShellActivityTestRule;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
/**
* Custom activity test rule for any content shell tests related to accessibility.
*/
@SuppressLint("VisibleForTests")
public class AccessibilityContentShellActivityTestRule extends ContentShellActivityTestRule {
// Test output error messages.
protected static final String EVENTS_ERROR =
"Generated events and actions did not match expectations.";
protected static final String NODE_ERROR =
"Generated AccessibilityNodeInfo tree did not match expectations.";
protected static final String EXPECTATIONS_NULL =
"Test expectations were null, perhaps the file is missing?";
protected static final String RESULTS_NULL =
"Test results were null, did you add the tracker to WebContentsAccessibilityImpl?";
protected static final String MISSING_FILE_ERROR =
"Input file could not be read, perhaps the file is missing?";
// Member variables required for testing framework. Although they are the same object, we will
// instantiate an object of type |AccessibilityNodeProvider| for convenience.
protected static final String BASE_DIRECTORY = "/chromium_tests_root";
public AccessibilityNodeProvider mNodeProvider;
public WebContentsAccessibilityImpl mWcax;
// Tracker for all events and actions performed during a given test.
private AccessibilityActionAndEventTracker mTracker;
public AccessibilityContentShellActivityTestRule() {
super();
}
/**
* Helper methods for setup of a basic web contents accessibility unit test.
*
* This method replaces the usual setUp() method annotated with @Before because we wish to
* load different data with each test, but the process is the same for all tests.
*
* Leaving a commented @Before annotation on each method as a reminder/context clue.
*/
/* @Before */
protected void setupTestFromFile(String file) {
// Verify file exists before beginning the test.
verifyInputFile(file);
launchContentShellWithUrl(UrlUtils.getIsolatedTestFileUrl(file));
waitForActiveShellToBeDoneLoading();
setupTestFramework();
setAccessibilityDelegate();
sendReadyForTestSignal();
}
/**
* Helper method to set up our tests. This method replaces the @Before method.
* Leaving a commented @Before annotation on method as a reminder/context clue.
*/
/* @Before */
public void setupTestFramework() {
mWcax = getWebContentsAccessibility();
mWcax.setState(true);
mWcax.setAccessibilityEnabledForTesting();
mWcax.setBrowserAccessibilityStateForTesting();
mNodeProvider = getAccessibilityNodeProvider();
mTracker = new AccessibilityActionAndEventTracker();
mWcax.setAccessibilityTrackerForTesting(mTracker);
}
/**
* Helper method to tear down our tests so we can start the next test clean.
*/
@After
public void tearDown() {
mTracker = null;
mNodeProvider = null;
// Always reset our max events for good measure.
if (mWcax != null) {
mWcax.setMaxContentChangedEventsToFireForTesting(-1);
mWcax = null;
}
// Reset our test data.
AccessibilityContentShellTestData.resetData();
}
/**
* Returns the current |AccessibilityNodeProvider| from the WebContentsAccessibilityImpl
* instance. Use polling to ensure a non-null value before returning.
*/
private AccessibilityNodeProvider getAccessibilityNodeProvider() {
CriteriaHelper.pollUiThread(() -> mWcax.getAccessibilityNodeProvider() != null, ANP_ERROR);
return mWcax.getAccessibilityNodeProvider();
}
/**
* Helper method to call AccessibilityNodeInfo.getChildId and convert to a virtual
* view ID using reflection, since the needed methods are hidden.
*/
protected int getChildId(AccessibilityNodeInfo node, int index) {
try {
Method getChildIdMethod =
AccessibilityNodeInfo.class.getMethod("getChildId", int.class);
long childId = (long) getChildIdMethod.invoke(node, Integer.valueOf(index));
Method getVirtualDescendantIdMethod =
AccessibilityNodeInfo.class.getMethod("getVirtualDescendantId", long.class);
int virtualViewId =
(int) getVirtualDescendantIdMethod.invoke(null, Long.valueOf(childId));
return virtualViewId;
} catch (Exception ex) {
Assert.fail("Unable to call hidden AccessibilityNodeInfo method: " + ex.toString());
return 0;
}
}
/**
* Helper method to recursively search a tree of virtual views under an
* AccessibilityNodeProvider and return one whose text or contentDescription equals |text|.
* Returns the virtual view ID of the matching node, if found, and View.NO_ID if not.
*/
private <T> int findNodeMatching(int virtualViewId,
AccessibilityContentShellTestUtils.AccessibilityNodeInfoMatcher<T> matcher, T element) {
AccessibilityNodeInfo node = mNodeProvider.createAccessibilityNodeInfo(virtualViewId);
Assert.assertNotEquals(node, null);
if (matcher.matches(node, element)) return virtualViewId;
for (int i = 0; i < node.getChildCount(); i++) {
int childId = getChildId(node, i);
AccessibilityNodeInfo child = mNodeProvider.createAccessibilityNodeInfo(childId);
if (child != null) {
int result = findNodeMatching(childId, matcher, element);
if (result != View.NO_ID) return result;
}
}
return View.NO_ID;
}
/**
* Helper method to block until findNodeMatching() returns a valid node matching
* the given criteria. Returns the virtual view ID of the matching node, if found, and
* asserts if not.
*/
public <T> int waitForNodeMatching(
AccessibilityContentShellTestUtils.AccessibilityNodeInfoMatcher<T> matcher, T element) {
CriteriaHelper.pollUiThread(() -> {
Criteria.checkThat(
findNodeMatching(View.NO_ID, matcher, element), Matchers.not(View.NO_ID));
});
int virtualViewId = TestThreadUtils.runOnUiThreadBlockingNoException(
() -> findNodeMatching(View.NO_ID, matcher, element));
Assert.assertNotEquals(View.NO_ID, virtualViewId);
return virtualViewId;
}
/**
* Helper method to perform actions on the UI so we can then send accessibility events
*
* @param viewId int virtualViewId of the given node
* @param action int desired AccessibilityNodeInfo action
* @param args Bundle action bundle
* @return boolean return value of performAction
* @throws ExecutionException Error
*/
public boolean performActionOnUiThread(int viewId, int action, Bundle args)
throws ExecutionException {
return TestThreadUtils.runOnUiThreadBlocking(
() -> mNodeProvider.performAction(viewId, action, args));
}
/**
* Helper method to perform an action on the UI, then poll for a given criteria to verify
* the action was completed.
*
* @param viewId int virtualViewId of the given node
* @param action int desired AccessibilityNodeInfo action
* @param args Bundle action bundle
* @param criteria Callable<Boolean> criteria to poll against to verify completion
* @return boolean return value of performAction
* @throws ExecutionException Error
* @throws Throwable Error
*/
public boolean performActionOnUiThread(int viewId, int action, Bundle args,
Callable<Boolean> criteria) throws ExecutionException, Throwable {
boolean returnValue = performActionOnUiThread(viewId, action, args);
CriteriaHelper.pollUiThread(criteria, NODE_TIMEOUT_ERROR);
return returnValue;
}
/**
* Helper method for executing a given JS method for the current web contents.
*/
public void executeJS(String method) {
TestThreadUtils.runOnUiThreadBlocking(
() -> getWebContents().evaluateJavaScriptForTests(method, null));
}
/**
* Helper method to focus a given node.
*
* @param virtualViewId The virtualViewId of the node to focus
* @throws Throwable Error
*/
public void focusNode(int virtualViewId) throws Throwable {
// Focus given node, assert actions were performed, then poll until node is updated.
Assert.assertTrue(
performActionOnUiThread(virtualViewId, AccessibilityNodeInfo.ACTION_FOCUS, null));
Assert.assertTrue(performActionOnUiThread(
virtualViewId, AccessibilityNodeInfo.ACTION_ACCESSIBILITY_FOCUS, null));
TestThreadUtils.runOnUiThreadBlocking(
() -> mNodeProvider.createAccessibilityNodeInfo(virtualViewId));
CriteriaHelper.pollUiThread(() -> {
return mNodeProvider.createAccessibilityNodeInfo(virtualViewId)
.isAccessibilityFocused();
}, NODE_TIMEOUT_ERROR);
}
/**
* Helper method for setting standard AccessibilityDelegate. The delegate is set on the parent
* as WebContentsAccessibilityImpl sends events using the parent.
*/
public void setAccessibilityDelegate() {
((ViewGroup) getContainerView().getParent())
.setAccessibilityDelegate(sContentShellDelegate);
}
/**
* Call through the WebContentsAccessibilityImpl to send a signal that we are ready to begin
* a test (using the kEndOfTest signal for simplicity). Poll until we receive the generated
* Blink event in response, then reset the tracker.
*/
public void sendReadyForTestSignal() {
TestThreadUtils.runOnUiThreadBlocking(() -> mWcax.signalEndOfTestForTesting());
CriteriaHelper.pollUiThread(() -> mTracker.testComplete(), READY_FOR_TEST_ERROR);
TestThreadUtils.runOnUiThreadBlocking(() -> mTracker.signalReadyForTest());
}
/**
* Call through the WebContentsAccessibilityImpl to send a kEndOfTest event to signal that we
* are done with a test. Poll until we receive the generated Blink event in response.
*/
public void sendEndOfTestSignal() {
TestThreadUtils.runOnUiThreadBlocking(() -> mWcax.signalEndOfTestForTesting());
CriteriaHelper.pollUiThread(() -> mTracker.testComplete(), END_OF_TEST_ERROR);
}
/**
* Helper method to generate results from the |AccessibilityActionAndEventTracker|.
*
* @return String List of all actions and events performed during test.
*/
public String getTrackerResults() {
return mTracker.results();
}
/**
* Read the contents of a file, and return as a String.
*
* @param file File to read (including path and name)
* @return String Contents of the given file.
*/
protected String readExpectationFile(String file) {
String directory = Environment.getExternalStorageDirectory().getPath() + BASE_DIRECTORY;
try {
File expectedFile = new File(directory, "/" + file);
FileInputStream fis = new FileInputStream(expectedFile);
byte[] data = new byte[(int) expectedFile.length()];
fis.read(data);
fis.close();
return new String(data);
} catch (IOException e) {
throw new AssertionError(EXPECTATIONS_NULL, e);
}
}
/**
* Check that a given file exists on disk.
*
* @param file String - file to check, including path and name
*/
protected void verifyInputFile(String file) {
String directory = Environment.getExternalStorageDirectory().getPath() + BASE_DIRECTORY;
File expectedFile = new File(directory, "/" + file);
Assert.assertTrue(MISSING_FILE_ERROR + " could not find the directory: " + directory
+ ", and/or file: " + expectedFile.getPath(),
expectedFile.exists());
}
}
| 5,123 |
312 | /*
* Copyright 2014-2015 <NAME>. All rights reserved.
* License: https://github.com/bkaradzic/bgfx#license-bsd-2-clause
*/
#ifndef OCORNUT_IMGUI_H_HEADER_GUARD
#define OCORNUT_IMGUI_H_HEADER_GUARD
#include <ocornut-imgui/imgui.h>
namespace bx { struct AllocatorI; }
void IMGUI_create(const void* _data, uint32_t _size, float _fontSize, bx::AllocatorI* _allocator);
void IMGUI_destroy();
void IMGUI_beginFrame(int32_t _mx, int32_t _my, uint8_t _button, int32_t _scroll, int _width, int _height, char _inputChar, uint8_t _viewId);
void IMGUI_endFrame();
#endif // OCORNUT_IMGUI_H_HEADER_GUARD
| 242 |
988 | /*
* PROGRAM: Security data base manager
* MODULE: call_service.h
* DESCRIPTION: Invokes remote service manager to work with security DB.
*
* The contents of this file are subject to the Initial
* Developer's Public License Version 1.0 (the "License");
* you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
* http://www.ibphoenix.com/main.nfs?a=ibphoenix&page=ibp_idpl.
*
* Software distributed under the License is distributed AS IS,
* WITHOUT WARRANTY OF ANY KIND, either express or implied.
* See the License for the specific language governing rights
* and limitations under the License.
*
* The Original Code was created by <NAME> (<EMAIL>)
* for the Firebird Open Source RDBMS project.
*
* All Rights Reserved.
* Contributor(s): ______________________________________.
*/
#ifndef UTILITIES_GSEC_CALL_SERVICE_H
#define UTILITIES_GSEC_CALL_SERVICE_H
#include "ibase.h"
#include "../utilities/gsec/gsec.h"
#include "../common/security.h"
isc_svc_handle attachRemoteServiceManager(ISC_STATUS*, const TEXT*, const TEXT*,
bool, int, const TEXT*);
isc_svc_handle attachRemoteServiceManager(ISC_STATUS*, const TEXT*, const TEXT*, bool, const TEXT*, bool);
void callRemoteServiceManager(ISC_STATUS*, isc_svc_handle, Auth::UserData&, Firebird::IListUsers*);
void detachRemoteServiceManager(ISC_STATUS*, isc_svc_handle);
#endif // UTILITIES_GSEC_CALL_SERVICE_H
| 462 |
334 | // The MIT License(MIT)
//
// Copyright(c) 2022 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files(the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and / or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions :
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//---------------------------------------------------------------------------------
// NVIDIA Image Scaling SDK - v1.0.2
//---------------------------------------------------------------------------------
// The NVIDIA Image Scaling SDK provides a single spatial scaling and sharpening algorithm
// for cross-platform support. The scaling algorithm uses a 6-tap scaling filter combined
// with 4 directional scaling and adaptive sharpening filters, which creates nice smooth images
// and sharp edges. In addition, the SDK provides a state-of-the-art adaptive directional sharpening algorithm
// for use in applications where no scaling is required.
//
// The directional scaling and sharpening algorithm is named NVScaler while the adaptive-directional-sharpening-only
// algorithm is named NVSharpen. Both algorithms are provided as compute shaders and
// developers are free to integrate them in their applications. Note that if you integrate NVScaler, you
// should NOT integrate NVSharpen, as NVScaler already includes a sharpening pass
//
// Pipeline Placement
// ------------------
// The call into the NVIDIA Image Scaling shaders must occur during the post-processing phase after tone-mapping.
// Applying the scaling in linear HDR in-game color-space may result in a sharpening effect that is
// either not visible or too strong. Since sharpening algorithms can enhance noisy or grainy regions, it is recommended
// that certain effects such as film grain should occur after NVScaler or NVSharpen. Low-pass filters such as motion blur or
// light bloom are recommended to be applied before NVScaler or NVSharpen to avoid sharpening attenuation.
//
// Color Space and Ranges
// ----------------------
// NVIDIA Image Scaling shaders can process color textures stored as either LDR or HDR with the following
// restrictions:
// 1) LDR
// - The range of color values must be in the [0, 1] range
// - The input color texture must be in display-referred color-space after tone mapping and OETF (gamma-correction)
// has been applied
// 2) HDR PQ
// - The range of color values must be in the [0, 1] range
// - The input color texture must be in display-referred color-space after tone mapping with Rec.2020 PQ OETF applied
// 3) HDR Linear
// - The recommended range of color values is [0, 12.5], where luminance value (as per BT. 709) of
// 1.0 maps to brightness value of 80nits (sRGB peak) and 12.5 maps to 1000nits
// - The input color texture may have luminance values that are either linear and scene-referred or
// linear and display-referred (after tone mapping)
//
// If the input color texture sent to NVScaler/NVSharpen is in HDR format set NIS_HDR_MODE define to either
// NIS_HDR_MODE_LINEAR (1) or NIS_HDR_MODE_PQ (2).
//
// Supported Texture Formats
// -------------------------
// Input and output formats:
// Input and output formats are expected to be in the rages defined in previous section and should be
// specified using non-integer data types such as DXGI_FORMAT_R8G8B8A8_UNORM.
//
// Coefficients formats:
// The scaler coefficients and USM coefficients format should be specified using float4 type such as
// DXGI_FORMAT_R32G32B32A32_FLOAT or DXGI_FORMAT_R16G16B16A16_FLOAT.
//
// Resource States, Buffers, and Sampler:
// The game or application calling NVIDIA Image Scaling SDK shaders must ensure that the textures are in
// the correct state.
// - Input color textures must be in pixel shader read state. Shader Resource View (SRV) in DirectX
// - The output texture must be in read/write state. Unordered Access View (UAV) in DirectX
// - The coefficients texture for NVScaler must be in read state. Shader Resource View (SRV) in DirectX
// - The configuration variables must be passed as constant buffer. Constant Buffer View (CBV) in DirectX
// - The sampler for texture pixel sampling. Linear clamp SamplerState in Direct
//
// Adding NVIDIA Image Scaling SDK to a Project
// --------------------------------------------
// Include NIS_Scaler.h directly in your application or alternative use the provided NIS_Main.hlsl shader file.
// Use NIS_Config.h to get the ideal shader dispatch values for your platform, to configure the algorithm constant
// values (NVScalerUpdateConfig, and NVSharpenUpdateConfig), and to access the algorithm coefficients (coef_scale and coef_USM).
//
// Defines:
// NIS_SCALER: default (1) NVScaler, (0) fast NVSharpen only, no upscaling
// NIS_HDR_MODE: default (0) disabled, (1) Linear, (2) PQ
// NIS_BLOCK_WIDTH: pixels per block width. Use GetOptimalBlockWidth query for your platform
// NIS_BLOCK_HEIGHT: pixels per block height. Use GetOptimalBlockHeight query for your platform
// NIS_THREAD_GROUP_SIZE: number of threads per group. Use GetOptimalThreadGroupSize query for your platform
// NIS_USE_HALF_PRECISION: default (0) disabled, (1) enable half pression computation
// NIS_HLSL: (1) enabled, (0) disabled
// NIS_HLSL_6_2: default (0) HLSL v5, (1) HLSL v6.2 forces NIS_HLSL=1
// NIS_GLSL: (1) enabled, (0) disabled
// NIS_VIEWPORT_SUPPORT: default(0) disabled, (1) enable input/output viewport support
//
// Default NVScaler shader constants:
// [NIS_BLOCK_WIDTH, NIS_BLOCK_HEIGHT, NIS_THREAD_GROUP_SIZE] = [32, 24, 256]
//
// Default NVSharpen shader constants:
// [NIS_BLOCK_WIDTH, NIS_BLOCK_HEIGHT, NIS_THREAD_GROUP_SIZE] = [32, 32, 256]
//---------------------------------------------------------------------------------
// NVScaler enable by default. Set to 0 for NVSharpen only
#ifndef NIS_SCALER
#define NIS_SCALER 1
#endif
// HDR Modes
#define NIS_HDR_MODE_NONE 0
#define NIS_HDR_MODE_LINEAR 1
#define NIS_HDR_MODE_PQ 2
#ifndef NIS_HDR_MODE
#define NIS_HDR_MODE NIS_HDR_MODE_NONE
#endif
#define kHDRCompressionFactor 0.282842712f
// Viewport support
#ifndef NIS_VIEWPORT_SUPPORT
#define NIS_VIEWPORT_SUPPORT 0
#endif
// HLSL, GLSL
#if NIS_HLSL==0 && !defined(NIS_GLSL)
#define NIS_GLSL 1
#endif
#if NIS_HLSL_6_2 || (!NIS_GLSL && !NIS_HLSL)
#if defined(NIS_HLSL)
#undef NIS_HLSL
#endif
#define NIS_HLSL 1
#endif
#if NIS_HLSL && NIS_GLSL
#undef NIS_GLSL
#define NIS_GLSL 0
#endif
// Half precision
#ifndef NIS_USE_HALF_PRECISION
#define NIS_USE_HALF_PRECISION 0
#endif
#if NIS_HLSL
// Generic type and function aliases for HLSL
#define NVF float
#define NVF2 float2
#define NVF3 float3
#define NVF4 float4
#define NVI int
#define NVI2 int2
#define NVU uint
#define NVU2 uint2
#define NVB bool
#if NIS_USE_HALF_PRECISION
#if NIS_HLSL_6_2
#define NVH float16_t
#define NVH2 float16_t2
#define NVH3 float16_t3
#define NVH4 float16_t4
#else
#define NVH min16float
#define NVH2 min16float2
#define NVH3 min16float3
#define NVH4 min16float4
#endif // NIS_HLSL_6_2
#else // FP32 types
#define NVH NVF
#define NVH2 NVF2
#define NVH3 NVF3
#define NVH4 NVF4
#endif // NIS_USE_HALF_PRECISION
#define NVSHARED groupshared
#define NVTEX_LOAD(x, pos) x[pos]
#define NVTEX_SAMPLE(x, sampler, pos) x.SampleLevel(sampler, pos, 0)
#define NVTEX_SAMPLE_RED(x, sampler, pos) x.GatherRed(sampler, pos)
#define NVTEX_SAMPLE_GREEN(x, sampler, pos) x.GatherGreen(sampler, pos)
#define NVTEX_SAMPLE_BLUE(x, sampler, pos) x.GatherBlue(sampler, pos)
#define NVTEX_STORE(x, pos, v) x[pos] = v
#ifndef NIS_UNROLL
#define NIS_UNROLL [unroll]
#endif
#endif // NIS_HLSL
// Generic type and function aliases for GLSL
#if NIS_GLSL
#define NVF float
#define NVF2 vec2
#define NVF3 vec3
#define NVF4 vec4
#define NVI int
#define NVI2 ivec2
#define NVU uint
#define NVU2 uvec2
#define NVB bool
#if NIS_USE_HALF_PRECISION
#define NVH float16_t
#define NVH2 f16vec2
#define NVH3 f16vec3
#define NVH4 f16vec4
#else // FP32 types
#define NVH NVF
#define NVH2 NVF2
#define NVH3 NVF3
#define NVH4 NVF4
#endif // NIS_USE_HALF_PRECISION
#define NVSHARED shared
#define NVTEX_LOAD(x, pos) texelFetch(sampler2D(x, samplerLinearClamp), pos, 0)
#define NVTEX_SAMPLE(x, sampler, pos) textureLod(sampler2D(x, sampler), pos, 0)
#define NVTEX_SAMPLE_RED(x, sampler, pos) textureGather(sampler2D(x, sampler), pos, 0)
#define NVTEX_SAMPLE_GREEN(x, sampler, pos) textureGather(sampler2D(x, sampler), pos, 1)
#define NVTEX_SAMPLE_BLUE(x, sampler, pos) textureGather(sampler2D(x, sampler), pos, 2)
#define NVTEX_STORE(x, pos, v) imageStore(x, NVI2(pos), v)
#define saturate(x) clamp(x, 0, 1)
#define lerp(a, b, x) mix(a, b, x)
#define GroupMemoryBarrierWithGroupSync() groupMemoryBarrier(); barrier()
#ifndef NIS_UNROLL
#define NIS_UNROLL
#endif
#endif // NIS_GLSL
// Texture gather
#ifndef NIS_TEXTURE_GATHER
#define NIS_TEXTURE_GATHER 0
#endif
// NIS Scaling
#define NIS_SCALE_INT 1
#define NIS_SCALE_FLOAT NVF(1.f)
NVF getY(NVF3 rgba)
{
#if NIS_HDR_MODE == NIS_HDR_MODE_PQ
return NVF(0.262f) * rgba.x + NVF(0.678f) * rgba.y + NVF(0.0593f) * rgba.z;
#elif NIS_HDR_MODE == NIS_HDR_MODE_LINEAR
return sqrt(NVF(0.2126f) * rgba.x + NVF(0.7152f) * rgba.y + NVF(0.0722f) * rgba.z) * kHDRCompressionFactor;
#else
return NVF(0.2126f) * rgba.x + NVF(0.7152f) * rgba.y + NVF(0.0722f) * rgba.z;
#endif
}
NVF getYLinear(NVF3 rgba)
{
return NVF(0.2126f) * rgba.x + NVF(0.7152f) * rgba.y + NVF(0.0722f) * rgba.z;
}
#if NIS_SCALER
NVF4 GetEdgeMap(NVF p[4][4], NVI i, NVI j)
#else
NVF4 GetEdgeMap(NVF p[5][5], NVI i, NVI j)
#endif
{
const NVF g_0 = abs(p[0 + i][0 + j] + p[0 + i][1 + j] + p[0 + i][2 + j] - p[2 + i][0 + j] - p[2 + i][1 + j] - p[2 + i][2 + j]);
const NVF g_45 = abs(p[1 + i][0 + j] + p[0 + i][0 + j] + p[0 + i][1 + j] - p[2 + i][1 + j] - p[2 + i][2 + j] - p[1 + i][2 + j]);
const NVF g_90 = abs(p[0 + i][0 + j] + p[1 + i][0 + j] + p[2 + i][0 + j] - p[0 + i][2 + j] - p[1 + i][2 + j] - p[2 + i][2 + j]);
const NVF g_135 = abs(p[1 + i][0 + j] + p[2 + i][0 + j] + p[2 + i][1 + j] - p[0 + i][1 + j] - p[0 + i][2 + j] - p[1 + i][2 + j]);
const NVF g_0_90_max = max(g_0, g_90);
const NVF g_0_90_min = min(g_0, g_90);
const NVF g_45_135_max = max(g_45, g_135);
const NVF g_45_135_min = min(g_45, g_135);
NVF e_0_90 = 0;
NVF e_45_135 = 0;
if (g_0_90_max + g_45_135_max == 0)
{
return NVF4(0, 0, 0, 0);
}
e_0_90 = min(g_0_90_max / (g_0_90_max + g_45_135_max), 1.0f);
e_45_135 = 1.0f - e_0_90;
NVB c_0_90 = (g_0_90_max > (g_0_90_min * kDetectRatio)) && (g_0_90_max > kDetectThres) && (g_0_90_max > g_45_135_min);
NVB c_45_135 = (g_45_135_max > (g_45_135_min * kDetectRatio)) && (g_45_135_max > kDetectThres) && (g_45_135_max > g_0_90_min);
NVB c_g_0_90 = g_0_90_max == g_0;
NVB c_g_45_135 = g_45_135_max == g_45;
NVF f_e_0_90 = (c_0_90 && c_45_135) ? e_0_90 : 1.0f;
NVF f_e_45_135 = (c_0_90 && c_45_135) ? e_45_135 : 1.0f;
NVF weight_0 = (c_0_90 && c_g_0_90) ? f_e_0_90 : 0.0f;
NVF weight_90 = (c_0_90 && !c_g_0_90) ? f_e_0_90 : 0.0f;
NVF weight_45 = (c_45_135 && c_g_45_135) ? f_e_45_135 : 0.0f;
NVF weight_135 = (c_45_135 && !c_g_45_135) ? f_e_45_135 : 0.0f;
return NVF4(weight_0, weight_90, weight_45, weight_135);
}
#if NIS_SCALER
#ifndef NIS_BLOCK_WIDTH
#define NIS_BLOCK_WIDTH 32
#endif
#ifndef NIS_BLOCK_HEIGHT
#define NIS_BLOCK_HEIGHT 24
#endif
#ifndef NIS_THREAD_GROUP_SIZE
#define NIS_THREAD_GROUP_SIZE 256
#endif
#define kPhaseCount 64
#define kFilterSize 6
#define kSupportSize 6
#define kPadSize kSupportSize
// 'Tile' is the region of source luminance values that we load into shPixelsY.
// It is the area of source pixels covered by the destination 'Block' plus a
// 3 pixel border of support pixels.
#define kTilePitch (NIS_BLOCK_WIDTH + kPadSize)
#define kTileSize (kTilePitch * (NIS_BLOCK_HEIGHT + kPadSize))
// 'EdgeMap' is the region of source pixels for which edge map vectors are derived.
// It is the area of source pixels covered by the destination 'Block' plus a
// 1 pixel border.
#define kEdgeMapPitch (NIS_BLOCK_WIDTH + 2)
#define kEdgeMapSize (kEdgeMapPitch * (NIS_BLOCK_HEIGHT + 2))
NVSHARED NVF shPixelsY[kTileSize];
NVSHARED NVH shCoefScaler[kPhaseCount][kFilterSize];
NVSHARED NVH shCoefUSM[kPhaseCount][kFilterSize];
NVSHARED NVH4 shEdgeMap[kEdgeMapSize];
void LoadFilterBanksSh(NVI i0, NVI di) {
// Load up filter banks to shared memory
// The work is spread over (kPhaseCount * 2) threads
for (NVI i = i0; i < kPhaseCount * 2; i += di)
{
NVI phase = i >> 1;
NVI vIdx = i & 1;
NVH4 v = NVH4(NVTEX_LOAD(coef_scaler, NVI2(vIdx, phase)));
NVI filterOffset = vIdx * 4;
shCoefScaler[phase][filterOffset + 0] = v.x;
shCoefScaler[phase][filterOffset + 1] = v.y;
if (vIdx == 0)
{
shCoefScaler[phase][2] = v.z;
shCoefScaler[phase][3] = v.w;
}
v = NVH4(NVTEX_LOAD(coef_usm, NVI2(vIdx, phase)));
shCoefUSM[phase][filterOffset + 0] = v.x;
shCoefUSM[phase][filterOffset + 1] = v.y;
if (vIdx == 0)
{
shCoefUSM[phase][2] = v.z;
shCoefUSM[phase][3] = v.w;
}
}
}
NVF CalcLTI(NVF p0, NVF p1, NVF p2, NVF p3, NVF p4, NVF p5, NVI phase_index)
{
const NVB selector = (phase_index <= kPhaseCount / 2);
NVF sel = selector ? p0 : p3;
const NVF a_min = min(min(p1, p2), sel);
const NVF a_max = max(max(p1, p2), sel);
sel = selector ? p2 : p5;
const NVF b_min = min(min(p3, p4), sel);
const NVF b_max = max(max(p3, p4), sel);
const NVF a_cont = a_max - a_min;
const NVF b_cont = b_max - b_min;
const NVF cont_ratio = max(a_cont, b_cont) / (min(a_cont, b_cont) + kEps);
return (1.0f - saturate((cont_ratio - kMinContrastRatio) * kRatioNorm)) * kContrastBoost;
}
NVF4 GetInterpEdgeMap(const NVF4 edge[2][2], NVF phase_frac_x, NVF phase_frac_y)
{
NVF4 h0 = lerp(edge[0][0], edge[0][1], phase_frac_x);
NVF4 h1 = lerp(edge[1][0], edge[1][1], phase_frac_x);
return lerp(h0, h1, phase_frac_y);
}
NVF EvalPoly6(const NVF pxl[6], NVI phase_int)
{
NVF y = 0.f;
{
NIS_UNROLL
for (NVI i = 0; i < 6; ++i)
{
y += shCoefScaler[phase_int][i] * pxl[i];
}
}
NVF y_usm = 0.f;
{
NIS_UNROLL
for (NVI i = 0; i < 6; ++i)
{
y_usm += shCoefUSM[phase_int][i] * pxl[i];
}
}
// let's compute a piece-wise ramp based on luma
const NVF y_scale = 1.0f - saturate((y * (1.0f / NIS_SCALE_FLOAT) - kSharpStartY) * kSharpScaleY);
// scale the ramp to sharpen as a function of luma
const NVF y_sharpness = y_scale * kSharpStrengthScale + kSharpStrengthMin;
y_usm *= y_sharpness;
// scale the ramp to limit USM as a function of luma
const NVF y_sharpness_limit = (y_scale * kSharpLimitScale + kSharpLimitMin) * y;
y_usm = min(y_sharpness_limit, max(-y_sharpness_limit, y_usm));
// reduce ringing
y_usm *= CalcLTI(pxl[0], pxl[1], pxl[2], pxl[3], pxl[4], pxl[5], phase_int);
return y + y_usm;
}
NVF FilterNormal(const NVF p[6][6], NVI phase_x_frac_int, NVI phase_y_frac_int)
{
NVF h_acc = 0.0f;
NIS_UNROLL
for (NVI j = 0; j < 6; ++j)
{
NVF v_acc = 0.0f;
NIS_UNROLL
for (NVI i = 0; i < 6; ++i)
{
v_acc += p[i][j] * shCoefScaler[phase_y_frac_int][i];
}
h_acc += v_acc * shCoefScaler[phase_x_frac_int][j];
}
// let's return the sum unpacked -> we can accumulate it later
return h_acc;
}
NVF AddDirFilters(NVF p[6][6], NVF phase_x_frac, NVF phase_y_frac, NVI phase_x_frac_int, NVI phase_y_frac_int, NVF4 w)
{
NVF f = 0;
if (w.x > 0.0f)
{
// 0 deg filter
NVF interp0Deg[6];
{
NIS_UNROLL
for (NVI i = 0; i < 6; ++i)
{
interp0Deg[i] = lerp(p[i][2], p[i][3], phase_x_frac);
}
}
f += EvalPoly6(interp0Deg, phase_y_frac_int) * w.x;
}
if (w.y > 0.0f)
{
// 90 deg filter
NVF interp90Deg[6];
{
NIS_UNROLL
for (NVI i = 0; i < 6; ++i)
{
interp90Deg[i] = lerp(p[2][i], p[3][i], phase_y_frac);
}
}
f += EvalPoly6(interp90Deg, phase_x_frac_int) * w.y;
}
if (w.z > 0.0f)
{
//45 deg filter
NVF pphase_b45 = 0.5f + 0.5f * (phase_x_frac - phase_y_frac);
NVF temp_interp45Deg[7];
temp_interp45Deg[1] = lerp(p[2][1], p[1][2], pphase_b45);
temp_interp45Deg[3] = lerp(p[3][2], p[2][3], pphase_b45);
temp_interp45Deg[5] = lerp(p[4][3], p[3][4], pphase_b45);
{
pphase_b45 = pphase_b45 - 0.5f;
NVF a = (pphase_b45 >= 0.f) ? p[0][2] : p[2][0];
NVF b = (pphase_b45 >= 0.f) ? p[1][3] : p[3][1];
NVF c = (pphase_b45 >= 0.f) ? p[2][4] : p[4][2];
NVF d = (pphase_b45 >= 0.f) ? p[3][5] : p[5][3];
temp_interp45Deg[0] = lerp(p[1][1], a, abs(pphase_b45));
temp_interp45Deg[2] = lerp(p[2][2], b, abs(pphase_b45));
temp_interp45Deg[4] = lerp(p[3][3], c, abs(pphase_b45));
temp_interp45Deg[6] = lerp(p[4][4], d, abs(pphase_b45));
}
NVF interp45Deg[6];
NVF pphase_p45 = phase_x_frac + phase_y_frac;
if (pphase_p45 >= 1)
{
NIS_UNROLL
for (NVI i = 0; i < 6; i++)
{
interp45Deg[i] = temp_interp45Deg[i + 1];
}
pphase_p45 = pphase_p45 - 1;
}
else
{
NIS_UNROLL
for (NVI i = 0; i < 6; i++)
{
interp45Deg[i] = temp_interp45Deg[i];
}
}
f += EvalPoly6(interp45Deg, NVI(pphase_p45 * 64)) * w.z;
}
if (w.w > 0.0f)
{
//135 deg filter
NVF pphase_b135 = 0.5f * (phase_x_frac + phase_y_frac);
NVF temp_interp135Deg[7];
temp_interp135Deg[1] = lerp(p[3][1], p[4][2], pphase_b135);
temp_interp135Deg[3] = lerp(p[2][2], p[3][3], pphase_b135);
temp_interp135Deg[5] = lerp(p[1][3], p[2][4], pphase_b135);
{
pphase_b135 = pphase_b135 - 0.5f;
NVF a = (pphase_b135 >= 0.f) ? p[5][2] : p[3][0];
NVF b = (pphase_b135 >= 0.f) ? p[4][3] : p[2][1];
NVF c = (pphase_b135 >= 0.f) ? p[3][4] : p[1][2];
NVF d = (pphase_b135 >= 0.f) ? p[2][5] : p[0][3];
temp_interp135Deg[0] = lerp(p[4][1], a, abs(pphase_b135));
temp_interp135Deg[2] = lerp(p[3][2], b, abs(pphase_b135));
temp_interp135Deg[4] = lerp(p[2][3], c, abs(pphase_b135));
temp_interp135Deg[6] = lerp(p[1][4], d, abs(pphase_b135));
}
NVF interp135Deg[6];
NVF pphase_p135 = 1 + (phase_x_frac - phase_y_frac);
if (pphase_p135 >= 1)
{
NIS_UNROLL
for (NVI i = 0; i < 6; ++i)
{
interp135Deg[i] = temp_interp135Deg[i + 1];
}
pphase_p135 = pphase_p135 - 1;
}
else
{
NIS_UNROLL
for (NVI i = 0; i < 6; ++i)
{
interp135Deg[i] = temp_interp135Deg[i];
}
}
f += EvalPoly6(interp135Deg, NVI(pphase_p135 * 64)) * w.w;
}
return f;
}
//-----------------------------------------------------------------------------------------------
// NVScaler
//-----------------------------------------------------------------------------------------------
void NVScaler(NVU2 blockIdx, NVU threadIdx)
{
// Figure out the range of pixels from input image that would be needed to be loaded for this thread-block
NVI dstBlockX = NVI(NIS_BLOCK_WIDTH * blockIdx.x);
NVI dstBlockY = NVI(NIS_BLOCK_HEIGHT * blockIdx.y);
const NVI srcBlockStartX = NVI(floor((dstBlockX + 0.5f) * kScaleX - 0.5f));
const NVI srcBlockStartY = NVI(floor((dstBlockY + 0.5f) * kScaleY - 0.5f));
const NVI srcBlockEndX = NVI(ceil((dstBlockX + NIS_BLOCK_WIDTH + 0.5f) * kScaleX - 0.5f));
const NVI srcBlockEndY = NVI(ceil((dstBlockY + NIS_BLOCK_HEIGHT + 0.5f) * kScaleY - 0.5f));
NVI numTilePixelsX = srcBlockEndX - srcBlockStartX + kSupportSize - 1;
NVI numTilePixelsY = srcBlockEndY - srcBlockStartY + kSupportSize - 1;
// round-up load region to even size since we're loading in 2x2 batches
numTilePixelsX += numTilePixelsX & 0x1;
numTilePixelsY += numTilePixelsY & 0x1;
const NVI numTilePixels = numTilePixelsX * numTilePixelsY;
// calculate the equivalent values for the edge map
const NVI numEdgeMapPixelsX = numTilePixelsX - kSupportSize + 2;
const NVI numEdgeMapPixelsY = numTilePixelsY - kSupportSize + 2;
const NVI numEdgeMapPixels = numEdgeMapPixelsX * numEdgeMapPixelsY;
// fill in input luma tile (shPixelsY) in batches of 2x2 pixels
// we use texture gather to get extra support necessary
// to compute 2x2 edge map outputs too
{
for (NVU i = threadIdx * 2; i < NVU(numTilePixels) >> 1; i += NIS_THREAD_GROUP_SIZE * 2)
{
NVU py = (i / numTilePixelsX) * 2;
NVU px = i % numTilePixelsX;
// 0.5 to be in the center of texel
// - (kSupportSize - 1) / 2 to shift by the kernel support size
NVF kShift = 0.5f - (kSupportSize - 1) / 2;
#if NIS_VIEWPORT_SUPPORT
const NVF tx = (srcBlockStartX + px + kInputViewportOriginX + kShift) * kSrcNormX;
const NVF ty = (srcBlockStartY + py + kInputViewportOriginY + kShift) * kSrcNormY;
#else
const NVF tx = (srcBlockStartX + px + kShift) * kSrcNormX;
const NVF ty = (srcBlockStartY + py + kShift) * kSrcNormY;
#endif
NVF p[2][2];
#if NIS_TEXTURE_GATHER
{
const NVF4 sr = NVTEX_SAMPLE_RED(in_texture, samplerLinearClamp, NVF2(tx, ty));
const NVF4 sg = NVTEX_SAMPLE_GREEN(in_texture, samplerLinearClamp, NVF2(tx, ty));
const NVF4 sb = NVTEX_SAMPLE_BLUE(in_texture, samplerLinearClamp, NVF2(tx, ty));
p[0][0] = getY(NVF3(sr.w, sg.w, sb.w));
p[0][1] = getY(NVF3(sr.z, sg.z, sb.z));
p[1][0] = getY(NVF3(sr.x, sg.x, sb.x));
p[1][1] = getY(NVF3(sr.y, sg.y, sb.y));
}
#else
NIS_UNROLL
for (NVI j = 0; j < 2; j++)
{
NIS_UNROLL
for (NVI k = 0; k < 2; k++)
{
const NVF4 px = NVTEX_SAMPLE(in_texture, samplerLinearClamp, NVF2(tx + k * kSrcNormX, ty + j * kSrcNormY));
p[j][k] = getY(px.xyz);
}
}
#endif
const NVU idx = py * kTilePitch + px;
shPixelsY[idx] = NVH(p[0][0]);
shPixelsY[idx + 1] = NVH(p[0][1]);
shPixelsY[idx + kTilePitch] = NVH(p[1][0]);
shPixelsY[idx + kTilePitch + 1] = NVH(p[1][1]);
}
}
GroupMemoryBarrierWithGroupSync();
{
// fill in the edge map of 2x2 pixels
for (NVU i = threadIdx * 2; i < NVU(numEdgeMapPixels) >> 1; i += NIS_THREAD_GROUP_SIZE * 2)
{
NVU py = (i / numEdgeMapPixelsX) * 2;
NVU px = i % numEdgeMapPixelsX;
const NVU edgeMapIdx = py * kEdgeMapPitch + px;
NVU tileCornerIdx = (py+1) * kTilePitch + px + 1;
NVF p[4][4];
NIS_UNROLL
for (NVI j = 0; j < 4; j++)
{
NIS_UNROLL
for (NVI k = 0; k < 4; k++)
{
p[j][k] = shPixelsY[tileCornerIdx + j * kTilePitch + k];
}
}
shEdgeMap[edgeMapIdx] = NVH4(GetEdgeMap(p, 0, 0));
shEdgeMap[edgeMapIdx + 1] = NVH4(GetEdgeMap(p, 0, 1));
shEdgeMap[edgeMapIdx + kEdgeMapPitch] = NVH4(GetEdgeMap(p, 1, 0));
shEdgeMap[edgeMapIdx + kEdgeMapPitch + 1] = NVH4(GetEdgeMap(p, 1, 1));
}
}
LoadFilterBanksSh(NVI(threadIdx), NIS_THREAD_GROUP_SIZE);
GroupMemoryBarrierWithGroupSync();
// output coord within a tile
const NVI2 pos = NVI2(NVU(threadIdx) % NVU(NIS_BLOCK_WIDTH), NVU(threadIdx) / NVU(NIS_BLOCK_WIDTH));
// x coord inside the output image
const NVI dstX = dstBlockX + pos.x;
// x coord inside the input image
const NVF srcX = (0.5f + dstX) * kScaleX - 0.5f;
// nearest integer part
const NVI px = NVI(floor(srcX) - srcBlockStartX);
// fractional part
const NVF fx = srcX - floor(srcX);
// discretized phase
const NVI fx_int = NVI(fx * kPhaseCount);
for (NVI k = 0; k < NIS_BLOCK_WIDTH * NIS_BLOCK_HEIGHT / NIS_THREAD_GROUP_SIZE; ++k)
{
// y coord inside the output image
const NVI dstY = dstBlockY + pos.y + k * (NIS_THREAD_GROUP_SIZE / NIS_BLOCK_WIDTH);
// y coord inside the input image
const NVF srcY = (0.5f + dstY) * kScaleY - 0.5f;
#if NIS_VIEWPORT_SUPPORT
if (srcX > kInputViewportWidth || srcY > kInputViewportHeight ||
dstX > kOutputViewportWidth || dstY > kOutputViewportHeight)
{
return;
}
#endif
// nearest integer part
const NVI py = NVI(floor(srcY) - srcBlockStartY);
// fractional part
const NVF fy = srcY - floor(srcY);
// discretized phase
const NVI fy_int = NVI(fy * kPhaseCount);
// generate weights for directional filters
const NVI startEdgeMapIdx = py * kEdgeMapPitch + px;
NVF4 edge[2][2];
NIS_UNROLL
for (NVI i = 0; i < 2; i++)
{
NIS_UNROLL
for (NVI j = 0; j < 2; j++)
{
// need to shift edge map sampling since it's a 2x2 centered inside 6x6 grid
edge[i][j] = shEdgeMap[startEdgeMapIdx + (i * kEdgeMapPitch) + j];
}
}
const NVF4 w = GetInterpEdgeMap(edge, fx, fy) * NIS_SCALE_INT;
// load 6x6 support to regs
const NVI startTileIdx = py * kTilePitch + px;
NVF p[6][6];
{
NIS_UNROLL
for (NVI i = 0; i < 6; ++i)
{
NIS_UNROLL
for (NVI j = 0; j < 6; ++j)
{
p[i][j] = shPixelsY[startTileIdx + i * kTilePitch + j];
}
}
}
// weigth for luma
const NVF baseWeight = NIS_SCALE_FLOAT - w.x - w.y - w.z - w.w;
// final luma is a weighted product of directional & normal filters
NVF opY = 0;
// get traditional scaler filter output
opY += FilterNormal(p, fx_int, fy_int) * baseWeight;
// get directional filter bank output
opY += AddDirFilters(p, fx, fy, fx_int, fy_int, w);
// do bilinear tap for chroma upscaling
#if NIS_VIEWPORT_SUPPORT
NVF4 op = NVTEX_SAMPLE(in_texture, samplerLinearClamp, NVF2((srcX + kInputViewportOriginX + 0.5f) * kSrcNormX, (srcY + kInputViewportOriginY + 0.5f) * kSrcNormY));
#else
NVF4 op = NVTEX_SAMPLE(in_texture, samplerLinearClamp, NVF2((srcX + 0.5f) * kSrcNormX, (srcY + 0.5f) * kSrcNormY));
#endif
#if NIS_HDR_MODE == NIS_HDR_MODE_LINEAR
const NVF kEps = 1e-4f;
const NVF kNorm = 1.0f / (NIS_SCALE_FLOAT * kHDRCompressionFactor);
const NVF opYN = max(opY, 0.0f) * kNorm;
const NVF corr = (opYN * opYN + kEps) / (max(getYLinear(NVF3(op.x, op.y, op.z)), 0.0f) + kEps);
op.x *= corr;
op.y *= corr;
op.z *= corr;
#else
const NVF corr = opY * (1.0f / NIS_SCALE_FLOAT) - getY(NVF3(op.x, op.y, op.z));
op.x += corr;
op.y += corr;
op.z += corr;
#endif
#if NIS_VIEWPORT_SUPPORT
NVTEX_STORE(out_texture, NVU2(dstX + kOutputViewportOriginX, dstY + kOutputViewportOriginY), op);
#else
NVTEX_STORE(out_texture, NVU2(dstX, dstY), op);
#endif
}
}
#else
#ifndef NIS_BLOCK_WIDTH
#define NIS_BLOCK_WIDTH 32
#endif
#ifndef NIS_BLOCK_HEIGHT
#define NIS_BLOCK_HEIGHT 32
#endif
#ifndef NIS_THREAD_GROUP_SIZE
#define NIS_THREAD_GROUP_SIZE 256
#endif
#define kSupportSize 5
#define kNumPixelsX (NIS_BLOCK_WIDTH + kSupportSize + 1)
#define kNumPixelsY (NIS_BLOCK_HEIGHT + kSupportSize + 1)
NVSHARED NVF shPixelsY[kNumPixelsY][kNumPixelsX];
NVF CalcLTIFast(const NVF y[5])
{
const NVF a_min = min(min(y[0], y[1]), y[2]);
const NVF a_max = max(max(y[0], y[1]), y[2]);
const NVF b_min = min(min(y[2], y[3]), y[4]);
const NVF b_max = max(max(y[2], y[3]), y[4]);
const NVF a_cont = a_max - a_min;
const NVF b_cont = b_max - b_min;
const NVF cont_ratio = max(a_cont, b_cont) / (min(a_cont, b_cont) + kEps);
return (1.0f - saturate((cont_ratio - kMinContrastRatio) * kRatioNorm)) * kContrastBoost;
}
NVF EvalUSM(const NVF pxl[5], const NVF sharpnessStrength, const NVF sharpnessLimit)
{
// USM profile
NVF y_usm = -0.6001f * pxl[1] + 1.2002f * pxl[2] - 0.6001f * pxl[3];
// boost USM profile
y_usm *= sharpnessStrength;
// clamp to the limit
y_usm = min(sharpnessLimit, max(-sharpnessLimit, y_usm));
// reduce ringing
y_usm *= CalcLTIFast(pxl);
return y_usm;
}
NVF4 GetDirUSM(const NVF p[5][5])
{
// sharpness boost & limit are the same for all directions
const NVF scaleY = 1.0f - saturate((p[2][2] - kSharpStartY) * kSharpScaleY);
// scale the ramp to sharpen as a function of luma
const NVF sharpnessStrength = scaleY * kSharpStrengthScale + kSharpStrengthMin;
// scale the ramp to limit USM as a function of luma
const NVF sharpnessLimit = (scaleY * kSharpLimitScale + kSharpLimitMin) * p[2][2];
NVF4 rval;
// 0 deg filter
NVF interp0Deg[5];
{
for (NVI i = 0; i < 5; ++i)
{
interp0Deg[i] = p[i][2];
}
}
rval.x = EvalUSM(interp0Deg, sharpnessStrength, sharpnessLimit);
// 90 deg filter
NVF interp90Deg[5];
{
for (NVI i = 0; i < 5; ++i)
{
interp90Deg[i] = p[2][i];
}
}
rval.y = EvalUSM(interp90Deg, sharpnessStrength, sharpnessLimit);
//45 deg filter
NVF interp45Deg[5];
interp45Deg[0] = p[1][1];
interp45Deg[1] = lerp(p[2][1], p[1][2], 0.5f);
interp45Deg[2] = p[2][2];
interp45Deg[3] = lerp(p[3][2], p[2][3], 0.5f);
interp45Deg[4] = p[3][3];
rval.z = EvalUSM(interp45Deg, sharpnessStrength, sharpnessLimit);
//135 deg filter
NVF interp135Deg[5];
interp135Deg[0] = p[3][1];
interp135Deg[1] = lerp(p[3][2], p[2][1], 0.5f);
interp135Deg[2] = p[2][2];
interp135Deg[3] = lerp(p[2][3], p[1][2], 0.5f);
interp135Deg[4] = p[1][3];
rval.w = EvalUSM(interp135Deg, sharpnessStrength, sharpnessLimit);
return rval;
}
//-----------------------------------------------------------------------------------------------
// NVSharpen
//-----------------------------------------------------------------------------------------------
void NVSharpen(NVU2 blockIdx, NVU threadIdx)
{
const NVI dstBlockX = NVI(NIS_BLOCK_WIDTH * blockIdx.x);
const NVI dstBlockY = NVI(NIS_BLOCK_HEIGHT * blockIdx.y);
// fill in input luma tile in batches of 2x2 pixels
// we use texture gather to get extra support necessary
// to compute 2x2 edge map outputs too
const NVF kShift = 0.5f - kSupportSize / 2;
for (NVI i = NVI(threadIdx) * 2; i < kNumPixelsX * kNumPixelsY / 2; i += NIS_THREAD_GROUP_SIZE * 2)
{
NVU2 pos = NVU2(NVU(i) % NVU(kNumPixelsX), NVU(i) / NVU(kNumPixelsX) * 2);
NIS_UNROLL
for (NVI dy = 0; dy < 2; dy++)
{
NIS_UNROLL
for (NVI dx = 0; dx < 2; dx++)
{
#if NIS_VIEWPORT_SUPPORT
const NVF tx = (dstBlockX + pos.x + kInputViewportOriginX + dx + kShift) * kSrcNormX;
const NVF ty = (dstBlockY + pos.y + kInputViewportOriginY + dy + kShift) * kSrcNormY;
#else
const NVF tx = (dstBlockX + pos.x + dx + kShift) * kSrcNormX;
const NVF ty = (dstBlockY + pos.y + dy + kShift) * kSrcNormY;
#endif
const NVF4 px = NVTEX_SAMPLE(in_texture, samplerLinearClamp, NVF2(tx, ty));
shPixelsY[pos.y + dy][pos.x + dx] = getY(px.xyz);
}
}
}
GroupMemoryBarrierWithGroupSync();
for (NVI k = NVI(threadIdx); k < NIS_BLOCK_WIDTH * NIS_BLOCK_HEIGHT; k += NIS_THREAD_GROUP_SIZE)
{
const NVI2 pos = NVI2(NVU(k) % NVU(NIS_BLOCK_WIDTH), NVU(k) / NVU(NIS_BLOCK_WIDTH));
// load 5x5 support to regs
NVF p[5][5];
NIS_UNROLL
for (NVI i = 0; i < 5; ++i)
{
NIS_UNROLL
for (NVI j = 0; j < 5; ++j)
{
p[i][j] = shPixelsY[pos.y + i][pos.x + j];
}
}
// get directional filter bank output
NVF4 dirUSM = GetDirUSM(p);
// generate weights for directional filters
NVF4 w = GetEdgeMap(p, kSupportSize / 2 - 1, kSupportSize / 2 - 1);
// final USM is a weighted sum filter outputs
const NVF usmY = (dirUSM.x * w.x + dirUSM.y * w.y + dirUSM.z * w.z + dirUSM.w * w.w);
// do bilinear tap and correct rgb texel so it produces new sharpened luma
const NVI dstX = dstBlockX + pos.x;
const NVI dstY = dstBlockY + pos.y;
#if NIS_VIEWPORT_SUPPORT
if (dstX > kOutputViewportWidth || dstY > kOutputViewportHeight)
{
return;
}
#endif
#if NIS_VIEWPORT_SUPPORT
NVF4 op = NVTEX_SAMPLE(in_texture, samplerLinearClamp, NVF2((dstX + kInputViewportOriginX + 0.5f) * kSrcNormX, (dstY + kInputViewportOriginY + 0.5f) * kSrcNormY));
#else
NVF4 op = NVTEX_SAMPLE(in_texture, samplerLinearClamp, NVF2((dstX + 0.5f) * kSrcNormX, (dstY + 0.5f) * kSrcNormY));
#endif
#if NIS_HDR_MODE == NIS_HDR_MODE_LINEAR
const NVF kEps = 1e-4f * kHDRCompressionFactor * kHDRCompressionFactor;
NVF newY = p[2][2] + usmY;
newY = max(newY, 0.0f);
const NVF oldY = p[2][2];
const NVF corr = (newY * newY + kEps) / (oldY * oldY + kEps);
op.x *= corr;
op.y *= corr;
op.z *= corr;
#else
op.x += usmY;
op.y += usmY;
op.z += usmY;
#endif
#if NIS_VIEWPORT_SUPPORT
NVTEX_STORE(out_texture, NVU2(dstX + kOutputViewportOriginX, dstY + kOutputViewportOriginY), op);
#else
NVTEX_STORE(out_texture, NVU2(dstX, dstY), op);
#endif
}
}
#endif | 16,993 |
9,156 | <filename>pulsar-broker/src/test/java/org/apache/pulsar/client/impl/PartialPartitionedProducerTest.java<gh_stars>1000+
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.client.impl;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import lombok.Cleanup;
import lombok.extern.slf4j.Slf4j;
import java.lang.reflect.Field;
import java.util.concurrent.TimeUnit;
import java.util.function.Supplier;
import org.apache.pulsar.broker.BrokerTestUtil;
import org.apache.pulsar.client.api.MessageRoutingMode;
import org.apache.pulsar.client.api.ProducerAccessMode;
import org.apache.pulsar.client.api.ProducerConsumerBase;
import org.apache.pulsar.client.api.TopicMetadata;
import org.apache.pulsar.client.impl.customroute.PartialRoundRobinMessageRouterImpl;
import org.awaitility.Awaitility;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
@Slf4j
@Test(groups = "broker-impl")
public class PartialPartitionedProducerTest extends ProducerConsumerBase {
@Override
@BeforeClass
public void setup() throws Exception {
super.internalSetup();
super.producerBaseSetup();
}
@Override
@AfterClass(alwaysRun = true)
public void cleanup() throws Exception {
super.internalCleanup();
}
@Test
public void testPtWithSinglePartition() throws Throwable {
final String topic = BrokerTestUtil.newUniqueName("pt-with-single-routing");
admin.topics().createPartitionedTopic(topic, 10);
@Cleanup
final PartitionedProducerImpl<byte[]> producerImpl = (PartitionedProducerImpl<byte[]>) pulsarClient.newProducer()
.topic(topic)
.enableLazyStartPartitionedProducers(true)
.enableBatching(false)
.messageRoutingMode(MessageRoutingMode.SinglePartition)
.create();
for (int i = 0; i < 10; i++) {
producerImpl.newMessage().value("msg".getBytes()).send();
}
assertEquals(producerImpl.getProducers().size(), 1);
}
@Test
public void testPtWithPartialPartition() throws Throwable {
final String topic = BrokerTestUtil.newUniqueName("pt-with-partial-routing");
admin.topics().createPartitionedTopic(topic, 10);
@Cleanup
final PartitionedProducerImpl<byte[]> producerImpl = (PartitionedProducerImpl<byte[]>) pulsarClient.newProducer()
.topic(topic)
.enableLazyStartPartitionedProducers(true)
.enableBatching(false)
.messageRoutingMode(MessageRoutingMode.CustomPartition)
.messageRouter(new PartialRoundRobinMessageRouterImpl(3))
.create();
for (int i = 0; i < 10; i++) {
producerImpl.newMessage().value("msg".getBytes()).send();
}
assertEquals(producerImpl.getProducers().size(), 3);
}
// AddPartitionTest
@Test
public void testPtLazyLoading() throws Throwable {
final String topic = BrokerTestUtil.newUniqueName("pt-lazily");
admin.topics().createPartitionedTopic(topic, 10);
@Cleanup
final PartitionedProducerImpl<byte[]> producerImpl = (PartitionedProducerImpl<byte[]>) pulsarClient.newProducer()
.topic(topic)
.enableLazyStartPartitionedProducers(true)
.enableBatching(false)
.messageRoutingMode(MessageRoutingMode.RoundRobinPartition)
.create();
final Supplier<Boolean> send = () -> {
for (int i = 0; i < 10; i++) {
try {
producerImpl.newMessage().value("msg".getBytes()).send();
} catch (Throwable e) {
return false;
}
}
return true;
};
// create first producer at initialization step
assertEquals(producerImpl.getProducers().size(), 1);
assertTrue(send.get());
assertEquals(producerImpl.getProducers().size(), 10);
}
@Test
public void testPtLoadingNotSharedMode() throws Throwable {
final String topic = BrokerTestUtil.newUniqueName("pt-not-shared-mode");
admin.topics().createPartitionedTopic(topic, 10);
@Cleanup
final PartitionedProducerImpl<byte[]> producerImplExclusive = (PartitionedProducerImpl<byte[]>) pulsarClient.newProducer()
.topic(topic)
.enableLazyStartPartitionedProducers(true)
.enableBatching(false)
.accessMode(ProducerAccessMode.Exclusive)
.messageRoutingMode(MessageRoutingMode.RoundRobinPartition)
.create();
// create first producer at initialization step
assertEquals(producerImplExclusive.getProducers().size(), 10);
producerImplExclusive.close();
@Cleanup
final PartitionedProducerImpl<byte[]> producerImplWaitForExclusive = (PartitionedProducerImpl<byte[]>) pulsarClient.newProducer()
.topic(topic)
.enableLazyStartPartitionedProducers(true)
.enableBatching(false)
.accessMode(ProducerAccessMode.WaitForExclusive)
.messageRoutingMode(MessageRoutingMode.RoundRobinPartition)
.create();
assertEquals(producerImplWaitForExclusive.getProducers().size(), 10);
}
// AddPartitionAndLimitTest
@Test
public void testPtUpdateWithPartialPartition() throws Throwable {
final String topic = BrokerTestUtil.newUniqueName("pt-update-with-partial-routing");
admin.topics().createPartitionedTopic(topic, 2);
final Field field = PartitionedProducerImpl.class.getDeclaredField("topicMetadata");
field.setAccessible(true);
@Cleanup
final PartitionedProducerImpl<byte[]> producerImpl = (PartitionedProducerImpl<byte[]>) pulsarClient.newProducer()
.topic(topic)
.enableLazyStartPartitionedProducers(true)
.enableBatching(false)
.messageRoutingMode(MessageRoutingMode.CustomPartition)
.messageRouter(new PartialRoundRobinMessageRouterImpl(3))
.accessMode(ProducerAccessMode.Shared)
.autoUpdatePartitions(true)
.autoUpdatePartitionsInterval(1, TimeUnit.SECONDS)
.create();
final Supplier<Boolean> send = ()-> {
for (int i = 0; i < 10; i++) {
try {
producerImpl.newMessage().value("msg".getBytes()).send();
} catch (Throwable e) {
return false;
}
}
return true;
};
// create first producer at initialization step
assertEquals(producerImpl.getProducers().size(), 1);
assertTrue(send.get());
assertEquals(producerImpl.getProducers().size(), 2);
admin.topics().updatePartitionedTopic(topic, 3);
Awaitility.await().untilAsserted(() ->
assertEquals(((TopicMetadata) field.get(producerImpl)).numPartitions(), 3));
assertEquals(producerImpl.getProducers().size(), 2);
assertTrue(send.get());
assertEquals(producerImpl.getProducers().size(), 3);
admin.topics().updatePartitionedTopic(topic, 4);
Awaitility.await().untilAsserted(() ->
assertEquals(((TopicMetadata) field.get(producerImpl)).numPartitions(), 4));
assertTrue(send.get());
assertEquals(producerImpl.getProducers().size(), 3);
}
@Test
public void testPtUpdateNotSharedMode() throws Throwable {
final String topic = BrokerTestUtil.newUniqueName("pt-update-not-shared");
admin.topics().createPartitionedTopic(topic, 2);
final Field field = PartitionedProducerImpl.class.getDeclaredField("topicMetadata");
field.setAccessible(true);
@Cleanup
final PartitionedProducerImpl<byte[]> producerImplExclusive = (PartitionedProducerImpl<byte[]>) pulsarClient.newProducer()
.topic(topic)
.enableLazyStartPartitionedProducers(true)
.enableBatching(false)
.messageRoutingMode(MessageRoutingMode.RoundRobinPartition)
.accessMode(ProducerAccessMode.Exclusive)
.autoUpdatePartitions(true)
.autoUpdatePartitionsInterval(1, TimeUnit.SECONDS)
.create();
assertEquals(producerImplExclusive.getProducers().size(), 2);
admin.topics().updatePartitionedTopic(topic, 3);
Awaitility.await().untilAsserted(() ->
assertEquals(((TopicMetadata) field.get(producerImplExclusive)).numPartitions(), 3));
assertEquals(producerImplExclusive.getProducers().size(), 3);
producerImplExclusive.close();
@Cleanup
final PartitionedProducerImpl<byte[]> producerImplWaitForExclusive = (PartitionedProducerImpl<byte[]>) pulsarClient.newProducer()
.topic(topic)
.enableLazyStartPartitionedProducers(true)
.enableBatching(false)
.messageRoutingMode(MessageRoutingMode.RoundRobinPartition)
.accessMode(ProducerAccessMode.WaitForExclusive)
.autoUpdatePartitions(true)
.autoUpdatePartitionsInterval(1, TimeUnit.SECONDS)
.create();
assertEquals(producerImplWaitForExclusive.getProducers().size(), 3);
admin.topics().updatePartitionedTopic(topic, 4);
Awaitility.await().untilAsserted(() ->
assertEquals(((TopicMetadata) field.get(producerImplWaitForExclusive)).numPartitions(), 4));
assertEquals(producerImplWaitForExclusive.getProducers().size(), 4);
}
}
| 4,442 |
5,459 | <reponame>minjaes01/react-native-interactable
package com.wix.interactable.physics;
import android.graphics.Point;
import android.graphics.PointF;
/**
* Created by rotemm on 09/02/2017.
*/
public class PhysicsObject {
PointF velocity;
float mass;
public PhysicsObject() {
this.velocity = new PointF(0,0);
this.mass = 1.0f;
}
public PhysicsObject(PointF velocity, float mass) {
this.velocity = velocity;
this.mass = mass;
}
}
| 197 |
303 | <filename>tests/35_sizeof.c
#include <stdio.h>
char a;
short b;
printf("%d %d\n", sizeof(char), sizeof(a));
printf("%d %d\n", sizeof(short), sizeof(b));
void main() {}
| 73 |
342 | #include "mainwindow.h"
#include <QApplication>
#include <QDebug>
#include <QStringList>
#include <QTranslator>
#include <QLocale>
int main(int argc, char *argv[])
{
QApplication a(argc, argv);
QString systemLang = QLocale::languageToString(QLocale::system().language());
QTranslator t;
if(systemLang == "Turkish")
{
}
else if (systemLang == "Korean")
{
t.load(":/lang_ko.qm");
a.installTranslator(&t);
}
else
{
t.load(":/lang_en.qm");
a.installTranslator(&t);
}
MainWindow w(a.arguments());
QStringList argList = a.arguments();
QString argument = argList.join(",");
bool silent = argument.contains("-silent");
if(!silent)
{
w.show();
}
return a.exec();
}
| 354 |
1,350 | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.loadtestservice.implementation;
import com.azure.core.annotation.BodyParam;
import com.azure.core.annotation.Delete;
import com.azure.core.annotation.ExpectedResponses;
import com.azure.core.annotation.Get;
import com.azure.core.annotation.HeaderParam;
import com.azure.core.annotation.Headers;
import com.azure.core.annotation.Host;
import com.azure.core.annotation.HostParam;
import com.azure.core.annotation.Patch;
import com.azure.core.annotation.PathParam;
import com.azure.core.annotation.Put;
import com.azure.core.annotation.QueryParam;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceInterface;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.annotation.UnexpectedResponseExceptionType;
import com.azure.core.http.rest.PagedFlux;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.PagedResponse;
import com.azure.core.http.rest.PagedResponseBase;
import com.azure.core.http.rest.Response;
import com.azure.core.http.rest.RestProxy;
import com.azure.core.management.exception.ManagementException;
import com.azure.core.management.polling.PollResult;
import com.azure.core.util.Context;
import com.azure.core.util.FluxUtil;
import com.azure.core.util.logging.ClientLogger;
import com.azure.core.util.polling.PollerFlux;
import com.azure.core.util.polling.SyncPoller;
import com.azure.resourcemanager.loadtestservice.fluent.LoadTestsClient;
import com.azure.resourcemanager.loadtestservice.fluent.models.LoadTestResourceInner;
import com.azure.resourcemanager.loadtestservice.models.LoadTestResourcePageList;
import com.azure.resourcemanager.loadtestservice.models.LoadTestResourcePatchRequestBody;
import java.nio.ByteBuffer;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
/** An instance of this class provides access to all the operations defined in LoadTestsClient. */
public final class LoadTestsClientImpl implements LoadTestsClient {
private final ClientLogger logger = new ClientLogger(LoadTestsClientImpl.class);
/** The proxy service used to perform REST calls. */
private final LoadTestsService service;
/** The service client containing this operation class. */
private final LoadTestClientImpl client;
/**
* Initializes an instance of LoadTestsClientImpl.
*
* @param client the instance of the service client containing this operation class.
*/
LoadTestsClientImpl(LoadTestClientImpl client) {
this.service =
RestProxy.create(LoadTestsService.class, client.getHttpPipeline(), client.getSerializerAdapter());
this.client = client;
}
/**
* The interface defining all the services for LoadTestClientLoadTests to be used by the proxy service to perform
* REST calls.
*/
@Host("{$host}")
@ServiceInterface(name = "LoadTestClientLoadTe")
private interface LoadTestsService {
@Headers({"Content-Type: application/json"})
@Get("/subscriptions/{subscriptionId}/providers/Microsoft.LoadTestService/loadTests")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<LoadTestResourcePageList>> list(
@HostParam("$host") String endpoint,
@PathParam("subscriptionId") String subscriptionId,
@QueryParam("api-version") String apiVersion,
@HeaderParam("Accept") String accept,
Context context);
@Headers({"Content-Type: application/json"})
@Get(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LoadTestService"
+ "/loadTests")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<LoadTestResourcePageList>> listByResourceGroup(
@HostParam("$host") String endpoint,
@PathParam("subscriptionId") String subscriptionId,
@PathParam("resourceGroupName") String resourceGroupName,
@QueryParam("api-version") String apiVersion,
@HeaderParam("Accept") String accept,
Context context);
@Headers({"Content-Type: application/json"})
@Get(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LoadTestService"
+ "/loadTests/{loadTestName}")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<LoadTestResourceInner>> getByResourceGroup(
@HostParam("$host") String endpoint,
@PathParam("subscriptionId") String subscriptionId,
@PathParam("resourceGroupName") String resourceGroupName,
@QueryParam("api-version") String apiVersion,
@PathParam("loadTestName") String loadTestName,
@HeaderParam("Accept") String accept,
Context context);
@Headers({"Content-Type: application/json"})
@Put(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LoadTestService"
+ "/loadTests/{loadTestName}")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<LoadTestResourceInner>> createOrUpdate(
@HostParam("$host") String endpoint,
@PathParam("subscriptionId") String subscriptionId,
@PathParam("resourceGroupName") String resourceGroupName,
@QueryParam("api-version") String apiVersion,
@PathParam("loadTestName") String loadTestName,
@BodyParam("application/json") LoadTestResourceInner loadTestResource,
@HeaderParam("Accept") String accept,
Context context);
@Headers({"Content-Type: application/json"})
@Patch(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LoadTestService"
+ "/loadTests/{loadTestName}")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<LoadTestResourceInner>> update(
@HostParam("$host") String endpoint,
@PathParam("subscriptionId") String subscriptionId,
@PathParam("resourceGroupName") String resourceGroupName,
@QueryParam("api-version") String apiVersion,
@PathParam("loadTestName") String loadTestName,
@BodyParam("application/json") LoadTestResourcePatchRequestBody loadTestResourcePatchRequestBody,
@HeaderParam("Accept") String accept,
Context context);
@Headers({"Content-Type: application/json"})
@Delete(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.LoadTestService"
+ "/loadTests/{loadTestName}")
@ExpectedResponses({200, 202, 204})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<Flux<ByteBuffer>>> delete(
@HostParam("$host") String endpoint,
@PathParam("subscriptionId") String subscriptionId,
@PathParam("resourceGroupName") String resourceGroupName,
@QueryParam("api-version") String apiVersion,
@PathParam("loadTestName") String loadTestName,
@HeaderParam("Accept") String accept,
Context context);
@Headers({"Content-Type: application/json"})
@Get("{nextLink}")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<LoadTestResourcePageList>> listBySubscriptionNext(
@PathParam(value = "nextLink", encoded = true) String nextLink,
@HostParam("$host") String endpoint,
@HeaderParam("Accept") String accept,
Context context);
@Headers({"Content-Type: application/json"})
@Get("{nextLink}")
@ExpectedResponses({200})
@UnexpectedResponseExceptionType(ManagementException.class)
Mono<Response<LoadTestResourcePageList>> listByResourceGroupNext(
@PathParam(value = "nextLink", encoded = true) String nextLink,
@HostParam("$host") String endpoint,
@HeaderParam("Accept") String accept,
Context context);
}
/**
* Lists loadtests resources in a subscription.
*
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of resources page result.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<LoadTestResourceInner>> listSinglePageAsync() {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil
.withContext(
context ->
service
.list(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
this.client.getApiVersion(),
accept,
context))
.<PagedResponse<LoadTestResourceInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Lists loadtests resources in a subscription.
*
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of resources page result.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<LoadTestResourceInner>> listSinglePageAsync(Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.list(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
this.client.getApiVersion(),
accept,
context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
/**
* Lists loadtests resources in a subscription.
*
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of resources page result.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
private PagedFlux<LoadTestResourceInner> listAsync() {
return new PagedFlux<>(
() -> listSinglePageAsync(), nextLink -> listBySubscriptionNextSinglePageAsync(nextLink));
}
/**
* Lists loadtests resources in a subscription.
*
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of resources page result.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
private PagedFlux<LoadTestResourceInner> listAsync(Context context) {
return new PagedFlux<>(
() -> listSinglePageAsync(context), nextLink -> listBySubscriptionNextSinglePageAsync(nextLink, context));
}
/**
* Lists loadtests resources in a subscription.
*
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of resources page result.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<LoadTestResourceInner> list() {
return new PagedIterable<>(listAsync());
}
/**
* Lists loadtests resources in a subscription.
*
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of resources page result.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<LoadTestResourceInner> list(Context context) {
return new PagedIterable<>(listAsync(context));
}
/**
* Lists loadtest resources in a resource group.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of resources page result.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<LoadTestResourceInner>> listByResourceGroupSinglePageAsync(String resourceGroupName) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil
.withContext(
context ->
service
.listByResourceGroup(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
resourceGroupName,
this.client.getApiVersion(),
accept,
context))
.<PagedResponse<LoadTestResourceInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Lists loadtest resources in a resource group.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of resources page result.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<LoadTestResourceInner>> listByResourceGroupSinglePageAsync(
String resourceGroupName, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.listByResourceGroup(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
resourceGroupName,
this.client.getApiVersion(),
accept,
context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
/**
* Lists loadtest resources in a resource group.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of resources page result.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
private PagedFlux<LoadTestResourceInner> listByResourceGroupAsync(String resourceGroupName) {
return new PagedFlux<>(
() -> listByResourceGroupSinglePageAsync(resourceGroupName),
nextLink -> listByResourceGroupNextSinglePageAsync(nextLink));
}
/**
* Lists loadtest resources in a resource group.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of resources page result.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
private PagedFlux<LoadTestResourceInner> listByResourceGroupAsync(String resourceGroupName, Context context) {
return new PagedFlux<>(
() -> listByResourceGroupSinglePageAsync(resourceGroupName, context),
nextLink -> listByResourceGroupNextSinglePageAsync(nextLink, context));
}
/**
* Lists loadtest resources in a resource group.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of resources page result.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<LoadTestResourceInner> listByResourceGroup(String resourceGroupName) {
return new PagedIterable<>(listByResourceGroupAsync(resourceGroupName));
}
/**
* Lists loadtest resources in a resource group.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of resources page result.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
public PagedIterable<LoadTestResourceInner> listByResourceGroup(String resourceGroupName, Context context) {
return new PagedIterable<>(listByResourceGroupAsync(resourceGroupName, context));
}
/**
* Get a LoadTest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a LoadTest resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<Response<LoadTestResourceInner>> getByResourceGroupWithResponseAsync(
String resourceGroupName, String loadTestName) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (loadTestName == null) {
return Mono.error(new IllegalArgumentException("Parameter loadTestName is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil
.withContext(
context ->
service
.getByResourceGroup(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
resourceGroupName,
this.client.getApiVersion(),
loadTestName,
accept,
context))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Get a LoadTest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a LoadTest resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<Response<LoadTestResourceInner>> getByResourceGroupWithResponseAsync(
String resourceGroupName, String loadTestName, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (loadTestName == null) {
return Mono.error(new IllegalArgumentException("Parameter loadTestName is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.getByResourceGroup(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
resourceGroupName,
this.client.getApiVersion(),
loadTestName,
accept,
context);
}
/**
* Get a LoadTest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a LoadTest resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<LoadTestResourceInner> getByResourceGroupAsync(String resourceGroupName, String loadTestName) {
return getByResourceGroupWithResponseAsync(resourceGroupName, loadTestName)
.flatMap(
(Response<LoadTestResourceInner> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
}
/**
* Get a LoadTest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a LoadTest resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public LoadTestResourceInner getByResourceGroup(String resourceGroupName, String loadTestName) {
return getByResourceGroupAsync(resourceGroupName, loadTestName).block();
}
/**
* Get a LoadTest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a LoadTest resource.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<LoadTestResourceInner> getByResourceGroupWithResponse(
String resourceGroupName, String loadTestName, Context context) {
return getByResourceGroupWithResponseAsync(resourceGroupName, loadTestName, context).block();
}
/**
* Create or update LoadTest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @param loadTestResource LoadTest resource data.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return loadTest details.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<Response<LoadTestResourceInner>> createOrUpdateWithResponseAsync(
String resourceGroupName, String loadTestName, LoadTestResourceInner loadTestResource) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (loadTestName == null) {
return Mono.error(new IllegalArgumentException("Parameter loadTestName is required and cannot be null."));
}
if (loadTestResource == null) {
return Mono
.error(new IllegalArgumentException("Parameter loadTestResource is required and cannot be null."));
} else {
loadTestResource.validate();
}
final String accept = "application/json";
return FluxUtil
.withContext(
context ->
service
.createOrUpdate(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
resourceGroupName,
this.client.getApiVersion(),
loadTestName,
loadTestResource,
accept,
context))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Create or update LoadTest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @param loadTestResource LoadTest resource data.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return loadTest details.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<Response<LoadTestResourceInner>> createOrUpdateWithResponseAsync(
String resourceGroupName, String loadTestName, LoadTestResourceInner loadTestResource, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (loadTestName == null) {
return Mono.error(new IllegalArgumentException("Parameter loadTestName is required and cannot be null."));
}
if (loadTestResource == null) {
return Mono
.error(new IllegalArgumentException("Parameter loadTestResource is required and cannot be null."));
} else {
loadTestResource.validate();
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.createOrUpdate(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
resourceGroupName,
this.client.getApiVersion(),
loadTestName,
loadTestResource,
accept,
context);
}
/**
* Create or update LoadTest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @param loadTestResource LoadTest resource data.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return loadTest details.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<LoadTestResourceInner> createOrUpdateAsync(
String resourceGroupName, String loadTestName, LoadTestResourceInner loadTestResource) {
return createOrUpdateWithResponseAsync(resourceGroupName, loadTestName, loadTestResource)
.flatMap(
(Response<LoadTestResourceInner> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
}
/**
* Create or update LoadTest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @param loadTestResource LoadTest resource data.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return loadTest details.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public LoadTestResourceInner createOrUpdate(
String resourceGroupName, String loadTestName, LoadTestResourceInner loadTestResource) {
return createOrUpdateAsync(resourceGroupName, loadTestName, loadTestResource).block();
}
/**
* Create or update LoadTest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @param loadTestResource LoadTest resource data.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return loadTest details.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<LoadTestResourceInner> createOrUpdateWithResponse(
String resourceGroupName, String loadTestName, LoadTestResourceInner loadTestResource, Context context) {
return createOrUpdateWithResponseAsync(resourceGroupName, loadTestName, loadTestResource, context).block();
}
/**
* Update a loadtest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @param loadTestResourcePatchRequestBody LoadTest resource update data.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return loadTest details.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<Response<LoadTestResourceInner>> updateWithResponseAsync(
String resourceGroupName,
String loadTestName,
LoadTestResourcePatchRequestBody loadTestResourcePatchRequestBody) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (loadTestName == null) {
return Mono.error(new IllegalArgumentException("Parameter loadTestName is required and cannot be null."));
}
if (loadTestResourcePatchRequestBody == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter loadTestResourcePatchRequestBody is required and cannot be null."));
} else {
loadTestResourcePatchRequestBody.validate();
}
final String accept = "application/json";
return FluxUtil
.withContext(
context ->
service
.update(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
resourceGroupName,
this.client.getApiVersion(),
loadTestName,
loadTestResourcePatchRequestBody,
accept,
context))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Update a loadtest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @param loadTestResourcePatchRequestBody LoadTest resource update data.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return loadTest details.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<Response<LoadTestResourceInner>> updateWithResponseAsync(
String resourceGroupName,
String loadTestName,
LoadTestResourcePatchRequestBody loadTestResourcePatchRequestBody,
Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (loadTestName == null) {
return Mono.error(new IllegalArgumentException("Parameter loadTestName is required and cannot be null."));
}
if (loadTestResourcePatchRequestBody == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter loadTestResourcePatchRequestBody is required and cannot be null."));
} else {
loadTestResourcePatchRequestBody.validate();
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.update(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
resourceGroupName,
this.client.getApiVersion(),
loadTestName,
loadTestResourcePatchRequestBody,
accept,
context);
}
/**
* Update a loadtest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @param loadTestResourcePatchRequestBody LoadTest resource update data.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return loadTest details.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<LoadTestResourceInner> updateAsync(
String resourceGroupName,
String loadTestName,
LoadTestResourcePatchRequestBody loadTestResourcePatchRequestBody) {
return updateWithResponseAsync(resourceGroupName, loadTestName, loadTestResourcePatchRequestBody)
.flatMap(
(Response<LoadTestResourceInner> res) -> {
if (res.getValue() != null) {
return Mono.just(res.getValue());
} else {
return Mono.empty();
}
});
}
/**
* Update a loadtest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @param loadTestResourcePatchRequestBody LoadTest resource update data.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return loadTest details.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public LoadTestResourceInner update(
String resourceGroupName,
String loadTestName,
LoadTestResourcePatchRequestBody loadTestResourcePatchRequestBody) {
return updateAsync(resourceGroupName, loadTestName, loadTestResourcePatchRequestBody).block();
}
/**
* Update a loadtest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @param loadTestResourcePatchRequestBody LoadTest resource update data.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return loadTest details.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public Response<LoadTestResourceInner> updateWithResponse(
String resourceGroupName,
String loadTestName,
LoadTestResourcePatchRequestBody loadTestResourcePatchRequestBody,
Context context) {
return updateWithResponseAsync(resourceGroupName, loadTestName, loadTestResourcePatchRequestBody, context)
.block();
}
/**
* Delete a LoadTest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<Response<Flux<ByteBuffer>>> deleteWithResponseAsync(String resourceGroupName, String loadTestName) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (loadTestName == null) {
return Mono.error(new IllegalArgumentException("Parameter loadTestName is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil
.withContext(
context ->
service
.delete(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
resourceGroupName,
this.client.getApiVersion(),
loadTestName,
accept,
context))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Delete a LoadTest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<Response<Flux<ByteBuffer>>> deleteWithResponseAsync(
String resourceGroupName, String loadTestName, Context context) {
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
if (this.client.getSubscriptionId() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getSubscriptionId() is required and cannot be null."));
}
if (resourceGroupName == null) {
return Mono
.error(new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."));
}
if (loadTestName == null) {
return Mono.error(new IllegalArgumentException("Parameter loadTestName is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.delete(
this.client.getEndpoint(),
this.client.getSubscriptionId(),
resourceGroupName,
this.client.getApiVersion(),
loadTestName,
accept,
context);
}
/**
* Delete a LoadTest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
private PollerFlux<PollResult<Void>, Void> beginDeleteAsync(String resourceGroupName, String loadTestName) {
Mono<Response<Flux<ByteBuffer>>> mono = deleteWithResponseAsync(resourceGroupName, loadTestName);
return this
.client
.<Void, Void>getLroResult(
mono, this.client.getHttpPipeline(), Void.class, Void.class, this.client.getContext());
}
/**
* Delete a LoadTest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
private PollerFlux<PollResult<Void>, Void> beginDeleteAsync(
String resourceGroupName, String loadTestName, Context context) {
context = this.client.mergeContext(context);
Mono<Response<Flux<ByteBuffer>>> mono = deleteWithResponseAsync(resourceGroupName, loadTestName, context);
return this
.client
.<Void, Void>getLroResult(mono, this.client.getHttpPipeline(), Void.class, Void.class, context);
}
/**
* Delete a LoadTest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
public SyncPoller<PollResult<Void>, Void> beginDelete(String resourceGroupName, String loadTestName) {
return beginDeleteAsync(resourceGroupName, loadTestName).getSyncPoller();
}
/**
* Delete a LoadTest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
public SyncPoller<PollResult<Void>, Void> beginDelete(
String resourceGroupName, String loadTestName, Context context) {
return beginDeleteAsync(resourceGroupName, loadTestName, context).getSyncPoller();
}
/**
* Delete a LoadTest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<Void> deleteAsync(String resourceGroupName, String loadTestName) {
return beginDeleteAsync(resourceGroupName, loadTestName).last().flatMap(this.client::getLroFinalResultOrError);
}
/**
* Delete a LoadTest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<Void> deleteAsync(String resourceGroupName, String loadTestName, Context context) {
return beginDeleteAsync(resourceGroupName, loadTestName, context)
.last()
.flatMap(this.client::getLroFinalResultOrError);
}
/**
* Delete a LoadTest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void delete(String resourceGroupName, String loadTestName) {
deleteAsync(resourceGroupName, loadTestName).block();
}
/**
* Delete a LoadTest resource.
*
* @param resourceGroupName The name of the resource group. The name is case insensitive.
* @param loadTestName Load Test name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
public void delete(String resourceGroupName, String loadTestName, Context context) {
deleteAsync(resourceGroupName, loadTestName, context).block();
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of resources page result.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<LoadTestResourceInner>> listBySubscriptionNextSinglePageAsync(String nextLink) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil
.withContext(
context -> service.listBySubscriptionNext(nextLink, this.client.getEndpoint(), accept, context))
.<PagedResponse<LoadTestResourceInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of resources page result.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<LoadTestResourceInner>> listBySubscriptionNextSinglePageAsync(
String nextLink, Context context) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.listBySubscriptionNext(nextLink, this.client.getEndpoint(), accept, context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of resources page result.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<LoadTestResourceInner>> listByResourceGroupNextSinglePageAsync(String nextLink) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
final String accept = "application/json";
return FluxUtil
.withContext(
context -> service.listByResourceGroupNext(nextLink, this.client.getEndpoint(), accept, context))
.<PagedResponse<LoadTestResourceInner>>map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null))
.contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly()));
}
/**
* Get the next page of items.
*
* @param nextLink The nextLink parameter.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of resources page result.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
private Mono<PagedResponse<LoadTestResourceInner>> listByResourceGroupNextSinglePageAsync(
String nextLink, Context context) {
if (nextLink == null) {
return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null."));
}
if (this.client.getEndpoint() == null) {
return Mono
.error(
new IllegalArgumentException(
"Parameter this.client.getEndpoint() is required and cannot be null."));
}
final String accept = "application/json";
context = this.client.mergeContext(context);
return service
.listByResourceGroupNext(nextLink, this.client.getEndpoint(), accept, context)
.map(
res ->
new PagedResponseBase<>(
res.getRequest(),
res.getStatusCode(),
res.getHeaders(),
res.getValue().value(),
res.getValue().nextLink(),
null));
}
}
| 25,077 |
353 | #pragma once
#include <c10/core/Allocator.h>
#include <c10/core/DeviceType.h>
// Use of c10::hip namespace here makes hipification easier, because
// I don't have to also fix namespaces. Sorry!
namespace c10 { namespace hip {
// Takes a valid HIPAllocator (of any sort) and turns it into
// an allocator pretending to be a CUDA allocator. See
// Note [Masquerading as CUDA]
class HIPAllocatorMasqueradingAsCUDA final : public Allocator {
Allocator* allocator_;
public:
explicit HIPAllocatorMasqueradingAsCUDA(Allocator* allocator)
: allocator_(allocator) {}
DataPtr allocate(size_t size) const override {
DataPtr r = allocator_->allocate(size);
r.unsafe_set_device(Device(DeviceType::CUDA, r.device().index()));
return r;
}
DeleterFnPtr raw_deleter() const override {
return allocator_->raw_deleter();
}
};
}} // namespace c10::hip
| 330 |
387 | <filename>SystemExplorer/ProcessPropertiesDlg.h
#pragma once
#include "resource.h"
#include "ProcessInfoEx.h"
#include "ProcessManager.h"
class CProcessPropertiesDlg :
public CDialogImpl<CProcessPropertiesDlg>,
public CDynamicDialogLayout<CProcessPropertiesDlg> {
public:
enum { IDD = IDD_PROCESS_PROPERTIES };
CProcessPropertiesDlg(const WinSys::ProcessManager& pm, ProcessInfoEx& px) : m_pm(pm), m_px(px) {}
void SetModal(bool modal) {
m_Modal = modal;
}
void OnFinalMessage(HWND) override;
BEGIN_MSG_MAP(CProcessPropertiesDlg)
MESSAGE_HANDLER(WM_GETMINMAXINFO, OnGetMinMaxInfo)
MESSAGE_HANDLER(WM_CTLCOLORDLG, OnDialogColor)
MESSAGE_HANDLER(WM_CTLCOLORSTATIC, OnDialogColor)
MESSAGE_HANDLER(WM_CTLCOLOREDIT, OnDialogColor)
MESSAGE_HANDLER(WM_CTLCOLORSCROLLBAR, OnDialogColor)
MESSAGE_HANDLER(WM_INITDIALOG, OnInitDialog)
COMMAND_ID_HANDLER(IDCANCEL, OnCloseCmd)
COMMAND_ID_HANDLER(IDC_COPY, OnCopy)
COMMAND_ID_HANDLER(IDOK, OnCloseCmd)
COMMAND_ID_HANDLER(IDC_EXPLORE, OnExplore)
COMMAND_ID_HANDLER(IDC_EXPLORE_DIR, OnExploreDirectory)
COMMAND_ID_HANDLER(IDC_TOKEN, OnShowToken)
COMMAND_ID_HANDLER(IDC_JOB, OnShowJob)
COMMAND_ID_HANDLER(IDC_ENV, OnShowEnvironment)
CHAIN_MSG_MAP(CDynamicDialogLayout<CProcessPropertiesDlg>)
END_MSG_MAP()
private:
void InitProcess();
LRESULT OnDialogColor(UINT /*uMsg*/, WPARAM /*wParam*/, LPARAM /*lParam*/, BOOL& /*bHandled*/);
LRESULT OnInitDialog(UINT /*uMsg*/, WPARAM /*wParam*/, LPARAM /*lParam*/, BOOL& /*bHandled*/);
LRESULT OnCloseCmd(WORD /*wNotifyCode*/, WORD wID, HWND /*hWndCtl*/, BOOL& /*bHandled*/);
LRESULT OnExplore(WORD /*wNotifyCode*/, WORD wID, HWND /*hWndCtl*/, BOOL& /*bHandled*/);
LRESULT OnGetMinMaxInfo(UINT /*uMsg*/, WPARAM /*wParam*/, LPARAM /*lParam*/, BOOL& /*bHandled*/);
LRESULT OnExploreDirectory(WORD /*wNotifyCode*/, WORD wID, HWND /*hWndCtl*/, BOOL& /*bHandled*/);
LRESULT OnCopy(WORD /*wNotifyCode*/, WORD wID, HWND /*hWndCtl*/, BOOL& /*bHandled*/);
LRESULT OnShowToken(WORD /*wNotifyCode*/, WORD wID, HWND /*hWndCtl*/, BOOL& /*bHandled*/);
LRESULT OnShowEnvironment(WORD /*wNotifyCode*/, WORD wID, HWND /*hWndCtl*/, BOOL& /*bHandled*/);
LRESULT OnShowJob(WORD /*wNotifyCode*/, WORD wID, HWND /*hWndCtl*/, BOOL& /*bHandled*/);
private:
ProcessInfoEx& m_px;
const WinSys::ProcessManager& m_pm;
bool m_Modal{ false };
};
| 1,045 |
1,991 | //
// DGActivityIndicatorRotatingTrigonsAnimation.h
// DGActivityIndicatorExample
//
// Created by tripleCC on 15/6/26.
// Copyright (c) 2015年 <NAME>. All rights reserved.
//
#import "DGActivityIndicatorAnimation.h"
@interface DGActivityIndicatorRotatingTrigonAnimation: DGActivityIndicatorAnimation
@end
| 97 |
867 | """
Copyright 2016 Deepgram
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from .subclass import get_subclasses
from .flatiter import flatten, concatenate
from .iterhelp import get_any_value, get_any_key, merge_dict, parallelize, \
partial_sum
from .importhelp import can_import
from .network import download, get_hash, prepare_multipart, prepare_json, \
UploadFile, UploadFileData
from .timer import Timer
from .critical import CriticalSection
from . import idx
from . import mergetools
from .environtools import EnvironmentalVariable
from .contexttools import redirect_stderr, DisableLogging
from .package import unpack, canonicalize, install
from .filetools import count_lines
from .audiotools import load_audio, get_audio_features
from .normalize import Normalize
from . import neighbor_sort
from .cuda import CudaContext, CudaError
### EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF.EOF
| 410 |
32,544 | <reponame>DBatOWL/tutorials<filename>core-java-modules/core-java-string-algorithms-3/src/test/java/com/baeldung/isuppercase/StringFirstCharacterUppercaseUnitTest.java
package com.baeldung.isuppercase;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInstance;
import com.google.common.base.Ascii;
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
public class StringFirstCharacterUppercaseUnitTest {
@Test
public void givenString_whenCheckingWithCharacterIsUpperCase_thenStringCapitalized() {
String example = "Katie";
Assertions.assertTrue(Character.isUpperCase(example.charAt(0)));
}
@Test
public void givenString_whenCheckingWithRegex_thenStringCapitalized() {
String example = "Katie";
String regEx = "[A-Z]\\w*";
Assertions.assertTrue(example.matches(regEx));
}
@Test
public void givenString_whenCheckingWithGuava_thenStringCapitalized() {
String example = "Katie";
Assertions.assertTrue(Ascii.isUpperCase(example.charAt(0)));
}
}
| 421 |
2,151 | <reponame>rio-31/android_frameworks_base-1
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.systemui.statusbar.phone;
import android.graphics.Canvas;
import android.graphics.Point;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.view.View;
import android.widget.ImageView;
/** Creates a scaled-up version of an app icon for dragging. */
class AppIconDragShadowBuilder extends View.DragShadowBuilder {
private final static int ICON_SCALE = 2;
final Drawable mDrawable;
final int mIconSize; // Height and width in device-pixels.
public AppIconDragShadowBuilder(ImageView icon) {
mDrawable = icon.getDrawable();
// The Drawable may not be the same size as the ImageView, so use the ImageView size.
// The ImageView is not square because it has additional left and right padding to create
// a wider drop target, so use the height to create a square drag shadow.
mIconSize = icon.getHeight() * ICON_SCALE;
}
@Override
public void onProvideShadowMetrics(Point size, Point touch) {
size.set(mIconSize, mIconSize);
// Shift the drag shadow up slightly because the apps are at the bottom edge of the
// screen.
touch.set(mIconSize / 2, mIconSize * 2 / 3);
}
@Override
public void onDrawShadow(Canvas canvas) {
// The Drawable's native bounds may be different than the source ImageView. Force it
// to the correct size.
Rect oldBounds = mDrawable.copyBounds();
mDrawable.setBounds(0, 0, mIconSize, mIconSize);
mDrawable.draw(canvas);
mDrawable.setBounds(oldBounds);
}
}
| 741 |
662 | <gh_stars>100-1000
///////////////////////////////////////////////////////////////////////////////
//
// AutobahnJava - http://crossbar.io/autobahn
//
// Copyright (c) Crossbar.io Technologies GmbH and contributors
//
// Licensed under the MIT License.
// http://www.opensource.org/licenses/mit-license.php
//
///////////////////////////////////////////////////////////////////////////////
package io.crossbar.autobahn.wamp.types;
public class ExitInfo {
public final int code;
public ExitInfo() {
code = 0;
}
public ExitInfo(boolean wasClean) {
if (wasClean) {
code = 0;
} else {
code = 1;
}
}
public ExitInfo(int _code) {
code = _code;
}
}
| 273 |
1,056 | <reponame>timfel/netbeans
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.spi.viewmodel;
import java.util.Arrays;
import java.util.EventObject;
/**
* Encapsulates information describing changes to a model, and
* used to notify model listeners of the change.
*
* @author <NAME>
* @since 1.4
*/
public class ModelEvent extends EventObject {
private ModelEvent (Object source) {
super (source);
}
/**
* Used to notify that whole content of tree has been changed.
*
* @since 1.4
*/
public static class TreeChanged extends ModelEvent {
/**
* Creates a new instance of TreeChanged event.
*
* @param source a source if event.
*
* @since 1.4
*/
public TreeChanged (Object source) {
super (source);
}
}
/**
* Used to notify that one cell in table has been changed.
*
* @since 1.4
*/
public static class TableValueChanged extends ModelEvent {
/**
* The mask for value change.
* @since 1.42
*/
public static final int VALUE_MASK = 1;
/**
* The mask for HTML value change.
* @since 1.42
*/
public static final int HTML_VALUE_MASK = 2;
/**
* The mask for change of the read only state.
* @since 1.42
*/
public static final int IS_READ_ONLY_MASK = 4;
private Object node;
private String columnID;
private int change;
/**
* Creates a new instance of TableValueChanged event.
*
* @param source a source if event.
* @param node a changed node instance
* @param columnID a changed column name
*
* @since 1.4
*/
public TableValueChanged (
Object source,
Object node,
String columnID
) {
this(source, node, columnID, 0xffffffff);
}
/**
* Creates a new instance of TableValueChanged event.
*
* @param source a source if event.
* @param node a changed node instance
* @param columnID a changed column name
* @param change one of the *_MASK constants or their aggregation.
* @since 1.42
*/
public TableValueChanged (
Object source,
Object node,
String columnID,
int change
) {
super (source);
this.node = node;
this.columnID = columnID;
this.change = change;
}
/**
* Returns changed node instance.
*
* @return changed node instance
*
* @since 1.4
*/
public Object getNode () {
return node;
}
/**
* Returns changed column name.
*
* @return changed column name
*
* @since 1.4
*/
public String getColumnID () {
return columnID;
}
/**
* Get the change mask.
*
* @return the change mask, one of the *_MASK constants or their aggregation.
* @since 1.42
*/
public int getChange() {
return change;
}
}
/**
* Used to notify that one node has been changed (icon, displayName and
* children).
*
* @since 1.4
*/
public static class NodeChanged extends ModelEvent {
/**
* The mask for display name change.
* @since 1.6
*/
public static final int DISPLAY_NAME_MASK = 1;
/**
* The mask for icon change.
* @since 1.6
*/
public static final int ICON_MASK = 2;
/**
* The mask for short description change.
* @since 1.6
*/
public static final int SHORT_DESCRIPTION_MASK = 4;
/**
* The mask for children change.
* @since 1.6
*/
public static final int CHILDREN_MASK = 8;
/**
* The mask for expansion change.
* @since 1.15
*/
public static final int EXPANSION_MASK = 16;
private Object node;
private int change;
/**
* Creates a new instance of NodeChanged event.
*
* @param source a source if event.
* @param node a changed node instance
*
* @since 1.4
*/
public NodeChanged (
Object source,
Object node
) {
this (source, node, 0xFFFFFFFF);
}
/**
* Creates a new instance of NodeChanged event.
*
* @param source a source if event.
* @param node a changed node instance.
* @param change one of the *_MASK constant or their aggregation.
*
* @since 1.6
*/
public NodeChanged(Object source, Object node, int change) {
super (source);
this.node = node;
this.change = change;
}
/**
* Returns changed node instance.
*
* @return changed node instance
*
* @since 1.4
*/
public Object getNode () {
return node;
}
/**
* Get the change mask.
*
* @return the change mask, one of the *_MASK constant or their aggregation.
* @since 1.6
*/
public int getChange() {
return change;
}
@Override
public String toString() {
return super.toString()+"(node = "+node+", change = "+getChangeString(change)+")";
}
private static String getChangeString(int change) {
StringBuilder sb = new StringBuilder();
if ((change & DISPLAY_NAME_MASK) != 0) {
sb.append("DISPLAY_NAME, ");
}
if ((change & ICON_MASK) != 0) {
sb.append("ICON, ");
}
if ((change & SHORT_DESCRIPTION_MASK) != 0) {
sb.append("SHORT_DESCRIPTION, ");
}
if ((change & CHILDREN_MASK) != 0) {
sb.append("CHILDREN, ");
}
if ((change & EXPANSION_MASK) != 0) {
sb.append("EXPANSION, ");
}
if (sb.length() > 0) {
sb.delete(sb.length() - 2, sb.length());
}
return sb.toString();
}
}
/**
* Event to change a selection in the tree table view.
*
* @since 1.19
*/
public static class SelectionChanged extends ModelEvent {
private Object[] nodes;
/**
* Creates a new instance of SelectionChanged event.
*
* @param source the source of the event.
* @param nodes list of selected node instances. All nodes are deselected
* when this list is empty.
*/
public SelectionChanged(Object source, Object... nodes) {
super (source);
this.nodes = nodes;
}
/**
* Returns selected node instances.
*
* @return selected node instances
*/
public Object[] getNodes() {
return nodes;
}
@Override
public String toString() {
return super.toString()+"(nodes = "+Arrays.toString(nodes)+")"; // NOI18N
}
}
}
| 3,980 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.debugger.ui.actions;
import java.awt.event.ActionEvent;
import java.beans.PropertyChangeEvent;
import java.lang.ref.WeakReference;
import java.util.Map;
import javax.swing.AbstractAction;
import javax.swing.Action;
import javax.swing.SwingUtilities;
import org.netbeans.api.debugger.ActionsManager;
import org.netbeans.api.debugger.ActionsManagerListener;
import org.netbeans.api.debugger.DebuggerEngine;
import org.netbeans.api.debugger.DebuggerManager;
import org.netbeans.api.debugger.DebuggerManagerAdapter;
import org.netbeans.spi.project.ActionProvider;
import org.netbeans.spi.project.ui.support.FileSensitiveActions;
import org.openide.util.ImageUtilities;
import org.openide.util.NbBundle;
/**
*
* @author <NAME>
*/
public class DebuggerAction extends AbstractAction {
private Object action;
private boolean nameInBundle;
private DebuggerAction (Object action) {
this(action, true);
}
private DebuggerAction (Object action, boolean nameInBundle) {
this.action = action;
this.nameInBundle = nameInBundle;
new Listener (this);
setEnabled (isEnabled (getAction ()));
}
public Object getAction () {
return action;
}
@Override
public Object getValue(String key) {
if (key == Action.NAME && nameInBundle) {
return NbBundle.getMessage (DebuggerAction.class, (String) super.getValue(key));
}
return super.getValue(key);
}
@Override
public void actionPerformed (ActionEvent evt) {
// Post the action asynchronously, since we're on AWT
getActionsManager(action).postAction(action);
}
/**
* Get the actions manager of the current engine (if any).
* @return The actions manager or <code>null</code>.
*/
private static ActionsManager getCurrentEngineActionsManager() {
DebuggerEngine engine = DebuggerManager.getDebuggerManager().getCurrentEngine();
if (engine != null) {
return engine.getActionsManager();
} else {
return null;
}
}
/**
* Test whether the given action is enabled in either the current engine's
* action manager, or the default action manager.
* We need to take the default actions into account so that actions provided
* by other debuggers are not ignored.
*/
private static boolean isEnabled(Object action) {
ActionsManager manager = getCurrentEngineActionsManager();
if (manager != null) {
if (manager.isEnabled(action)) {
return true;
}
}
return DebuggerManager.getDebuggerManager().getActionsManager().isEnabled(action);
}
/**
* Get the actions manager for which the action is enabled.
* It returns either the current engine's manager, or the default one.
* @param the action
* @return the actions manager
*/
private static ActionsManager getActionsManager(Object action) {
ActionsManager manager = getCurrentEngineActionsManager();
if (manager != null) {
if (manager.isEnabled(action)) {
return manager;
}
}
return DebuggerManager.getDebuggerManager().getActionsManager();
}
public static DebuggerAction createContinueAction() {
DebuggerAction action = new DebuggerAction(ActionsManager.ACTION_CONTINUE);
action.putValue (Action.NAME, "CTL_Continue_action_name");
action.putValue (
"iconBase", // NOI18N
"org/netbeans/modules/debugger/resources/actions/Continue.gif" // NOI18N
);
return action;
}
public static DebuggerAction createFixAction() {
DebuggerAction action = new DebuggerAction(ActionsManager.ACTION_FIX);
action.putValue (Action.NAME, "CTL_Fix_action_name");
action.putValue (
"iconBase",
"org/netbeans/modules/debugger/resources/actions/Fix.gif" // NOI18N
);
return action;
}
public static DebuggerAction createKillAction() {
DebuggerAction action = new DebuggerAction(ActionsManager.ACTION_KILL);
action.putValue (Action.NAME, "CTL_KillAction_name");
action.putValue (
"iconBase", // NOI18N
"org/netbeans/modules/debugger/resources/actions/Kill.gif" // NOI18N
);
action.setEnabled (false);
return action;
}
public static DebuggerAction createMakeCalleeCurrentAction() {
DebuggerAction action = new DebuggerAction(ActionsManager.ACTION_MAKE_CALLEE_CURRENT);
action.putValue (Action.NAME, "CTL_MakeCalleeCurrentAction_name");
action.putValue (
"iconBase", // NOI18N
"org/netbeans/modules/debugger/resources/actions/GoToCalledMethod.gif" // NOI18N
);
return action;
}
public static DebuggerAction createMakeCallerCurrentAction() {
DebuggerAction action = new DebuggerAction(ActionsManager.ACTION_MAKE_CALLER_CURRENT);
action.putValue (Action.NAME, "CTL_MakeCallerCurrentAction_name");
action.putValue (
"iconBase", // NOI18N
"org/netbeans/modules/debugger/resources/actions/GoToCallingMethod.gif" // NOI18N
);
return action;
}
public static DebuggerAction createPauseAction () {
DebuggerAction action = new DebuggerAction(ActionsManager.ACTION_PAUSE);
action.putValue (Action.NAME, "CTL_Pause_action_name");
action.putValue (
"iconBase", // NOI18N
"org/netbeans/modules/debugger/resources/actions/Pause.gif" // NOI18N
);
return action;
}
public static DebuggerAction createPopTopmostCallAction () {
DebuggerAction action = new DebuggerAction(ActionsManager.ACTION_POP_TOPMOST_CALL);
action.putValue (Action.NAME, "CTL_PopTopmostCallAction_name");
return action;
}
public static DebuggerAction createRunIntoMethodAction () {
DebuggerAction action = new DebuggerAction(ActionsManager.ACTION_RUN_INTO_METHOD);
action.putValue (Action.NAME, "CTL_Run_into_method_action_name");
return action;
}
public static DebuggerAction createRunToCursorAction () {
DebuggerAction action = new DebuggerAction(ActionsManager.ACTION_RUN_TO_CURSOR);
action.putValue (Action.NAME, "CTL_Run_to_cursor_action_name");
action.putValue (
"iconBase", // NOI18N
"org/netbeans/modules/debugger/resources/actions/RunToCursor.gif" // NOI18N
);
return action;
}
public static DebuggerAction createStepIntoAction () {
DebuggerAction action = new DebuggerAction(ActionsManager.ACTION_STEP_INTO);
action.putValue (Action.NAME, "CTL_Step_into_action_name");
action.putValue (
"iconBase", // NOI18N
"org/netbeans/modules/debugger/resources/actions/StepInto.gif" // NOI18N
);
return action;
}
public static DebuggerAction createStepIntoNextMethodAction () {
DebuggerAction action = new DebuggerAction("stepIntoNextMethod"); // NOI18N [TODO] add constant
action.putValue (Action.NAME, "CTL_Step_into_next_method_action_name");
return action;
}
public static DebuggerAction createStepOutAction () {
DebuggerAction action = new DebuggerAction(ActionsManager.ACTION_STEP_OUT);
action.putValue (Action.NAME, "CTL_Step_out_action_name");
action.putValue (
"iconBase", // NOI18N
"org/netbeans/modules/debugger/resources/actions/StepOut.gif" // NOI18N
);
return action;
}
public static DebuggerAction createStepOverAction () {
DebuggerAction action = new DebuggerAction(ActionsManager.ACTION_STEP_OVER);
action.putValue (Action.NAME, "CTL_Step_over_action_name");
action.putValue (
"iconBase", // NOI18N
"org/netbeans/modules/debugger/resources/actions/StepOver.gif" // NOI18N
);
return action;
}
public static DebuggerAction createStepOperationAction () {
DebuggerAction action = new DebuggerAction(ActionsManager.ACTION_STEP_OPERATION);
action.putValue (Action.NAME, "CTL_Step_operation_action_name");
action.putValue (
"iconBase", // NOI18N
"org/netbeans/modules/debugger/resources/actions/StepOverOperation.gif" // NOI18N
);
return action;
}
private static final String[] BREAKPOINT_ANNOTATION_TYPES = new String[] {
"Breakpoint_broken",
"Breakpoint",
"Breakpoint_stroke",
"CondBreakpoint_broken",
"CondBreakpoint",
"CondBreakpoint_stroke",
"DisabledBreakpoint",
"DisabledCondBreakpoint",
"DisabledBreakpoint_stroke",
"CurrentExpressionLine",
"CurrentExpression",
"CurrentPC2",
"CurrentPC2LinePart",
"CurrentPC2_BP",
"CurrentPC2_DBP",
};
public static DebuggerAction createToggleBreakpointAction () {
DebuggerAction action = new DebuggerAction(ActionsManager.ACTION_TOGGLE_BREAKPOINT);
action.putValue (Action.NAME, "CTL_Toggle_breakpoint");
action.putValue("default-action", true);
action.putValue("supported-annotation-types", BREAKPOINT_ANNOTATION_TYPES);
//action.putValue("default-action-excluded-annotation-types", BREAKPOINT_ANNOTATION_TYPES);
return action;
}
public static DebuggerAction createEvaluateAction() {
DebuggerAction action = new DebuggerAction(ActionsManager.ACTION_EVALUATE);
action.putValue (Action.NAME, "CTL_Evaluate"); // NOI18N
return action;
}
/**
* Use this method to register an additional debugger action.
* Register in a module layer manually as follows:
* <pre style="background-color: rgb(255, 255, 153);">
* <folder name="Actions">
* <folder name="Debug">
* <file name="ActionName.instance">
* <attr name="instanceClass" stringvalue="org.netbeans.modules.debugger.ui.actions.DebuggerAction"/>
* <attr name="instanceOf" stringvalue="javax.swing.Action"/>
* <attr name="instanceCreate" methodvalue="org.netbeans.modules.debugger.ui.actions.DebuggerAction.createAction"/>
* <attr name="action" stringvalue="actionName"/>
* <attr name="name" bundlevalue="org.netbeans.modules.debugger.general.Bundle#CTL_MyAction_Title"/>
* <attr name="iconBase" stringvalue="org/netbeans/modules/debugger/general/MyAction.png"/>
* </file>
* </folder>
* </folder></pre>
* @param params "action", "name" and optional "iconBase".
* @return The action object
*/
public static DebuggerAction createAction(Map<String,?> params) {
Object action = params.get("action");
if (action == null) {
throw new IllegalStateException("\"action\" parameter is missing.");
}
String name = (String) params.get("name");
if (name == null) {
throw new IllegalStateException("\"name\" parameter is missing.");
}
String iconBase = (String) params.get("iconBase");
DebuggerAction a = new DebuggerAction(action, false);
a.putValue(Action.NAME, name);
if (iconBase != null) {
a.putValue("iconBase", iconBase);
}
return a;
}
// Debug File Actions:
public static Action createDebugFileAction() {
Action a = FileSensitiveActions.fileCommandAction(
ActionProvider.COMMAND_DEBUG_SINGLE,
NbBundle.getMessage(DebuggerAction.class, "LBL_DebugSingleAction_Name"),
ImageUtilities.loadImageIcon("org/netbeans/modules/debugger/resources/debugSingle.png", true));
a.putValue("iconBase","org/netbeans/modules/debugger/resources/debugSingle.png"); //NOI18N
a.putValue("noIconInMenu", true); //NOI18N
return a;
}
public static Action createDebugTestFileAction() {
Action a = FileSensitiveActions.fileCommandAction(
ActionProvider.COMMAND_DEBUG_TEST_SINGLE,
NbBundle.getMessage(DebuggerAction.class, "LBL_DebugTestSingleAction_Name"),
ImageUtilities.loadImageIcon("org/netbeans/modules/debugger/resources/debugTestSingle.png", true));
a.putValue("iconBase","org/netbeans/modules/debugger/resources/debugTestSingle.png"); //NOI18N
a.putValue("noIconInMenu", true); //NOI18N
return a;
}
// innerclasses ............................................................
/**
* Listens on DebuggerManager on PROP_CURRENT_ENGINE and on current engine
* on PROP_ACTION_STATE and updates state of this action instance.
*/
static class Listener extends DebuggerManagerAdapter
implements ActionsManagerListener {
private ActionsManager currentActionsManager;
private WeakReference<DebuggerAction> ref;
Listener (DebuggerAction da) {
ref = new WeakReference<DebuggerAction>(da);
DebuggerManager.getDebuggerManager ().addDebuggerListener (
DebuggerManager.PROP_CURRENT_ENGINE,
this
);
DebuggerManager.getDebuggerManager ().getActionsManager().addActionsManagerListener(
ActionsManagerListener.PROP_ACTION_STATE_CHANGED,
this
);
updateCurrentActionsManager ();
}
@Override
public void propertyChange (PropertyChangeEvent evt) {
final DebuggerAction da = getDebuggerAction ();
if (da == null) return;
updateCurrentActionsManager ();
final boolean en = DebuggerAction.isEnabled (da.getAction ());
SwingUtilities.invokeLater (new Runnable () {
@Override
public void run () {
da.setEnabled (en);
}
});
}
@Override
public void actionPerformed (Object action) {
}
@Override
public void actionStateChanged (
final Object action,
final boolean enabled
) {
final DebuggerAction da = getDebuggerAction ();
if (da == null) return;
if (!action.equals(da.getAction ())) return;
// ignore the enabled argument, check it with respect to the proper
// actions manager.
final boolean en = DebuggerAction.isEnabled (da.getAction ());
SwingUtilities.invokeLater (new Runnable () {
@Override
public void run () {
da.setEnabled (en);
}
});
}
private void updateCurrentActionsManager () {
ActionsManager newActionsManager = getCurrentEngineActionsManager ();
if (currentActionsManager == newActionsManager) return;
if (currentActionsManager != null)
currentActionsManager.removeActionsManagerListener
(ActionsManagerListener.PROP_ACTION_STATE_CHANGED, this);
if (newActionsManager != null)
newActionsManager.addActionsManagerListener
(ActionsManagerListener.PROP_ACTION_STATE_CHANGED, this);
currentActionsManager = newActionsManager;
}
private DebuggerAction getDebuggerAction () {
DebuggerAction da = ref.get ();
if (da == null) {
DebuggerManager.getDebuggerManager ().removeDebuggerListener (
DebuggerManager.PROP_CURRENT_ENGINE,
this
);
DebuggerManager.getDebuggerManager ().getActionsManager().removeActionsManagerListener(
ActionsManagerListener.PROP_ACTION_STATE_CHANGED,
this
);
if (currentActionsManager != null)
currentActionsManager.removeActionsManagerListener
(ActionsManagerListener.PROP_ACTION_STATE_CHANGED, this);
currentActionsManager = null;
return null;
}
return da;
}
}
}
| 7,277 |
456 | // SPDX-License-Identifier: BSD-3-Clause
// Copyright (c) 2004-2020 <NAME>
// All rights reserved.
#include <djvUIComponents/IOSettingsWidget.h>
#include <djvUIComponents/IOSettings.h>
#include <djvUI/FormLayout.h>
#include <djvUI/IntSlider.h>
#include <djvUI/SettingsSystem.h>
#include <djvSystem/Context.h>
#include <djvMath/NumericValueModels.h>
using namespace djv::Core;
namespace djv
{
namespace UIComponents
{
namespace Settings
{
struct IOThreadsWidget::Private
{
std::shared_ptr<UI::Numeric::IntSlider> threadCountSlider;
std::shared_ptr<UI::FormLayout> layout;
std::shared_ptr<Observer::Value<size_t> > threadCountObserver;
};
void IOThreadsWidget::_init(const std::shared_ptr<System::Context>& context)
{
IWidget::_init(context);
DJV_PRIVATE_PTR();
setClassName("djv::UIComponents::Settings::IOThreadsWidget");
p.threadCountSlider = UI::Numeric::IntSlider::create(context);
p.threadCountSlider->setRange(Math::IntRange(2, 64));
p.layout = UI::FormLayout::create(context);
p.layout->addChild(p.threadCountSlider);
addChild(p.layout);
auto weak = std::weak_ptr<IOThreadsWidget>(std::dynamic_pointer_cast<IOThreadsWidget>(shared_from_this()));
auto contextWeak = std::weak_ptr<System::Context>(context);
p.threadCountSlider->setValueCallback(
[weak, contextWeak](int value)
{
if (auto context = contextWeak.lock())
{
if (auto widget = weak.lock())
{
auto settingsSystem = context->getSystemT<UI::Settings::SettingsSystem>();
if (auto ioSettings = settingsSystem->getSettingsT<Settings::IO>())
{
ioSettings->setThreadCount(static_cast<size_t>(value));
}
}
}
});
auto settingsSystem = context->getSystemT<UI::Settings::SettingsSystem>();
if (auto ioSettings = settingsSystem->getSettingsT<Settings::IO>())
{
p.threadCountObserver = Observer::Value<size_t>::create(
ioSettings->observeThreadCount(),
[weak](size_t value)
{
if (auto widget = weak.lock())
{
widget->_p->threadCountSlider->setValue(static_cast<int>(value));
}
});
}
}
IOThreadsWidget::IOThreadsWidget() :
_p(new Private)
{}
std::shared_ptr<IOThreadsWidget> IOThreadsWidget::create(const std::shared_ptr<System::Context>& context)
{
auto out = std::shared_ptr<IOThreadsWidget>(new IOThreadsWidget);
out->_init(context);
return out;
}
std::string IOThreadsWidget::getSettingsGroup() const
{
return DJV_TEXT("settings_title_io");
}
std::string IOThreadsWidget::getSettingsSortKey() const
{
return "d";
}
void IOThreadsWidget::_initEvent(System::Event::Init& event)
{
IWidget::_initEvent(event);
DJV_PRIVATE_PTR();
if (event.getData().text)
{
p.layout->setText(p.threadCountSlider, _getText(DJV_TEXT("settings_io_thread_count")) + ":");
}
}
} // namespace Settings
} // namespace UIComponents
} // namespace djv
| 2,228 |
2,536 | package cn.pedant.SafeWebViewBridge;
import android.text.TextUtils;
import android.webkit.WebView;
import android.util.Log;
import com.google.gson.Gson;
import org.json.JSONArray;
import org.json.JSONObject;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.HashMap;
public class JsCallJava {
private final static String TAG = "JsCallJava";
private final static String RETURN_RESULT_FORMAT = "{\"code\": %d, \"result\": %s}";
private HashMap<String, Method> mMethodsMap;
private String mInjectedName;
private String mPreloadInterfaceJS;
private Gson mGson;
public JsCallJava (String injectedName, Class injectedCls) {
try {
if (TextUtils.isEmpty(injectedName)) {
throw new Exception("injected name can not be null");
}
mInjectedName = injectedName;
mMethodsMap = new HashMap<String, Method>();
//获取自身声明的所有方法(包括public private protected), getMethods会获得所有继承与非继承的方法
Method[] methods = injectedCls.getDeclaredMethods();
StringBuilder sb = new StringBuilder("javascript:(function(b){console.log(\"");
sb.append(mInjectedName);
sb.append(" initialization begin\");var a={queue:[],callback:function(){var d=Array.prototype.slice.call(arguments,0);var c=d.shift();var e=d.shift();this.queue[c].apply(this,d);if(!e){delete this.queue[c]}}};");
for (Method method : methods) {
String sign;
if (method.getModifiers() != (Modifier.PUBLIC | Modifier.STATIC) || (sign = genJavaMethodSign(method)) == null) {
continue;
}
mMethodsMap.put(sign, method);
sb.append(String.format("a.%s=", method.getName()));
}
sb.append("function(){var f=Array.prototype.slice.call(arguments,0);if(f.length<1){throw\"");
sb.append(mInjectedName);
sb.append(" call error, message:miss method name\"}var e=[];for(var h=1;h<f.length;h++){var c=f[h];var j=typeof c;e[e.length]=j;if(j==\"function\"){var d=a.queue.length;a.queue[d]=c;f[h]=d}}var g=JSON.parse(prompt(JSON.stringify({method:f.shift(),types:e,args:f})));if(g.code!=200){throw\"");
sb.append(mInjectedName);
sb.append(" call error, code:\"+g.code+\", message:\"+g.result}return g.result};Object.getOwnPropertyNames(a).forEach(function(d){var c=a[d];if(typeof c===\"function\"&&d!==\"callback\"){a[d]=function(){return c.apply(a,[d].concat(Array.prototype.slice.call(arguments,0)))}}});b.");
sb.append(mInjectedName);
sb.append("=a;console.log(\"");
sb.append(mInjectedName);
sb.append(" initialization end\")})(window);");
mPreloadInterfaceJS = sb.toString();
} catch(Exception e){
Log.e(TAG, "init js error:" + e.getMessage());
}
}
private String genJavaMethodSign (Method method) {
String sign = method.getName();
Class[] argsTypes = method.getParameterTypes();
int len = argsTypes.length;
if (len < 1 || argsTypes[0] != WebView.class) {
Log.w(TAG, "method(" + sign + ") must use webview to be first parameter, will be pass");
return null;
}
for (int k = 1; k < len; k++) {
Class cls = argsTypes[k];
if (cls == String.class) {
sign += "_S";
} else if (cls == int.class ||
cls == long.class ||
cls == float.class ||
cls == double.class) {
sign += "_N";
} else if (cls == boolean.class) {
sign += "_B";
} else if (cls == JSONObject.class) {
sign += "_O";
} else if (cls == JsCallback.class) {
sign += "_F";
} else {
sign += "_P";
}
}
return sign;
}
public String getPreloadInterfaceJS () {
return mPreloadInterfaceJS;
}
public String call(WebView webView, String jsonStr) {
if (!TextUtils.isEmpty(jsonStr)) {
try {
JSONObject callJson = new JSONObject(jsonStr);
String methodName = callJson.getString("method");
JSONArray argsTypes = callJson.getJSONArray("types");
JSONArray argsVals = callJson.getJSONArray("args");
String sign = methodName;
int len = argsTypes.length();
Object[] values = new Object[len + 1];
int numIndex = 0;
String currType;
values[0] = webView;
for (int k = 0; k < len; k++) {
currType = argsTypes.optString(k);
if ("string".equals(currType)) {
sign += "_S";
values[k + 1] = argsVals.isNull(k) ? null : argsVals.getString(k);
} else if ("number".equals(currType)) {
sign += "_N";
numIndex = numIndex * 10 + k + 1;
} else if ("boolean".equals(currType)) {
sign += "_B";
values[k + 1] = argsVals.getBoolean(k);
} else if ("object".equals(currType)) {
sign += "_O";
values[k + 1] = argsVals.isNull(k) ? null : argsVals.getJSONObject(k);
} else if ("function".equals(currType)) {
sign += "_F";
values[k + 1] = new JsCallback(webView, mInjectedName, argsVals.getInt(k));
} else {
sign += "_P";
}
}
Method currMethod = mMethodsMap.get(sign);
// 方法匹配失败
if (currMethod == null) {
return getReturn(jsonStr, 500, "not found method(" + sign + ") with valid parameters");
}
// 数字类型细分匹配
if (numIndex > 0) {
Class[] methodTypes = currMethod.getParameterTypes();
int currIndex;
Class currCls;
while (numIndex > 0) {
currIndex = numIndex - numIndex / 10 * 10;
currCls = methodTypes[currIndex];
if (currCls == int.class) {
values[currIndex] = argsVals.getInt(currIndex - 1);
} else if (currCls == long.class) {
//WARN: argsJson.getLong(k + defValue) will return a bigger incorrect number
values[currIndex] = Long.parseLong(argsVals.getString(currIndex - 1));
} else {
values[currIndex] = argsVals.getDouble(currIndex - 1);
}
numIndex /= 10;
}
}
return getReturn(jsonStr, 200, currMethod.invoke(null, values));
} catch (Exception e) {
//优先返回详细的错误信息
if (e.getCause() != null) {
return getReturn(jsonStr, 500, "method execute error:" + e.getCause().getMessage());
}
return getReturn(jsonStr, 500, "method execute error:" + e.getMessage());
}
} else {
return getReturn(jsonStr, 500, "call data empty");
}
}
private String getReturn (String reqJson, int stateCode, Object result) {
String insertRes;
if (result == null) {
insertRes = "null";
} else if (result instanceof String) {
result = ((String) result).replace("\"", "\\\"");
insertRes = "\"" + result + "\"";
} else if (!(result instanceof Integer)
&& !(result instanceof Long)
&& !(result instanceof Boolean)
&& !(result instanceof Float)
&& !(result instanceof Double)
&& !(result instanceof JSONObject)) { // 非数字或者非字符串的构造对象类型都要序列化后再拼接
if (mGson == null) {
mGson = new Gson();
}
insertRes = mGson.toJson(result);
} else { //数字直接转化
insertRes = String.valueOf(result);
}
String resStr = String.format(RETURN_RESULT_FORMAT, stateCode, insertRes);
Log.d(TAG, mInjectedName + " call json: " + reqJson + " result:" + resStr);
return resStr;
}
} | 4,587 |
2,542 | <reponame>gridgentoo/ServiceFabricAzure
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
#include "stdafx.h"
#include "TestHeaders.h"
#include <boost/test/unit_test.hpp>
#include "Common/boost-taef.h"
namespace TransactionalReplicatorTests
{
using namespace std;
using namespace ktl;
using namespace Data::LoggingReplicator;
using namespace TxnReplicator;
using namespace TxnReplicator::TestCommon;
using namespace Data::Utilities;
using namespace Common;
StringLiteral const TraceComponent = "IOMonitorTests";
class IOMonitorTests
{
protected:
IOMonitorTests()
{
healthClient_ = TestHealthClient::Create(true);
CommonConfig config; // load config object for tracing functionality
int seed = GetTickCount();
Common::Random r(seed);
rId_ = r.Next();
pId_.CreateNew();
ConfigureSettings();
}
~IOMonitorTests()
{
}
void EndTest();
void ConfigureSettings(
__in ULONG count = 5,
__in ULONG timeThresholdInSeconds = 1,
__in ULONG healthReportTTL = 1)
{
timeThresholdSeconds_ = timeThresholdInSeconds;
txrSettings_ = { 0 };
txrSettings_.SlowLogIOCountThreshold = count;
txrSettings_.Flags |= FABRIC_TRANSACTIONAL_REPLICATOR_SLOW_LOG_IO_COUNT_THRESHOLD;
txrSettings_.SlowLogIOTimeThresholdSeconds = timeThresholdInSeconds;
txrSettings_.Flags |= FABRIC_TRANSACTIONAL_REPLICATOR_SLOW_LOG_IO_TIME_THRESHOLD_SECONDS;
txrSettings_.SlowLogIOHealthReportTTLSeconds = healthReportTTL;
txrSettings_.Flags |= FABRIC_TRANSACTIONAL_REPLICATOR_SLOW_LOG_IO_HEALTH_REPORT_TTL_SECONDS;
}
void Initialize(
__in KAllocator & allocator)
{
prId_ = PartitionedReplicaId::Create(pId_, rId_, allocator);
TransactionalReplicatorSettingsUPtr tmp;
TransactionalReplicatorSettings::FromPublicApi(txrSettings_, tmp);
config_ = TRInternalSettings::Create(
move(tmp),
make_shared<TransactionalReplicatorConfig>());
ioTracker_ = IOMonitor::Create(
*prId_,
L"Test",
Common::SystemHealthReportCode::TR_SlowIO,
config_,
healthClient_,
allocator);
};
void SlowIO(int count)
{
for (int i = 0; i < count; i++)
{
ioTracker_->OnSlowOperation();
}
}
void UpdateConfig(wstring name, wstring newvalue, ConfigSettings & settings, shared_ptr<ConfigSettingsConfigStore> & store)
{
ConfigSection section;
section.Name = L"TransactionalReplicator2";
ConfigParameter d1;
d1.Name = name;
d1.Value = newvalue;
section.Parameters[d1.Name] = d1;
settings.Sections[section.Name] = section;
store->Update(settings);
}
Awaitable<void> RandomSlowIO(int count, int minDelayMs, int maxDelayMs)
{
DWORD tSeed = GetTickCount();
Common::Random random(tSeed);
KTimer::SPtr localTimer;
NTSTATUS status = KTimer::Create(localTimer, underlyingSystem_->NonPagedAllocator(), KTL_TAG_TEST);
ASSERT_IFNOT(NT_SUCCESS(status), "Failed to initialize timer");
for (int i = 0; i < count; i++)
{
if(minDelayMs > 0 && maxDelayMs > 0)
{
co_await localTimer->StartTimerAsync(random.Next(minDelayMs, maxDelayMs), nullptr);
}
ioTracker_->OnSlowOperation();
localTimer->Reuse();
}
}
TestHealthClientSPtr healthClient_;
TxnReplicator::TRInternalSettingsSPtr config_;
TRANSACTIONAL_REPLICATOR_SETTINGS txrSettings_;
IOMonitor::SPtr ioTracker_;
ULONG timeThresholdSeconds_;
KGuid pId_;
::FABRIC_REPLICA_ID rId_;
PartitionedReplicaId::SPtr prId_;
KtlSystem * underlyingSystem_;
private:
KtlSystem * CreateKtlSystem()
{
KtlSystem* underlyingSystem;
NTSTATUS status = KtlSystem::Initialize(FALSE, &underlyingSystem);
CODING_ERROR_ASSERT(NT_SUCCESS(status));
underlyingSystem->SetStrictAllocationChecks(TRUE);
return underlyingSystem;
}
};
void IOMonitorTests::EndTest()
{
prId_.Reset();
ioTracker_.Reset();
config_.reset();
healthClient_.reset();
}
BOOST_FIXTURE_TEST_SUITE(TRHealthTrackerTestSuite, IOMonitorTests)
BOOST_AUTO_TEST_CASE(Verify_HealthReport)
{
TR_TEST_TRACE_BEGIN("Verify_HealthReport")
{
UNREFERENCED_PARAMETER(allocator);
// Count = 5, Time Threshold = 1s
Initialize(allocator);
// Report 5 slow log io ops w/no delay
// Ensures all reports are fired within 1 time threshold
SlowIO(5);
ASSERT_IFNOT(healthClient_->NumberofReportsSent == 1, "Expected 1 health report");
// Delay one time threshold + 1
Sleep(1000 * (timeThresholdSeconds_ + 1));
// Report count - 1 slow ops
SlowIO(4);
// Confirm another health report has not been fired
ASSERT_IFNOT(healthClient_->NumberofReportsSent == 1, "Expected 1 health report");
EndTest();
}
}
BOOST_AUTO_TEST_CASE(Verify_NoHealthReport)
{
TR_TEST_TRACE_BEGIN("Verify_NoHealthReport")
{
UNREFERENCED_PARAMETER(allocator);
// Count = 5, Time Threshold = 1s
Initialize(allocator);
// Report 3 slow log io ops w/no delay
SlowIO(3);
// Expect no health report
ASSERT_IFNOT(healthClient_->NumberofReportsSent == 0, "Expected 0 health reports");
// Delay one time threshold + 1
Sleep(1000 * (timeThresholdSeconds_ + 1));
// Report 3 more slow operations
SlowIO(3);
// Confirm no health reports have still been fired
ASSERT_IFNOT(healthClient_->NumberofReportsSent == 0, "Expected 0 health reports");
EndTest();
}
}
BOOST_AUTO_TEST_CASE(Verify_Null_IReplicatorHealthClient_NoHealthReport)
{
TR_TEST_TRACE_BEGIN("Verify_Null_IReplicatorHealthClient_NoHealthReport")
{
UNREFERENCED_PARAMETER(allocator);
// Count = 5, Time Threshold = 1s
// Set IReplicatorHealthClientSPtr to nullptr
healthClient_ = nullptr;
Initialize(allocator);
// Report 15 slow log io ops w/no delay
SlowIO(15);
// Assert healthClient_ == null after 15 reported 'slow' operations' with count threshold of 5
// Calling 'ReportReplicatorHealth' on nullptr is expected to AV.
// Reaching the assert statement and exiting without issue confirms the nullptr case is handled by IOMonitor
ASSERT_IFNOT(healthClient_ == nullptr, "Expected 0 health reports");
EndTest();
}
}
BOOST_AUTO_TEST_CASE(Verify_DisabledViaConfig_NoHealthReport)
{
TR_TEST_TRACE_BEGIN("Verify_DisabledViaConfig_NoHealthReport")
{
UNREFERENCED_PARAMETER(allocator);
// Count = 0, Time Threshold = 1s
ConfigureSettings(0, 1, 15);
Initialize(allocator);
// Report 15 slow log io ops w/no delay
SlowIO(15);
// Expect no health report
ASSERT_IFNOT(healthClient_->NumberofReportsSent == 0, "Expected 0 health reports");
// Delay one time threshold + 1
Sleep(1000 * (timeThresholdSeconds_ + 1));
// Report 30 more slow operations
SlowIO(30);
// Confirm no health reports have still been fired
ASSERT_IFNOT(healthClient_->NumberofReportsSent == 0, "Expected 0 health reports");
EndTest();
}
}
BOOST_AUTO_TEST_CASE(Verify_DynamicConfigUpdate_HealthReport)
{
TR_TEST_TRACE_BEGIN("Verify_DynamicConfigUpdate_HealthReport")
{
UNREFERENCED_PARAMETER(allocator);
ConfigSettings settings;
auto configStore = make_shared<ConfigSettingsConfigStore>(settings);
Config::SetConfigStore(configStore);
// Count = 0, Time Threshold = 1s
// Count is intentionally not set as dynamic config update will not override user settings
txrSettings_ = { 0 };
txrSettings_.SlowLogIOTimeThresholdSeconds = 1;
txrSettings_.Flags |= FABRIC_TRANSACTIONAL_REPLICATOR_SLOW_LOG_IO_TIME_THRESHOLD_SECONDS;
txrSettings_.SlowLogIOHealthReportTTLSeconds = 1;
txrSettings_.Flags |= FABRIC_TRANSACTIONAL_REPLICATOR_SLOW_LOG_IO_HEALTH_REPORT_TTL_SECONDS;
Initialize(allocator);
// Report 15 slow log io ops w/no delay
SlowIO(15);
// Expect no health report
ASSERT_IFNOT(healthClient_->NumberofReportsSent == 0, "Expected 0 health reports");
// Dynamically update count threshold
UpdateConfig(L"SlowLogIOCountThreshold", L"20", settings, configStore);
// Delay one time threshold + 1
Sleep(1000 * (timeThresholdSeconds_ + 1));
// Report 30 more slow operations
SlowIO(30);
// Confirm 1 health report has been fired
ASSERT_IFNOT(healthClient_->NumberofReportsSent == 1, "Expected 1 health report");
EndTest();
}
}
BOOST_AUTO_TEST_CASE(Verify_HealthReport_Stress)
{
TR_TEST_TRACE_BEGIN("Verify_HealthReport_Stress")
{
UNREFERENCED_PARAMETER(allocator);
// Count = 20, Time Threshold = 1s
ConfigureSettings(20, 1);
Initialize(allocator);
// Report 200 slow log io ops w/no delay
// Ensures all reports are fired within 1 time threshold
SlowIO(200);
ASSERT_IFNOT(healthClient_->NumberofReportsSent == 1, "Expected 1 health report");
// Delay one time threshold + 1
Sleep(1000 * (timeThresholdSeconds_ + 1));
// Report count threshold # of slow ops
SlowIO(21);
// Confirm another health report has been fired
ASSERT_IFNOT(
healthClient_->NumberofReportsSent == 2,
"Expected 1 health report. {0}",
*ioTracker_);
EndTest();
}
}
BOOST_AUTO_TEST_SUITE_END()
}
| 5,214 |
324 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.cloudstack.features;
import static org.jclouds.reflect.Reflection2.method;
import org.jclouds.cloudstack.internal.BaseCloudStackApiTest;
import org.jclouds.cloudstack.options.ListStoragePoolsOptions;
import org.jclouds.fallbacks.MapHttp4xxCodesToExceptions;
import org.jclouds.http.functions.ParseFirstJsonValueNamed;
import org.jclouds.rest.internal.GeneratedHttpRequest;
import org.testng.annotations.Test;
import com.google.common.collect.ImmutableList;
import com.google.common.reflect.Invokable;
/**
* Tests behavior of {@code GlobalStoragePoolApi}
*/
@Test(groups = "unit", testName = "GlobalStoragePoolApiTest")
public class GlobalStoragePoolApiTest extends BaseCloudStackApiTest<GlobalStoragePoolApi> {
public void testListStoragePools() throws NoSuchMethodException {
Invokable<?, ?> method = method(GlobalStoragePoolApi.class, "listStoragePools", ListStoragePoolsOptions[].class);
GeneratedHttpRequest httpRequest = processor.createRequest(method, ImmutableList.of());
assertRequestLineEquals(httpRequest,
"GET http://localhost:8080/client/api?response=json&command=listStoragePools&listAll=true HTTP/1.1");
assertNonPayloadHeadersEqual(httpRequest, "Accept: application/json\n");
assertPayloadEquals(httpRequest, null, null, false);
assertResponseParserClassEquals(method, httpRequest, ParseFirstJsonValueNamed.class);
assertSaxResponseParserClassEquals(method, null);
assertFallbackClassEquals(method, MapHttp4xxCodesToExceptions.class);
checkFilters(httpRequest);
}
public void testListStoragePoolsOptions() throws NoSuchMethodException {
Invokable<?, ?> method = method(GlobalStoragePoolApi.class, "listStoragePools", ListStoragePoolsOptions[].class);
GeneratedHttpRequest httpRequest = processor.createRequest(method, ImmutableList.<Object> of(ListStoragePoolsOptions.Builder.clusterId("3").id("4").ipAddress("192.168.42.42").keyword("fred").name("bob").path("/mnt/store42").podId("4").zoneId("5")));
assertRequestLineEquals(httpRequest,
"GET http://localhost:8080/client/api?response=json&command=listStoragePools&listAll=true&clusterid=3&id=4&ipaddress=192.168.42.42&keyword=fred&name=bob&path=/mnt/store42&podid=4&zoneid=5 HTTP/1.1");
assertNonPayloadHeadersEqual(httpRequest, "Accept: application/json\n");
assertPayloadEquals(httpRequest, null, null, false);
assertResponseParserClassEquals(method, httpRequest, ParseFirstJsonValueNamed.class);
assertSaxResponseParserClassEquals(method, null);
assertFallbackClassEquals(method, MapHttp4xxCodesToExceptions.class);
checkFilters(httpRequest);
}
}
| 1,099 |
2,978 | /*
* Copyright Strimzi authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.strimzi.systemtest.annotations;
import io.strimzi.test.k8s.KubeClusterResource;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.junit.jupiter.api.extension.ConditionEvaluationResult;
import org.junit.jupiter.api.extension.ExecutionCondition;
import org.junit.jupiter.api.extension.ExtensionContext;
import java.util.Optional;
import static org.junit.platform.commons.support.AnnotationSupport.findAnnotation;
public class RequiredMinKubeApiVersionCondition implements ExecutionCondition {
private static final Logger LOGGER = LogManager.getLogger(RequiredMinKubeApiVersionCondition.class);
@Override
public ConditionEvaluationResult evaluateExecutionCondition(ExtensionContext extensionContext) {
Optional<RequiredMinKubeApiVersion> annotation = findAnnotation(extensionContext.getElement(), RequiredMinKubeApiVersion.class);
KubeClusterResource clusterResource = KubeClusterResource.getInstance();
double version = annotation.get().version();
if (Double.parseDouble(clusterResource.client().clusterKubernetesVersion()) >= version) {
return ConditionEvaluationResult.enabled("Test is enabled");
} else {
LOGGER.info("{} is @RequiredMinKubeApiVersion with version {}, but the running on cluster with {}: Ignoring {}",
extensionContext.getDisplayName(),
version,
clusterResource.client().clusterKubernetesVersion(),
extensionContext.getDisplayName()
);
return ConditionEvaluationResult.disabled("Test is disabled");
}
}
}
| 639 |
555 | [
{
"outputFile": "plugins/Chart/css/chart.css",
"inputFile": "plugins/Chart/css/chart.less"
},
{
"outputFile": "plugins/Chart/css/chartform.css",
"inputFile": "plugins/Chart/css/chartform.less"
},
{
"outputFile": "plugins/Chart/css/mindatarep.css",
"inputFile": "plugins/Chart/css/mindatarep.less"
},
{
"outputFile": "plugins/Chart/css/cnllist.css",
"inputFile": "plugins/Chart/css/cnllist.less"
},
{
"outputFile": "plugins/Chart/css/selectcnls.css",
"inputFile": "plugins/Chart/css/selectcnls.less"
}
] | 242 |
852 | <reponame>ckamtsikis/cmssw
#ifndef _ConfigurableAdaptiveReconstructor_H_
#define _ConfigurableAdaptiveReconstructor_H_
#include "RecoVertex/ConfigurableVertexReco/interface/AbstractConfReconstructor.h"
/**
* Wrap any VertexFitter into the VertexReconstructor interface
*/
class ConfigurableAdaptiveReconstructor : public AbstractConfReconstructor {
public:
ConfigurableAdaptiveReconstructor();
void configure(const edm::ParameterSet&) override;
ConfigurableAdaptiveReconstructor(const ConfigurableAdaptiveReconstructor& o);
~ConfigurableAdaptiveReconstructor() override;
ConfigurableAdaptiveReconstructor* clone() const override;
std::vector<TransientVertex> vertices(const std::vector<reco::TransientTrack>& t) const override;
std::vector<TransientVertex> vertices(const std::vector<reco::TransientTrack>& t,
const reco::BeamSpot&) const override;
std::vector<TransientVertex> vertices(const std::vector<reco::TransientTrack>& prims,
const std::vector<reco::TransientTrack>& secs,
const reco::BeamSpot&) const override;
edm::ParameterSet defaults() const override;
private:
const VertexReconstructor* theRector;
};
#endif
| 487 |
369 | // Copyright (c) 2017-2021, Mudit<NAME>. All rights reserved.
// For licensing, see https://github.com/mudita/MuditaOS/LICENSE.md
#ifndef PUREPHONE_RT1051CELLULARAUDIO_HPP
#define PUREPHONE_RT1051CELLULARAUDIO_HPP
#include "SAIAudioDevice.hpp"
#include "fsl_sai_edma.h"
#include "FreeRTOS.h"
#include "task.h"
#include "macros.h"
#include "drivers/pll/DriverPLL.hpp"
#include "drivers/dmamux/DriverDMAMux.hpp"
#include "drivers/dma/DriverDMA.hpp"
#include <vector>
namespace audio
{
void txCellularCallback(I2S_Type *base, sai_edma_handle_t *handle, status_t status, void *userData);
void rxCellularCallback(I2S_Type *base, sai_edma_handle_t *handle, status_t status, void *userData);
class RT1051CellularAudio : public SAIAudioDevice
{
public:
friend void txCellularCallback(I2S_Type *base, sai_edma_handle_t *handle, status_t status, void *userData);
friend void rxCellularCallback(I2S_Type *base, sai_edma_handle_t *handle, status_t status, void *userData);
RT1051CellularAudio();
virtual ~RT1051CellularAudio();
AudioDevice::RetCode Start() override final;
AudioDevice::RetCode Stop() override final;
AudioDevice::RetCode setOutputVolume(float vol) override final;
AudioDevice::RetCode setInputGain(float gain) override final;
auto getSupportedFormats() -> std::vector<AudioFormat> override final;
auto getTraits() const -> Traits override final;
auto getSourceFormat() -> AudioFormat override final;
private:
static constexpr auto supportedSampleRate = 16000U;
static constexpr auto supportedBitWidth = 16U;
static constexpr auto supportedChannels = 1U;
enum class State
{
Running,
Stopped
};
State state = State::Stopped;
uint32_t mclkSourceClockHz = 0;
sai_config_t config;
// M.P: It is important to destroy these drivers in specific order
std::shared_ptr<drivers::DriverPLL> pll;
std::shared_ptr<drivers::DriverDMAMux> dmamux;
std::shared_ptr<drivers::DriverDMA> dma;
std::unique_ptr<drivers::DriverDMAHandle> rxDMAHandle;
std::unique_ptr<drivers::DriverDMAHandle> txDMAHandle;
static AT_NONCACHEABLE_SECTION_INIT(sai_edma_handle_t txHandle);
static AT_NONCACHEABLE_SECTION_INIT(sai_edma_handle_t rxHandle);
void Init();
void Deinit();
void OutStart();
void InStart();
void OutStop();
void InStop();
};
} // namespace audio
#endif // PUREPHONE_RT1051CELLULARAUDIO_HPP
| 1,111 |
407 | <reponame>sld-columbia/nvdla-sw<gh_stars>100-1000
# Copyright (c) 2017, NVIDIA CORPORATION. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of NVIDIA CORPORATION nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import print_function
import os
import re
class Test(object):
"""Single test"""
def __init__(self):
self.uid = None
self.level = None
self.name = None
self.options = []
self.targets = []
self.features = []
self.description = None
self.dependencies = None
self.runscript = None
self.status = None
def __repr__(self):
return os.path.basename(self.runscript) + ' ' + self.name + \
' '.join(self.options)
def __hash__(self):
return hash((self.name, tuple(self.options)))
def __eq__(self, other):
return (self.name, self.options) == (other.name, other.options)
def guid(self):
code = hex(hash(self) & (2 ** 64 - 1))
code = code[2:-1]
return code.upper()
def pprint(self):
print("Test:", str(self))
print("Test Description:", self.description)
print("Level:", self.level)
print("Features:", self.features)
print("Targets:", self.targets)
print("Runscript:", self.runscript)
print("Status:", self.status)
print("Hash:", hash(self))
class Testplan(object):
"""Container of tests"""
def __init__(self, name):
self.name = name
self.test_list = []
def register_tests(self):
raise NotImplementedError("This call should be overridden")
def add_test(self, test):
# TODO: improve Test design
test.uid = len(self.test_list) - 1
# workaround for the legacy code
test.level = str(test.level)
test.options = test.options or []
self.test_list.append(test)
def get_test(self, uid):
return self.test_list[uid]
def num_written(self, level, target):
# TODO: improve testplan design
tests = tuple(self.valid_tests(level, target))
return len(tests)
def num_total(self, level, target):
# TODO: improve testplan design
tests = tuple(self.valid_tests(level, target, False))
return len(tests)
def match(self, test, kwd):
"""Match by key word"""
if not kwd:
return True
kwd = kwd.upper()
return (
kwd in test.guid() or
kwd in test.runscript.upper() or
kwd in test.name.upper() or
kwd in ' '.join(test.options).upper()
)
def re_match(self, test, pattern):
"""Match by regex"""
if not pattern:
return True
m = re.search(pattern, test.name, re.IGNORECASE)
return bool(m)
def get_testlist(self, level, target, kwd=None, rex_pattern=None):
testlist = []
for test in self.valid_tests(level, target):
if self.match(test, kwd) and self.re_match(test, rex_pattern):
testlist.append(test)
return testlist
def valid_tests(self, level, target, status_check=True):
for test in self.test_list:
valid = (
level == test.level and
target in test.targets
)
if status_check:
valid = valid and test.status in ('Written', 'Staged')
if valid:
yield test
| 1,954 |
2,338 | // RUN: %clang_cc1 -fblocks -fsyntax-only -Wunused-but-set-parameter -verify %s
int f0(int x,
int y, // expected-warning{{parameter 'y' set but not used}}
int z __attribute__((unused))) {
y = 0;
return x;
}
void f1(void) {
(void)^(int x,
int y, // expected-warning{{parameter 'y' set but not used}}
int z __attribute__((unused))) {
y = 0;
return x;
};
}
struct S {
int i;
};
// In C++, don't warn for a struct (following gcc).
void f3(struct S s) {
struct S t;
s = t;
}
// Also don't warn for a reference.
void f4(int &x) {
x = 0;
}
// Make sure this doesn't warn.
struct A {
int i;
A(int j) : i(j) {}
};
| 303 |
811 | <reponame>mitsuhiko/lol-html<filename>c-api/tests/src/test_comment_api.c
#include "../../include/lol_html.h"
#include "deps/picotest/picotest.h"
#include "tests.h"
#include "test_util.h"
static int EXPECTED_USER_DATA = 42;
//-------------------------------------------------------------------------
EXPECT_OUTPUT(
get_set_comment_text_output_sink,
"<!--Yo-->",
&EXPECTED_USER_DATA,
sizeof(EXPECTED_USER_DATA)
);
static lol_html_rewriter_directive_t get_set_comment_text(
lol_html_comment_t *comment,
void *user_data
) {
UNUSED(user_data);
const char *new_text = "Yo";
note("Get/set text");
lol_html_str_t text = lol_html_comment_text_get(comment);
str_eq(&text, "Hey 42");
lol_html_str_free(text);
ok(!lol_html_comment_text_set(comment, new_text, strlen(new_text)));
return LOL_HTML_CONTINUE;
}
static void test_get_set_comment_text(void *user_data) {
lol_html_rewriter_builder_t *builder = lol_html_rewriter_builder_new();
lol_html_rewriter_builder_add_document_content_handlers(
builder,
NULL,
NULL,
&get_set_comment_text,
user_data,
NULL,
NULL,
NULL,
NULL
);
run_rewriter(builder, "<!--Hey 42-->", get_set_comment_text_output_sink, user_data);
}
//-------------------------------------------------------------------------
EXPECT_OUTPUT(
insert_before_and_after_comment_output_sink,
"<div><!--Hey 42--></div>",
&EXPECTED_USER_DATA,
sizeof(EXPECTED_USER_DATA)
);
static lol_html_rewriter_directive_t insert_before_and_after_comment(
lol_html_comment_t *comment,
void *user_data
) {
UNUSED(user_data);
const char *before = "<div>";
const char *after = "</div>";
note("Insert before/after");
ok(!lol_html_comment_before(comment, before, strlen(before), true));
ok(!lol_html_comment_after(comment, after, strlen(after), false));
return LOL_HTML_CONTINUE;
}
static void test_insert_before_and_after_comment(void *user_data) {
lol_html_rewriter_builder_t *builder = lol_html_rewriter_builder_new();
lol_html_rewriter_builder_add_document_content_handlers(
builder,
NULL,
NULL,
&insert_before_and_after_comment,
user_data,
NULL,
NULL,
NULL,
NULL
);
run_rewriter(
builder,
"<!--Hey 42-->",
insert_before_and_after_comment_output_sink,
user_data
);
}
//-------------------------------------------------------------------------
EXPECT_OUTPUT(
get_set_user_data_output_sink,
"<!--33-->",
&EXPECTED_USER_DATA,
sizeof(EXPECTED_USER_DATA)
);
static lol_html_rewriter_directive_t get_set_user_data(
lol_html_comment_t *comment,
void *user_data
) {
note("Set comment user data");
lol_html_comment_user_data_set(comment, user_data);
note("User data");
ok(*(int*)user_data == EXPECTED_USER_DATA);
note("Get comment user data");
int comment_user_data = *(int*)lol_html_comment_user_data_get(comment);
ok(comment_user_data == EXPECTED_USER_DATA);
return LOL_HTML_CONTINUE;
}
static void test_get_set_user_data(void *user_data) {
lol_html_rewriter_builder_t *builder = lol_html_rewriter_builder_new();
lol_html_rewriter_builder_add_document_content_handlers(
builder,
NULL,
NULL,
&get_set_user_data,
user_data,
NULL,
NULL,
NULL,
NULL
);
run_rewriter(builder, "<!--33-->", get_set_user_data_output_sink, user_data);
}
//-------------------------------------------------------------------------
EXPECT_OUTPUT(
replace_comment_output_sink,
"<div><repl></div>",
&EXPECTED_USER_DATA,
sizeof(EXPECTED_USER_DATA)
);
static lol_html_rewriter_directive_t replace_comment(
lol_html_comment_t *comment,
void *user_data
) {
UNUSED(user_data);
const char *replacement = "<repl>";
note("Replace");
ok(!lol_html_comment_replace(comment, replacement, strlen(replacement), true));
ok(lol_html_comment_is_removed(comment));
return LOL_HTML_CONTINUE;
}
static void test_replace_comment(lol_html_selector_t *selector, void *user_data) {
lol_html_rewriter_builder_t *builder = lol_html_rewriter_builder_new();
int err = lol_html_rewriter_builder_add_element_content_handlers(
builder,
selector,
NULL,
NULL,
&replace_comment,
NULL,
NULL,
NULL
);
ok(!err);
run_rewriter(builder, "<div><!--hello--></div>", replace_comment_output_sink, user_data);
}
//-------------------------------------------------------------------------
EXPECT_OUTPUT(
insert_after_comment_output_sink,
"<div><!--hello--><after></div>",
&EXPECTED_USER_DATA,
sizeof(EXPECTED_USER_DATA)
);
static lol_html_rewriter_directive_t insert_after_comment(
lol_html_comment_t *comment,
void *user_data
) {
UNUSED(user_data);
const char *after = "<after>";
note("Insert after comment");
ok(!lol_html_comment_after(comment, after, strlen(after), true));
return LOL_HTML_CONTINUE;
}
static void test_insert_after_comment(lol_html_selector_t *selector, void *user_data) {
UNUSED(selector);
lol_html_rewriter_builder_t *builder = lol_html_rewriter_builder_new();
lol_html_rewriter_builder_add_document_content_handlers(
builder,
NULL,
NULL,
&insert_after_comment,
NULL,
NULL,
NULL,
NULL,
NULL
);
run_rewriter(
builder,
"<div><!--hello--></div>",
insert_after_comment_output_sink,
user_data
);
}
//-------------------------------------------------------------------------
EXPECT_OUTPUT(
remove_comment_output_sink,
"<>",
&EXPECTED_USER_DATA,
sizeof(EXPECTED_USER_DATA)
);
static lol_html_rewriter_directive_t remove_comment(
lol_html_comment_t *comment,
void *user_data
) {
UNUSED(user_data);
note("Removed flag");
ok(!lol_html_comment_is_removed(comment));
note("Remove");
lol_html_comment_remove(comment);
ok(lol_html_comment_is_removed(comment));
return LOL_HTML_CONTINUE;
}
static void test_remove_comment(void *user_data) {
lol_html_rewriter_builder_t *builder = lol_html_rewriter_builder_new();
lol_html_rewriter_builder_add_document_content_handlers(
builder,
NULL,
NULL,
&remove_comment,
NULL,
NULL,
NULL,
NULL,
NULL
);
run_rewriter(builder, "<<!--0_0-->>", remove_comment_output_sink, user_data);
}
//-------------------------------------------------------------------------
static lol_html_rewriter_directive_t stop_rewriting(
lol_html_comment_t *comment,
void *user_data
) {
UNUSED(comment);
UNUSED(user_data);
note("Stop rewriting");
return LOL_HTML_STOP;
}
static void test_stop(void *user_data) {
lol_html_rewriter_builder_t *builder = lol_html_rewriter_builder_new();
lol_html_rewriter_builder_add_document_content_handlers(
builder,
NULL,
NULL,
&stop_rewriting,
NULL,
NULL,
NULL,
NULL,
NULL
);
expect_stop(builder, "<!-- hey -->", user_data);
}
//-------------------------------------------------------------------------
static void test_stop_with_selector(lol_html_selector_t *selector, void *user_data) {
lol_html_rewriter_builder_t *builder = lol_html_rewriter_builder_new();
int err = lol_html_rewriter_builder_add_element_content_handlers(
builder,
selector,
NULL,
NULL,
&stop_rewriting,
NULL,
NULL,
NULL
);
ok(!err);
expect_stop(builder, "<div><!-- foo --></div>", user_data);
}
void test_comment_api() {
int user_data = 42;
const char *selector_str = "*";
lol_html_selector_t *selector = lol_html_selector_parse(
selector_str,
strlen(selector_str)
);
test_get_set_comment_text(&user_data);
test_get_set_user_data(&user_data);
test_replace_comment(selector, &user_data);
test_insert_after_comment(selector, &user_data);
test_remove_comment(&user_data);
test_insert_before_and_after_comment(&user_data);
test_stop(&user_data);
test_stop_with_selector(selector, &user_data);
lol_html_selector_free(selector);
}
| 3,549 |
2,151 | // Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/browsing_data/counters/browsing_data_counter_utils.h"
#include <string>
#include <vector>
#include "base/strings/stringprintf.h"
#include "base/strings/utf_string_conversions.h"
#include "chrome/browser/browsing_data/counters/cache_counter.h"
#include "chrome/test/base/testing_browser_process.h"
#include "chrome/test/base/testing_profile.h"
#include "content/public/test/test_browser_thread_bundle.h"
#include "extensions/buildflags/buildflags.h"
#include "testing/gtest/include/gtest/gtest.h"
#if BUILDFLAG(ENABLE_EXTENSIONS)
#include "base/strings/string_split.h"
#include "chrome/browser/browsing_data/counters/hosted_apps_counter.h"
#endif
class BrowsingDataCounterUtilsTest : public testing::Test {
public:
BrowsingDataCounterUtilsTest() {}
~BrowsingDataCounterUtilsTest() override {}
TestingProfile* GetProfile() { return &profile_; }
private:
content::TestBrowserThreadBundle thread_bundle_;
TestingProfile profile_;
};
TEST_F(BrowsingDataCounterUtilsTest, CacheCounterResult) {
// This test assumes that the strings are served exactly as defined,
// i.e. that the locale is set to the default "en".
ASSERT_EQ("en", TestingBrowserProcess::GetGlobal()->GetApplicationLocale());
const int kBytesInAMegabyte = 1024 * 1024;
// Test the output for various forms of CacheResults.
const struct TestCase {
int bytes;
bool is_upper_limit;
bool is_basic_tab;
std::string expected_output;
} kTestCases[] = {
{42, false, false, "Less than 1 MB"},
{42, false, true,
"Frees up less than 1 MB. Some sites may load more slowly on your next "
"visit."},
{2.312 * kBytesInAMegabyte, false, false, "2.3 MB"},
{2.312 * kBytesInAMegabyte, false, true,
"Frees up 2.3 MB. Some sites may load more slowly on your next visit."},
{2.312 * kBytesInAMegabyte, true, false, "Less than 2.3 MB"},
{2.312 * kBytesInAMegabyte, true, true,
"Frees up less than 2.3 MB. Some sites may load more slowly on your "
"next visit."},
{500.2 * kBytesInAMegabyte, false, false, "500 MB"},
{500.2 * kBytesInAMegabyte, true, false, "Less than 500 MB"},
};
for (const TestCase& test_case : kTestCases) {
CacheCounter counter(GetProfile());
browsing_data::ClearBrowsingDataTab tab =
test_case.is_basic_tab ? browsing_data::ClearBrowsingDataTab::BASIC
: browsing_data::ClearBrowsingDataTab::ADVANCED;
counter.Init(GetProfile()->GetPrefs(), tab,
browsing_data::BrowsingDataCounter::Callback());
CacheCounter::CacheResult result(&counter, test_case.bytes,
test_case.is_upper_limit);
SCOPED_TRACE(base::StringPrintf(
"Test params: %d bytes, %d is_upper_limit, %d is_basic_tab.",
test_case.bytes, test_case.is_upper_limit, test_case.is_basic_tab));
base::string16 output =
GetChromeCounterTextFromResult(&result, GetProfile());
EXPECT_EQ(output, base::ASCIIToUTF16(test_case.expected_output));
}
}
#if BUILDFLAG(ENABLE_EXTENSIONS)
// Tests the complex output of the hosted apps counter.
TEST_F(BrowsingDataCounterUtilsTest, HostedAppsCounterResult) {
HostedAppsCounter counter(GetProfile());
// This test assumes that the strings are served exactly as defined,
// i.e. that the locale is set to the default "en".
ASSERT_EQ("en", TestingBrowserProcess::GetGlobal()->GetApplicationLocale());
// Test the output for various numbers of hosted apps.
const struct TestCase {
std::string apps_list;
std::string expected_output;
} kTestCases[] = {
{"", "None"},
{"App1", "1 app (App1)"},
{"App1, App2", "2 apps (App1, App2)"},
{"App1, App2, App3", "3 apps (App1, App2, and 1 more)"},
{"App1, App2, App3, App4", "4 apps (App1, App2, and 2 more)"},
{"App1, App2, App3, App4, App5", "5 apps (App1, App2, and 3 more)"},
};
for (const TestCase& test_case : kTestCases) {
// Split the list of installed apps by commas.
std::vector<std::string> apps = base::SplitString(
test_case.apps_list, ",",
base::TRIM_WHITESPACE, base::SPLIT_WANT_NONEMPTY);
// The first two apps in the list are used as examples.
std::vector<std::string> examples;
examples.assign(
apps.begin(), apps.begin() + (apps.size() > 2 ? 2 : apps.size()));
HostedAppsCounter::HostedAppsResult result(
&counter,
apps.size(),
examples);
base::string16 output =
GetChromeCounterTextFromResult(&result, GetProfile());
EXPECT_EQ(output, base::ASCIIToUTF16(test_case.expected_output));
}
}
#endif
| 1,835 |
852 | #include "DataFormats/CastorReco/interface/CastorCluster.h"
reco::CastorCluster::CastorCluster(const double energy,
const ROOT::Math::XYZPoint& position,
const double emEnergy,
const double hadEnergy,
const double fem,
const double width,
const double depth,
const double fhot,
const double sigmaz,
const reco::CastorTowerRefVector& usedTowers) {
position_ = position;
energy_ = energy;
emEnergy_ = emEnergy;
hadEnergy_ = hadEnergy;
fem_ = fem;
width_ = width;
depth_ = depth;
fhot_ = fhot;
sigmaz_ = sigmaz;
for (reco::CastorTowerRefVector::const_iterator towerit = usedTowers.begin(); towerit != usedTowers.end();
++towerit) {
usedTowers_.push_back((*towerit));
}
}
reco::CastorCluster::~CastorCluster() {}
| 575 |
2,342 | <reponame>cameled/GacUI
/***********************************************************************
!!!!!! DO NOT MODIFY !!!!!!
Source: Host.sln
This file is generated by Workflow compiler
https://github.com/vczh-libraries
***********************************************************************/
#ifndef VCZH_WORKFLOW_COMPILER_GENERATED_DARKSKININCLUDES
#define VCZH_WORKFLOW_COMPILER_GENERATED_DARKSKININCLUDES
#include "DarkSkin.h"
#endif
| 140 |
2,151 | <reponame>zack-braun/4607_NS
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
| 65 |
2,231 | <filename>engine/sound/src/devices/device_openal.cpp
// Copyright 2020 The Defold Foundation
// Licensed under the Defold License version 1.0 (the "License"); you may not use
// this file except in compliance with the License.
//
// You may obtain a copy of the License, together with FAQs at
// https://www.defold.com/license
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
#include <stdio.h>
#include <string.h>
#include <stdint.h>
#include <dlib/array.h>
#include <dlib/log.h>
#include <dlib/math.h>
#include <dlib/time.h>
#include <dlib/profile.h>
#include "sound.h"
#if defined(__MACH__)
#include <OpenAL/al.h>
#include <OpenAL/alc.h>
#else
#include <AL/al.h>
#include <AL/alc.h>
#endif
namespace dmDeviceOpenAL
{
struct OpenALDevice
{
ALCdevice* m_Device;
ALCcontext* m_Context;
dmArray<ALuint> m_Buffers;
ALuint m_Source;
uint32_t m_MixRate;
OpenALDevice()
{
m_Device = 0;
m_Context = 0;
m_Source = 0;
m_MixRate = 0;
}
};
static void CheckAndPrintError()
{
ALenum error = alGetError();
if (error != AL_NO_ERROR)
{
dmLogError("%s", alGetString(error));
}
}
dmSound::Result DeviceOpenALOpen(const dmSound::OpenDeviceParams* params, dmSound::HDevice* device)
{
assert(params);
assert(device);
ALCdevice* al_device;
ALCcontext* al_context;
al_device = alcOpenDevice(0);
if (al_device == 0) {
dmLogError("Failed to create OpenAL device");
return dmSound::RESULT_UNKNOWN_ERROR;
}
al_context = alcCreateContext(al_device, 0);
if (al_context == 0) {
dmLogError("Failed to create OpenAL context");
alcCloseDevice(al_device);
return dmSound::RESULT_UNKNOWN_ERROR;
}
if (!alcMakeContextCurrent (al_context)) {
dmLogError("Failed to make OpenAL context current");
alcDestroyContext(al_context);
alcCloseDevice(al_device);
return dmSound::RESULT_UNKNOWN_ERROR;
}
OpenALDevice* openal = new OpenALDevice;
openal->m_Device = al_device;
openal->m_Context = al_context;
openal->m_Buffers.SetCapacity(params->m_BufferCount);
openal->m_Buffers.SetSize(params->m_BufferCount);
alGenBuffers(params->m_BufferCount, openal->m_Buffers.Begin());
CheckAndPrintError();
alGenSources(1, &openal->m_Source);
CheckAndPrintError();
openal->m_MixRate = 44100;
*device = openal;
alcMakeContextCurrent(NULL);
return dmSound::RESULT_OK;
}
void DeviceOpenALClose(dmSound::HDevice device)
{
if (!device)
{
return;
}
OpenALDevice* openal = (OpenALDevice*) device;
alcMakeContextCurrent(openal->m_Context);
int iter = 0;
while (openal->m_Buffers.Size() != openal->m_Buffers.Capacity())
{
int processed;
alGetSourcei(openal->m_Source, AL_BUFFERS_PROCESSED, &processed);
while (processed > 0) {
ALuint buffer;
alSourceUnqueueBuffers(openal->m_Source, 1, &buffer);
CheckAndPrintError();
openal->m_Buffers.Push(buffer);
--processed;
}
if ((iter + 1) % 10 == 0) {
dmLogInfo("Waiting for OpenAL device to complete");
}
++iter;
dmTime::Sleep(10 * 1000);
if (iter > 1000) {
dmLogError("Still buffers in OpenAL. Bailing.");
}
}
alSourceStop(openal->m_Source);
alDeleteSources(1, &openal->m_Source);
alDeleteBuffers(openal->m_Buffers.Size(), openal->m_Buffers.Begin());
CheckAndPrintError();
if (alcMakeContextCurrent(0)) {
alcDestroyContext(openal->m_Context);
if (!alcCloseDevice(openal->m_Device)) {
dmLogError("Failed to close OpenAL device");
}
} else {
dmLogError("Failed to make OpenAL context current");
}
delete openal;
}
dmSound::Result DeviceOpenALQueue(dmSound::HDevice device, const int16_t* samples, uint32_t sample_count)
{
assert(device);
OpenALDevice* openal = (OpenALDevice*) device;
ALCcontext* current_context = alcGetCurrentContext();
CheckAndPrintError();
if (current_context != openal->m_Context)
{
return dmSound::RESULT_INIT_ERROR;
}
ALuint buffer = openal->m_Buffers[0];
openal->m_Buffers.EraseSwap(0);
alBufferData(buffer, AL_FORMAT_STEREO16, samples, sample_count * 2 * sizeof(int16_t), openal->m_MixRate);
CheckAndPrintError();
alSourceQueueBuffers(openal->m_Source, 1, &buffer);
ALint state;
alGetSourcei(openal->m_Source, AL_SOURCE_STATE, &state);
CheckAndPrintError();
if(state != AL_PLAYING)
{
alSourcePlay(openal->m_Source);
}
return dmSound::RESULT_OK;
}
uint32_t DeviceOpenALFreeBufferSlots(dmSound::HDevice device)
{
assert(device);
OpenALDevice* openal = (OpenALDevice*) device;
int processed;
alGetSourcei(openal->m_Source, AL_BUFFERS_PROCESSED, &processed);
while (processed > 0)
{
ALuint buffer;
alSourceUnqueueBuffers(openal->m_Source, 1, &buffer);
CheckAndPrintError();
openal->m_Buffers.Push(buffer);
--processed;
}
return openal->m_Buffers.Size();
}
void DeviceOpenALDeviceInfo(dmSound::HDevice device, dmSound::DeviceInfo* info)
{
assert(device);
assert(info);
OpenALDevice* openal = (OpenALDevice*) device;
info->m_MixRate = openal->m_MixRate;
}
void DeviceOpenALStart(dmSound::HDevice device)
{
assert(device);
OpenALDevice* openal = (OpenALDevice*) device;
if (!alcMakeContextCurrent(openal->m_Context)) {
dmLogError("Failed to restart OpenAL device, could not enable context!");
}
}
void DeviceOpenALStop(dmSound::HDevice device)
{
assert(device);
if (!alcMakeContextCurrent(NULL)) {
dmLogError("Failed to stop OpenAL device, could not disable context!");
}
}
DM_DECLARE_SOUND_DEVICE(DefaultSoundDevice, "default", DeviceOpenALOpen, DeviceOpenALClose, DeviceOpenALQueue, DeviceOpenALFreeBufferSlots, DeviceOpenALDeviceInfo, DeviceOpenALStart, DeviceOpenALStop);
}
| 3,265 |
3,172 | import logging
from authlib.jose import jwt
from authlib.jose.errors import JoseError
from ..rfc6749 import InvalidClientError
ASSERTION_TYPE = 'urn:ietf:params:oauth:client-assertion-type:jwt-bearer'
log = logging.getLogger(__name__)
class JWTBearerClientAssertion(object):
"""Implementation of Using JWTs for Client Authentication, which is
defined by RFC7523.
"""
#: Value of ``client_assertion_type`` of JWTs
CLIENT_ASSERTION_TYPE = ASSERTION_TYPE
#: Name of the client authentication method
CLIENT_AUTH_METHOD = 'client_assertion_jwt'
def __init__(self, token_url, validate_jti=True):
self.token_url = token_url
self._validate_jti = validate_jti
def __call__(self, query_client, request):
data = request.form
assertion_type = data.get('client_assertion_type')
assertion = data.get('client_assertion')
if assertion_type == ASSERTION_TYPE and assertion:
resolve_key = self.create_resolve_key_func(query_client, request)
self.process_assertion_claims(assertion, resolve_key)
return self.authenticate_client(request.client)
log.debug('Authenticate via %r failed', self.CLIENT_AUTH_METHOD)
def create_claims_options(self):
"""Create a claims_options for verify JWT payload claims. Developers
MAY overwrite this method to create a more strict options."""
# https://tools.ietf.org/html/rfc7523#section-3
# The Audience SHOULD be the URL of the Authorization Server's Token Endpoint
options = {
'iss': {'essential': True, 'validate': _validate_iss},
'sub': {'essential': True},
'aud': {'essential': True, 'value': self.token_url},
'exp': {'essential': True},
}
if self._validate_jti:
options['jti'] = {'essential': True, 'validate': self.validate_jti}
return options
def process_assertion_claims(self, assertion, resolve_key):
"""Extract JWT payload claims from request "assertion", per
`Section 3.1`_.
:param assertion: assertion string value in the request
:param resolve_key: function to resolve the sign key
:return: JWTClaims
:raise: InvalidClientError
.. _`Section 3.1`: https://tools.ietf.org/html/rfc7523#section-3.1
"""
try:
claims = jwt.decode(
assertion, resolve_key,
claims_options=self.create_claims_options()
)
claims.validate()
except JoseError as e:
log.debug('Assertion Error: %r', e)
raise InvalidClientError()
return claims
def authenticate_client(self, client):
if client.check_endpoint_auth_method(self.CLIENT_AUTH_METHOD, 'token'):
return client
raise InvalidClientError()
def create_resolve_key_func(self, query_client, request):
def resolve_key(headers, payload):
# https://tools.ietf.org/html/rfc7523#section-3
# For client authentication, the subject MUST be the
# "client_id" of the OAuth client
client_id = payload['sub']
client = query_client(client_id)
if not client:
raise InvalidClientError()
request.client = client
return self.resolve_client_public_key(client, headers)
return resolve_key
def validate_jti(self, claims, jti):
"""Validate if the given ``jti`` value is used before. Developers
MUST implement this method::
def validate_jti(self, claims, jti):
key = 'jti:{}-{}'.format(claims['sub'], jti)
if redis.get(key):
return False
redis.set(key, 1, ex=3600)
return True
"""
raise NotImplementedError()
def resolve_client_public_key(self, client, headers):
"""Resolve the client public key for verifying the JWT signature.
A client may have many public keys, in this case, we can retrieve it
via ``kid`` value in headers. Developers MUST implement this method::
def resolve_client_public_key(self, client, headers):
return client.public_key
"""
raise NotImplementedError()
def _validate_iss(claims, iss):
return claims['sub'] == iss
| 1,855 |
967 | //
// ECOGeneralLogPlugin.h
// EchoSDK
//
// Created by yxj on 2019/8/6.
//
#import "ECOBasePlugin.h"
NS_ASSUME_NONNULL_BEGIN
@interface ECOGeneralLogPlugin : ECOBasePlugin
@end
NS_ASSUME_NONNULL_END
| 93 |
1,920 | <reponame>tirkarthi/mathics-core
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pytest
from .helper import evaluate
try:
import scipy.integrate
usescipy = True
except:
usescipy = False
if usescipy:
methods = ["Automatic", "Romberg", "Internal", "NQuadrature"]
generic_tests_for_nintegrate = [
(r"NIntegrate[x^2, {x,0,1}, {method} ]", r"1/3.", ""),
(r"NIntegrate[x^2 y^(-1.+1/3.), {x,0,1},{y,0,1}, {method}]", r"1.", ""),
]
tests_for_nintegrate = sum(
[
[
(tst[0].replace("{method}", "Method->" + method), tst[1], tst[2])
for tst in generic_tests_for_nintegrate
]
for method in methods
],
[],
)
else:
tests_for_nintegrate = [
(r"NIntegrate[x^2, {x,0,1}]", r"1/3.", ""),
# FIXME: this can integrate to Infinity
# (r"NIntegrate[x^2 y^(-.5), {x,0,1},{y,0,1}]", r"1.", ""),
]
@pytest.mark.parametrize("str_expr, str_expected, msg", tests_for_nintegrate)
def test_nintegrate(str_expr: str, str_expected: str, msg: str, message=""):
result = evaluate(str_expr)
expected = evaluate(str_expected)
if msg:
assert result == expected, msg
else:
assert result == expected
| 637 |
5,169 | <reponame>Gantios/Specs
{
"name": "StompX",
"version": "1.0.1",
"summary": "Robust Websocket STOMP client for real-time messaging and events",
"swift_versions": [
"4.0",
"4.2",
"5.0"
],
"description": "Robust Websocket STOMP client for real-time messaging and events. STOMP allows for subscribing and unsubscribing to topics for real-time socket events.",
"homepage": "https://github.com/ChatKitty/chatkitty-ios-stompx",
"license": {
"type": "Apache 2.0",
"file": "LICENSE"
},
"authors": {
"ChatKitty": "<EMAIL>"
},
"source": {
"git": "https://github.com/ChatKitty/chatkitty-ios-stompx.git",
"tag": "1.0.1"
},
"platforms": {
"ios": "9.0"
},
"source_files": "StompX/Classes/**/*",
"dependencies": {
"Starscream": [
"~> 3.0.6"
]
},
"swift_version": "5.0"
}
| 368 |
617 | // Copyright (c) Open Enclave SDK contributors.
// Licensed under the MIT License.
#ifndef _OE_BITS_STDFILE_H
#define _OE_BITS_STDFILE_H
#define OE_BUFSIZ 8192
#define OE_EOF (-1)
typedef struct _OE_IO_FILE OE_FILE;
extern OE_FILE* const oe_stdin;
extern OE_FILE* const oe_stdout;
extern OE_FILE* const oe_stderr;
#if defined(OE_NEED_STDC_NAMES)
typedef OE_FILE FILE;
#define stdin oe_stdin
#define stdout oe_stdout
#define stderr oe_stderr
#endif
#endif /* _OE_BITS_STDFILE_H */
| 216 |
316 | <gh_stars>100-1000
#include <type_traits>
#include <cstdio>
#include <cmath>
#include <vector>
#include <string>
#include "../usdi/usdi.h"
using usdi::float2;
using usdi::float3;
using usdi::float4;
using usdi::quatf;
template<class T> void* ToPtr(const T& v) { return (void*)&v; }
template<class T> void* ToPtr(const T* v) { return (void*)v; }
static void AddAttribute(usdi::Schema *schema, const char *name, usdi::AttributeType type, const char* v)
{
auto *attr = usdiPrimCreateAttribute(schema, name, type);
usdi::AttributeData data;
data.data = (void*)v;
data.num_elements = 1;
usdiAttrWriteSample(attr, &data, usdiDefaultTime());
}
template<class T>
static void AddAttribute(usdi::Schema *schema, const char *name, usdi::AttributeType type, const T& v)
{
auto *attr = usdiPrimCreateAttribute(schema, name, type);
usdi::AttributeData data;
data.data = ToPtr(v);
data.num_elements = 1;
usdiAttrWriteSample(attr, &data, usdiDefaultTime());
}
template<class T, size_t N>
static void AddAttribute(usdi::Schema *schema, const char *name, usdi::AttributeType type, const T (&v)[N])
{
auto *attr = usdiPrimCreateAttribute(schema, name, type);
usdi::AttributeData data;
data.data = (void*)v;
data.num_elements = N;
for (int i = 0; i < N; ++i) {
usdi::Time t = i;
usdiAttrWriteSample(attr, &data, t);
}
}
static void TestAttributes(usdi::Schema *schema)
{
{
int v = 123;
AddAttribute(schema, "int_scalar", usdi::AttributeType::Int, v);
}
{
float v = 1.23f;
AddAttribute(schema, "float_scalar", usdi::AttributeType::Float, v);
}
{
float2 v = {1.23f, 2.34f};
AddAttribute(schema, "float2_scalar", usdi::AttributeType::Float2, v);
}
{
float3 v = { 1.23f, 2.34f, 3.45f };
AddAttribute(schema, "float3_scalar", usdi::AttributeType::Float3, v);
}
{
float4 v = { 1.23f, 2.34f, 3.45f, 4.56f };
AddAttribute(schema, "float4_scalar", usdi::AttributeType::Float4, v);
}
{
quatf v = { 1.23f, 2.34f, 3.45f, 4.56f };
AddAttribute(schema, "quatf_scalar", usdi::AttributeType::QuatF, v);
}
{
const char *v = "test_token";
AddAttribute(schema, "token_scalar", usdi::AttributeType::Token, v);
}
{
const char *v = "test_string";
AddAttribute(schema, "string_scalar", usdi::AttributeType::String, v);
}
{
int v[] = { 123, 234, 345 };
AddAttribute(schema, "int_array", usdi::AttributeType::IntArray, v);
}
{
float v[] = { 1.23f, 2.34f, 3.45f };
AddAttribute(schema, "float_array", usdi::AttributeType::FloatArray, v);
}
{
float2 v[] = { { 1.23f, 2.34f } ,{ 3.45f, 4.56f } ,{ 5.67f, 6.78f } };
AddAttribute(schema, "float2_array", usdi::AttributeType::Float2Array, v);
}
{
float3 v[] = { { 1.23f, 2.34f, 3.45f } ,{ 4.56f, 5.67f, 6.78f } ,{ 7.89f, 8.90f, 9.01f} };
AddAttribute(schema, "float3_array", usdi::AttributeType::Float3Array, v);
}
{
float4 v[] = { { 1.23f, 2.34f, 3.45f, 4.56f } ,{ 5.67f, 6.78f, 7.89f, 8.90f } ,{ 9.01f, 0.12f, 1.23f, 2.34f } };
AddAttribute(schema, "float4_array", usdi::AttributeType::Float4Array, v);
}
{
quatf v[] = { { 1.23f, 2.34f, 3.45f, 4.56f } ,{ 5.67f, 6.78f, 7.89f, 8.90f } ,{ 9.01f, 0.12f, 1.23f, 2.34f } };
AddAttribute(schema, "quatf_array", usdi::AttributeType::QuatFArray, v);
}
{
const char *v[] = { "test_token0", "test_token1", "test_token2" };
AddAttribute(schema, "token_array", usdi::AttributeType::TokenArray, v);
}
{
const char *v[] = { "test_string0", "test_string1", "test_string2" };
AddAttribute(schema, "string_array", usdi::AttributeType::StringArray, v);
}
}
void TestExport(const char *filename)
{
auto *ctx = usdiCreateContext();
usdi::ExportSettings settings;
settings.instanceable_by_default = true;
usdiSetExportSettings(ctx, &settings);
usdiCreateStage(ctx, filename);
auto *root = usdiGetRoot(ctx);
{
auto *xf = usdiCreateXform(ctx, root, "Child");
for (int i = 0; i < 5; ++i) {
usdi::Time t = i;
usdi::XformData data;
data.position.x = 0.2f * i;
usdiXformWriteSample(xf, &data, t);
}
TestAttributes(xf);
{
auto *mesh = usdiCreateMesh(ctx, xf, "TestMesh");
float3 points[] = {
{ -0.5f, -0.5f, 0.0f },
{ 0.5f, -0.5f, 0.0f },
{ 0.5f, 0.5f, 0.0f },
{ -0.5f, 0.5f, 0.0f },
};
float3 normals[] = {
{ 0.0f, 0.0f, 1.0f },
{ 0.0f, 0.0f, 1.0f },
{ 0.0f, 0.0f, 1.0f },
{ 0.0f, 0.0f, 1.0f },
};
float4 tangents[] = {
{ 0.0f, 1.0f, 0.0f },
{ 0.0f, 1.0f, 0.0f },
{ 0.0f, 1.0f, 0.0f },
{ 0.0f, 1.0f, 0.0f },
};
float2 uvs[] = {
{ 0.0f, 0.0f },
{ 1.0f, 0.0f },
{ 1.0f, 1.0f },
{ 0.0f, 1.0f },
};
int counts[] = { 4 };
int indices[] = { 0, 1, 2, 3 };
usdi::MeshData data;
data.points = points;
data.normals = normals;
data.tangents = tangents;
data.uvs = uvs;
data.indices = indices;
data.num_points = std::extent<decltype(points)>::value;
data.num_counts = std::extent<decltype(counts)>::value;
data.num_indices = std::extent<decltype(indices)>::value;
for (int i = 0; i < 5; ++i) {
usdi::Time t = i;
usdiMeshWriteSample(mesh, &data, t);
}
}
}
{
auto *mesh2 = usdiCreateMesh(ctx, root, "TestRootMesh");
float3 vertices[] = {
{ -0.5f, -0.5f, 0.0f },
{ 0.5f, -0.5f, 0.0f },
{ 0.5f, 0.5f, 0.0f },
{ -0.5f, 0.5f, 0.0f },
};
float2 uv[] = {
{ 0.0f, 0.0f },
{ 1.0f, 0.0f },
{ 1.0f, 1.0f },
{ 0.0f, 1.0f },
};
int counts[] = { 3, 3 };
int indices[] = { 0, 1, 2, 0, 2, 3 };
usdi::MeshData data;
data.points = vertices;
data.uvs = uv;
data.counts = counts;
data.indices = indices;
data.num_points = std::extent<decltype(vertices)>::value;
data.num_counts = std::extent<decltype(counts)>::value;
data.num_indices = std::extent<decltype(indices)>::value;
for (int i = 0; i < 5; ++i) {
usdi::Time t = i;
usdiMeshWriteSample(mesh2, &data, t);
}
usdiMeshPreComputeNormals(mesh2, true);
AddAttribute(mesh2, "TestImageAsset", usdi::AttributeType::Asset, "USDAssets/test.exr");
AddAttribute(mesh2, "TestFBXAsset", usdi::AttributeType::Asset, "USDAssets/test.fbx");
AddAttribute(mesh2, "TestMDLAsset", usdi::AttributeType::Asset, "USDAssets/test.mdl");
}
{
auto CreateTestXformTree = [](usdi::Context *ctx, usdi::Schema *parent, std::vector<std::string> names)
{
for (auto& name : names) {
parent = usdiCreateXform(ctx, parent, name.c_str());
}
};
auto *xf = usdiCreateXform(ctx, root, "TestVariants");
usdiPrimBeginEditVariant(xf, "VariantSet0", "Variant0-0");
xf = usdiAsXform(usdiFindSchema(ctx, "/TestVariants"));
for (int i = 0; i < 5; ++i) {
usdi::Time t = i;
usdi::XformData data;
data.position.x = 0.2f * i;
usdiXformWriteSample(xf, &data, t);
}
CreateTestXformTree(ctx, xf, { "Variant0-0", "Hoge" });
usdiPrimEndEditVariant(xf);
usdiPrimBeginEditVariant(xf, "VariantSet1", "Variant1-1");
xf = usdiAsXform(usdiFindSchema(ctx, "/TestVariants"));
for (int i = 0; i < 5; ++i) {
usdi::Time t = i;
usdi::XformData data;
data.position.y = 0.4f * i;
usdiXformWriteSample(xf, &data, t);
}
CreateTestXformTree(ctx, xf, { "Variant1-1", "Hage", "Hige" });
usdiPrimEndEditVariant(xf);
}
// create internal reference
auto *ref = usdiCreateOverride(ctx, "/Ref");
usdiPrimAddReference(ref, nullptr, "/Child");
// create payload
auto *pl = usdiCreateOverride(ctx, "/TestPayload");
usdiPrimSetPayload(pl, "HelloWorld.usda", "/hello");
usdiSave(ctx);
usdiFlatten(ctx);
usdiSaveAs(ctx, "Hoge.usda");
usdiDestroyContext(ctx);
}
void TestExportReference(const char *filename, const char *flatten)
{
auto *ctx = usdiCreateContext();
usdiCreateStage(ctx, filename);
// create external reference
auto *ref = usdiCreateOverride(ctx, "/Test");
usdiPrimAddReference(ref, "TestExport.usda", "/Child");
usdiSave(ctx);
usdiFlatten(ctx);
usdiSaveAs(ctx, flatten);
usdiDestroyContext(ctx);
}
| 5,083 |
640 | <filename>src/appmake/adam.c<gh_stars>100-1000
/*
* Adam DDP generator
*/
#include "appmake.h"
static char *binname = NULL;
static char *crtfile = NULL;
static char *outfile = NULL;
static char help = 0;
/* Options that are available for this module */
option_t adam_options[] = {
{ 'h', "help", "Display this help", OPT_BOOL, &help},
{ 'b', "binfile", "Linked binary file", OPT_STR, &binname },
{ 'c', "crt0file", "crt0 file used in linking", OPT_STR, &crtfile },
{ 'o', "output", "Name of output file", OPT_STR, &outfile },
{ 0 , NULL, NULL, OPT_NONE, NULL }
};
int adam_exec(char *target)
{
char *buf;
char bootbuf[1024];
char filename[FILENAME_MAX+1];
char bootname[FILENAME_MAX+1];
FILE *fpin, *bootstrap_fp, *fpout;
long pos, bootlen;
if ( help )
return -1;
if ( binname == NULL ) {
return -1;
}
strcpy(bootname, binname);
suffix_change(bootname, "_BOOTSTRAP.bin");
if ( (bootstrap_fp=fopen_bin(bootname, crtfile) ) == NULL ) {
exit_log(1,"Can't open input file %s\n",bootname);
}
if ( fseek(bootstrap_fp,0,SEEK_END) ) {
fclose(bootstrap_fp);
fprintf(stderr,"Couldn't determine size of file\n");
}
bootlen = ftell(bootstrap_fp);
fseek(bootstrap_fp,0L,SEEK_SET);
if ( bootlen > 1024 ) {
exit_log(1, "Bootstrap has length %d > 1024", bootlen);
}
memset(bootbuf, 0, sizeof(bootbuf));
if ( fread(bootbuf, 1, bootlen, bootstrap_fp) != bootlen ) {
exit_log(1, "Cannot read whole bootstrap file");
}
fclose(bootstrap_fp);
strcpy(filename, binname);
if ( ( fpin = fopen_bin(binname, crtfile) ) == NULL ) {
exit_log(1,"Cannot open binary file <%s>\n",binname);
}
if (fseek(fpin, 0, SEEK_END)) {
fclose(fpin);
exit_log(1,"Couldn't determine size of file\n");
}
pos = ftell(fpin);
fseek(fpin, 0L, SEEK_SET);
buf = must_malloc(255 * 1024);
if (pos != fread(buf, 1, pos, fpin)) { fclose(fpin); exit_log(1, "Could not read required data from <%s>\n",binname); }
fclose(fpin);
suffix_change(filename,".ddp");
if ( ( fpout = fopen(filename, "wb")) == NULL ) {
exit_log(1,"Cannot open ddp file for writing <%s>\n",filename);
}
if ( fwrite(bootbuf, sizeof(char), 1024, fpout) != 1024) {
exit_log(1,"Could not write bootstrap to ddp file <%s>\n",filename);
}
if ( fwrite(buf, sizeof(char), 255 * 1024, fpout) != 255 * 1024) {
exit_log(1,"Could not write program to ddp file <%s>\n",filename);
}
fclose(fpout);
return 0;
}
| 1,318 |
3,562 | <filename>fe/spark-dpp/src/main/java/org/apache/doris/load/loadv2/dpp/DppColumns.java
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.load.loadv2.dpp;
import com.google.common.base.Preconditions;
import java.io.Serializable;
import java.util.List;
import java.util.ArrayList;
import java.util.Date;
import java.util.Objects;
import java.util.Comparator;
// DppColumns is used to store the
class DppColumns implements Comparable<DppColumns>, Serializable {
public List<Object> columns = new ArrayList<Object>();;
public DppColumns(List<Object> keys){
this.columns = keys;
}
public DppColumns(DppColumns key, List<Integer> indexes){
for (int i = 0; i < indexes.size(); ++i) {
columns.add(key.columns.get(indexes.get(i)));
}
}
@Override
public int compareTo(DppColumns other) {
Preconditions.checkState(columns.size() == other.columns.size());
int cmp = 0;
for (int i = 0; i < columns.size(); i++) {
Object columnObj = columns.get(i);
Object otherColumn = other.columns.get(i);
if (columnObj == null && otherColumn == null) {
return 0;
} else if (columnObj == null || otherColumn == null) {
if (columnObj == null) {
return -1;
} else {
return 1;
}
}
if (columns.get(i) instanceof Integer) {
cmp = ((Integer)(columns.get(i))).compareTo((Integer)(other.columns.get(i)));
} else if (columns.get(i) instanceof Long) {
cmp = ((Long)(columns.get(i))).compareTo((Long)(other.columns.get(i)));
} else if (columns.get(i) instanceof Boolean) {
cmp = ((Boolean)(columns.get(i))).compareTo((Boolean) (other.columns.get(i)));
} else if (columns.get(i) instanceof Short) {
cmp = ((Short)(columns.get(i))).compareTo((Short)(other.columns.get(i)));
} else if (columns.get(i) instanceof Float) {
cmp = ((Float)(columns.get(i))).compareTo((Float) (other.columns.get(i)));
} else if (columns.get(i) instanceof Double) {
cmp = ((Double)(columns.get(i))).compareTo((Double) (other.columns.get(i)));
} else if (columns.get(i) instanceof Date) {
cmp = ((Date)(columns.get(i))).compareTo((Date) (other.columns.get(i)));
} else if (columns.get(i) instanceof java.sql.Timestamp) {
cmp = ((java.sql.Timestamp)columns.get(i)).compareTo((java.sql.Timestamp)other.columns.get(i));
} else {
cmp = ((String)(columns.get(i))).compareTo((String) (other.columns.get(i)));
}
if (cmp != 0) {
return cmp;
}
}
return cmp;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DppColumns dppColumns = (DppColumns) o;
return Objects.equals(columns, dppColumns.columns);
}
@Override
public int hashCode() {
return Objects.hash(columns);
}
@Override
public String toString() {
return "dppColumns{" +
"columns=" + columns +
'}';
}
}
class DppColumnsComparator implements Comparator<DppColumns> {
@Override
public int compare(DppColumns left, DppColumns right) {
return left.compareTo(right);
}
} | 1,891 |
554 | package github.tornaco.xposedmoduletest.xposed.service;
/**
* Created by guohao4 on 2017/11/1.
* Email: <EMAIL>
*/
// Both ash and ag need to be called at same time.
@interface SinceSDK {
int value();
}
| 88 |
665 | <gh_stars>100-1000
/* legacy_event_loop.h -*-C++-*-
This file is part of MLDB.
<NAME>, July 2016
Copyright (c) 2016 mldb.ai inc. All rights reserved.
*/
#pragma once
#include <memory>
namespace MLDB {
/* Forward declarations */
struct MessageLoop;
/****************************************************************************/
/* LEGACY EVENT LOOP */
/****************************************************************************/
/* A wrapper around MessageLoop that provides an API similar to the EventLoop
* class */
struct LegacyEventLoop {
LegacyEventLoop();
~LegacyEventLoop();
/** Return the associated MessageLoop instance */
MessageLoop & loop() const;
/** Start the loop thread */
void start();
/** Shutdown the loop thread */
void shutdown();
private:
std::unique_ptr<MessageLoop> loop_;
};
} // namespace MLDB
| 353 |
2,151 | <reponame>google-ar/chromium<filename>components/drive/chromeos/remove_stale_cache_files.cc
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/drive/chromeos/remove_stale_cache_files.h"
#include "base/logging.h"
#include "components/drive/chromeos/file_cache.h"
#include "components/drive/chromeos/resource_metadata.h"
#include "components/drive/drive.pb.h"
namespace drive {
namespace internal {
void RemoveStaleCacheFiles(FileCache* cache,
ResourceMetadata* resource_metadata) {
std::unique_ptr<ResourceMetadata::Iterator> it =
resource_metadata->GetIterator();
for (; !it->IsAtEnd(); it->Advance()) {
const ResourceEntry& entry = it->GetValue();
const FileCacheEntry& cache_state =
entry.file_specific_info().cache_state();
// Stale = not dirty but the MD5 does not match.
if (!cache_state.is_dirty() &&
cache_state.md5() != entry.file_specific_info().md5()) {
FileError error = cache->Remove(it->GetID());
LOG_IF(WARNING, error != FILE_ERROR_OK)
<< "Failed to remove a stale cache file. resource_id: "
<< it->GetID();
}
}
}
} // namespace internal
} // namespace drive
| 485 |
892 | <gh_stars>100-1000
{
"schema_version": "1.2.0",
"id": "GHSA-4f3p-g373-w46g",
"modified": "2022-05-01T07:31:20Z",
"published": "2022-05-01T07:31:20Z",
"aliases": [
"CVE-2006-5744"
],
"details": "Multiple SQL injection vulnerabilities in Highwall Enterprise and Highwall Endpoint 4.0.2.11045 management interface allow remote attackers to execute arbitrary SQL commands via an Access Point with a crafted SSID, and via unspecified vectors related to a malicious system operator.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2006-5744"
},
{
"type": "WEB",
"url": "http://www.osvdb.org/29916"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/archive/1/449118/100/200/threaded"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/archive/1/449739/100/100/threaded"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/bid/20605"
}
],
"database_specific": {
"cwe_ids": [
],
"severity": "HIGH",
"github_reviewed": false
}
} | 509 |
1,652 | package com.ctrip.xpipe.api.migration;
import com.ctrip.xpipe.api.lifecycle.Ordered;
import com.ctrip.xpipe.utils.ServicesUtil;
/**
* @author wenchao.meng
* <p>
* Apr 07, 2017
*/
public interface DcMapper extends Ordered{
DcMapper INSTANCE = ServicesUtil.getDcMapperService();
String getDc(String dcName);
String reverse(String otherDcName);
}
| 153 |
343 | <reponame>nzeh/syzygy
// Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Specialization of the instrumenter interface for instrumenters that use a
// relinker. This performs all the common bits of this kind of instrumenters:
// - Parse the shared command-line parameters.
// - Initialization the relinker.
// - Default implementation of Instrument.
#ifndef SYZYGY_INSTRUMENT_INSTRUMENTERS_INSTRUMENTER_WITH_RELINKER_H_
#define SYZYGY_INSTRUMENT_INSTRUMENTERS_INSTRUMENTER_WITH_RELINKER_H_
#include <string>
#include "base/command_line.h"
#include "base/files/file_path.h"
#include "syzygy/instrument/instrumenter.h"
#include "syzygy/pe/coff_relinker.h"
#include "syzygy/pe/pe_relinker.h"
namespace instrument {
namespace instrumenters {
class InstrumenterWithRelinker : public InstrumenterInterface {
public:
typedef block_graph::BlockGraph BlockGraph;
typedef block_graph::BlockGraph::ImageFormat ImageFormat;
InstrumenterWithRelinker()
: image_format_(BlockGraph::PE_IMAGE),
allow_overwrite_(false),
debug_friendly_(false),
no_augment_pdb_(false),
no_strip_strings_(false) { }
~InstrumenterWithRelinker() { }
// @name InstrumenterInterface implementation.
// @{
bool ParseCommandLine(const base::CommandLine* command_line) final;
bool Instrument() override;
// @}
protected:
// Virtual method that determines whether or not the input object file
// format is supported by the instrumenter. The default implementation
// supports PE files, and does not support COFF files.
virtual bool ImageFormatIsSupported(ImageFormat image_format);
// Virtual method that performs quick-to-run preparation for the instrumenter,
// such as parsing config files. This function is meant to be called by the
// Instrument function before invoking the relinker. This allows early failure
// to occur, e.g., from bad config files.
virtual bool InstrumentPrepare() = 0;
// Virtual method that does the actual instrumentation with the relinker.
// This function is meant to be called by the Instrument function.
// @note The implementation should log on failure.
virtual bool InstrumentImpl() = 0;
// Pure virtual method that should return the name of the instrumentation
// mode.
virtual const char* InstrumentationMode() = 0;
// Command line parsing to be executed before all subclasses. Subclass
// overrides should call Super::DoCommandLineParse() at the beginning.
virtual bool DoCommandLineParse(const base::CommandLine* command_line);
// Performs more validation after all parsing is done. Subclass overrides
// should call Super::CheckCommandLineParse() at the end.
virtual bool CheckCommandLineParse(const base::CommandLine* command_line);
// @name Internal machinery, replaceable for testing purposes. These will
// only ever be called once per object lifetime.
// @{
virtual pe::PETransformPolicy* GetPETransformPolicy();
virtual pe::CoffTransformPolicy* GetCoffTransformPolicy();
virtual pe::PERelinker* GetPERelinker();
virtual pe::CoffRelinker* GetCoffRelinker();
// @}
// Creates and configures a relinker. This is split out for unittesting
// purposes, allowing child classes to test their InstrumentImpl functions
// in isolation.
bool CreateRelinker();
// The type of image file we are transforming.
ImageFormat image_format_;
// @name Command-line parameters.
// @{
base::FilePath input_image_path_;
base::FilePath input_pdb_path_;
base::FilePath output_image_path_;
base::FilePath output_pdb_path_;
bool allow_overwrite_;
bool debug_friendly_;
bool no_augment_pdb_;
bool no_strip_strings_;
// @}
// This is used to save a pointer to the object returned by the call to
// Get(PE|Coff)Relinker. Ownership of the object is internal in the default
// case, but may be external during tests.
pe::RelinkerInterface* relinker_;
private:
// They are used as containers for holding policy and relinker objects that
// are allocated by our default Get* implementations above.
std::unique_ptr<block_graph::TransformPolicyInterface> policy_object_;
std::unique_ptr<pe::RelinkerInterface> relinker_object_;
DISALLOW_COPY_AND_ASSIGN(InstrumenterWithRelinker);
};
} // namespace instrumenters
} // namespace instrument
#endif // SYZYGY_INSTRUMENT_INSTRUMENTERS_INSTRUMENTER_WITH_RELINKER_H_
| 1,441 |
1,550 | <filename>leo/test/unittest/at-clean-line-number-test.c
// before @others // line 1
def spam(): // line 2
pass
def eggs(): // line 4
pass
// last line: line 6
| 64 |
14,668 | // Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef UI_OZONE_PLATFORM_WAYLAND_GPU_GL_SURFACE_EGL_READBACK_WAYLAND_H_
#define UI_OZONE_PLATFORM_WAYLAND_GPU_GL_SURFACE_EGL_READBACK_WAYLAND_H_
#include "base/containers/circular_deque.h"
#include "base/memory/shared_memory_mapping.h"
#include "ui/ozone/common/gl_surface_egl_readback.h"
#include "ui/ozone/platform/wayland/gpu/wayland_surface_gpu.h"
namespace ui {
class WaylandBufferManagerGpu;
// This is a GLSurface implementation that uses glReadPixels to populate a
// shared memory region with the contents of the surface, and then passes the
// shared memory region to Wayland for presentation.
//
// Basic control flow:
// 1. Resize() creates kMaxBuffers shared memory regions. These are added to
// available_buffers_ and registered with Wayland via CreateShmBasedBuffer().
// 2. SwapBuffersAsync() calls glReadPixels() to read the contents of the
// active GL context into the next available shared memory region. The shared
// memory region is immediately sent to Wayland via CommitBuffer().
// 3. The buffer is not available for reuse until OnSubmission() is called.
//
// Note: This class relies on the assumption that kMaxBuffers is necessary and
// sufficient. The behavior is undefined if SwapBuffersAsync() is called and no
// buffers are available.
class GLSurfaceEglReadbackWayland : public GLSurfaceEglReadback,
public WaylandSurfaceGpu {
public:
GLSurfaceEglReadbackWayland(gfx::AcceleratedWidget widget,
WaylandBufferManagerGpu* buffer_manager);
GLSurfaceEglReadbackWayland(const GLSurfaceEglReadbackWayland&) = delete;
GLSurfaceEglReadbackWayland& operator=(const GLSurfaceEglReadbackWayland&) =
delete;
// gl::GLSurface:
void Destroy() override;
bool Resize(const gfx::Size& size,
float scale_factor,
const gfx::ColorSpace& color_space,
bool has_alpha) override;
bool IsOffscreen() override;
gfx::SwapResult SwapBuffers(PresentationCallback callback) override;
bool SupportsAsyncSwap() override;
void SwapBuffersAsync(SwapCompletionCallback completion_callback,
PresentationCallback presentation_callback) override;
gfx::SurfaceOrigin GetOrigin() const override;
private:
struct PixelBuffer {
PixelBuffer(base::WritableSharedMemoryMapping shm_mapping,
uint32_t buffer_id);
~PixelBuffer();
PixelBuffer(const PixelBuffer&) = delete;
PixelBuffer& operator=(const PixelBuffer&) = delete;
// Shared memory mapping that readback pixels are written to so that Wayland
// is able to turn them in light.
base::WritableSharedMemoryMapping shm_mapping_;
// The buffer id that corresponds to the |wl_buffer| created on the browser
// process side.
uint32_t buffer_id_ = 0;
};
~GLSurfaceEglReadbackWayland() override;
// WaylandSurfaceGpu:
void OnSubmission(uint32_t buffer_id,
const gfx::SwapResult& swap_result,
gfx::GpuFenceHandle release_fence) override;
void OnPresentation(uint32_t buffer_id,
const gfx::PresentationFeedback& feedback) override;
void DestroyBuffers();
// Widget of the window that this readback writes pixels to.
const gfx::AcceleratedWidget widget_;
WaylandBufferManagerGpu* const buffer_manager_;
// Size of the buffer.
gfx::Size size_;
float surface_scale_factor_ = 1.f;
// Available pixel buffers based on shared memory.
std::vector<std::unique_ptr<PixelBuffer>> available_buffers_;
// Displayed buffer that will become available after another buffer is
// submitted.
std::unique_ptr<PixelBuffer> displayed_buffer_;
// Submitted buffers waiting to be displayed.
base::circular_deque<std::unique_ptr<PixelBuffer>> in_flight_pixel_buffers_;
std::vector<SwapCompletionCallback> completion_callbacks_;
std::vector<PresentationCallback> presentation_callbacks_;
size_t pending_frames_ = 0;
};
} // namespace ui
#endif // UI_OZONE_PLATFORM_WAYLAND_GPU_GL_SURFACE_EGL_READBACK_WAYLAND_H_
| 1,465 |
7,353 | <reponame>sh0tCa11er/lanternEnhancement
/**
* @file NCDUdevMonitorParser.c
* @author <NAME> <<EMAIL>>
*
* @section LICENSE
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the author nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <stdlib.h>
#include <string.h>
#include <misc/string_begins_with.h>
#include <misc/balloc.h>
#include <base/BLog.h>
#include <udevmonitor/NCDUdevMonitorParser.h>
#include <generated/blog_channel_NCDUdevMonitorParser.h>
#define PROPERTY_REGEX "^([^=]+)=(.*)$"
static uint8_t * find_end (uint8_t *buf, size_t len)
{
while (len >= 2) {
if (buf[0] == '\n' && buf[1] == '\n') {
return (buf + 2);
}
buf++;
len--;
}
return NULL;
}
static int parse_property (NCDUdevMonitorParser *o, char *data)
{
ASSERT(o->ready_num_properties >= 0)
ASSERT(o->ready_num_properties <= o->max_properties)
if (o->ready_num_properties == o->max_properties) {
BLog(BLOG_ERROR, "too many properties");
return 0;
}
struct NCDUdevMonitorParser_property *prop = &o->ready_properties[o->ready_num_properties];
// execute property regex
regmatch_t matches[3];
if (regexec(&o->property_regex, data, 3, matches, 0) != 0) {
BLog(BLOG_ERROR, "failed to parse property");
return 0;
}
// extract components
prop->name = data + matches[1].rm_so;
*(data + matches[1].rm_eo) = '\0';
prop->value = data + matches[2].rm_so;
*(data + matches[2].rm_eo) = '\0';
// register property
o->ready_num_properties++;
return 1;
}
static int parse_message (NCDUdevMonitorParser *o)
{
ASSERT(!o->is_ready)
ASSERT(o->ready_len >= 2)
ASSERT(o->buf[o->ready_len - 2] == '\n')
ASSERT(o->buf[o->ready_len - 1] == '\n')
// zero terminate message (replacing the second newline)
o->buf[o->ready_len - 1] = '\0';
// start parsing
char *line = (char *)o->buf;
int first_line = 1;
// set is not ready event
o->ready_is_ready_event = 0;
// init properties
o->ready_num_properties = 0;
do {
// find end of line
char *line_end = strchr(line, '\n');
ASSERT(line_end)
// zero terminate line
*line_end = '\0';
if (o->is_info_mode) {
// ignore W: entries with missing space
if (string_begins_with(line, "W:")) {
goto nextline;
}
// parse prefix
if (strlen(line) < 3 || line[1] != ':' || line[2] != ' ') {
BLog(BLOG_ERROR, "failed to parse head");
return 0;
}
char line_type = line[0];
char *line_value = line + 3;
if (first_line) {
if (line_type != 'P') {
BLog(BLOG_ERROR, "wrong first line type");
return 0;
}
} else {
if (line_type == 'E') {
if (!parse_property(o, line_value)) {
return 0;
}
}
}
} else {
if (first_line) {
// is this the initial informational message?
if (string_begins_with(line, "monitor")) {
o->ready_is_ready_event = 1;
break;
}
// check first line
if (!string_begins_with(line, "UDEV ") && !string_begins_with(line, "KERNEL")) {
BLog(BLOG_ERROR, "failed to parse head");
return 0;
}
} else {
if (!parse_property(o, line)) {
return 0;
}
}
}
nextline:
first_line = 0;
line = line_end + 1;
} while (*line);
// set ready
o->is_ready = 1;
return 1;
}
static void process_data (NCDUdevMonitorParser *o)
{
ASSERT(!o->is_ready)
while (1) {
// look for end of event
uint8_t *c = find_end(o->buf, o->buf_used);
if (!c) {
// check for out of buffer condition
if (o->buf_used == o->buf_size) {
BLog(BLOG_ERROR, "out of buffer");
o->buf_used = 0;
}
// receive more data
StreamRecvInterface_Receiver_Recv(o->input, o->buf + o->buf_used, o->buf_size - o->buf_used);
return;
}
// remember message length
o->ready_len = c - o->buf;
// parse message
if (parse_message(o)) {
break;
}
// shift buffer
memmove(o->buf, o->buf + o->ready_len, o->buf_used - o->ready_len);
o->buf_used -= o->ready_len;
}
// call handler
o->handler(o->user);
return;
}
static void input_handler_done (NCDUdevMonitorParser *o, int data_len)
{
DebugObject_Access(&o->d_obj);
ASSERT(!o->is_ready)
ASSERT(data_len > 0)
ASSERT(data_len <= o->buf_size - o->buf_used)
// increment buffer position
o->buf_used += data_len;
// process data
process_data(o);
return;
}
static void done_job_handler (NCDUdevMonitorParser *o)
{
DebugObject_Access(&o->d_obj);
ASSERT(o->is_ready)
// shift buffer
memmove(o->buf, o->buf + o->ready_len, o->buf_used - o->ready_len);
o->buf_used -= o->ready_len;
// set not ready
o->is_ready = 0;
// process data
process_data(o);
return;
}
int NCDUdevMonitorParser_Init (NCDUdevMonitorParser *o, StreamRecvInterface *input, int buf_size, int max_properties,
int is_info_mode, BPendingGroup *pg, void *user,
NCDUdevMonitorParser_handler handler)
{
ASSERT(buf_size > 0)
ASSERT(max_properties >= 0)
ASSERT(is_info_mode == 0 || is_info_mode == 1)
// init arguments
o->input = input;
o->buf_size = buf_size;
o->max_properties = max_properties;
o->is_info_mode = is_info_mode;
o->user = user;
o->handler = handler;
// init input
StreamRecvInterface_Receiver_Init(o->input, (StreamRecvInterface_handler_done)input_handler_done, o);
// init property regex
if (regcomp(&o->property_regex, PROPERTY_REGEX, REG_EXTENDED) != 0) {
BLog(BLOG_ERROR, "regcomp failed");
goto fail1;
}
// init done job
BPending_Init(&o->done_job, pg, (BPending_handler)done_job_handler, o);
// allocate buffer
if (!(o->buf = malloc(o->buf_size))) {
BLog(BLOG_ERROR, "malloc failed");
goto fail2;
}
// set buffer position
o->buf_used = 0;
// set not ready
o->is_ready = 0;
// allocate properties
if (!(o->ready_properties = BAllocArray(o->max_properties, sizeof(o->ready_properties[0])))) {
BLog(BLOG_ERROR, "BAllocArray failed");
goto fail3;
}
// start receiving
StreamRecvInterface_Receiver_Recv(o->input, o->buf, o->buf_size);
DebugObject_Init(&o->d_obj);
return 1;
fail3:
free(o->buf);
fail2:
BPending_Free(&o->done_job);
regfree(&o->property_regex);
fail1:
return 0;
}
void NCDUdevMonitorParser_Free (NCDUdevMonitorParser *o)
{
DebugObject_Free(&o->d_obj);
// free properties
BFree(o->ready_properties);
// free buffer
free(o->buf);
// free done job
BPending_Free(&o->done_job);
// free property regex
regfree(&o->property_regex);
}
void NCDUdevMonitorParser_AssertReady (NCDUdevMonitorParser *o)
{
DebugObject_Access(&o->d_obj);
ASSERT(o->is_ready)
}
void NCDUdevMonitorParser_Done (NCDUdevMonitorParser *o)
{
DebugObject_Access(&o->d_obj);
ASSERT(o->is_ready)
// schedule done job
BPending_Set(&o->done_job);
}
int NCDUdevMonitorParser_IsReadyEvent (NCDUdevMonitorParser *o)
{
DebugObject_Access(&o->d_obj);
ASSERT(o->is_ready)
return o->ready_is_ready_event;
}
int NCDUdevMonitorParser_GetNumProperties (NCDUdevMonitorParser *o)
{
DebugObject_Access(&o->d_obj);
ASSERT(o->is_ready)
return o->ready_num_properties;
}
void NCDUdevMonitorParser_GetProperty (NCDUdevMonitorParser *o, int index, const char **name, const char **value)
{
DebugObject_Access(&o->d_obj);
ASSERT(o->is_ready)
ASSERT(index >= 0)
ASSERT(index < o->ready_num_properties)
*name = o->ready_properties[index].name;
*value = o->ready_properties[index].value;
}
| 4,682 |
1,825 | <gh_stars>1000+
package com.github.unidbg.ios.struct.sysctl;
import com.github.unidbg.pointer.UnidbgStructure;
import com.github.unidbg.unix.struct.TimeVal32;
import com.sun.jna.Pointer;
import java.util.Arrays;
import java.util.List;
public final class IfData extends UnidbgStructure {
public IfData(Pointer p) {
super(p);
}
/* generic interface information */
public byte ifi_type; /* ethernet, tokenring, etc */
public byte ifi_typelen; /* Length of frame type id */
public byte ifi_physical; /* e.g., AUI, Thinnet, 10base-T, etc */
public byte ifi_addrlen; /* media address length */
public byte ifi_hdrlen; /* media header length */
public byte ifi_recvquota; /* polling quota for receive intrs */
public byte ifi_xmitquota; /* polling quota for xmit intrs */
public byte ifi_unused1; /* for future use */
public int ifi_mtu; /* maximum transmission unit */
public int ifi_metric; /* routing metric (external only) */
public int ifi_baudrate; /* linespeed */
/* volatile statistics */
public int ifi_ipackets; /* packets received on interface */
public int ifi_ierrors; /* input errors on interface */
public int ifi_opackets; /* packets sent on interface */
public int ifi_oerrors; /* output errors on interface */
public int ifi_collisions; /* collisions on csma interfaces */
public int ifi_ibytes; /* total number of octets received */
public int ifi_obytes; /* total number of octets sent */
public int ifi_imcasts; /* packets received via multicast */
public int ifi_omcasts; /* packets sent via multicast */
public int ifi_iqdrops; /* dropped on input, this interface */
public int ifi_noproto; /* destined for unsupported protocol */
public int ifi_recvtiming; /* usec spent receiving when timing */
public int ifi_xmittiming; /* usec spent xmitting when timing */
public TimeVal32 ifi_lastchange; /* time of last administrative change */
public int ifi_unused2; /* used to be the default_proto */
public int ifi_hwassist; /* HW offload capabilities */
public int ifi_reserved1; /* for future use */
public int ifi_reserved2; /* for future use */
@Override
protected List<String> getFieldOrder() {
return Arrays.asList("ifi_type", "ifi_typelen", "ifi_physical", "ifi_addrlen", "ifi_hdrlen",
"ifi_recvquota", "ifi_xmitquota", "ifi_unused1", "ifi_mtu", "ifi_metric", "ifi_baudrate",
"ifi_ipackets", "ifi_ierrors", "ifi_opackets", "ifi_oerrors", "ifi_collisions",
"ifi_ibytes", "ifi_obytes", "ifi_imcasts", "ifi_omcasts", "ifi_iqdrops", "ifi_noproto",
"ifi_recvtiming", "ifi_xmittiming", "ifi_lastchange",
"ifi_unused2", "ifi_hwassist", "ifi_reserved1", "ifi_reserved2");
}
}
| 1,203 |
715 | class Node:
def __init__(self,vertex):
self.vertex = vertex
self.next = None
userinput = str(input("Number of nodes and edges : ")).split() #First line as a string and split (from spaces) it to an array.
N = int(userinput[0]) #Extract Number of vertexes
M = int(userinput[1]) #Extract Number of edges
graph = [] #Create an array to hold the graph
for i in range(N):
graph.append(None)
#Requesting next M lines of inputs which are the description of edges
for i in range(M):
edges = str(input("Edge if between vertexes : ")).split() #string input line of the user splitted in to an array
fromVertex = int(edges[0]) #extracting the 1st vertex of the user input and assigning it to fromVertex
toVertex = int(edges[1])#extracting the 2nd vertex of the user input and assigning it to toVertex
#Now let us create a node to toVertex and put that it to a/the linkedlist in the postion of fromVertex in the main graph
node = Node(toVertex) #creating the node
#if there's no linkedlist already in that position, then put the node there
if(graph[fromVertex-1]== None):
graph[fromVertex-1] = node
else:
#if there's already a linkedlist append the linkedlist to the newly created node and put that in to the position of fromNode in the main graph
node.next = graph[fromVertex-1]
graph[fromVertex-1] = node
def bfs(graph,source):
color = []
distance = []
for i in range(len(graph)):
color.append("white")
distance.append(None)
color[source-1] = "gray"
distance[source-1] = 0
Queue = []
Queue.append(source)
while(len(Queue)!=0):
u = Queue.pop(0)
node = graph[u-1]
while(node!=None):
if(color[node.vertex-1]=="white"):
Queue.append(node.vertex)
color[node.vertex-1]="gray"
distance[node.vertex-1] = distance[u-1]+1
node = node.next
color[u-1] = "black"
return distance
#Now let us see how we can use this BFS
#If you want to print distances from 3 to other vertexes, call bfs function with the source vertex 3.
print(bfs(graph,3))
| 792 |
4,283 | {
"instance": {
"hazelcast": {
"security": {
"enabled": "on",
"realms": [
2,
{
"unknown": {}
},
{
"tls": {
"roleAttribute": null
}
}
]
}
}
},
"error": {
"schemaLocation": "#/definitions/Security",
"pointerToViolation": "#/hazelcast/security",
"causingExceptions": [
{
"schemaLocation": "#/definitions/Security/properties/enabled",
"pointerToViolation": "#/hazelcast/security/enabled",
"causingExceptions": [],
"keyword": "type",
"message": "expected type: Boolean, found: String"
},
{
"schemaLocation": "#/definitions/Security/properties/realms",
"pointerToViolation": "#/hazelcast/security/realms",
"causingExceptions": [
{
"schemaLocation": "#/definitions/Security/properties/realms/items",
"pointerToViolation": "#/hazelcast/security/realms/0",
"causingExceptions": [],
"keyword": "type",
"message": "expected type: JSONObject, found: Integer"
},
{
"schemaLocation": "#/definitions/Security/properties/realms/items",
"pointerToViolation": "#/hazelcast/security/realms/1",
"causingExceptions": [],
"keyword": "additionalProperties",
"message": "extraneous key [unknown] is not permitted"
},
{
"schemaLocation": "#/definitions/Security/properties/realms/items",
"pointerToViolation": "#/hazelcast/security/realms/2",
"causingExceptions": [],
"keyword": "additionalProperties",
"message": "extraneous key [tls] is not permitted"
}
],
"message": "3 schema violations found"
}
],
"message": "4 schema violations found"
}
}
| 949 |
410 | <filename>external/glm/gtc/vec1.hpp
/// @ref gtc_vec1
/// @file glm/gtc/vec1.hpp
///
/// @see core (dependence)
///
/// @defgroup gtc_vec1 GLM_GTC_vec1
/// @ingroup gtc
///
/// Include <glm/gtc/vec1.hpp> to use the features of this extension.
///
/// Add vec1, ivec1, uvec1 and bvec1 types.
#pragma once
// Dependency:
#include "../ext/vec1.hpp"
#if GLM_MESSAGES == GLM_MESSAGES_ENABLED && !defined(GLM_EXT_INCLUDED)
# pragma message("GLM: GLM_GTC_vec1 extension included")
#endif
namespace glm
{
//////////////////////////
// vec1 definition
#if(defined(GLM_PRECISION_HIGHP_BOOL))
typedef highp_bvec1 bvec1;
#elif(defined(GLM_PRECISION_MEDIUMP_BOOL))
typedef mediump_bvec1 bvec1;
#elif(defined(GLM_PRECISION_LOWP_BOOL))
typedef lowp_bvec1 bvec1;
#else
/// 1 component vector of boolean.
/// @see gtc_vec1 extension.
typedef highp_bvec1 bvec1;
#endif//GLM_PRECISION
#if(defined(GLM_PRECISION_HIGHP_FLOAT))
typedef highp_vec1 vec1;
#elif(defined(GLM_PRECISION_MEDIUMP_FLOAT))
typedef mediump_vec1 vec1;
#elif(defined(GLM_PRECISION_LOWP_FLOAT))
typedef lowp_vec1 vec1;
#else
/// 1 component vector of floating-point numbers.
/// @see gtc_vec1 extension.
typedef highp_vec1 vec1;
#endif//GLM_PRECISION
#if(defined(GLM_PRECISION_HIGHP_DOUBLE))
typedef highp_dvec1 dvec1;
#elif(defined(GLM_PRECISION_MEDIUMP_DOUBLE))
typedef mediump_dvec1 dvec1;
#elif(defined(GLM_PRECISION_LOWP_DOUBLE))
typedef lowp_dvec1 dvec1;
#else
/// 1 component vector of floating-point numbers.
/// @see gtc_vec1 extension.
typedef highp_dvec1 dvec1;
#endif//GLM_PRECISION
#if(defined(GLM_PRECISION_HIGHP_INT))
typedef highp_ivec1 ivec1;
#elif(defined(GLM_PRECISION_MEDIUMP_INT))
typedef mediump_ivec1 ivec1;
#elif(defined(GLM_PRECISION_LOWP_INT))
typedef lowp_ivec1 ivec1;
#else
/// 1 component vector of signed integer numbers.
/// @see gtc_vec1 extension.
typedef highp_ivec1 ivec1;
#endif//GLM_PRECISION
#if(defined(GLM_PRECISION_HIGHP_UINT))
typedef highp_uvec1 uvec1;
#elif(defined(GLM_PRECISION_MEDIUMP_UINT))
typedef mediump_uvec1 uvec1;
#elif(defined(GLM_PRECISION_LOWP_UINT))
typedef lowp_uvec1 uvec1;
#else
/// 1 component vector of unsigned integer numbers.
/// @see gtc_vec1 extension.
typedef highp_uvec1 uvec1;
#endif//GLM_PRECISION
}// namespace glm
#include "vec1.inl"
| 1,111 |
615 | /**
* @file likely.h
* @author <NAME>
*
* All files in META are dual-licensed under the MIT and NCSA licenses. For more
* details, consult the file LICENSE.mit and LICENSE.ncsa in the root of the
* project.
*/
#ifndef META_UTIL_LIKELY_H_
#define META_UTIL_LIKELY_H_
#include "meta/config.h"
#if META_HAS_BUILTIN_EXPECT
#define META_LIKELY(x) __builtin_expect(!!(x), 1)
#define META_UNLIKELY(x) __builtin_expect(!!(x), 0)
#else
#define META_LIKELY(x) x
#define META_UNLIKELY(x) x
#endif
#endif
| 217 |
474 | // Copyright 2019 ETH Zürich, <NAME>
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors
// may be used to endorse or promote products derived from this software
// without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
#include "camera_calibration/tools/tools.h"
#include <libvis/logging.h>
#include <libvis/statistics.h>
#include <opengv/absolute_pose/CentralAbsoluteAdapter.hpp>
#include <opengv/absolute_pose/methods.hpp>
#include <opengv/sac/Ransac.hpp>
#include <opengv/sac/SampleConsensusProblem.hpp>
#include <opengv/sac/SampleConsensus.hpp>
#include <opengv/sac_problems/point_cloud/PointCloudSacProblem.hpp>
#include <opengv/sac_problems/absolute_pose/AbsolutePoseSacProblem.hpp>
#include "camera_calibration/fitting_report.h"
#include "camera_calibration/io/calibration_io.h"
#include "camera_calibration/models/central_generic.h"
namespace vis {
int LocalizationAccuracyTest(
const char* gt_model_yaml_path,
const char* compared_model_yaml_path) {
shared_ptr<CameraModel> gt_model = LoadCameraModel(gt_model_yaml_path);
if (!gt_model) {
LOG(ERROR) << "Cannot load ground truth camera model: " << gt_model_yaml_path;
return EXIT_FAILURE;
}
shared_ptr<CameraModel> compared_model = LoadCameraModel(compared_model_yaml_path);
if (!compared_model) {
LOG(ERROR) << "Cannot load camera model to compare: " << compared_model_yaml_path;
return EXIT_FAILURE;
}
if (gt_model->width() != compared_model->width() ||
gt_model->height() != compared_model->height()) {
LOG(ERROR) << "The ground truth and compared camera models do not have the same image size.";
return EXIT_FAILURE;
}
Mean<float> error_distance_mean;
srand(time(nullptr));
constexpr int kNumTrials = 10000;
vector<float> errors(kNumTrials);
for (int trial = 0; trial < kNumTrials; ++ trial) {
// Generate some random points in the image, unproject with the generic camera
// model to random depths close to 2 meters.
constexpr int kPointCount = 15;
constexpr float kMinDistance = 1.5f;
constexpr float kMaxDistance = 2.5f;
opengv::points_t gt_points;
opengv::bearingVectors_t compared_model_bearing_vectors;
for (int p = 0; p < kPointCount; ++ p) {
// Pick random pixel position
Vec2f pixel_position = (0.5f * Vec2f::Random() + Vec2f(0.5f, 0.5f)).cwiseProduct(Vec2f(gt_model->width(), gt_model->height()));
// Unproject the position with both models
Vec3d gt_direction;
Vec3d compared_direction;
if (!gt_model->Unproject(pixel_position.x(), pixel_position.y(), >_direction) ||
!compared_model->Unproject(pixel_position.x(), pixel_position.y(), &compared_direction)) {
-- p;
continue;
}
gt_direction.normalize();
// Add a 3D point with the ground truth direction and random distance
float distance = kMinDistance + ((rand() % 10000) / 10000.f) * (kMaxDistance - kMinDistance);
gt_points.push_back(gt_direction * distance);
// Store the compared direction
compared_model_bearing_vectors.push_back(compared_direction.normalized());
}
// Optimize the camera pose with the ground truth 3D points and the compared model.
opengv::absolute_pose::CentralAbsoluteAdapter adapter(
compared_model_bearing_vectors,
gt_points);
adapter.sett(Vec3d::Zero());
adapter.setR(Mat3d::Identity());
opengv::transformation_t global_tr_image_matrix = opengv::absolute_pose::optimize_nonlinear(adapter);
// Compute the camera center distance change from this process.
Vec3d translation = global_tr_image_matrix.block<3, 1>(0, 3);
float camera_error_distance = translation.norm();
// LOG(INFO) << "[" << trial << "] error [mm]: " << (1000 * camera_error_distance);
error_distance_mean.AddData(camera_error_distance);
errors[trial] = camera_error_distance;
}
std::sort(errors.begin(), errors.end());
double median_error = errors[errors.size() / 2];
// Report the average and median camera center distance change.
LOG(INFO) << "Average error [mm]: " << (1000 * error_distance_mean.ComputeArithmeticMean());
LOG(INFO) << "Median error [mm]: " << (1000 * median_error);
return EXIT_SUCCESS;
}
}
| 1,960 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.